import { VideoProcessingProps, AspectRatio, ZoomSegment, ExportSettings, } from "@/types/index"; import { renderVideoFrame } from "@/utils/render-frame"; import { AudioManager } from "./AudioManager"; import { getSupportedMimeType, sleep, aspectRatioMap } from "./export-helper"; import { setupExportMedia } from "./media-setup"; import { processVideoWithFFmpeg } from "./ffmpeg-processer"; import { AiOutlineFundProjectionScreen } from "react-icons/ai"; async function waitForSeek( video: HTMLVideoElement, time: number ): Promise<void> { return new Promise((resolve) => { const handler = () => { video.removeEventListener("seeked", handler); resolve(); }; video.addEventListener("seeked", handler); video.currentTime = time; }); } declare global { interface HTMLVideoElement { captureStream(frameRate?: number): MediaStream; } interface HTMLAudioElement { captureStream(): MediaStream; } } // Helper to combine video and audio streams function combineStreams( videoStream: MediaStream, audioStreams: MediaStream[] ): MediaStream { const combined = new MediaStream(); // Add video tracks videoStream.getVideoTracks().forEach((track) => combined.addTrack(track)); // Add all audio tracks audioStreams.forEach((stream) => { stream.getAudioTracks().forEach((track) => combined.addTrack(track)); }); return combined; } export const exportVideo = async ( props: VideoProcessingProps & { zoomSegments: ZoomSegment[]; musicRef?: React.RefObject<HTMLAudioElement>; options: { audioVolume: number; musicVolume: number; aspectRatio: string; backgroundColor: string; padding: number; borderRadius: number; }; exportSettings?: ExportSettings; playbackSpeed?: number; webcamVideo?: HTMLVideoElement | null; webcamEnabled?: boolean; webcamPosition?: | "top-left" | "top-right" | "bottom-left" | "bottom-right" | "center"; webcamSize?: number; webcamBorderRadius?: number; webcamBlur?: boolean; }, onProgress: (progress: { state: "idle" | "preparing" | "processing" | "finalizing" | "completed"; progress: number; processedFrames: number; totalFrames: number; }) => void ): Promise<void> => { // Use export settings if provided or fall back to defaults const exportSettings = props.exportSettings || { frameRate: 60, bitrate: "8M", resolution: aspectRatioMap[props.options.aspectRatio as AspectRatio], }; const FPS = exportSettings.frameRate; const chunks: Blob[] = []; const playbackSpeed = props.playbackSpeed || 1.0; try { const { videoRef, canvasRef, musicRef, options } = props; if (!videoRef.current || !canvasRef.current) { throw new Error("Video or canvas reference not available"); } const video = videoRef.current; const exportCanvas = canvasRef.current; // Prepare video stream from canvas const videoStream = exportCanvas.captureStream( exportSettings?.frameRate || 60 ); // Prepare audio streams const audioStreams: MediaStream[] = []; // Original video audio if (video.captureStream) { const vidStream = video.captureStream(); if (vidStream.getAudioTracks().length > 0) { audioStreams.push(vidStream); } } // Music audio if (musicRef?.current && musicRef.current.captureStream) { const musStream = musicRef.current.captureStream(); if (musStream.getAudioTracks().length > 0) { audioStreams.push(musStream); } } // Combine video and audio streams const combinedStream = combineStreams(videoStream, audioStreams); const mimeType = getSupportedMimeType(); if (!mimeType) throw new Error("No supported video recording mime type found"); const mediaRecorder = new MediaRecorder(combinedStream, { mimeType, videoBitsPerSecond: parseInt(exportSettings.bitrate.replace(/[^0-9]/g, "")) * (exportSettings.bitrate.includes("M") ? 1000000 : 1000), }); const chunks: Blob[] = []; mediaRecorder.ondataavailable = (e) => { if (e.data && e.data.size > 0) chunks.push(e.data); }; // Start recording return new Promise(async (resolve, reject) => { try { onProgress({ state: "preparing", progress: 5, processedFrames: 0, totalFrames: 0, }); // Reset video and music video.currentTime = 0; video.volume = Math.min(1.0, options.audioVolume); video.playbackRate = 1.0; // Always record at normal speed if (musicRef?.current) { musicRef.current.currentTime = 0; musicRef.current.volume = Math.min(1.0, options.musicVolume); await musicRef.current.play().catch(() => {}); } if (props.webcamVideo) { props.webcamVideo.currentTime = video.currentTime; props.webcamVideo.play(); } // Start recording mediaRecorder.start(); await video.play(); // Render loop (just like preview) let rafId: number; const render = async () => { // --- Webcam sync fix --- if (props.webcamVideo && !props.webcamVideo.ended) { // Only seek if out of sync by more than 1 frame (at 30fps) if (Math.abs(props.webcamVideo.currentTime - video.currentTime) > 0.033) { await waitForSeek(props.webcamVideo, video.currentTime); } // Ensure webcam is playing (for live streams) if (props.webcamVideo.paused) { try { await props.webcamVideo.play(); } catch {} } } // --- End webcam sync fix --- await renderVideoFrame({ video, canvas: exportCanvas, aspectRatio: options.aspectRatio as AspectRatio, backgroundColor: options.backgroundColor, padding: options.padding, borderRadius: options.borderRadius, zoomSegments: props.zoomSegments, currentTime: video.currentTime, outputWidth: aspectRatioMap[props.options.aspectRatio as AspectRatio].width, outputHeight: aspectRatioMap[props.options.aspectRatio as AspectRatio].height, webcamVideo: props.webcamVideo, webcamEnabled: props.webcamEnabled, webcamPosition: props.webcamPosition, webcamSize: props.webcamSize, webcamBorderRadius: props.webcamBorderRadius, webcamBlur: props.webcamBlur, }); // Progress reporting for recording phase (0-80%) onProgress({ state: "processing", progress: Math.round((video.currentTime / video.duration) * 80), processedFrames: Math.round(video.currentTime * (exportSettings?.frameRate || 60)), totalFrames: Math.round(video.duration * (exportSettings?.frameRate || 60)), }); if (!video.paused && !video.ended) { rafId = requestAnimationFrame(() => render()); } }; render(); // When video ends, stop recording video.onended = () => { if (musicRef?.current) musicRef.current.pause(); mediaRecorder.stop(); cancelAnimationFrame(rafId); }; mediaRecorder.onstop = async () => { try { onProgress({ state: "finalizing", progress: 85, processedFrames: 0, totalFrames: 0, }); const rawBlob = new Blob(chunks, { type: "video/mp4" }); // Process with FFmpeg to maintain proper FPS and apply playback speed const processedBlob = await processVideoWithFFmpeg( rawBlob, video.duration, options.aspectRatio as AspectRatio, options.audioVolume, options.musicVolume, !!musicRef?.current?.src, musicRef?.current?.src, exportSettings, playbackSpeed ); onProgress({ state: "finalizing", progress: 95, processedFrames: 0, totalFrames: 0, }); // Download the processed video const url = URL.createObjectURL(processedBlob); const link = document.createElement("a"); link.href = url; link.download = `exported-video-${Date.now()}.mp4`; link.click(); URL.revokeObjectURL(url); onProgress({ state: "completed", progress: 100, processedFrames: 0, totalFrames: 0, }); resolve(); } catch (error) { console.error("FFmpeg processing failed:", error); reject(error); } }; } catch (err) { reject(err); } }); } catch (error) { console.error("Export failed:", error); throw error; } };
Write, Run & Share Typescript code online using OneCompiler's Typescript online compiler for free. It's one of the robust, feature-rich online compilers for Typescript language. Getting started with the OneCompiler's Typescript editor is easy and fast. The editor shows sample boilerplate code when you choose language as Typescript and start coding.
Typescript(JS) is a strongly typed programming language that builds on JavaScript, developed and maintained by Microsoft.
Keyword | Description | Scope |
---|---|---|
var | Var is used to declare variables(old way of declaring variables) | Function or global scope |
let | let is also used to declare variables(new way) | Global or block Scope |
const | const is used to declare const values. Once the value is assigned it can not be modified | Global or block Scope |
Operator | Description |
---|---|
?? | nullish coalescing |
?. | optional chaining |
! | null assertion |
&&= | used to assign value only if current value is truthy |
||= | used to assign value only if current value is falsy |
??= | used to assign value if current value is null or undefined |
IF is used to execute a block of code based on a condition.
if(condition){
// code
}
Else part is used to execute the block of code when the condition fails.
if(condition){
// code
} else {
// code
}
Switch is used to replace nested If-Else statements.
switch(condition){
case 'value1' :
//code
break;
case 'value2' :
//code
break;
.......
default :
//code
break;
}
For loop is used to iterate a set of statements based on a condition.
for(Initialization; Condition; Increment/decrement){
//code
}
let arr = [1, 2, 3, 4, 5];
for (let ele of arr) {
// code
}
for (let index in arr) {
//code
}
While is also used to iterate a set of statements based on a condition. Usually while is preferred when number of iterations are not known in advance.
while (condition) {
// code
}
Do-while is also used to iterate a set of statements based on a condition. It is mostly used when you need to execute the statements atleast once.
do {
// code
} while (condition);
Arrow Functions helps developers to write code in concise way, it’s introduced in ES6.
Arrow functions can be written in multiple ways. Below are couple of ways to use arrow function but it can be written in many other ways as well.
() => expression
const numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
const squaresOfEvenNumbers = numbers.filter(ele => ele % 2 == 0)
.map(ele => ele ** 2);
console.log(squaresOfEvenNumbers);
Typescript provides function overloading where multiple functions with the same name but different parameter types and return type is possible. But, the number of parameters should be the same.
function Addition(a:string, b:string):string;
function Addition(a:number, b:number): number;
function Addition(a: any, b:any): any {
return a + b;
}
Addition("Hello ","foo"); // outputs Hello foo
Addition(2,3); //outpus 5