import { renderVideoFrame } from "@/utils/render-frame";
import { ExportSettings, AspectRatio, ZoomSegment } from "@/types/index";
import { muxAudioAndVideo, extractAudioWithFFmpeg,mixAudioTracksWithFFmpeg } from "./muxAudioAndVideo";

/**
 * Export video using WebCodecs API for video + ffmpeg.wasm for audio muxing
 */
export async function exportVideoWithWebCodecs(
  videoRef: React.RefObject<HTMLVideoElement>,
  canvasRef: React.RefObject<HTMLCanvasElement>,
  options: {
    aspectRatio: AspectRatio;
    backgroundColor: string;
    padding: number;
    borderRadius: number;
    audioVolume: number;
    musicVolume: number;
    musicRef?: React.RefObject<HTMLAudioElement>;
    playbackSpeed?: number;
    webcamVideo?: HTMLVideoElement | null;
    webcamEnabled?: boolean;
    webcamPosition?: string;
    webcamSize?: number;
    webcamBorderRadius?: number;
    webcamBlur?: boolean;
    exportSettings: ExportSettings;
  },
  zoomSegments: ZoomSegment[],
  onProgress: (progress: {
    state: "idle" | "preparing" | "processing" | "finalizing" | "completed";
    progress: number;
    processedFrames: number;
    totalFrames: number;
  }) => void
): Promise<Blob> {
  if (!videoRef.current || !canvasRef.current) {
    throw new Error("Video or canvas reference not available");
  }

  const video = videoRef.current;
  const canvas = canvasRef.current;
  const { exportSettings } = options;
  const width = exportSettings.resolution.width;
  const height = exportSettings.resolution.height;
  const fps = exportSettings.frameRate || 60;
  const duration = video.duration;
  const totalFrames = Math.floor(duration * fps);

  canvas.width = width;
  canvas.height = height;

  onProgress({ state: "preparing", progress: 0, processedFrames: 0, totalFrames });

  // Step 1: Generate video-only blob using WebCodecs (0-60% progress)
  const videoOnlyBlob = await generateVideoOnlyBlob(
    video,
    canvas,
    options,
    zoomSegments,
    exportSettings,
    (videoProgress) => {
      onProgress({
        state: "processing",
        progress: Math.round(videoProgress * 0.6),
        processedFrames: (videoProgress * totalFrames) / 100,
        totalFrames
      });
    }
  );

  onProgress({ state: "finalizing", progress: 60, processedFrames: totalFrames, totalFrames });

  // Step 2: Create a blob from the original video for audio extraction
  const originalVideoBlob = await createBlobFromVideo(video);

  // Step 3: Extract audio using FFmpeg (60-70% progress)
  const audioBlob = await extractAudioWithFFmpeg(
    originalVideoBlob, 
    options.audioVolume,
    (audioProgress) => {
      onProgress({
        state: "finalizing", 
        progress: 60 + Math.round(audioProgress * 0.1),
        processedFrames: totalFrames,
        totalFrames
      });
    }
  );

  // Step 4: Extract music audio if provided
  let mixedAudioBlob = audioBlob;
  if (options.musicRef?.current && options.musicRef.current.src) {
    const musicResponse = await fetch(options.musicRef.current.src);
    const musicBlob = await musicResponse.blob();

    // Mix original audio and music audio (70-80% progress)
    mixedAudioBlob = await mixAudioTracksWithFFmpeg(
      audioBlob,
      musicBlob,
      options.audioVolume,
      options.musicVolume,
      (mixProgress) => {
        onProgress({
          state: "finalizing",
          progress: 70 + Math.round(mixProgress * 0.1),
          processedFrames: totalFrames,
          totalFrames
        });
      }
    );
  }

  // Step 5: Mux audio and video (80-100% progress)
  const finalVideoBlob = await muxAudioAndVideo(
    videoOnlyBlob,
    mixedAudioBlob,
    (muxProgress) => {
      onProgress({
        state: "finalizing",
        progress: 80 + Math.round(muxProgress * 0.2),
        processedFrames: totalFrames,
        totalFrames
      });
    },
    exportSettings.frameRate // Pass the intended fps
  );

  onProgress({ state: "completed", progress: 100, processedFrames: totalFrames, totalFrames });
  return finalVideoBlob;
}

/**
 * Create a blob from the video element's source
 */
async function createBlobFromVideo(video: HTMLVideoElement): Promise<Blob> {
  const response = await fetch(video.src);
  return await response.blob();
}

/**
 * Generate video-only blob using WebCodecs
 */
async function generateVideoOnlyBlob(
  video: HTMLVideoElement,
  canvas: HTMLCanvasElement,
  options: any,
  zoomSegments: ZoomSegment[],
  exportSettings: any,
  onProgress: (progress: number) => void
): Promise<Blob> {
  const width = exportSettings.resolution.width;
  const height = exportSettings.resolution.height;
  const fps = exportSettings.frameRate || 60;
  const duration = video.duration;
  const totalFrames = Math.floor(duration * fps);

  // Import mp4-muxer
  const { Muxer, ArrayBufferTarget } = await import("mp4-muxer");

  // Set up muxer for video only
  const muxer = new Muxer({
    target: new ArrayBufferTarget(),
    video: {
      codec: "avc",
      width,
      height,
      frameRate: fps,
    },
    fastStart: "in-memory",
  });

  // Set up VideoEncoder
  const encoder = new VideoEncoder({
    output: (chunk, meta) => {
      muxer.addVideoChunk(chunk, meta);
    },
    error: (e) => {
      console.error("VideoEncoder error:", e);
      throw e;
    },
  });

  encoder.configure({
    codec: "avc1.4d0028",
    width,
    height,
    bitrate: 20_000_000,
    framerate: fps,
  });

  return new Promise<Blob>((resolve, reject) => {
    let processedFrames = 0;
    let encodingFinalized = false;
    const stableZoomSegments = zoomSegments.map((z) => ({ ...z }));

    // Cache the last good webcam video element
    let lastGoodWebcamVideo: HTMLVideoElement | undefined = undefined;

    // Reset video
    video.currentTime = 0;
    video.volume = 0; // Mute for video-only generation
    video.play();

    const render = async () => {
      try {
        if (encodingFinalized || video.ended) {
          // Finalize encoding
          encodingFinalized = true;
          await encoder.flush();
          encoder.close();

          muxer.finalize();
          // @ts-ignore
          const { buffer } = muxer.target;
          const blob = new Blob([buffer], { type: "video/mp4" });
          resolve(blob);
          return;
        }

        // Webcam sync
        if (options.webcamVideo && !options.webcamVideo.ended) {
          if (
            Math.abs(options.webcamVideo.currentTime - video.currentTime) >
            0.033
          ) {
            options.webcamVideo.currentTime = video.currentTime;
          }
          if (options.webcamVideo.paused) {
            try {
              await options.webcamVideo.play();
            } catch {}
          }
        }

        // Use last good webcam frame if current isn't ready
        let webcamVideoToDraw: HTMLVideoElement | undefined = undefined;
        if (
          options.webcamVideo &&
          options.webcamEnabled &&
          options.webcamVideo.readyState >= 2
        ) {
          lastGoodWebcamVideo = options.webcamVideo;
          webcamVideoToDraw = options.webcamVideo;
        } else if (lastGoodWebcamVideo && options.webcamEnabled) {
          webcamVideoToDraw = lastGoodWebcamVideo;
        }

        await renderVideoFrame({
          video,
          canvas,
          aspectRatio: options.aspectRatio,
          backgroundColor: options.backgroundColor,
          padding: options.padding,
          borderRadius: options.borderRadius,
          zoomSegments: stableZoomSegments,
          currentTime: video.currentTime,
          outputWidth: width,
          outputHeight: height,
          webcamVideo: webcamVideoToDraw,
          webcamEnabled: options.webcamEnabled,
          webcamSize: options.webcamSize,
          webcamBorderRadius: options.webcamBorderRadius,
          webcamBlur: options.webcamBlur,
        });

        // Encode frame
        const bitmap = await createImageBitmap(canvas);
        const frame = new VideoFrame(bitmap, {
          timestamp: Math.round(video.currentTime * 1_000_000),
        });
        encoder.encode(frame);
        frame.close();
        bitmap.close();

        processedFrames++;
        if (processedFrames % 10 === 0) {
          const progress = Math.round((processedFrames / totalFrames) * 100);
          onProgress(progress);
        }

        requestAnimationFrame(() => render());
      } catch (error) {
        reject(error);
      }
    };

    render();
  });
}
 

Typescript Online Compiler

Write, Run & Share Typescript code online using OneCompiler's Typescript online compiler for free. It's one of the robust, feature-rich online compilers for Typescript language. Getting started with the OneCompiler's Typescript editor is easy and fast. The editor shows sample boilerplate code when you choose language as Typescript and start coding.

About Typescript

Typescript(JS) is a strongly typed programming language that builds on JavaScript, developed and maintained by Microsoft.

Key Features

  • Superset of Javascript
  • portable
  • Strong static typing
  • supports OOPS
  • Language extension to ECMAScript 6 with other features like Type annotations and compile-time type checking, Type inference and Type erasure, Interfaces, Enumerated types, Generics, Namespaces, Tuples
  • .ts extension

Syntax help

variable declaration

KeywordDescriptionScope
varVar is used to declare variables(old way of declaring variables)Function or global scope
letlet is also used to declare variables(new way)Global or block Scope
constconst is used to declare const values. Once the value is assigned it can not be modifiedGlobal or block Scope

Operators

OperatorDescription
??nullish coalescing
?.optional chaining
!null assertion
&&=used to assign value only if current value is truthy
||=used to assign value only if current value is falsy
??=used to assign value if current value is null or undefined

Loops

1. If:

IF is used to execute a block of code based on a condition.

Syntax

if(condition){
    // code
}

2. If-Else:

Else part is used to execute the block of code when the condition fails.

Syntax

if(condition){
    // code
} else {
    // code
}

3. Switch:

Switch is used to replace nested If-Else statements.

Syntax

switch(condition){
    case 'value1' :
        //code
        break;
    case 'value2' :
        //code
        break;
    .......
    default :
        //code
        break;
}

4. For

For loop is used to iterate a set of statements based on a condition.

for(Initialization; Condition; Increment/decrement){  
//code  
} 

let arr = [1, 2, 3, 4, 5];
for (let ele of arr) {
 // code
}

for (let index in arr) {
//code
}

5. While

While is also used to iterate a set of statements based on a condition. Usually while is preferred when number of iterations are not known in advance.

while (condition) {  
  // code 
}  

6. Do-While

Do-while is also used to iterate a set of statements based on a condition. It is mostly used when you need to execute the statements atleast once.

do {  
  // code 
} while (condition); 

Arrow functions

Arrow Functions helps developers to write code in concise way, it’s introduced in ES6.
Arrow functions can be written in multiple ways. Below are couple of ways to use arrow function but it can be written in many other ways as well.

Syntax:

() => expression

Example:

const numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
const squaresOfEvenNumbers = numbers.filter(ele => ele % 2 == 0)
                                    .map(ele => ele ** 2);
console.log(squaresOfEvenNumbers);

Function Overloading

Typescript provides function overloading where multiple functions with the same name but different parameter types and return type is possible. But, the number of parameters should be the same.

function Addition(a:string, b:string):string;

function Addition(a:number, b:number): number;

function Addition(a: any, b:any): any {
    return a + b;
}
Addition("Hello ","foo"); // outputs Hello foo
Addition(2,3); //outpus 5