diff --git a/v1-com-officielle/index.html b/v1-com-officielle/index.html
index 42ee1a4..492c01c 100644
--- a/v1-com-officielle/index.html
+++ b/v1-com-officielle/index.html
@@ -2,12 +2,404 @@
-
- v1-com-officielle
+ DRAGS AND NERDS #2
+
+
+
+
+
+
+
+
+
+
+
+ drags and nerds
+
+ Drag shows, musique électronique, synthés vidéos et autre performances nerds
+
+ drag
+ nerd
+ drags
+ nerds
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/v1-com-officielle/public/LICENSE.txt b/v1-com-officielle/public/LICENSE.txt
new file mode 100644
index 0000000..7325ff5
--- /dev/null
+++ b/v1-com-officielle/public/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2025 Alan Ang
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/v1-com-officielle/public/README.MD b/v1-com-officielle/public/README.MD
new file mode 100644
index 0000000..1e56e7a
--- /dev/null
+++ b/v1-com-officielle/public/README.MD
@@ -0,0 +1,117 @@
+# Liquid Shape Distortions 🌀
+
+Create psychedelic animations in real-time in the browser.
+
+(P)art of your next trip.
+
+Webgl shader animation created using liquid motion, distorted shapes, shadows, and light.
+
+**Live demo: https://collidingscopes.github.io/liquid-shape-distortions**
+
+
+
+
+The song used is Fahrenheit Fair Enough by Telefon Tel Aviv.
+
+## Overview
+
+Use this to create:
+- Animated backgrounds for music videos
+- Concert / rave posters
+- Stylized assets for creative projects
+
+## Features
+
+- **Real-time rendering** - real-time edits to the animation in the browser
+- **Randomization** - Generate unique patterns with a single click
+- **Extensive controls** - Fine-tune the animation parameters and canvas size
+- **Export options** - Save your creations as images or videos
+- **Completely free** - No paywalls, no premium options
+- **Open source** - MIT licensed for personal and commercial use
+
+## Controls
+
+| Control | Keyboard | Description |
+|---------|----------|-------------|
+| 🎲 | [R] | Randomize all inputs |
+| ⏯️ | [Space] | Pause/play the animation |
+| 📷 | [S] | Save a screenshot image |
+| 🎥 | [V] | Start/stop video export |
+| 🔊 | [M] | Toggle background music |
+
+Additional keyboard shortcuts:
+- [Tab] - Refresh pattern with a new random seed
+- [T] - Restart animation from time = 0
+
+## Technical Details
+
+This project is built using:
+- WebGL shaders for high-performance rendering
+- Vanilla JavaScript for logic and interactivity
+- HTML5 Canvas for display
+- CSS for styling
+- dat.GUI for the control interface
+- MP4 muxer for video encoding and export
+
+The fragment shader uses:
+- 3D simplex noise and fractal Brownian motion for organic patterns / liquid movement
+- Seed-based randomization for unique visuals
+- Real-time shader effects (bloom, saturation, grain)
+
+## Installation
+
+1. Clone the repository:
+ ```
+ git clone https://github.com/collidingScopes/liquid-shape-distortions.git
+ ```
+
+2. Open `index.html` in a web browser.
+
+That's it! No build process, dependencies, or server setup required.
+
+For development, you might want to use a local server:
+```
+npx http-server
+```
+
+## Performance Notes
+
+The WebGL shaders used in this project can be resource-intensive. For optimal performance:
+- Close unnecessary browser tabs
+- Ensure your device has adequate GPU capabilities
+- Reduce the canvas size if experiencing lag
+- Ensure your device is not in battery-saving mode
+
+## Contributing
+
+Contributions are welcome! Feel free to submit issues or pull requests.
+
+## License
+
+This project is licensed under the MIT License - see the [LICENSE.txt](LICENSE.txt) file for details.
+
+## Related Projects
+
+Check out other free and open-source projects by the same developer:
+
+- [Particular Drift](https://collidingScopes.github.io/particular-drift) - Turn photos into flowing particle animations
+- [Liquid Logo](https://collidingScopes.github.io/liquid-logo) - Transform logos and icons into liquid metal animations
+- [Video-to-ASCII](https://collidingScopes.github.io/ascii) - Convert videos into ASCII pixel art
+- [Shape Shimmer](https://collidingScopes.github.io/shimmer) - Turn photos into funky wave animations
+- [Colliding Scopes](https://collidingScopes.github.io) - Turn photos into kaleidoscope animations
+- [Manual Brick Breaker](https://manual-brick-breaker.netlify.app) - Play brick breaker by waving your hands around
+
+## Connect with the Developer
+
+- Instagram: [@stereo.drift](https://www.instagram.com/stereo.drift/)
+- Twitter/X: [@measure_plan](https://x.com/measure_plan)
+- Email: [stereodriftvisuals@gmail.com](mailto:stereodriftvisuals@gmail.com)
+- GitHub: [collidingScopes](https://github.com/collidingScopes)
+
+## Donations
+
+If you found this tool useful, feel free to buy me a coffee.
+
+My name is Alan, and I enjoy building open source software for art, animation, games, and more. This would be much appreciated during late-night coding sessions!
+
+[](https://www.buymeacoffee.com/stereoDrift)
\ No newline at end of file
diff --git a/v1-com-officielle/public/canvasVideoExport.js b/v1-com-officielle/public/canvasVideoExport.js
new file mode 100644
index 0000000..15fd839
--- /dev/null
+++ b/v1-com-officielle/public/canvasVideoExport.js
@@ -0,0 +1,265 @@
+let projectName = "komorebi"; //to be updated
+
+//detect user browser
+var ua = navigator.userAgent;
+var isSafari = false;
+var isFirefox = false;
+var isIOS = false;
+var isAndroid = false;
+if(ua.includes("Safari")){
+ isSafari = true;
+}
+if(ua.includes("Firefox")){
+ isFirefox = true;
+}
+if(ua.includes("iPhone") || ua.includes("iPad") || ua.includes("iPod")){
+ isIOS = true;
+}
+if(ua.includes("Android")){
+ isAndroid = true;
+}
+console.log("isSafari: "+isSafari+", isFirefox: "+isFirefox+", isIOS: "+isIOS+", isAndroid: "+isAndroid);
+
+let useMobileRecord = false;
+if(isIOS || isAndroid || isFirefox){
+ useMobileRecord = true;
+}
+
+var mediaRecorder;
+var recordedChunks;
+var finishedBlob;
+var recordingMessageDiv = document.getElementById("videoRecordingMessageDiv");
+var recordVideoState = false;
+var videoRecordInterval;
+var videoEncoder;
+var muxer;
+var mobileRecorder;
+var videofps = 30;
+let bitrate = 16_000_000;
+
+function saveImage() {
+ console.log("Export png image");
+
+ // Create a temporary canvas with the same dimensions
+ const tempCanvas = document.createElement('canvas');
+ tempCanvas.width = canvas.width;
+ tempCanvas.height = canvas.height;
+ const tempContext = tempCanvas.getContext('2d', {
+ willReadFrequently: true,
+ alpha: true // Enable alpha for transparency
+ });
+
+ // Skip filling the background, leaving it transparent
+
+ // Force a render frame to ensure latest content
+ drawScene();
+ gl.flush();
+ gl.finish();
+
+ // Draw the WebGL canvas onto the temporary canvas
+ tempContext.drawImage(canvas, 0, 0);
+
+ // Create download link
+ const link = document.createElement('a');
+ link.href = tempCanvas.toDataURL('image/png');
+
+ const date = new Date();
+ const filename = projectName + `_${date.toLocaleDateString()}_${date.toLocaleTimeString()}.png`;
+ link.download = filename;
+ link.click();
+
+ // Cleanup
+ tempCanvas.remove();
+}
+
+function toggleVideoRecord(){
+
+ if(recordVideoState == false){
+ recordVideoState = true;
+ chooseRecordingFunction();
+ } else {
+ recordVideoState = false;
+ chooseEndRecordingFunction();
+ }
+}
+
+function chooseRecordingFunction(){
+ //resetAnimation();
+ if(useMobileRecord){
+ startMobileRecording();
+ }else {
+ recordVideoMuxer();
+ }
+}
+
+function chooseEndRecordingFunction(){
+ if(useMobileRecord){
+ mobileRecorder.stop();
+ }else {
+ finalizeVideo();
+ }
+}
+
+//record html canvas element and export as mp4 video
+//source: https://devtails.xyz/adam/how-to-save-html-canvas-to-mp4-using-web-codecs-api
+async function recordVideoMuxer() {
+console.log("start muxer video recording");
+
+var videoWidth = Math.floor(canvas.width/2)*2;
+var videoHeight = Math.floor(canvas.height/4)*4; //force a number which is divisible by 4
+
+console.log("Video dimensions: "+videoWidth+", "+videoHeight);
+
+//display user message
+recordingMessageDiv.classList.remove("hidden");
+
+recordVideoState = true;
+const ctx = canvas.getContext("2d", {
+ // This forces the use of a software (instead of hardware accelerated) 2D canvas
+ // This isn't necessary, but produces quicker results
+ willReadFrequently: true,
+ // Desynchronizes the canvas paint cycle from the event loop
+ // Should be less necessary with OffscreenCanvas, but with a real canvas you will want this
+ desynchronized: true,
+});
+
+muxer = new Mp4Muxer.Muxer({
+ target: new Mp4Muxer.ArrayBufferTarget(),
+ video: {
+ // If you change this, make sure to change the VideoEncoder codec as well
+ codec: "avc",
+ width: videoWidth,
+ height: videoHeight,
+ },
+
+ firstTimestampBehavior: 'offset',
+
+ // mp4-muxer docs claim you should always use this with ArrayBufferTarget
+ fastStart: "in-memory",
+});
+
+videoEncoder = new VideoEncoder({
+ output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
+ error: (e) => console.error(e),
+});
+
+// This codec should work in most browsers
+// See https://dmnsgn.github.io/media-codecs for list of codecs and see if your browser supports
+videoEncoder.configure({
+ codec: "avc1.4d0032",
+ width: videoWidth,
+ height: videoHeight,
+ bitrate: bitrate,
+ bitrateMode: "variable",
+});
+//NEW codec: "avc1.4d0032",
+//ORIGINAL codec: "avc1.42003e",
+
+var frameNumber = 0;
+
+//take a snapshot of the canvas every x miliseconds and encode to video
+
+videoRecordInterval = setInterval(
+ function(){
+ if(recordVideoState == true){
+ //gl.flush();
+ //gl.finish();
+ drawScene();
+ renderCanvasToVideoFrameAndEncode({
+ canvas,
+ videoEncoder,
+ frameNumber,
+ videofps
+ })
+ frameNumber++;
+ }else{
+ }
+ } , 1000/videofps);
+
+}
+
+//finish and export video
+async function finalizeVideo(){
+ console.log("finalize muxer video");
+ togglePlayPause();
+ clearInterval(videoRecordInterval);
+ //playAnimationToggle = false;
+ recordVideoState = false;
+
+ // Forces all pending encodes to complete
+ await videoEncoder.flush();
+ muxer.finalize();
+ let buffer = muxer.target.buffer;
+ finishedBlob = new Blob([buffer]);
+ downloadBlob(new Blob([buffer]));
+
+ //hide user message
+ recordingMessageDiv.classList.add("hidden");
+ togglePlayPause();
+}
+
+async function renderCanvasToVideoFrameAndEncode({
+ canvas,
+ videoEncoder,
+ frameNumber,
+ videofps,
+ }) {
+ let frame = new VideoFrame(canvas, {
+ // Equally spaces frames out depending on frames per second
+ timestamp: (frameNumber * 1e6) / videofps,
+ });
+
+ // The encode() method of the VideoEncoder interface asynchronously encodes a VideoFrame
+ videoEncoder.encode(frame);
+
+ // The close() method of the VideoFrame interface clears all states and releases the reference to the media resource.
+ frame.close();
+}
+
+function downloadBlob() {
+ console.log("download video");
+ let url = window.URL.createObjectURL(finishedBlob);
+ let a = document.createElement("a");
+ a.style.display = "none";
+ a.href = url;
+ const date = new Date();
+ const filename = projectName+`_${date.toLocaleDateString()}_${date.toLocaleTimeString()}.mp4`;
+ a.download = filename;
+ document.body.appendChild(a);
+ a.click();
+ window.URL.revokeObjectURL(url);
+}
+
+//record and download videos on mobile devices
+function startMobileRecording(){
+ var stream = canvas.captureStream(videofps);
+ mobileRecorder = new MediaRecorder(stream, { 'type': 'video/mp4' });
+ mobileRecorder.addEventListener('dataavailable', finalizeMobileVideo);
+
+ console.log("start simple video recording");
+ console.log("Video dimensions: "+canvas.width+", "+canvas.height);
+
+ recordingMessageDiv.classList.remove("hidden");
+
+ recordVideoState = true;
+ mobileRecorder.start(); //start mobile video recording
+
+}
+
+function finalizeMobileVideo(e) {
+ setTimeout(function(){
+ console.log("finish simple video recording");
+ togglePlayPause();
+ recordVideoState = false;
+ /*
+ mobileRecorder.stop();*/
+ var videoData = [ e.data ];
+ finishedBlob = new Blob(videoData, { 'type': 'video/mp4' });
+ downloadBlob(finishedBlob);
+
+ //hide user message
+ recordingMessageDiv.classList.add("hidden");
+ togglePlayPause();
+
+ },500);
+}
\ No newline at end of file
diff --git a/v1-com-officielle/public/dnn-screen.png b/v1-com-officielle/public/dnn-screen.png
new file mode 100644
index 0000000..81a4f2e
Binary files /dev/null and b/v1-com-officielle/public/dnn-screen.png differ
diff --git a/v1-com-officielle/public/helperFunctions.js b/v1-com-officielle/public/helperFunctions.js
new file mode 100644
index 0000000..60fce30
--- /dev/null
+++ b/v1-com-officielle/public/helperFunctions.js
@@ -0,0 +1,79 @@
+// Toggle play/pause
+function togglePlayPause() {
+
+ if (isPlaying) {
+ cancelAnimationFrame(animationID);
+ isPlaying = false;
+ } else {
+ isPlaying = true;
+ animationID = requestAnimationFrame(render);
+ }
+}
+
+// Function to refresh the pattern with a new random seed
+const selectedSeeds = [53, 118, 506];
+var seedCount = 2;
+function refreshPattern() {
+ timeOffset = performance.now();
+ //randomSeed = Math.floor(Math.random() * 1000,0);
+ randomSeed = selectedSeeds[seedCount];
+ gl.uniform1f(seedLocation, randomSeed);
+ if(!isPlaying){
+ isPlaying = true;
+ animationID = requestAnimationFrame(render);
+ }
+ console.log('seed:', randomSeed);
+}
+
+function startFromZeroTime(){
+ console.log("Restarting animation from time = 0");
+
+ // Cancel current animation if running
+ if (animationID) {
+ cancelAnimationFrame(animationID);
+ }
+
+ // Set the time offset to the current time
+ // This will be subtracted in the render function
+ timeOffset = performance.now();
+
+ // Reset frame counter for FPS calculation
+ frameCount = 0;
+ lastTime = performance.now();
+
+ // Make sure all other uniforms are updated
+ updateUniforms();
+
+ // Ensure animation is playing
+ isPlaying = true;
+
+ // Start the animation loop from the beginning
+ animationID = requestAnimationFrame(render);
+}
+
+// Add this function to handle canvas resizing
+function updateCanvasSize() {
+ // Update canvas dimensions to window size
+ canvas.width = window.innerWidth;
+ canvas.height = window.innerHeight;
+
+ // Update the WebGL viewport to match
+ gl.viewport(0, 0, canvas.width, canvas.height);
+
+ // Re-render if not already playing
+ if (!isPlaying) {
+ drawScene();
+ }
+
+ // If recording is active, we need to handle that
+ if (recordVideoState) {
+ stopRecording();
+ startRecording();
+ }
+}
+
+//intro overlay info screen
+
+let musicPlaying = false;
+
+let isZenMode = false;
diff --git a/v1-com-officielle/public/main.js b/v1-com-officielle/public/main.js
new file mode 100644
index 0000000..8c54f5a
--- /dev/null
+++ b/v1-com-officielle/public/main.js
@@ -0,0 +1,200 @@
+/*
+To do:
+Press z for zen mode (hides all control and other display on top of the canvas)
+Ability to add this shader effect on top of an image?
+Presets / seed choice??
+Allow user to upload a song, and then it becomes audio reactive?
+Generate perfect loops in x seconds
+*/
+
+// Initialize WebGL context
+const canvas = document.getElementById('canvas');
+let startingWidth = window.innerWidth;
+let startingHeight = window.innerHeight;
+canvas.width = startingWidth;
+canvas.height = startingHeight;
+console.log("canvas width/height: "+canvas.width+" / "+canvas.height);
+
+const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
+let isPlaying = false;
+let animationID = null;
+let randomSeed;
+let time;
+let timeOffset = 0;
+
+// FPS tracking variables
+let frameCount = 0;
+let lastTime = 0;
+let fps = 0;
+
+if (!gl) {
+ alert('WebGL not supported');
+}
+
+// Compile shaders
+function compileShader(source, type) {
+ const shader = gl.createShader(type);
+ gl.shaderSource(shader, source);
+ gl.compileShader(shader);
+
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
+ console.error('Shader compilation error:', gl.getShaderInfoLog(shader));
+ gl.deleteShader(shader);
+ return null;
+ }
+
+ return shader;
+}
+
+// Create program
+const vertexShader = compileShader(document.getElementById('vertexShader').textContent, gl.VERTEX_SHADER);
+const fragmentShader = compileShader(document.getElementById('fragmentShader').textContent, gl.FRAGMENT_SHADER);
+
+const program = gl.createProgram();
+gl.attachShader(program, vertexShader);
+gl.attachShader(program, fragmentShader);
+gl.linkProgram(program);
+
+if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
+ console.error('Program linking error:', gl.getProgramInfoLog(program));
+}
+
+gl.useProgram(program);
+
+// Create rectangle covering the entire canvas
+const positionBuffer = gl.createBuffer();
+gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
+gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
+ -1.0, -1.0,
+ 1.0, -1.0,
+ -1.0, 1.0,
+ 1.0, 1.0
+]), gl.STATIC_DRAW);
+
+// Set up attributes and uniforms
+const positionLocation = gl.getAttribLocation(program, 'position');
+gl.enableVertexAttribArray(positionLocation);
+gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
+
+const timeLocation = gl.getUniformLocation(program, 'time');
+const resolutionLocation = gl.getUniformLocation(program, 'resolution');
+const seedLocation = gl.getUniformLocation(program, 'seed');
+
+// GUI-controlled uniform locations
+const timeScaleLocation = gl.getUniformLocation(program, 'timeScale');
+const bloomStrengthLocation = gl.getUniformLocation(program, 'bloomStrength');
+const saturationLocation = gl.getUniformLocation(program, 'saturation');
+const grainAmountLocation = gl.getUniformLocation(program, 'grainAmount');
+const colorTintLocation = gl.getUniformLocation(program, 'colorTint');
+const minCircleSizeLocation = gl.getUniformLocation(program, 'minCircleSize');
+const circleStrengthLocation = gl.getUniformLocation(program, 'circleStrength');
+const distortXLocation = gl.getUniformLocation(program, 'distortX');
+const distortYLocation = gl.getUniformLocation(program, 'distortY');
+
+const patternAmpLocation = gl.getUniformLocation(program, 'patternAmp');
+const patternFreqLocation = gl.getUniformLocation(program, 'patternFreq');
+
+// Initialize parameters object for dat.gui
+const params = {
+ canvasWidth: startingWidth,
+ canvasHeight: startingHeight,
+ timeScale: .666,
+ patternAmp: 2,
+ patternFreq: 0.4,
+ bloomStrength: 0.5,
+ saturation: 1.74,
+ grainAmount: 0.161,
+ colorTintR: 1.5,
+ colorTintG: 1.0,
+ colorTintB: 1.0,
+ minCircleSize: 2.8,
+ circleStrength: 0,
+ distortX: 1,
+ distortY: 1,
+};
+
+// Also refresh on page load
+window.addEventListener('load', refreshPattern);
+window.addEventListener('resize', updateCanvasSize);
+
+// Initialize dat.gui
+const gui = new dat.GUI({ autoplace: false });
+gui.close();
+
+// Add GUI controls with folders for organization
+const canvasFolder = gui.addFolder('Canvas Size');
+canvasFolder.add(params, 'canvasWidth', 100, 4000).step(10).name('Width').onChange(updateCanvasSize);
+canvasFolder.add(params, 'canvasHeight', 100, 4000).step(10).name('Height').onChange(updateCanvasSize);
+canvasFolder.open();
+
+const timeFolder = gui.addFolder('Animation');
+timeFolder.add(params, 'timeScale', 0.1, 3.0).name('Speed').onChange(updateUniforms);
+timeFolder.open();
+
+const patternFolder = gui.addFolder('Pattern');
+patternFolder.add(params, 'patternAmp', 1.0, 50.0).step(0.1).name('Pattern Amp').onChange(updateUniforms);
+patternFolder.add(params, 'patternFreq', 0.2, 10.0).step(0.1).name('Pattern Freq').onChange(updateUniforms);
+patternFolder.open();
+
+const visualFolder = gui.addFolder('Visual Effects');
+visualFolder.add(params, 'bloomStrength', 0.0, 5.0).name('Bloom').onChange(updateUniforms);
+visualFolder.add(params, 'saturation', 0.0, 2.0).name('Saturation').onChange(updateUniforms);
+visualFolder.add(params, 'grainAmount', 0.0, 0.5).name('Grain').onChange(updateUniforms);
+visualFolder.add(params, 'minCircleSize', 0.0, 10.0).name('Circle Size').onChange(updateUniforms);
+visualFolder.add(params, 'circleStrength', 0.0, 3.0).name('Circle Strength').onChange(updateUniforms);
+visualFolder.add(params, 'distortX', 0.0, 50.0).name('Distort-X').onChange(updateUniforms);
+visualFolder.add(params, 'distortY', 0.0, 50.0).name('Distort-Y').onChange(updateUniforms);
+
+visualFolder.open();
+
+const colorFolder = gui.addFolder('Color Tint');
+colorFolder.add(params, 'colorTintR', 0.0, 1.5).name('Red').onChange(updateUniforms);
+colorFolder.add(params, 'colorTintG', 0.0, 1.5).name('Green').onChange(updateUniforms);
+colorFolder.add(params, 'colorTintB', 0.0, 1.5).name('Blue').onChange(updateUniforms);
+colorFolder.open();
+
+// Function to update shader uniforms from GUI values
+function updateUniforms() {
+ gl.uniform1f(timeScaleLocation, params.timeScale);
+ gl.uniform1f(patternAmpLocation, params.patternAmp);
+ gl.uniform1f(patternFreqLocation, params.patternFreq);
+ gl.uniform1f(bloomStrengthLocation, params.bloomStrength);
+ gl.uniform1f(saturationLocation, params.saturation);
+ gl.uniform1f(grainAmountLocation, params.grainAmount);
+ gl.uniform3f(colorTintLocation, params.colorTintR, params.colorTintG, params.colorTintB);
+ gl.uniform1f(minCircleSizeLocation, params.minCircleSize);
+ gl.uniform1f(circleStrengthLocation, params.circleStrength);
+ gl.uniform1f(distortXLocation, params.distortX);
+ gl.uniform1f(distortYLocation, params.distortY);
+}
+
+function drawScene(){
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+}
+
+// Animation loop
+function render(timestamp) {
+ if (isPlaying) {
+ // Calculate adjusted time by subtracting the offset
+ const adjustedTime = timestamp - timeOffset;
+ time = timestamp;
+
+ const timeInSeconds = adjustedTime * 0.0035;
+ gl.uniform1f(timeLocation, timeInSeconds);
+ gl.uniform2f(resolutionLocation, canvas.width, canvas.height);
+
+
+ // If video recording is ongoing, drawScene is called already
+ if (!recordVideoState || useMobileRecord) {
+ drawScene();
+ }
+
+ animationID = requestAnimationFrame(render);
+ }
+}
+
+// Start the animation loop
+isPlaying = true;
+refreshPattern();
+updateUniforms();
+animationID = requestAnimationFrame(render);
diff --git a/v1-com-officielle/public/mp4-muxer-main/LICENSE b/v1-com-officielle/public/mp4-muxer-main/LICENSE
new file mode 100644
index 0000000..06808a5
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Vanilagy
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/v1-com-officielle/public/mp4-muxer-main/LICENSE.txt b/v1-com-officielle/public/mp4-muxer-main/LICENSE.txt
new file mode 100644
index 0000000..7325ff5
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2025 Alan Ang
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/v1-com-officielle/public/mp4-muxer-main/README.md b/v1-com-officielle/public/mp4-muxer-main/README.md
new file mode 100644
index 0000000..c097a89
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/README.md
@@ -0,0 +1,358 @@
+# mp4-muxer - JavaScript MP4 multiplexer
+
+[](https://www.npmjs.com/package/mp4-muxer)
+[](https://bundlephobia.com/package/mp4-muxer)
+[](https://www.npmjs.com/package/mp4-muxer)
+
+The WebCodecs API provides low-level access to media codecs, but provides no way of actually packaging (multiplexing)
+the encoded media into a playable file. This project implements an MP4 multiplexer in pure TypeScript, which is
+high-quality, fast and tiny, and supports both video and audio as well as various internal layouts such as Fast Start or
+fragmented MP4.
+
+[Demo: Muxing into a file](https://vanilagy.github.io/mp4-muxer/demo/)
+
+[Demo: Live streaming](https://vanilagy.github.io/mp4-muxer/demo-streaming)
+
+> **Note:** If you're looking to create **WebM** files, check out [webm-muxer](https://github.com/Vanilagy/webm-muxer),
+the sister library to mp4-muxer.
+
+> Consider [donating](https://ko-fi.com/vanilagy) if you've found this library useful and wish to support it ❤️
+
+## Quick start
+The following is an example for a common usage of this library:
+```js
+import { Muxer, ArrayBufferTarget } from 'mp4-muxer';
+
+let muxer = new Muxer({
+ target: new ArrayBufferTarget(),
+ video: {
+ codec: 'avc',
+ width: 1280,
+ height: 720
+ },
+ fastStart: 'in-memory'
+});
+
+let videoEncoder = new VideoEncoder({
+ output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
+ error: e => console.error(e)
+});
+videoEncoder.configure({
+ codec: 'avc1.42001f',
+ width: 1280,
+ height: 720,
+ bitrate: 1e6
+});
+
+/* Encode some frames... */
+
+await videoEncoder.flush();
+muxer.finalize();
+
+let { buffer } = muxer.target; // Buffer contains final MP4 file
+```
+
+## Motivation
+After [webm-muxer](https://github.com/Vanilagy/webm-muxer) gained traction for its ease of use and integration with the
+WebCodecs API, this library was created to now also allow the creation of MP4 files while maintaining the same DX.
+While WebM is a more modern format, MP4 is an established standard and supported on way more devices.
+
+## Installation
+Using NPM, simply install this package using
+```
+npm install mp4-muxer
+```
+You can import all exported classes like so:
+```js
+import * as Mp4Muxer from 'mp4-muxer';
+// Or, using CommonJS:
+const Mp4Muxer = require('mp4-muxer');
+```
+Alternatively, you can simply include the library as a script in your HTML, which will add an `Mp4Muxer` object,
+containing all the exported classes, to the global object, like so:
+```html
+
+```
+
+## Usage
+### Initialization
+For each MP4 file you wish to create, create an instance of `Muxer` like so:
+```js
+import { Muxer } from 'mp4-muxer';
+
+let muxer = new Muxer(options);
+```
+The available options are defined by the following interface:
+```ts
+interface MuxerOptions {
+ target:
+ | ArrayBufferTarget
+ | StreamTarget
+ | FileSystemWritableFileStreamTarget,
+
+ video?: {
+ codec: 'avc' | 'hevc' | 'vp9' | 'av1',
+ width: number,
+ height: number,
+
+ // Adds rotation metadata to the file
+ rotation?: 0 | 90 | 180 | 270 | TransformationMatrix
+ },
+
+ audio?: {
+ codec: 'aac' | 'opus',
+ numberOfChannels: number,
+ sampleRate: number
+ },
+
+ fastStart:
+ | false
+ | 'in-memory'
+ | 'fragmented'
+ | { expectedVideoChunks?: number, expectedAudioChunks?: number }
+
+ firstTimestampBehavior?: 'strict' | 'offset' | 'cross-track-offset'
+}
+```
+Codecs currently supported by this library are AVC/H.264, HEVC/H.265, VP9 and AV1 for video, and AAC and Opus for audio.
+#### `target` (required)
+This option specifies where the data created by the muxer will be written. The options are:
+- `ArrayBufferTarget`: The file data will be written into a single large buffer, which is then stored in the target.
+
+ ```js
+ import { Muxer, ArrayBufferTarget } from 'mp4-muxer';
+
+ let muxer = new Muxer({
+ target: new ArrayBufferTarget(),
+ fastStart: 'in-memory',
+ // ...
+ });
+
+ // ...
+
+ muxer.finalize();
+ let { buffer } = muxer.target;
+ ```
+- `StreamTarget`: This target defines callbacks that will get called whenever there is new data available - this is
+ useful if you want to stream the data, e.g. pipe it somewhere else. The constructor has the following signature:
+
+ ```ts
+ constructor(options: {
+ onData?: (data: Uint8Array, position: number) => void,
+ chunked?: boolean,
+ chunkSize?: number
+ });
+ ```
+
+ `onData` is called for each new chunk of available data. The `position` argument specifies the offset in bytes at
+ which the data has to be written. Since the data written by the muxer is not always sequential, **make sure to
+ respect this argument**.
+
+ When using `chunked: true`, data created by the muxer will first be accumulated and only written out once it has
+ reached sufficient size. This is useful for reducing the total amount of writes, at the cost of latency. It using a
+ default chunk size of 16 MiB, which can be overridden by manually setting `chunkSize` to the desired byte length.
+
+ If you want to use this target for *live-streaming*, i.e. playback before muxing has finished, you also need to set
+ `fastStart: 'fragmented'`.
+
+ Usage example:
+ ```js
+ import { Muxer, StreamTarget } from 'mp4-muxer';
+
+ let muxer = new Muxer({
+ target: new StreamTarget({
+ onData: (data, position) => { /* Do something with the data */ }
+ }),
+ fastStart: false,
+ // ...
+ });
+ ```
+- `FileSystemWritableFileStreamTarget`: This is essentially a wrapper around a chunked `StreamTarget` with the intention
+ of simplifying the use of this library with the File System Access API. Writing the file directly to disk as it's
+ being created comes with many benefits, such as creating files way larger than the available RAM.
+
+ You can optionally override the default `chunkSize` of 16 MiB.
+ ```ts
+ constructor(
+ stream: FileSystemWritableFileStream,
+ options?: { chunkSize?: number }
+ );
+ ```
+
+ Usage example:
+ ```js
+ import { Muxer, FileSystemWritableFileStreamTarget } from 'mp4-muxer';
+
+ let fileHandle = await window.showSaveFilePicker({
+ suggestedName: `video.mp4`,
+ types: [{
+ description: 'Video File',
+ accept: { 'video/mp4': ['.mp4'] }
+ }],
+ });
+ let fileStream = await fileHandle.createWritable();
+ let muxer = new Muxer({
+ target: new FileSystemWritableFileStreamTarget(fileStream),
+ fastStart: false,
+ // ...
+ });
+
+ // ...
+
+ muxer.finalize();
+ await fileStream.close(); // Make sure to close the stream
+ ```
+#### `fastStart` (required)
+By default, MP4 metadata (track info, sample timing, etc.) is stored at the end of the file - this makes writing the
+file faster and easier. However, placing this metadata at the _start_ of the file instead (known as "Fast Start")
+provides certain benefits: The file becomes easier to stream over the web without range requests, and sites like YouTube
+can start processing the video while it's uploading. This library provides full control over the placement of metadata
+setting `fastStart` to one of these options:
+- `false`: Disables Fast Start, placing all metadata at the end of the file. This option is the fastest and uses the
+ least memory. This option is recommended for large, unbounded files that are streamed directly to disk.
+- `'in-memory'`: Produces a file with Fast Start by keeping all media chunks in memory until the file is finalized. This
+ option produces the most compact output possible at the cost of a more expensive finalization step and higher memory
+ requirements. This is the preferred option when using `ArrayBufferTarget` as it will result in a higher-quality
+ output with no change in memory footprint.
+- `'fragmented'`: Produces a _fragmented MP4 (fMP4)_ file, evenly placing sample metadata throughout the file by
+ grouping it into "fragments" (short sections of media), while placing general metadata at the beginning of the file.
+ Fragmented files are ideal for streaming, as they are optimized for random access with minimal to no seeking.
+ Furthermore, they remain lightweight to create no matter how large the file becomes, as they don't require media to
+ be kept in memory for very long. While fragmented files are not as widely supported as regular MP4 files, this
+ option provides powerful benefits with very little downsides. Further details
+ [here](#additional-notes-about-fragmented-mp4-files).
+- `object`: Produces a file with Fast Start by reserving space for metadata when muxing begins. To know
+ how many bytes need to be reserved to be safe, you'll have to provide the following data:
+ ```ts
+ {
+ expectedVideoChunks?: number,
+ expectedAudioChunks?: number
+ }
+ ```
+ Note that the property `expectedVideoChunks` is _required_ if you have a video track - the same goes for audio. With
+ this option set, you cannot mux more chunks than the number you've specified (although less is fine).
+
+ This option is faster than `'in-memory'` and uses no additional memory, but results in a slightly larger output,
+ making it useful for when you want to stream the file to disk while still retaining Fast Start.
+#### `firstTimestampBehavior` (optional)
+Specifies how to deal with the first chunk in each track having a non-zero timestamp. In the default strict mode,
+timestamps must start with 0 to ensure proper playback. However, when directly piping video frames or audio data
+from a MediaTrackStream into the encoder and then the muxer, the timestamps are usually relative to the age of
+the document or the computer's clock, which is typically not what we want. Handling of these timestamps must be
+set explicitly:
+- Use `'offset'` to offset the timestamp of each track by that track's first chunk's timestamp. This way, it
+starts at 0.
+- Use `'cross-track-offset'` to offset the timestamp of each track by the _minimum of all tracks' first chunk timestamp_.
+This works like `'offset'`, but should be used when the all tracks use the same clock.
+
+### Muxing media chunks
+Then, with VideoEncoder and AudioEncoder set up, send encoded chunks to the muxer using the following methods:
+```ts
+addVideoChunk(
+ chunk: EncodedVideoChunk,
+ meta?: EncodedVideoChunkMetadata,
+ timestamp?: number,
+ compositionTimeOffset?: number
+): void;
+
+addAudioChunk(
+ chunk: EncodedAudioChunk,
+ meta?: EncodedAudioChunkMetadata,
+ timestamp?: number
+): void;
+```
+
+Both methods accept an optional, third argument `timestamp` (microseconds) which, if specified, overrides
+the `timestamp` property of the passed-in chunk.
+
+The metadata comes from the second parameter of the `output` callback given to the
+VideoEncoder or AudioEncoder's constructor and needs to be passed into the muxer, like so:
+```js
+let videoEncoder = new VideoEncoder({
+ output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
+ error: e => console.error(e)
+});
+videoEncoder.configure(/* ... */);
+```
+
+The optional field `compositionTimeOffset` can be used when the decode time of the chunk doesn't equal its presentation
+time; this is the case when [B-frames](https://en.wikipedia.org/wiki/Video_compression_picture_types) are present.
+B-frames don't occur when using the WebCodecs API for encoding. The decode time is calculated by subtracting
+`compositionTimeOffset` from `timestamp`, meaning `timestamp` dictates the presentation time.
+
+Should you have obtained your encoded media data from a source other than the WebCodecs API, you can use these following
+methods to directly send your raw data to the muxer:
+```ts
+addVideoChunkRaw(
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number, // in microseconds
+ duration: number, // in microseconds
+ meta?: EncodedVideoChunkMetadata,
+ compositionTimeOffset?: number // in microseconds
+): void;
+
+addAudioChunkRaw(
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number, // in microseconds
+ duration: number, // in microseconds
+ meta?: EncodedAudioChunkMetadata
+): void;
+```
+
+### Finishing up
+When encoding is finished and all the encoders have been flushed, call `finalize` on the `Muxer` instance to finalize
+the MP4 file:
+```js
+muxer.finalize();
+```
+When using an ArrayBufferTarget, the final buffer will be accessible through it:
+```js
+let { buffer } = muxer.target;
+```
+When using a FileSystemWritableFileStreamTarget, make sure to close the stream after calling `finalize`:
+```js
+await fileStream.close();
+```
+
+## Details
+### Variable frame rate
+MP4 files support variable frame rate, however some players (such as QuickTime) have been observed not to behave well
+when the timestamps are irregular. Therefore, whenever possible, try aiming for a fixed frame rate.
+
+### Additional notes about fragmented MP4 files
+By breaking up the media and related metadata into small fragments, fMP4 files optimize for random access and are ideal
+for streaming, while remaining cheap to write even for long files. However, you should keep these things in mind:
+- **Media chunk buffering:**
+ When muxing a file with a video **and** an audio track, the muxer needs to wait for the chunks from _both_ media
+ to finalize any given fragment. In other words, it must buffer chunks of one medium if the other medium has not yet
+ encoded chunks up to that timestamp. For example, should you first encode all your video frames and then encode the
+ audio afterward, the multiplexer will have to hold all those video frames in memory until the audio chunks start
+ coming in. This might lead to memory exhaustion should your video be very long. When there is only one media track,
+ this issue does not arise. So, when muxing a multimedia file, make sure it is somewhat limited in size or the chunks
+ are encoded in a somewhat interleaved way (like is the case for live media). This will keep memory usage at a
+ constant low.
+- **Video key frame frequency:**
+ Every track's first sample in a fragment must be a key frame in order to be able to play said fragment without the
+ knowledge of previous ones. However, this means that the muxer needs to wait for a video key frame to begin a new
+ fragment. If these key frames are too infrequent, fragments become too large, harming random access. Therefore,
+ every 5–10 seconds, you should force a video key frame like so:
+ ```js
+ videoEncoder.encode(frame, { keyFrame: true });
+ ```
+
+## Implementation & development
+MP4 files are based on the ISO Base Media Format, which structures its files as a hierarchy of boxes (or atoms). The
+standards used to implement this library were
+[ISO/IEC 14496-1](http://netmedia.zju.edu.cn/multimedia2013/mpeg-4/ISO%20IEC%2014496-1%20MPEG-4%20System%20Standard.pdf),
+[ISO/IEC 14496-12](https://web.archive.org/web/20231123030701/https://b.goeswhere.com/ISO_IEC_14496-12_2015.pdf)
+and
+[ISO/IEC 14496-14](https://github.com/OpenAnsible/rust-mp4/raw/master/docs/ISO_IEC_14496-14_2003-11-15.pdf).
+Additionally, the
+[QuickTime MP4 Specification](https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFPreface/qtffPreface.html)
+was a very useful resource.
+
+For development, clone this repository, install everything with `npm install`, then run `npm run watch` to bundle the
+code into the `build` directory. Run `npm run check` to run the TypeScript type checker, and `npm run lint` to run
+ESLint.
diff --git a/v1-com-officielle/public/mp4-muxer-main/background_styles.css b/v1-com-officielle/public/mp4-muxer-main/background_styles.css
new file mode 100644
index 0000000..2042f68
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/background_styles.css
@@ -0,0 +1,28 @@
+html, body{
+ font-size: 16px;
+ font-family: Helvetica;
+ margin: 0;
+ padding: 0;
+ position: absolute;
+ background-color: #000000;
+ width: 100%;
+ overflow-x: hidden;
+ display: block;
+}
+
+canvas {
+ display: block;
+ max-width: 100%;
+ margin: 0 auto;
+ padding: 0;
+ /* margin-top: 0vh; */
+ text-align: center;
+ /* height: 100vh; */
+}
+
+
+.dg{
+ display: none !important;
+ height: 0;
+ overflow: hidden;
+}
diff --git a/v1-com-officielle/public/mp4-muxer-main/build.mjs b/v1-com-officielle/public/mp4-muxer-main/build.mjs
new file mode 100644
index 0000000..70e4c7f
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/build.mjs
@@ -0,0 +1,46 @@
+import * as esbuild from 'esbuild';
+
+const baseConfig = {
+ entryPoints: ['src/index.ts'],
+ bundle: true,
+ logLevel: 'info'
+};
+
+const umdConfig = {
+ ...baseConfig,
+ format: 'iife',
+
+ // The following are hacks to basically make this an UMD module. No native support for that in esbuild as of today
+ globalName: 'Mp4Muxer',
+
+ footer: {
+ js:
+`if (typeof module === "object" && typeof module.exports === "object") Object.assign(module.exports, Mp4Muxer)`
+ }
+};
+
+const esmConfig = {
+ ...baseConfig,
+ format: 'esm'
+};
+
+let ctxUmd = await esbuild.context({
+ ...umdConfig,
+ outfile: 'build/mp4-muxer.js'
+});
+let ctxEsm = await esbuild.context({
+ ...esmConfig,
+ outfile: 'build/mp4-muxer.mjs'
+});
+let ctxUmdMinified = await esbuild.context({
+ ...umdConfig,
+ outfile: 'build/mp4-muxer.min.js',
+ minify: true
+});
+let ctxEsmMinified = await esbuild.context({
+ ...esmConfig,
+ outfile: 'build/mp4-muxer.min.mjs',
+ minify: true
+});
+
+await Promise.all([ctxUmd.watch(), ctxEsm.watch(), ctxUmdMinified.watch(), ctxEsmMinified.watch()]);
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.d.ts b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.d.ts
new file mode 100644
index 0000000..303f545
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.d.ts
@@ -0,0 +1,226 @@
+declare type TransformationMatrix = [number, number, number, number, number, number, number, number, number];
+
+declare interface VideoOptions {
+ /**
+ * The codec of the encoded video chunks.
+ */
+ codec: 'avc' | 'hevc' | 'vp9' | 'av1',
+ /**
+ * The width of the video in pixels.
+ */
+ width: number,
+ /**
+ * The height of the video in pixels.
+ */
+ height: number,
+ /**
+ * The clockwise rotation of the video in degrees, or a transformation matrix.
+ */
+ rotation?: 0 | 90 | 180 | 270 | TransformationMatrix
+}
+
+declare interface AudioOptions {
+ /**
+ * The codec of the encoded audio chunks.
+ */
+ codec: 'aac' | 'opus',
+ /**
+ * The number of audio channels in the audio track.
+ */
+ numberOfChannels: number,
+ /**
+ * The sample rate of the audio track in samples per second per channel.
+ */
+ sampleRate: number
+}
+
+/**
+ * Describes the properties used to configure an instance of `Muxer`.
+ */
+declare type MuxerOptions = {
+ /**
+ * Specifies what happens with the data created by the muxer.
+ */
+ target: T,
+
+ /**
+ * When set, declares the existence of a video track in the MP4 file and configures that video track.
+ */
+ video?: VideoOptions,
+
+ /**
+ * When set, declares the existence of an audio track in the MP4 file and configures that audio track.
+ */
+ audio?: AudioOptions,
+
+ /**
+ * Controls the placement of metadata in the file. Placing metadata at the start of the file is known as "Fast
+ * Start", which results in better playback at the cost of more required processing or memory.
+ *
+ * Use `false` to disable Fast Start, placing the metadata at the end of the file. Fastest and uses the least
+ * memory.
+ *
+ * Use `'in-memory'` to produce a file with Fast Start by keeping all media chunks in memory until the file is
+ * finalized. This produces a high-quality and compact output at the cost of a more expensive finalization step and
+ * higher memory requirements.
+ *
+ * Use `'fragmented'` to place metadata at the start of the file by creating a fragmented "fMP4" file. In a
+ * fragmented file, chunks of media and their metadata are written to the file in "fragments", eliminating the need
+ * to put all metadata in one place. Fragmented files are useful for streaming, as they allow for better random
+ * access. Furthermore, they remain lightweight to create even for very large files, as they don't require all media
+ * to be kept in memory. However, fragmented files are not as widely supported as regular MP4 files.
+ *
+ * Use an object to produce a file with Fast Start by reserving space for metadata when muxing starts. In order to
+ * know how much space needs to be reserved, you'll need to tell it the upper bound of how many media chunks will be
+ * muxed. Do this by setting `expectedVideoChunks` and/or `expectedAudioChunks`.
+ */
+ fastStart: false | 'in-memory' | 'fragmented' | {
+ expectedVideoChunks?: number,
+ expectedAudioChunks?: number
+ },
+
+ /**
+ * Specifies how to deal with the first chunk in each track having a non-zero timestamp. In the default strict mode,
+ * timestamps must start with 0 to ensure proper playback. However, when directly piping video frames or audio data
+ * from a MediaTrackStream into the encoder and then the muxer, the timestamps are usually relative to the age of
+ * the document or the computer's clock, which is typically not what we want. Handling of these timestamps must be
+ * set explicitly:
+ *
+ * Use `'offset'` to offset the timestamp of each video track by that track's first chunk's timestamp. This way, it
+ * starts at 0.
+ *
+ * Use `'cross-track-offset'` to offset the timestamp of _both_ tracks by whichever track's first chunk timestamp is
+ * earliest. This is designed for cases when both tracks' timestamps come from the same clock source.
+ */
+ firstTimestampBehavior?: 'strict' | 'offset' | 'cross-track-offset'
+};
+
+declare type Target = ArrayBufferTarget | StreamTarget | FileSystemWritableFileStreamTarget;
+
+/** The file data will be written into a single large buffer, which is then stored in `buffer` upon finalization.. */
+declare class ArrayBufferTarget {
+ buffer: ArrayBuffer;
+}
+
+/**
+ * This target defines callbacks that will get called whenever there is new data available - this is useful if
+ * you want to stream the data, e.g. pipe it somewhere else.
+ *
+ * When using `chunked: true` in the options, data created by the muxer will first be accumulated and only written out
+ * once it has reached sufficient size, using a default chunk size of 16 MiB. This is useful for reducing the total
+ * amount of writes, at the cost of latency.
+ */
+declare class StreamTarget {
+ constructor(options: {
+ onData?: (data: Uint8Array, position: number) => void,
+ chunked?: boolean,
+ chunkSize?: number
+ });
+}
+
+/**
+ * This is essentially a wrapper around a chunked `StreamTarget` with the intention of simplifying the use of this
+ * library with the File System Access API. Writing the file directly to disk as it's being created comes with many
+ * benefits, such as creating files way larger than the available RAM.
+ */
+declare class FileSystemWritableFileStreamTarget {
+ constructor(
+ stream: FileSystemWritableFileStream,
+ options?: { chunkSize?: number }
+ );
+}
+
+/**
+ * Used to multiplex video and audio chunks into a single MP4 file. For each MP4 file you want to create, create
+ * one instance of `Muxer`.
+ */
+declare class Muxer {
+ target: T;
+
+ /**
+ * Creates a new instance of `Muxer`.
+ * @param options Specifies configuration and metadata for the MP4 file.
+ */
+ constructor(options: MuxerOptions);
+
+ /**
+ * Adds a new, encoded video chunk to the MP4 file.
+ * @param chunk The encoded video chunk. Can be obtained through a `VideoEncoder`.
+ * @param meta The metadata about the encoded video, also provided by `VideoEncoder`.
+ * @param timestamp Optionally, the presentation timestamp to use for the video chunk. When not provided, it will
+ * use the one specified in `chunk`.
+ * @param compositionTimeOffset Optionally, the composition time offset (i.e. presentation timestamp minus decode
+ * timestamp) to use for the video chunk. When not provided, it will be zero.
+ */
+ addVideoChunk(
+ chunk: EncodedVideoChunk,
+ meta?: EncodedVideoChunkMetadata,
+ timestamp?: number,
+ compositionTimeOffset?: number
+ ): void;
+ /**
+ * Adds a new, encoded audio chunk to the MP4 file.
+ * @param chunk The encoded audio chunk. Can be obtained through an `AudioEncoder`.
+ * @param meta The metadata about the encoded audio, also provided by `AudioEncoder`.
+ * @param timestamp Optionally, the timestamp to use for the audio chunk. When not provided, it will use the one
+ * specified in `chunk`.
+ */
+ addAudioChunk(chunk: EncodedAudioChunk, meta?: EncodedAudioChunkMetadata, timestamp?: number): void;
+
+ /**
+ * Adds a raw video chunk to the MP4 file. This method should be used when the encoded video is not obtained
+ * through a `VideoEncoder` but through some other means, where no instance of `EncodedVideoChunk`is available.
+ * @param data The raw data of the video chunk.
+ * @param type Whether the video chunk is a keyframe or delta frame.
+ * @param timestamp The timestamp of the video chunk.
+ * @param duration The duration of the video chunk.
+ * @param meta Optionally, any encoder metadata.
+ * @param compositionTimeOffset The composition time offset (i.e. presentation timestamp minus decode timestamp) of
+ * the video chunk.
+ */
+ addVideoChunkRaw(
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number,
+ duration: number,
+ meta?: EncodedVideoChunkMetadata,
+ compositionTimeOffset?: number
+ ): void;
+ /**
+ * Adds a raw audio chunk to the MP4 file. This method should be used when the encoded audio is not obtained
+ * through an `AudioEncoder` but through some other means, where no instance of `EncodedAudioChunk`is available.
+ * @param data The raw data of the audio chunk.
+ * @param type Whether the audio chunk is a keyframe or delta frame.
+ * @param timestamp The timestamp of the audio chunk.
+ * @param duration The duration of the audio chunk.
+ * @param meta Optionally, any encoder metadata.
+ */
+ addAudioChunkRaw(
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number,
+ duration: number,
+ meta?: EncodedAudioChunkMetadata
+ ): void;
+
+ /**
+ * Is to be called after all media chunks have been added to the muxer. Make sure to call and await the `flush`
+ * method on your `VideoEncoder` and/or `AudioEncoder` before calling this method to ensure all encoding has
+ * finished. This method will then finish up the writing process of the MP4 file.
+ */
+ finalize(): void;
+}
+
+declare global {
+ let Mp4Muxer: typeof Mp4Muxer;
+}
+
+export {
+ Muxer,
+ MuxerOptions,
+ ArrayBufferTarget,
+ StreamTarget,
+ FileSystemWritableFileStreamTarget,
+ TransformationMatrix
+};
+export as namespace Mp4Muxer;
diff --git a/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.js b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.js
new file mode 100644
index 0000000..67b8c8d
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.js
@@ -0,0 +1,1718 @@
+"use strict";
+var Mp4Muxer = (() => {
+ var __defProp = Object.defineProperty;
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+ var __getOwnPropNames = Object.getOwnPropertyNames;
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
+ var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+ };
+ var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+ };
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+ var __accessCheck = (obj, member, msg) => {
+ if (!member.has(obj))
+ throw TypeError("Cannot " + msg);
+ };
+ var __privateGet = (obj, member, getter) => {
+ __accessCheck(obj, member, "read from private field");
+ return getter ? getter.call(obj) : member.get(obj);
+ };
+ var __privateAdd = (obj, member, value) => {
+ if (member.has(obj))
+ throw TypeError("Cannot add the same private member more than once");
+ member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
+ };
+ var __privateSet = (obj, member, value, setter) => {
+ __accessCheck(obj, member, "write to private field");
+ setter ? setter.call(obj, value) : member.set(obj, value);
+ return value;
+ };
+ var __privateWrapper = (obj, member, setter, getter) => ({
+ set _(value) {
+ __privateSet(obj, member, value, setter);
+ },
+ get _() {
+ return __privateGet(obj, member, getter);
+ }
+ });
+ var __privateMethod = (obj, member, method) => {
+ __accessCheck(obj, member, "access private method");
+ return method;
+ };
+
+ // src/index.ts
+ var src_exports = {};
+ __export(src_exports, {
+ ArrayBufferTarget: () => ArrayBufferTarget,
+ FileSystemWritableFileStreamTarget: () => FileSystemWritableFileStreamTarget,
+ Muxer: () => Muxer,
+ StreamTarget: () => StreamTarget
+ });
+
+ // src/misc.ts
+ var bytes = new Uint8Array(8);
+ var view = new DataView(bytes.buffer);
+ var u8 = (value) => {
+ return [(value % 256 + 256) % 256];
+ };
+ var u16 = (value) => {
+ view.setUint16(0, value, false);
+ return [bytes[0], bytes[1]];
+ };
+ var i16 = (value) => {
+ view.setInt16(0, value, false);
+ return [bytes[0], bytes[1]];
+ };
+ var u24 = (value) => {
+ view.setUint32(0, value, false);
+ return [bytes[1], bytes[2], bytes[3]];
+ };
+ var u32 = (value) => {
+ view.setUint32(0, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+ };
+ var i32 = (value) => {
+ view.setInt32(0, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+ };
+ var u64 = (value) => {
+ view.setUint32(0, Math.floor(value / 2 ** 32), false);
+ view.setUint32(4, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7]];
+ };
+ var fixed_8_8 = (value) => {
+ view.setInt16(0, 2 ** 8 * value, false);
+ return [bytes[0], bytes[1]];
+ };
+ var fixed_16_16 = (value) => {
+ view.setInt32(0, 2 ** 16 * value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+ };
+ var fixed_2_30 = (value) => {
+ view.setInt32(0, 2 ** 30 * value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+ };
+ var ascii = (text, nullTerminated = false) => {
+ let bytes2 = Array(text.length).fill(null).map((_, i) => text.charCodeAt(i));
+ if (nullTerminated)
+ bytes2.push(0);
+ return bytes2;
+ };
+ var last = (arr) => {
+ return arr && arr[arr.length - 1];
+ };
+ var lastPresentedSample = (samples) => {
+ let result = void 0;
+ for (let sample of samples) {
+ if (!result || sample.presentationTimestamp > result.presentationTimestamp) {
+ result = sample;
+ }
+ }
+ return result;
+ };
+ var intoTimescale = (timeInSeconds, timescale, round = true) => {
+ let value = timeInSeconds * timescale;
+ return round ? Math.round(value) : value;
+ };
+ var rotationMatrix = (rotationInDegrees) => {
+ let theta = rotationInDegrees * (Math.PI / 180);
+ let cosTheta = Math.cos(theta);
+ let sinTheta = Math.sin(theta);
+ return [
+ cosTheta,
+ sinTheta,
+ 0,
+ -sinTheta,
+ cosTheta,
+ 0,
+ 0,
+ 0,
+ 1
+ ];
+ };
+ var IDENTITY_MATRIX = rotationMatrix(0);
+ var matrixToBytes = (matrix) => {
+ return [
+ fixed_16_16(matrix[0]),
+ fixed_16_16(matrix[1]),
+ fixed_2_30(matrix[2]),
+ fixed_16_16(matrix[3]),
+ fixed_16_16(matrix[4]),
+ fixed_2_30(matrix[5]),
+ fixed_16_16(matrix[6]),
+ fixed_16_16(matrix[7]),
+ fixed_2_30(matrix[8])
+ ];
+ };
+ var deepClone = (x) => {
+ if (!x)
+ return x;
+ if (typeof x !== "object")
+ return x;
+ if (Array.isArray(x))
+ return x.map(deepClone);
+ return Object.fromEntries(Object.entries(x).map(([key, value]) => [key, deepClone(value)]));
+ };
+ var isU32 = (value) => {
+ return value >= 0 && value < 2 ** 32;
+ };
+
+ // src/box.ts
+ var box = (type, contents, children) => ({
+ type,
+ contents: contents && new Uint8Array(contents.flat(10)),
+ children
+ });
+ var fullBox = (type, version, flags, contents, children) => box(
+ type,
+ [u8(version), u24(flags), contents ?? []],
+ children
+ );
+ var ftyp = (details) => {
+ let minorVersion = 512;
+ if (details.fragmented)
+ return box("ftyp", [
+ ascii("iso5"),
+ // Major brand
+ u32(minorVersion),
+ // Minor version
+ // Compatible brands
+ ascii("iso5"),
+ ascii("iso6"),
+ ascii("mp41")
+ ]);
+ return box("ftyp", [
+ ascii("isom"),
+ // Major brand
+ u32(minorVersion),
+ // Minor version
+ // Compatible brands
+ ascii("isom"),
+ details.holdsAvc ? ascii("avc1") : [],
+ ascii("mp41")
+ ]);
+ };
+ var mdat = (reserveLargeSize) => ({ type: "mdat", largeSize: reserveLargeSize });
+ var free = (size) => ({ type: "free", size });
+ var moov = (tracks, creationTime, fragmented = false) => box("moov", null, [
+ mvhd(creationTime, tracks),
+ ...tracks.map((x) => trak(x, creationTime)),
+ fragmented ? mvex(tracks) : null
+ ]);
+ var mvhd = (creationTime, tracks) => {
+ let duration = intoTimescale(Math.max(
+ 0,
+ ...tracks.filter((x) => x.samples.length > 0).map((x) => {
+ const lastSample = lastPresentedSample(x.samples);
+ return lastSample.presentationTimestamp + lastSample.duration;
+ })
+ ), GLOBAL_TIMESCALE);
+ let nextTrackId = Math.max(...tracks.map((x) => x.id)) + 1;
+ let needsU64 = !isU32(creationTime) || !isU32(duration);
+ let u32OrU64 = needsU64 ? u64 : u32;
+ return fullBox("mvhd", +needsU64, 0, [
+ u32OrU64(creationTime),
+ // Creation time
+ u32OrU64(creationTime),
+ // Modification time
+ u32(GLOBAL_TIMESCALE),
+ // Timescale
+ u32OrU64(duration),
+ // Duration
+ fixed_16_16(1),
+ // Preferred rate
+ fixed_8_8(1),
+ // Preferred volume
+ Array(10).fill(0),
+ // Reserved
+ matrixToBytes(IDENTITY_MATRIX),
+ // Matrix
+ Array(24).fill(0),
+ // Pre-defined
+ u32(nextTrackId)
+ // Next track ID
+ ]);
+ };
+ var trak = (track, creationTime) => box("trak", null, [
+ tkhd(track, creationTime),
+ mdia(track, creationTime)
+ ]);
+ var tkhd = (track, creationTime) => {
+ let lastSample = lastPresentedSample(track.samples);
+ let durationInGlobalTimescale = intoTimescale(
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
+ GLOBAL_TIMESCALE
+ );
+ let needsU64 = !isU32(creationTime) || !isU32(durationInGlobalTimescale);
+ let u32OrU64 = needsU64 ? u64 : u32;
+ let matrix;
+ if (track.info.type === "video") {
+ matrix = typeof track.info.rotation === "number" ? rotationMatrix(track.info.rotation) : track.info.rotation;
+ } else {
+ matrix = IDENTITY_MATRIX;
+ }
+ return fullBox("tkhd", +needsU64, 3, [
+ u32OrU64(creationTime),
+ // Creation time
+ u32OrU64(creationTime),
+ // Modification time
+ u32(track.id),
+ // Track ID
+ u32(0),
+ // Reserved
+ u32OrU64(durationInGlobalTimescale),
+ // Duration
+ Array(8).fill(0),
+ // Reserved
+ u16(0),
+ // Layer
+ u16(0),
+ // Alternate group
+ fixed_8_8(track.info.type === "audio" ? 1 : 0),
+ // Volume
+ u16(0),
+ // Reserved
+ matrixToBytes(matrix),
+ // Matrix
+ fixed_16_16(track.info.type === "video" ? track.info.width : 0),
+ // Track width
+ fixed_16_16(track.info.type === "video" ? track.info.height : 0)
+ // Track height
+ ]);
+ };
+ var mdia = (track, creationTime) => box("mdia", null, [
+ mdhd(track, creationTime),
+ hdlr(track.info.type === "video" ? "vide" : "soun"),
+ minf(track)
+ ]);
+ var mdhd = (track, creationTime) => {
+ let lastSample = lastPresentedSample(track.samples);
+ let localDuration = intoTimescale(
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
+ track.timescale
+ );
+ let needsU64 = !isU32(creationTime) || !isU32(localDuration);
+ let u32OrU64 = needsU64 ? u64 : u32;
+ return fullBox("mdhd", +needsU64, 0, [
+ u32OrU64(creationTime),
+ // Creation time
+ u32OrU64(creationTime),
+ // Modification time
+ u32(track.timescale),
+ // Timescale
+ u32OrU64(localDuration),
+ // Duration
+ u16(21956),
+ // Language ("und", undetermined)
+ u16(0)
+ // Quality
+ ]);
+ };
+ var hdlr = (componentSubtype) => fullBox("hdlr", 0, 0, [
+ ascii("mhlr"),
+ // Component type
+ ascii(componentSubtype),
+ // Component subtype
+ u32(0),
+ // Component manufacturer
+ u32(0),
+ // Component flags
+ u32(0),
+ // Component flags mask
+ ascii("mp4-muxer-hdlr", true)
+ // Component name
+ ]);
+ var minf = (track) => box("minf", null, [
+ track.info.type === "video" ? vmhd() : smhd(),
+ dinf(),
+ stbl(track)
+ ]);
+ var vmhd = () => fullBox("vmhd", 0, 1, [
+ u16(0),
+ // Graphics mode
+ u16(0),
+ // Opcolor R
+ u16(0),
+ // Opcolor G
+ u16(0)
+ // Opcolor B
+ ]);
+ var smhd = () => fullBox("smhd", 0, 0, [
+ u16(0),
+ // Balance
+ u16(0)
+ // Reserved
+ ]);
+ var dinf = () => box("dinf", null, [
+ dref()
+ ]);
+ var dref = () => fullBox("dref", 0, 0, [
+ u32(1)
+ // Entry count
+ ], [
+ url()
+ ]);
+ var url = () => fullBox("url ", 0, 1);
+ var stbl = (track) => {
+ const needsCtts = track.compositionTimeOffsetTable.length > 1 || track.compositionTimeOffsetTable.some((x) => x.sampleCompositionTimeOffset !== 0);
+ return box("stbl", null, [
+ stsd(track),
+ stts(track),
+ stss(track),
+ stsc(track),
+ stsz(track),
+ stco(track),
+ needsCtts ? ctts(track) : null
+ ]);
+ };
+ var stsd = (track) => fullBox("stsd", 0, 0, [
+ u32(1)
+ // Entry count
+ ], [
+ track.info.type === "video" ? videoSampleDescription(
+ VIDEO_CODEC_TO_BOX_NAME[track.info.codec],
+ track
+ ) : soundSampleDescription(
+ AUDIO_CODEC_TO_BOX_NAME[track.info.codec],
+ track
+ )
+ ]);
+ var videoSampleDescription = (compressionType, track) => box(compressionType, [
+ Array(6).fill(0),
+ // Reserved
+ u16(1),
+ // Data reference index
+ u16(0),
+ // Pre-defined
+ u16(0),
+ // Reserved
+ Array(12).fill(0),
+ // Pre-defined
+ u16(track.info.width),
+ // Width
+ u16(track.info.height),
+ // Height
+ u32(4718592),
+ // Horizontal resolution
+ u32(4718592),
+ // Vertical resolution
+ u32(0),
+ // Reserved
+ u16(1),
+ // Frame count
+ Array(32).fill(0),
+ // Compressor name
+ u16(24),
+ // Depth
+ i16(65535)
+ // Pre-defined
+ ], [
+ VIDEO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
+ ]);
+ var avcC = (track) => track.info.decoderConfig && box("avcC", [
+ // For AVC, description is an AVCDecoderConfigurationRecord, so nothing else to do here
+ ...new Uint8Array(track.info.decoderConfig.description)
+ ]);
+ var hvcC = (track) => track.info.decoderConfig && box("hvcC", [
+ // For HEVC, description is a HEVCDecoderConfigurationRecord, so nothing else to do here
+ ...new Uint8Array(track.info.decoderConfig.description)
+ ]);
+ var vpcC = (track) => {
+ if (!track.info.decoderConfig) {
+ return null;
+ }
+ let decoderConfig = track.info.decoderConfig;
+ if (!decoderConfig.colorSpace) {
+ throw new Error(`'colorSpace' is required in the decoder config for VP9.`);
+ }
+ let parts = decoderConfig.codec.split(".");
+ let profile = Number(parts[1]);
+ let level = Number(parts[2]);
+ let bitDepth = Number(parts[3]);
+ let chromaSubsampling = 0;
+ let thirdByte = (bitDepth << 4) + (chromaSubsampling << 1) + Number(decoderConfig.colorSpace.fullRange);
+ let colourPrimaries = 2;
+ let transferCharacteristics = 2;
+ let matrixCoefficients = 2;
+ return fullBox("vpcC", 1, 0, [
+ u8(profile),
+ // Profile
+ u8(level),
+ // Level
+ u8(thirdByte),
+ // Bit depth, chroma subsampling, full range
+ u8(colourPrimaries),
+ // Colour primaries
+ u8(transferCharacteristics),
+ // Transfer characteristics
+ u8(matrixCoefficients),
+ // Matrix coefficients
+ u16(0)
+ // Codec initialization data size
+ ]);
+ };
+ var av1C = () => {
+ let marker = 1;
+ let version = 1;
+ let firstByte = (marker << 7) + version;
+ return box("av1C", [
+ firstByte,
+ 0,
+ 0,
+ 0
+ ]);
+ };
+ var soundSampleDescription = (compressionType, track) => box(compressionType, [
+ Array(6).fill(0),
+ // Reserved
+ u16(1),
+ // Data reference index
+ u16(0),
+ // Version
+ u16(0),
+ // Revision level
+ u32(0),
+ // Vendor
+ u16(track.info.numberOfChannels),
+ // Number of channels
+ u16(16),
+ // Sample size (bits)
+ u16(0),
+ // Compression ID
+ u16(0),
+ // Packet size
+ fixed_16_16(track.info.sampleRate)
+ // Sample rate
+ ], [
+ AUDIO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
+ ]);
+ var esds = (track) => {
+ let description = new Uint8Array(track.info.decoderConfig.description);
+ return fullBox("esds", 0, 0, [
+ // https://stackoverflow.com/a/54803118
+ u32(58753152),
+ // TAG(3) = Object Descriptor ([2])
+ u8(32 + description.byteLength),
+ // length of this OD (which includes the next 2 tags)
+ u16(1),
+ // ES_ID = 1
+ u8(0),
+ // flags etc = 0
+ u32(75530368),
+ // TAG(4) = ES Descriptor ([2]) embedded in above OD
+ u8(18 + description.byteLength),
+ // length of this ESD
+ u8(64),
+ // MPEG-4 Audio
+ u8(21),
+ // stream type(6bits)=5 audio, flags(2bits)=1
+ u24(0),
+ // 24bit buffer size
+ u32(130071),
+ // max bitrate
+ u32(130071),
+ // avg bitrate
+ u32(92307584),
+ // TAG(5) = ASC ([2],[3]) embedded in above OD
+ u8(description.byteLength),
+ // length
+ ...description,
+ u32(109084800),
+ // TAG(6)
+ u8(1),
+ // length
+ u8(2)
+ // data
+ ]);
+ };
+ var dOps = (track) => box("dOps", [
+ u8(0),
+ // Version
+ u8(track.info.numberOfChannels),
+ // OutputChannelCount
+ u16(3840),
+ // PreSkip, should be at least 80 milliseconds worth of playback, measured in 48000 Hz samples
+ u32(track.info.sampleRate),
+ // InputSampleRate
+ fixed_8_8(0),
+ // OutputGain
+ u8(0)
+ // ChannelMappingFamily
+ ]);
+ var stts = (track) => {
+ return fullBox("stts", 0, 0, [
+ u32(track.timeToSampleTable.length),
+ // Number of entries
+ track.timeToSampleTable.map((x) => [
+ // Time-to-sample table
+ u32(x.sampleCount),
+ // Sample count
+ u32(x.sampleDelta)
+ // Sample duration
+ ])
+ ]);
+ };
+ var stss = (track) => {
+ if (track.samples.every((x) => x.type === "key"))
+ return null;
+ let keySamples = [...track.samples.entries()].filter(([, sample]) => sample.type === "key");
+ return fullBox("stss", 0, 0, [
+ u32(keySamples.length),
+ // Number of entries
+ keySamples.map(([index]) => u32(index + 1))
+ // Sync sample table
+ ]);
+ };
+ var stsc = (track) => {
+ return fullBox("stsc", 0, 0, [
+ u32(track.compactlyCodedChunkTable.length),
+ // Number of entries
+ track.compactlyCodedChunkTable.map((x) => [
+ // Sample-to-chunk table
+ u32(x.firstChunk),
+ // First chunk
+ u32(x.samplesPerChunk),
+ // Samples per chunk
+ u32(1)
+ // Sample description index
+ ])
+ ]);
+ };
+ var stsz = (track) => fullBox("stsz", 0, 0, [
+ u32(0),
+ // Sample size (0 means non-constant size)
+ u32(track.samples.length),
+ // Number of entries
+ track.samples.map((x) => u32(x.size))
+ // Sample size table
+ ]);
+ var stco = (track) => {
+ if (track.finalizedChunks.length > 0 && last(track.finalizedChunks).offset >= 2 ** 32) {
+ return fullBox("co64", 0, 0, [
+ u32(track.finalizedChunks.length),
+ // Number of entries
+ track.finalizedChunks.map((x) => u64(x.offset))
+ // Chunk offset table
+ ]);
+ }
+ return fullBox("stco", 0, 0, [
+ u32(track.finalizedChunks.length),
+ // Number of entries
+ track.finalizedChunks.map((x) => u32(x.offset))
+ // Chunk offset table
+ ]);
+ };
+ var ctts = (track) => {
+ return fullBox("ctts", 0, 0, [
+ u32(track.compositionTimeOffsetTable.length),
+ // Number of entries
+ track.compositionTimeOffsetTable.map((x) => [
+ // Time-to-sample table
+ u32(x.sampleCount),
+ // Sample count
+ u32(x.sampleCompositionTimeOffset)
+ // Sample offset
+ ])
+ ]);
+ };
+ var mvex = (tracks) => {
+ return box("mvex", null, tracks.map(trex));
+ };
+ var trex = (track) => {
+ return fullBox("trex", 0, 0, [
+ u32(track.id),
+ // Track ID
+ u32(1),
+ // Default sample description index
+ u32(0),
+ // Default sample duration
+ u32(0),
+ // Default sample size
+ u32(0)
+ // Default sample flags
+ ]);
+ };
+ var moof = (sequenceNumber, tracks) => {
+ return box("moof", null, [
+ mfhd(sequenceNumber),
+ ...tracks.map(traf)
+ ]);
+ };
+ var mfhd = (sequenceNumber) => {
+ return fullBox("mfhd", 0, 0, [
+ u32(sequenceNumber)
+ // Sequence number
+ ]);
+ };
+ var fragmentSampleFlags = (sample) => {
+ let byte1 = 0;
+ let byte2 = 0;
+ let byte3 = 0;
+ let byte4 = 0;
+ let sampleIsDifferenceSample = sample.type === "delta";
+ byte2 |= +sampleIsDifferenceSample;
+ if (sampleIsDifferenceSample) {
+ byte1 |= 1;
+ } else {
+ byte1 |= 2;
+ }
+ return byte1 << 24 | byte2 << 16 | byte3 << 8 | byte4;
+ };
+ var traf = (track) => {
+ return box("traf", null, [
+ tfhd(track),
+ tfdt(track),
+ trun(track)
+ ]);
+ };
+ var tfhd = (track) => {
+ let tfFlags = 0;
+ tfFlags |= 8;
+ tfFlags |= 16;
+ tfFlags |= 32;
+ tfFlags |= 131072;
+ let referenceSample = track.currentChunk.samples[1] ?? track.currentChunk.samples[0];
+ let referenceSampleInfo = {
+ duration: referenceSample.timescaleUnitsToNextSample,
+ size: referenceSample.size,
+ flags: fragmentSampleFlags(referenceSample)
+ };
+ return fullBox("tfhd", 0, tfFlags, [
+ u32(track.id),
+ // Track ID
+ u32(referenceSampleInfo.duration),
+ // Default sample duration
+ u32(referenceSampleInfo.size),
+ // Default sample size
+ u32(referenceSampleInfo.flags)
+ // Default sample flags
+ ]);
+ };
+ var tfdt = (track) => {
+ return fullBox("tfdt", 1, 0, [
+ u64(intoTimescale(track.currentChunk.startTimestamp, track.timescale))
+ // Base Media Decode Time
+ ]);
+ };
+ var trun = (track) => {
+ let allSampleDurations = track.currentChunk.samples.map((x) => x.timescaleUnitsToNextSample);
+ let allSampleSizes = track.currentChunk.samples.map((x) => x.size);
+ let allSampleFlags = track.currentChunk.samples.map(fragmentSampleFlags);
+ let allSampleCompositionTimeOffsets = track.currentChunk.samples.map((x) => intoTimescale(x.presentationTimestamp - x.decodeTimestamp, track.timescale));
+ let uniqueSampleDurations = new Set(allSampleDurations);
+ let uniqueSampleSizes = new Set(allSampleSizes);
+ let uniqueSampleFlags = new Set(allSampleFlags);
+ let uniqueSampleCompositionTimeOffsets = new Set(allSampleCompositionTimeOffsets);
+ let firstSampleFlagsPresent = uniqueSampleFlags.size === 2 && allSampleFlags[0] !== allSampleFlags[1];
+ let sampleDurationPresent = uniqueSampleDurations.size > 1;
+ let sampleSizePresent = uniqueSampleSizes.size > 1;
+ let sampleFlagsPresent = !firstSampleFlagsPresent && uniqueSampleFlags.size > 1;
+ let sampleCompositionTimeOffsetsPresent = uniqueSampleCompositionTimeOffsets.size > 1 || [...uniqueSampleCompositionTimeOffsets].some((x) => x !== 0);
+ let flags = 0;
+ flags |= 1;
+ flags |= 4 * +firstSampleFlagsPresent;
+ flags |= 256 * +sampleDurationPresent;
+ flags |= 512 * +sampleSizePresent;
+ flags |= 1024 * +sampleFlagsPresent;
+ flags |= 2048 * +sampleCompositionTimeOffsetsPresent;
+ return fullBox("trun", 1, flags, [
+ u32(track.currentChunk.samples.length),
+ // Sample count
+ u32(track.currentChunk.offset - track.currentChunk.moofOffset || 0),
+ // Data offset
+ firstSampleFlagsPresent ? u32(allSampleFlags[0]) : [],
+ track.currentChunk.samples.map((_, i) => [
+ sampleDurationPresent ? u32(allSampleDurations[i]) : [],
+ // Sample duration
+ sampleSizePresent ? u32(allSampleSizes[i]) : [],
+ // Sample size
+ sampleFlagsPresent ? u32(allSampleFlags[i]) : [],
+ // Sample flags
+ // Sample composition time offsets
+ sampleCompositionTimeOffsetsPresent ? i32(allSampleCompositionTimeOffsets[i]) : []
+ ])
+ ]);
+ };
+ var mfra = (tracks) => {
+ return box("mfra", null, [
+ ...tracks.map(tfra),
+ mfro()
+ ]);
+ };
+ var tfra = (track, trackIndex) => {
+ let version = 1;
+ return fullBox("tfra", version, 0, [
+ u32(track.id),
+ // Track ID
+ u32(63),
+ // This specifies that traf number, trun number and sample number are 32-bit ints
+ u32(track.finalizedChunks.length),
+ // Number of entries
+ track.finalizedChunks.map((chunk) => [
+ u64(intoTimescale(chunk.startTimestamp, track.timescale)),
+ // Time
+ u64(chunk.moofOffset),
+ // moof offset
+ u32(trackIndex + 1),
+ // traf number
+ u32(1),
+ // trun number
+ u32(1)
+ // Sample number
+ ])
+ ]);
+ };
+ var mfro = () => {
+ return fullBox("mfro", 0, 0, [
+ // This value needs to be overwritten manually from the outside, where the actual size of the enclosing mfra box
+ // is known
+ u32(0)
+ // Size
+ ]);
+ };
+ var VIDEO_CODEC_TO_BOX_NAME = {
+ "avc": "avc1",
+ "hevc": "hvc1",
+ "vp9": "vp09",
+ "av1": "av01"
+ };
+ var VIDEO_CODEC_TO_CONFIGURATION_BOX = {
+ "avc": avcC,
+ "hevc": hvcC,
+ "vp9": vpcC,
+ "av1": av1C
+ };
+ var AUDIO_CODEC_TO_BOX_NAME = {
+ "aac": "mp4a",
+ "opus": "Opus"
+ };
+ var AUDIO_CODEC_TO_CONFIGURATION_BOX = {
+ "aac": esds,
+ "opus": dOps
+ };
+
+ // src/target.ts
+ var ArrayBufferTarget = class {
+ constructor() {
+ this.buffer = null;
+ }
+ };
+ var StreamTarget = class {
+ constructor(options) {
+ this.options = options;
+ }
+ };
+ var FileSystemWritableFileStreamTarget = class {
+ constructor(stream, options) {
+ this.stream = stream;
+ this.options = options;
+ }
+ };
+
+ // src/writer.ts
+ var _helper, _helperView;
+ var Writer = class {
+ constructor() {
+ this.pos = 0;
+ __privateAdd(this, _helper, new Uint8Array(8));
+ __privateAdd(this, _helperView, new DataView(__privateGet(this, _helper).buffer));
+ /**
+ * Stores the position from the start of the file to where boxes elements have been written. This is used to
+ * rewrite/edit elements that were already added before, and to measure sizes of things.
+ */
+ this.offsets = /* @__PURE__ */ new WeakMap();
+ }
+ /** Sets the current position for future writes to a new one. */
+ seek(newPos) {
+ this.pos = newPos;
+ }
+ writeU32(value) {
+ __privateGet(this, _helperView).setUint32(0, value, false);
+ this.write(__privateGet(this, _helper).subarray(0, 4));
+ }
+ writeU64(value) {
+ __privateGet(this, _helperView).setUint32(0, Math.floor(value / 2 ** 32), false);
+ __privateGet(this, _helperView).setUint32(4, value, false);
+ this.write(__privateGet(this, _helper).subarray(0, 8));
+ }
+ writeAscii(text) {
+ for (let i = 0; i < text.length; i++) {
+ __privateGet(this, _helperView).setUint8(i % 8, text.charCodeAt(i));
+ if (i % 8 === 7)
+ this.write(__privateGet(this, _helper));
+ }
+ if (text.length % 8 !== 0) {
+ this.write(__privateGet(this, _helper).subarray(0, text.length % 8));
+ }
+ }
+ writeBox(box2) {
+ this.offsets.set(box2, this.pos);
+ if (box2.contents && !box2.children) {
+ this.writeBoxHeader(box2, box2.size ?? box2.contents.byteLength + 8);
+ this.write(box2.contents);
+ } else {
+ let startPos = this.pos;
+ this.writeBoxHeader(box2, 0);
+ if (box2.contents)
+ this.write(box2.contents);
+ if (box2.children) {
+ for (let child of box2.children)
+ if (child)
+ this.writeBox(child);
+ }
+ let endPos = this.pos;
+ let size = box2.size ?? endPos - startPos;
+ this.seek(startPos);
+ this.writeBoxHeader(box2, size);
+ this.seek(endPos);
+ }
+ }
+ writeBoxHeader(box2, size) {
+ this.writeU32(box2.largeSize ? 1 : size);
+ this.writeAscii(box2.type);
+ if (box2.largeSize)
+ this.writeU64(size);
+ }
+ measureBoxHeader(box2) {
+ return 8 + (box2.largeSize ? 8 : 0);
+ }
+ patchBox(box2) {
+ let endPos = this.pos;
+ this.seek(this.offsets.get(box2));
+ this.writeBox(box2);
+ this.seek(endPos);
+ }
+ measureBox(box2) {
+ if (box2.contents && !box2.children) {
+ let headerSize = this.measureBoxHeader(box2);
+ return headerSize + box2.contents.byteLength;
+ } else {
+ let result = this.measureBoxHeader(box2);
+ if (box2.contents)
+ result += box2.contents.byteLength;
+ if (box2.children) {
+ for (let child of box2.children)
+ if (child)
+ result += this.measureBox(child);
+ }
+ return result;
+ }
+ }
+ };
+ _helper = new WeakMap();
+ _helperView = new WeakMap();
+ var _target, _buffer, _bytes, _maxPos, _ensureSize, ensureSize_fn;
+ var ArrayBufferTargetWriter = class extends Writer {
+ constructor(target) {
+ super();
+ __privateAdd(this, _ensureSize);
+ __privateAdd(this, _target, void 0);
+ __privateAdd(this, _buffer, new ArrayBuffer(2 ** 16));
+ __privateAdd(this, _bytes, new Uint8Array(__privateGet(this, _buffer)));
+ __privateAdd(this, _maxPos, 0);
+ __privateSet(this, _target, target);
+ }
+ write(data) {
+ __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos + data.byteLength);
+ __privateGet(this, _bytes).set(data, this.pos);
+ this.pos += data.byteLength;
+ __privateSet(this, _maxPos, Math.max(__privateGet(this, _maxPos), this.pos));
+ }
+ finalize() {
+ __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos);
+ __privateGet(this, _target).buffer = __privateGet(this, _buffer).slice(0, Math.max(__privateGet(this, _maxPos), this.pos));
+ }
+ };
+ _target = new WeakMap();
+ _buffer = new WeakMap();
+ _bytes = new WeakMap();
+ _maxPos = new WeakMap();
+ _ensureSize = new WeakSet();
+ ensureSize_fn = function(size) {
+ let newLength = __privateGet(this, _buffer).byteLength;
+ while (newLength < size)
+ newLength *= 2;
+ if (newLength === __privateGet(this, _buffer).byteLength)
+ return;
+ let newBuffer = new ArrayBuffer(newLength);
+ let newBytes = new Uint8Array(newBuffer);
+ newBytes.set(__privateGet(this, _bytes), 0);
+ __privateSet(this, _buffer, newBuffer);
+ __privateSet(this, _bytes, newBytes);
+ };
+ var _target2, _sections;
+ var StreamTargetWriter = class extends Writer {
+ constructor(target) {
+ super();
+ __privateAdd(this, _target2, void 0);
+ __privateAdd(this, _sections, []);
+ __privateSet(this, _target2, target);
+ }
+ write(data) {
+ __privateGet(this, _sections).push({
+ data: data.slice(),
+ start: this.pos
+ });
+ this.pos += data.byteLength;
+ }
+ flush() {
+ if (__privateGet(this, _sections).length === 0)
+ return;
+ let chunks = [];
+ let sorted = [...__privateGet(this, _sections)].sort((a, b) => a.start - b.start);
+ chunks.push({
+ start: sorted[0].start,
+ size: sorted[0].data.byteLength
+ });
+ for (let i = 1; i < sorted.length; i++) {
+ let lastChunk = chunks[chunks.length - 1];
+ let section = sorted[i];
+ if (section.start <= lastChunk.start + lastChunk.size) {
+ lastChunk.size = Math.max(lastChunk.size, section.start + section.data.byteLength - lastChunk.start);
+ } else {
+ chunks.push({
+ start: section.start,
+ size: section.data.byteLength
+ });
+ }
+ }
+ for (let chunk of chunks) {
+ chunk.data = new Uint8Array(chunk.size);
+ for (let section of __privateGet(this, _sections)) {
+ if (chunk.start <= section.start && section.start < chunk.start + chunk.size) {
+ chunk.data.set(section.data, section.start - chunk.start);
+ }
+ }
+ __privateGet(this, _target2).options.onData?.(chunk.data, chunk.start);
+ }
+ __privateGet(this, _sections).length = 0;
+ }
+ finalize() {
+ }
+ };
+ _target2 = new WeakMap();
+ _sections = new WeakMap();
+ var DEFAULT_CHUNK_SIZE = 2 ** 24;
+ var MAX_CHUNKS_AT_ONCE = 2;
+ var _target3, _chunkSize, _chunks, _writeDataIntoChunks, writeDataIntoChunks_fn, _insertSectionIntoChunk, insertSectionIntoChunk_fn, _createChunk, createChunk_fn, _flushChunks, flushChunks_fn;
+ var ChunkedStreamTargetWriter = class extends Writer {
+ constructor(target) {
+ super();
+ __privateAdd(this, _writeDataIntoChunks);
+ __privateAdd(this, _insertSectionIntoChunk);
+ __privateAdd(this, _createChunk);
+ __privateAdd(this, _flushChunks);
+ __privateAdd(this, _target3, void 0);
+ __privateAdd(this, _chunkSize, void 0);
+ /**
+ * The data is divided up into fixed-size chunks, whose contents are first filled in RAM and then flushed out.
+ * A chunk is flushed if all of its contents have been written.
+ */
+ __privateAdd(this, _chunks, []);
+ __privateSet(this, _target3, target);
+ __privateSet(this, _chunkSize, target.options?.chunkSize ?? DEFAULT_CHUNK_SIZE);
+ if (!Number.isInteger(__privateGet(this, _chunkSize)) || __privateGet(this, _chunkSize) < 2 ** 10) {
+ throw new Error("Invalid StreamTarget options: chunkSize must be an integer not smaller than 1024.");
+ }
+ }
+ write(data) {
+ __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data, this.pos);
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this);
+ this.pos += data.byteLength;
+ }
+ finalize() {
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this, true);
+ }
+ };
+ _target3 = new WeakMap();
+ _chunkSize = new WeakMap();
+ _chunks = new WeakMap();
+ _writeDataIntoChunks = new WeakSet();
+ writeDataIntoChunks_fn = function(data, position) {
+ let chunkIndex = __privateGet(this, _chunks).findIndex((x) => x.start <= position && position < x.start + __privateGet(this, _chunkSize));
+ if (chunkIndex === -1)
+ chunkIndex = __privateMethod(this, _createChunk, createChunk_fn).call(this, position);
+ let chunk = __privateGet(this, _chunks)[chunkIndex];
+ let relativePosition = position - chunk.start;
+ let toWrite = data.subarray(0, Math.min(__privateGet(this, _chunkSize) - relativePosition, data.byteLength));
+ chunk.data.set(toWrite, relativePosition);
+ let section = {
+ start: relativePosition,
+ end: relativePosition + toWrite.byteLength
+ };
+ __privateMethod(this, _insertSectionIntoChunk, insertSectionIntoChunk_fn).call(this, chunk, section);
+ if (chunk.written[0].start === 0 && chunk.written[0].end === __privateGet(this, _chunkSize)) {
+ chunk.shouldFlush = true;
+ }
+ if (__privateGet(this, _chunks).length > MAX_CHUNKS_AT_ONCE) {
+ for (let i = 0; i < __privateGet(this, _chunks).length - 1; i++) {
+ __privateGet(this, _chunks)[i].shouldFlush = true;
+ }
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this);
+ }
+ if (toWrite.byteLength < data.byteLength) {
+ __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data.subarray(toWrite.byteLength), position + toWrite.byteLength);
+ }
+ };
+ _insertSectionIntoChunk = new WeakSet();
+ insertSectionIntoChunk_fn = function(chunk, section) {
+ let low = 0;
+ let high = chunk.written.length - 1;
+ let index = -1;
+ while (low <= high) {
+ let mid = Math.floor(low + (high - low + 1) / 2);
+ if (chunk.written[mid].start <= section.start) {
+ low = mid + 1;
+ index = mid;
+ } else {
+ high = mid - 1;
+ }
+ }
+ chunk.written.splice(index + 1, 0, section);
+ if (index === -1 || chunk.written[index].end < section.start)
+ index++;
+ while (index < chunk.written.length - 1 && chunk.written[index].end >= chunk.written[index + 1].start) {
+ chunk.written[index].end = Math.max(chunk.written[index].end, chunk.written[index + 1].end);
+ chunk.written.splice(index + 1, 1);
+ }
+ };
+ _createChunk = new WeakSet();
+ createChunk_fn = function(includesPosition) {
+ let start = Math.floor(includesPosition / __privateGet(this, _chunkSize)) * __privateGet(this, _chunkSize);
+ let chunk = {
+ start,
+ data: new Uint8Array(__privateGet(this, _chunkSize)),
+ written: [],
+ shouldFlush: false
+ };
+ __privateGet(this, _chunks).push(chunk);
+ __privateGet(this, _chunks).sort((a, b) => a.start - b.start);
+ return __privateGet(this, _chunks).indexOf(chunk);
+ };
+ _flushChunks = new WeakSet();
+ flushChunks_fn = function(force = false) {
+ for (let i = 0; i < __privateGet(this, _chunks).length; i++) {
+ let chunk = __privateGet(this, _chunks)[i];
+ if (!chunk.shouldFlush && !force)
+ continue;
+ for (let section of chunk.written) {
+ __privateGet(this, _target3).options.onData?.(
+ chunk.data.subarray(section.start, section.end),
+ chunk.start + section.start
+ );
+ }
+ __privateGet(this, _chunks).splice(i--, 1);
+ }
+ };
+ var FileSystemWritableFileStreamTargetWriter = class extends ChunkedStreamTargetWriter {
+ constructor(target) {
+ super(new StreamTarget({
+ onData: (data, position) => target.stream.write({
+ type: "write",
+ data,
+ position
+ }),
+ chunkSize: target.options?.chunkSize
+ }));
+ }
+ };
+
+ // src/muxer.ts
+ var GLOBAL_TIMESCALE = 1e3;
+ var SUPPORTED_VIDEO_CODECS2 = ["avc", "hevc", "vp9", "av1"];
+ var SUPPORTED_AUDIO_CODECS2 = ["aac", "opus"];
+ var TIMESTAMP_OFFSET = 2082844800;
+ var FIRST_TIMESTAMP_BEHAVIORS = ["strict", "offset", "cross-track-offset"];
+ var _options, _writer, _ftypSize, _mdat, _videoTrack, _audioTrack, _creationTime, _finalizedChunks, _nextFragmentNumber, _videoSampleQueue, _audioSampleQueue, _finalized, _validateOptions, validateOptions_fn, _writeHeader, writeHeader_fn, _computeMoovSizeUpperBound, computeMoovSizeUpperBound_fn, _prepareTracks, prepareTracks_fn, _generateMpeg4AudioSpecificConfig, generateMpeg4AudioSpecificConfig_fn, _createSampleForTrack, createSampleForTrack_fn, _addSampleToTrack, addSampleToTrack_fn, _validateTimestamp, validateTimestamp_fn, _finalizeCurrentChunk, finalizeCurrentChunk_fn, _finalizeFragment, finalizeFragment_fn, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn, _ensureNotFinalized, ensureNotFinalized_fn;
+ var Muxer = class {
+ constructor(options) {
+ __privateAdd(this, _validateOptions);
+ __privateAdd(this, _writeHeader);
+ __privateAdd(this, _computeMoovSizeUpperBound);
+ __privateAdd(this, _prepareTracks);
+ // https://wiki.multimedia.cx/index.php/MPEG-4_Audio
+ __privateAdd(this, _generateMpeg4AudioSpecificConfig);
+ __privateAdd(this, _createSampleForTrack);
+ __privateAdd(this, _addSampleToTrack);
+ __privateAdd(this, _validateTimestamp);
+ __privateAdd(this, _finalizeCurrentChunk);
+ __privateAdd(this, _finalizeFragment);
+ __privateAdd(this, _maybeFlushStreamingTargetWriter);
+ __privateAdd(this, _ensureNotFinalized);
+ __privateAdd(this, _options, void 0);
+ __privateAdd(this, _writer, void 0);
+ __privateAdd(this, _ftypSize, void 0);
+ __privateAdd(this, _mdat, void 0);
+ __privateAdd(this, _videoTrack, null);
+ __privateAdd(this, _audioTrack, null);
+ __privateAdd(this, _creationTime, Math.floor(Date.now() / 1e3) + TIMESTAMP_OFFSET);
+ __privateAdd(this, _finalizedChunks, []);
+ // Fields for fragmented MP4:
+ __privateAdd(this, _nextFragmentNumber, 1);
+ __privateAdd(this, _videoSampleQueue, []);
+ __privateAdd(this, _audioSampleQueue, []);
+ __privateAdd(this, _finalized, false);
+ __privateMethod(this, _validateOptions, validateOptions_fn).call(this, options);
+ options.video = deepClone(options.video);
+ options.audio = deepClone(options.audio);
+ options.fastStart = deepClone(options.fastStart);
+ this.target = options.target;
+ __privateSet(this, _options, {
+ firstTimestampBehavior: "strict",
+ ...options
+ });
+ if (options.target instanceof ArrayBufferTarget) {
+ __privateSet(this, _writer, new ArrayBufferTargetWriter(options.target));
+ } else if (options.target instanceof StreamTarget) {
+ __privateSet(this, _writer, options.target.options?.chunked ? new ChunkedStreamTargetWriter(options.target) : new StreamTargetWriter(options.target));
+ } else if (options.target instanceof FileSystemWritableFileStreamTarget) {
+ __privateSet(this, _writer, new FileSystemWritableFileStreamTargetWriter(options.target));
+ } else {
+ throw new Error(`Invalid target: ${options.target}`);
+ }
+ __privateMethod(this, _prepareTracks, prepareTracks_fn).call(this);
+ __privateMethod(this, _writeHeader, writeHeader_fn).call(this);
+ }
+ addVideoChunk(sample, meta, timestamp, compositionTimeOffset) {
+ let data = new Uint8Array(sample.byteLength);
+ sample.copyTo(data);
+ this.addVideoChunkRaw(
+ data,
+ sample.type,
+ timestamp ?? sample.timestamp,
+ sample.duration,
+ meta,
+ compositionTimeOffset
+ );
+ }
+ addVideoChunkRaw(data, type, timestamp, duration, meta, compositionTimeOffset) {
+ __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this);
+ if (!__privateGet(this, _options).video)
+ throw new Error("No video track declared.");
+ if (typeof __privateGet(this, _options).fastStart === "object" && __privateGet(this, _videoTrack).samples.length === __privateGet(this, _options).fastStart.expectedVideoChunks) {
+ throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${__privateGet(this, _options).fastStart.expectedVideoChunks}).`);
+ }
+ let videoSample = __privateMethod(this, _createSampleForTrack, createSampleForTrack_fn).call(this, __privateGet(this, _videoTrack), data, type, timestamp, duration, meta, compositionTimeOffset);
+ if (__privateGet(this, _options).fastStart === "fragmented" && __privateGet(this, _audioTrack)) {
+ while (__privateGet(this, _audioSampleQueue).length > 0 && __privateGet(this, _audioSampleQueue)[0].decodeTimestamp <= videoSample.decodeTimestamp) {
+ let audioSample = __privateGet(this, _audioSampleQueue).shift();
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ }
+ if (videoSample.decodeTimestamp <= __privateGet(this, _audioTrack).lastDecodeTimestamp) {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ } else {
+ __privateGet(this, _videoSampleQueue).push(videoSample);
+ }
+ } else {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ }
+ }
+ addAudioChunk(sample, meta, timestamp) {
+ let data = new Uint8Array(sample.byteLength);
+ sample.copyTo(data);
+ this.addAudioChunkRaw(data, sample.type, timestamp ?? sample.timestamp, sample.duration, meta);
+ }
+ addAudioChunkRaw(data, type, timestamp, duration, meta) {
+ __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this);
+ if (!__privateGet(this, _options).audio)
+ throw new Error("No audio track declared.");
+ if (typeof __privateGet(this, _options).fastStart === "object" && __privateGet(this, _audioTrack).samples.length === __privateGet(this, _options).fastStart.expectedAudioChunks) {
+ throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${__privateGet(this, _options).fastStart.expectedAudioChunks}).`);
+ }
+ let audioSample = __privateMethod(this, _createSampleForTrack, createSampleForTrack_fn).call(this, __privateGet(this, _audioTrack), data, type, timestamp, duration, meta);
+ if (__privateGet(this, _options).fastStart === "fragmented" && __privateGet(this, _videoTrack)) {
+ while (__privateGet(this, _videoSampleQueue).length > 0 && __privateGet(this, _videoSampleQueue)[0].decodeTimestamp <= audioSample.decodeTimestamp) {
+ let videoSample = __privateGet(this, _videoSampleQueue).shift();
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ }
+ if (audioSample.decodeTimestamp <= __privateGet(this, _videoTrack).lastDecodeTimestamp) {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ } else {
+ __privateGet(this, _audioSampleQueue).push(audioSample);
+ }
+ } else {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ }
+ }
+ /** Finalizes the file, making it ready for use. Must be called after all video and audio chunks have been added. */
+ finalize() {
+ if (__privateGet(this, _finalized)) {
+ throw new Error("Cannot finalize a muxer more than once.");
+ }
+ if (__privateGet(this, _options).fastStart === "fragmented") {
+ for (let videoSample of __privateGet(this, _videoSampleQueue))
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ for (let audioSample of __privateGet(this, _audioSampleQueue))
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ __privateMethod(this, _finalizeFragment, finalizeFragment_fn).call(this, false);
+ } else {
+ if (__privateGet(this, _videoTrack))
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, __privateGet(this, _videoTrack));
+ if (__privateGet(this, _audioTrack))
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, __privateGet(this, _audioTrack));
+ }
+ let tracks = [__privateGet(this, _videoTrack), __privateGet(this, _audioTrack)].filter(Boolean);
+ if (__privateGet(this, _options).fastStart === "in-memory") {
+ let mdatSize;
+ for (let i = 0; i < 2; i++) {
+ let movieBox2 = moov(tracks, __privateGet(this, _creationTime));
+ let movieBoxSize = __privateGet(this, _writer).measureBox(movieBox2);
+ mdatSize = __privateGet(this, _writer).measureBox(__privateGet(this, _mdat));
+ let currentChunkPos = __privateGet(this, _writer).pos + movieBoxSize + mdatSize;
+ for (let chunk of __privateGet(this, _finalizedChunks)) {
+ chunk.offset = currentChunkPos;
+ for (let { data } of chunk.samples) {
+ currentChunkPos += data.byteLength;
+ mdatSize += data.byteLength;
+ }
+ }
+ if (currentChunkPos < 2 ** 32)
+ break;
+ if (mdatSize >= 2 ** 32)
+ __privateGet(this, _mdat).largeSize = true;
+ }
+ let movieBox = moov(tracks, __privateGet(this, _creationTime));
+ __privateGet(this, _writer).writeBox(movieBox);
+ __privateGet(this, _mdat).size = mdatSize;
+ __privateGet(this, _writer).writeBox(__privateGet(this, _mdat));
+ for (let chunk of __privateGet(this, _finalizedChunks)) {
+ for (let sample of chunk.samples) {
+ __privateGet(this, _writer).write(sample.data);
+ sample.data = null;
+ }
+ }
+ } else if (__privateGet(this, _options).fastStart === "fragmented") {
+ let startPos = __privateGet(this, _writer).pos;
+ let mfraBox = mfra(tracks);
+ __privateGet(this, _writer).writeBox(mfraBox);
+ let mfraBoxSize = __privateGet(this, _writer).pos - startPos;
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).pos - 4);
+ __privateGet(this, _writer).writeU32(mfraBoxSize);
+ } else {
+ let mdatPos = __privateGet(this, _writer).offsets.get(__privateGet(this, _mdat));
+ let mdatSize = __privateGet(this, _writer).pos - mdatPos;
+ __privateGet(this, _mdat).size = mdatSize;
+ __privateGet(this, _mdat).largeSize = mdatSize >= 2 ** 32;
+ __privateGet(this, _writer).patchBox(__privateGet(this, _mdat));
+ let movieBox = moov(tracks, __privateGet(this, _creationTime));
+ if (typeof __privateGet(this, _options).fastStart === "object") {
+ __privateGet(this, _writer).seek(__privateGet(this, _ftypSize));
+ __privateGet(this, _writer).writeBox(movieBox);
+ let remainingBytes = mdatPos - __privateGet(this, _writer).pos;
+ __privateGet(this, _writer).writeBox(free(remainingBytes));
+ } else {
+ __privateGet(this, _writer).writeBox(movieBox);
+ }
+ }
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+ __privateGet(this, _writer).finalize();
+ __privateSet(this, _finalized, true);
+ }
+ };
+ _options = new WeakMap();
+ _writer = new WeakMap();
+ _ftypSize = new WeakMap();
+ _mdat = new WeakMap();
+ _videoTrack = new WeakMap();
+ _audioTrack = new WeakMap();
+ _creationTime = new WeakMap();
+ _finalizedChunks = new WeakMap();
+ _nextFragmentNumber = new WeakMap();
+ _videoSampleQueue = new WeakMap();
+ _audioSampleQueue = new WeakMap();
+ _finalized = new WeakMap();
+ _validateOptions = new WeakSet();
+ validateOptions_fn = function(options) {
+ if (options.video) {
+ if (!SUPPORTED_VIDEO_CODECS2.includes(options.video.codec)) {
+ throw new Error(`Unsupported video codec: ${options.video.codec}`);
+ }
+ const videoRotation = options.video.rotation;
+ if (typeof videoRotation === "number" && ![0, 90, 180, 270].includes(videoRotation)) {
+ throw new Error(`Invalid video rotation: ${videoRotation}. Has to be 0, 90, 180 or 270.`);
+ } else if (Array.isArray(videoRotation) && (videoRotation.length !== 9 || videoRotation.some((value) => typeof value !== "number"))) {
+ throw new Error(`Invalid video transformation matrix: ${videoRotation.join()}`);
+ }
+ }
+ if (options.audio && !SUPPORTED_AUDIO_CODECS2.includes(options.audio.codec)) {
+ throw new Error(`Unsupported audio codec: ${options.audio.codec}`);
+ }
+ if (options.firstTimestampBehavior && !FIRST_TIMESTAMP_BEHAVIORS.includes(options.firstTimestampBehavior)) {
+ throw new Error(`Invalid first timestamp behavior: ${options.firstTimestampBehavior}`);
+ }
+ if (typeof options.fastStart === "object") {
+ if (options.video && options.fastStart.expectedVideoChunks === void 0) {
+ throw new Error(`'fastStart' is an object but is missing property 'expectedVideoChunks'.`);
+ }
+ if (options.audio && options.fastStart.expectedAudioChunks === void 0) {
+ throw new Error(`'fastStart' is an object but is missing property 'expectedAudioChunks'.`);
+ }
+ } else if (![false, "in-memory", "fragmented"].includes(options.fastStart)) {
+ throw new Error(`'fastStart' option must be false, 'in-memory', 'fragmented' or an object.`);
+ }
+ };
+ _writeHeader = new WeakSet();
+ writeHeader_fn = function() {
+ __privateGet(this, _writer).writeBox(ftyp({
+ holdsAvc: __privateGet(this, _options).video?.codec === "avc",
+ fragmented: __privateGet(this, _options).fastStart === "fragmented"
+ }));
+ __privateSet(this, _ftypSize, __privateGet(this, _writer).pos);
+ if (__privateGet(this, _options).fastStart === "in-memory") {
+ __privateSet(this, _mdat, mdat(false));
+ } else if (__privateGet(this, _options).fastStart === "fragmented") {
+ } else {
+ if (typeof __privateGet(this, _options).fastStart === "object") {
+ let moovSizeUpperBound = __privateMethod(this, _computeMoovSizeUpperBound, computeMoovSizeUpperBound_fn).call(this);
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).pos + moovSizeUpperBound);
+ }
+ __privateSet(this, _mdat, mdat(true));
+ __privateGet(this, _writer).writeBox(__privateGet(this, _mdat));
+ }
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+ };
+ _computeMoovSizeUpperBound = new WeakSet();
+ computeMoovSizeUpperBound_fn = function() {
+ if (typeof __privateGet(this, _options).fastStart !== "object")
+ return;
+ let upperBound = 0;
+ let sampleCounts = [
+ __privateGet(this, _options).fastStart.expectedVideoChunks,
+ __privateGet(this, _options).fastStart.expectedAudioChunks
+ ];
+ for (let n of sampleCounts) {
+ if (!n)
+ continue;
+ upperBound += (4 + 4) * Math.ceil(2 / 3 * n);
+ upperBound += 4 * n;
+ upperBound += (4 + 4 + 4) * Math.ceil(2 / 3 * n);
+ upperBound += 4 * n;
+ upperBound += 8 * n;
+ }
+ upperBound += 4096;
+ return upperBound;
+ };
+ _prepareTracks = new WeakSet();
+ prepareTracks_fn = function() {
+ if (__privateGet(this, _options).video) {
+ __privateSet(this, _videoTrack, {
+ id: 1,
+ info: {
+ type: "video",
+ codec: __privateGet(this, _options).video.codec,
+ width: __privateGet(this, _options).video.width,
+ height: __privateGet(this, _options).video.height,
+ rotation: __privateGet(this, _options).video.rotation ?? 0,
+ decoderConfig: null
+ },
+ timescale: 11520,
+ // Timescale used by FFmpeg, contains many common frame rates as factors
+ samples: [],
+ finalizedChunks: [],
+ currentChunk: null,
+ firstDecodeTimestamp: void 0,
+ lastDecodeTimestamp: -1,
+ timeToSampleTable: [],
+ compositionTimeOffsetTable: [],
+ lastTimescaleUnits: null,
+ lastSample: null,
+ compactlyCodedChunkTable: []
+ });
+ }
+ if (__privateGet(this, _options).audio) {
+ let guessedCodecPrivate = __privateMethod(this, _generateMpeg4AudioSpecificConfig, generateMpeg4AudioSpecificConfig_fn).call(
+ this,
+ 2,
+ // Object type for AAC-LC, since it's the most common
+ __privateGet(this, _options).audio.sampleRate,
+ __privateGet(this, _options).audio.numberOfChannels
+ );
+ __privateSet(this, _audioTrack, {
+ id: __privateGet(this, _options).video ? 2 : 1,
+ info: {
+ type: "audio",
+ codec: __privateGet(this, _options).audio.codec,
+ numberOfChannels: __privateGet(this, _options).audio.numberOfChannels,
+ sampleRate: __privateGet(this, _options).audio.sampleRate,
+ decoderConfig: {
+ codec: __privateGet(this, _options).audio.codec,
+ description: guessedCodecPrivate,
+ numberOfChannels: __privateGet(this, _options).audio.numberOfChannels,
+ sampleRate: __privateGet(this, _options).audio.sampleRate
+ }
+ },
+ timescale: __privateGet(this, _options).audio.sampleRate,
+ samples: [],
+ finalizedChunks: [],
+ currentChunk: null,
+ firstDecodeTimestamp: void 0,
+ lastDecodeTimestamp: -1,
+ timeToSampleTable: [],
+ compositionTimeOffsetTable: [],
+ lastTimescaleUnits: null,
+ lastSample: null,
+ compactlyCodedChunkTable: []
+ });
+ }
+ };
+ _generateMpeg4AudioSpecificConfig = new WeakSet();
+ generateMpeg4AudioSpecificConfig_fn = function(objectType, sampleRate, numberOfChannels) {
+ let frequencyIndices = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350];
+ let frequencyIndex = frequencyIndices.indexOf(sampleRate);
+ let channelConfig = numberOfChannels;
+ let configBits = "";
+ configBits += objectType.toString(2).padStart(5, "0");
+ configBits += frequencyIndex.toString(2).padStart(4, "0");
+ if (frequencyIndex === 15)
+ configBits += sampleRate.toString(2).padStart(24, "0");
+ configBits += channelConfig.toString(2).padStart(4, "0");
+ let paddingLength = Math.ceil(configBits.length / 8) * 8;
+ configBits = configBits.padEnd(paddingLength, "0");
+ let configBytes = new Uint8Array(configBits.length / 8);
+ for (let i = 0; i < configBits.length; i += 8) {
+ configBytes[i / 8] = parseInt(configBits.slice(i, i + 8), 2);
+ }
+ return configBytes;
+ };
+ _createSampleForTrack = new WeakSet();
+ createSampleForTrack_fn = function(track, data, type, timestamp, duration, meta, compositionTimeOffset) {
+ let presentationTimestampInSeconds = timestamp / 1e6;
+ let decodeTimestampInSeconds = (timestamp - (compositionTimeOffset ?? 0)) / 1e6;
+ let durationInSeconds = duration / 1e6;
+ let adjusted = __privateMethod(this, _validateTimestamp, validateTimestamp_fn).call(this, presentationTimestampInSeconds, decodeTimestampInSeconds, track);
+ presentationTimestampInSeconds = adjusted.presentationTimestamp;
+ decodeTimestampInSeconds = adjusted.decodeTimestamp;
+ if (meta?.decoderConfig) {
+ if (track.info.decoderConfig === null) {
+ track.info.decoderConfig = meta.decoderConfig;
+ } else {
+ Object.assign(track.info.decoderConfig, meta.decoderConfig);
+ }
+ }
+ let sample = {
+ presentationTimestamp: presentationTimestampInSeconds,
+ decodeTimestamp: decodeTimestampInSeconds,
+ duration: durationInSeconds,
+ data,
+ size: data.byteLength,
+ type,
+ // Will be refined once the next sample comes in
+ timescaleUnitsToNextSample: intoTimescale(durationInSeconds, track.timescale)
+ };
+ return sample;
+ };
+ _addSampleToTrack = new WeakSet();
+ addSampleToTrack_fn = function(track, sample) {
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ track.samples.push(sample);
+ }
+ const sampleCompositionTimeOffset = intoTimescale(sample.presentationTimestamp - sample.decodeTimestamp, track.timescale);
+ if (track.lastTimescaleUnits !== null) {
+ let timescaleUnits = intoTimescale(sample.decodeTimestamp, track.timescale, false);
+ let delta = Math.round(timescaleUnits - track.lastTimescaleUnits);
+ track.lastTimescaleUnits += delta;
+ track.lastSample.timescaleUnitsToNextSample = delta;
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ let lastTableEntry = last(track.timeToSampleTable);
+ if (lastTableEntry.sampleCount === 1) {
+ lastTableEntry.sampleDelta = delta;
+ lastTableEntry.sampleCount++;
+ } else if (lastTableEntry.sampleDelta === delta) {
+ lastTableEntry.sampleCount++;
+ } else {
+ lastTableEntry.sampleCount--;
+ track.timeToSampleTable.push({
+ sampleCount: 2,
+ sampleDelta: delta
+ });
+ }
+ const lastCompositionTimeOffsetTableEntry = last(track.compositionTimeOffsetTable);
+ if (lastCompositionTimeOffsetTableEntry.sampleCompositionTimeOffset === sampleCompositionTimeOffset) {
+ lastCompositionTimeOffsetTableEntry.sampleCount++;
+ } else {
+ track.compositionTimeOffsetTable.push({
+ sampleCount: 1,
+ sampleCompositionTimeOffset
+ });
+ }
+ }
+ } else {
+ track.lastTimescaleUnits = 0;
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ track.timeToSampleTable.push({
+ sampleCount: 1,
+ sampleDelta: intoTimescale(sample.duration, track.timescale)
+ });
+ track.compositionTimeOffsetTable.push({
+ sampleCount: 1,
+ sampleCompositionTimeOffset
+ });
+ }
+ }
+ track.lastSample = sample;
+ let beginNewChunk = false;
+ if (!track.currentChunk) {
+ beginNewChunk = true;
+ } else {
+ let currentChunkDuration = sample.presentationTimestamp - track.currentChunk.startTimestamp;
+ if (__privateGet(this, _options).fastStart === "fragmented") {
+ let mostImportantTrack = __privateGet(this, _videoTrack) ?? __privateGet(this, _audioTrack);
+ if (track === mostImportantTrack && sample.type === "key" && currentChunkDuration >= 1) {
+ beginNewChunk = true;
+ __privateMethod(this, _finalizeFragment, finalizeFragment_fn).call(this);
+ }
+ } else {
+ beginNewChunk = currentChunkDuration >= 0.5;
+ }
+ }
+ if (beginNewChunk) {
+ if (track.currentChunk) {
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, track);
+ }
+ track.currentChunk = {
+ startTimestamp: sample.presentationTimestamp,
+ samples: []
+ };
+ }
+ track.currentChunk.samples.push(sample);
+ };
+ _validateTimestamp = new WeakSet();
+ validateTimestamp_fn = function(presentationTimestamp, decodeTimestamp, track) {
+ const strictTimestampBehavior = __privateGet(this, _options).firstTimestampBehavior === "strict";
+ const noLastDecodeTimestamp = track.lastDecodeTimestamp === -1;
+ const timestampNonZero = decodeTimestamp !== 0;
+ if (strictTimestampBehavior && noLastDecodeTimestamp && timestampNonZero) {
+ throw new Error(
+ `The first chunk for your media track must have a timestamp of 0 (received DTS=${decodeTimestamp}).Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of thedocument, which is probably what you want.
+
+If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options.
+`
+ );
+ } else if (__privateGet(this, _options).firstTimestampBehavior === "offset" || __privateGet(this, _options).firstTimestampBehavior === "cross-track-offset") {
+ if (track.firstDecodeTimestamp === void 0) {
+ track.firstDecodeTimestamp = decodeTimestamp;
+ }
+ let baseDecodeTimestamp;
+ if (__privateGet(this, _options).firstTimestampBehavior === "offset") {
+ baseDecodeTimestamp = track.firstDecodeTimestamp;
+ } else {
+ baseDecodeTimestamp = Math.min(
+ __privateGet(this, _videoTrack)?.firstDecodeTimestamp ?? Infinity,
+ __privateGet(this, _audioTrack)?.firstDecodeTimestamp ?? Infinity
+ );
+ }
+ decodeTimestamp -= baseDecodeTimestamp;
+ presentationTimestamp -= baseDecodeTimestamp;
+ }
+ if (decodeTimestamp < track.lastDecodeTimestamp) {
+ throw new Error(
+ `Timestamps must be monotonically increasing (DTS went from ${track.lastDecodeTimestamp * 1e6} to ${decodeTimestamp * 1e6}).`
+ );
+ }
+ track.lastDecodeTimestamp = decodeTimestamp;
+ return { presentationTimestamp, decodeTimestamp };
+ };
+ _finalizeCurrentChunk = new WeakSet();
+ finalizeCurrentChunk_fn = function(track) {
+ if (__privateGet(this, _options).fastStart === "fragmented") {
+ throw new Error("Can't finalize individual chunks 'fastStart' is set to 'fragmented'.");
+ }
+ if (!track.currentChunk)
+ return;
+ track.finalizedChunks.push(track.currentChunk);
+ __privateGet(this, _finalizedChunks).push(track.currentChunk);
+ if (track.compactlyCodedChunkTable.length === 0 || last(track.compactlyCodedChunkTable).samplesPerChunk !== track.currentChunk.samples.length) {
+ track.compactlyCodedChunkTable.push({
+ firstChunk: track.finalizedChunks.length,
+ // 1-indexed
+ samplesPerChunk: track.currentChunk.samples.length
+ });
+ }
+ if (__privateGet(this, _options).fastStart === "in-memory") {
+ track.currentChunk.offset = 0;
+ return;
+ }
+ track.currentChunk.offset = __privateGet(this, _writer).pos;
+ for (let sample of track.currentChunk.samples) {
+ __privateGet(this, _writer).write(sample.data);
+ sample.data = null;
+ }
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+ };
+ _finalizeFragment = new WeakSet();
+ finalizeFragment_fn = function(flushStreamingWriter = true) {
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ throw new Error("Can't finalize a fragment unless 'fastStart' is set to 'fragmented'.");
+ }
+ let tracks = [__privateGet(this, _videoTrack), __privateGet(this, _audioTrack)].filter((track) => track && track.currentChunk);
+ if (tracks.length === 0)
+ return;
+ let fragmentNumber = __privateWrapper(this, _nextFragmentNumber)._++;
+ if (fragmentNumber === 1) {
+ let movieBox = moov(tracks, __privateGet(this, _creationTime), true);
+ __privateGet(this, _writer).writeBox(movieBox);
+ }
+ let moofOffset = __privateGet(this, _writer).pos;
+ let moofBox = moof(fragmentNumber, tracks);
+ __privateGet(this, _writer).writeBox(moofBox);
+ {
+ let mdatBox = mdat(false);
+ let totalTrackSampleSize = 0;
+ for (let track of tracks) {
+ for (let sample of track.currentChunk.samples) {
+ totalTrackSampleSize += sample.size;
+ }
+ }
+ let mdatSize = __privateGet(this, _writer).measureBox(mdatBox) + totalTrackSampleSize;
+ if (mdatSize >= 2 ** 32) {
+ mdatBox.largeSize = true;
+ mdatSize = __privateGet(this, _writer).measureBox(mdatBox) + totalTrackSampleSize;
+ }
+ mdatBox.size = mdatSize;
+ __privateGet(this, _writer).writeBox(mdatBox);
+ }
+ for (let track of tracks) {
+ track.currentChunk.offset = __privateGet(this, _writer).pos;
+ track.currentChunk.moofOffset = moofOffset;
+ for (let sample of track.currentChunk.samples) {
+ __privateGet(this, _writer).write(sample.data);
+ sample.data = null;
+ }
+ }
+ let endPos = __privateGet(this, _writer).pos;
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(moofBox));
+ let newMoofBox = moof(fragmentNumber, tracks);
+ __privateGet(this, _writer).writeBox(newMoofBox);
+ __privateGet(this, _writer).seek(endPos);
+ for (let track of tracks) {
+ track.finalizedChunks.push(track.currentChunk);
+ __privateGet(this, _finalizedChunks).push(track.currentChunk);
+ track.currentChunk = null;
+ }
+ if (flushStreamingWriter) {
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+ }
+ };
+ _maybeFlushStreamingTargetWriter = new WeakSet();
+ maybeFlushStreamingTargetWriter_fn = function() {
+ if (__privateGet(this, _writer) instanceof StreamTargetWriter) {
+ __privateGet(this, _writer).flush();
+ }
+ };
+ _ensureNotFinalized = new WeakSet();
+ ensureNotFinalized_fn = function() {
+ if (__privateGet(this, _finalized)) {
+ throw new Error("Cannot add new video or audio chunks after the file has been finalized.");
+ }
+ };
+ return __toCommonJS(src_exports);
+})();
+if (typeof module === "object" && typeof module.exports === "object") Object.assign(module.exports, Mp4Muxer)
diff --git a/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.min.js b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.min.js
new file mode 100644
index 0000000..4ac985f
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.min.js
@@ -0,0 +1,5 @@
+"use strict";var Mp4Muxer=(()=>{var Ne=Object.defineProperty;var dt=Object.getOwnPropertyDescriptor;var pt=Object.getOwnPropertyNames;var ct=Object.prototype.hasOwnProperty;var Tt=(t,e)=>{for(var s in e)Ne(t,s,{get:e[s],enumerable:!0})},Ct=(t,e,s,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of pt(e))!ct.call(t,n)&&n!==s&&Ne(t,n,{get:()=>e[n],enumerable:!(r=dt(e,n))||r.enumerable});return t};var bt=t=>Ct(Ne({},"__esModule",{value:!0}),t);var Re=(t,e,s)=>{if(!e.has(t))throw TypeError("Cannot "+s)};var i=(t,e,s)=>(Re(t,e,"read from private field"),s?s.call(t):e.get(t)),f=(t,e,s)=>{if(e.has(t))throw TypeError("Cannot add the same private member more than once");e instanceof WeakSet?e.add(t):e.set(t,s)},S=(t,e,s,r)=>(Re(t,e,"write to private field"),r?r.call(t,s):e.set(t,s),s),Ye=(t,e,s,r)=>({set _(n){S(t,e,n,s)},get _(){return i(t,e,r)}}),p=(t,e,s)=>(Re(t,e,"access private method"),s);var fs={};Tt(fs,{ArrayBufferTarget:()=>re,FileSystemWritableFileStreamTarget:()=>ne,Muxer:()=>Ue,StreamTarget:()=>$});var c=new Uint8Array(8),E=new DataView(c.buffer),g=t=>[(t%256+256)%256],C=t=>(E.setUint16(0,t,!1),[c[0],c[1]]),Je=t=>(E.setInt16(0,t,!1),[c[0],c[1]]),Fe=t=>(E.setUint32(0,t,!1),[c[1],c[2],c[3]]),o=t=>(E.setUint32(0,t,!1),[c[0],c[1],c[2],c[3]]),et=t=>(E.setInt32(0,t,!1),[c[0],c[1],c[2],c[3]]),M=t=>(E.setUint32(0,Math.floor(t/2**32),!1),E.setUint32(4,t,!1),[c[0],c[1],c[2],c[3],c[4],c[5],c[6],c[7]]),ge=t=>(E.setInt16(0,2**8*t,!1),[c[0],c[1]]),U=t=>(E.setInt32(0,2**16*t,!1),[c[0],c[1],c[2],c[3]]),Le=t=>(E.setInt32(0,2**30*t,!1),[c[0],c[1],c[2],c[3]]),k=(t,e=!1)=>{let s=Array(t.length).fill(null).map((r,n)=>t.charCodeAt(n));return e&&s.push(0),s},K=t=>t&&t[t.length-1],ye=t=>{let e;for(let s of t)(!e||s.presentationTimestamp>e.presentationTimestamp)&&(e=s);return e},B=(t,e,s=!0)=>{let r=t*e;return s?Math.round(r):r},je=t=>{let e=t*(Math.PI/180),s=Math.cos(e),r=Math.sin(e);return[s,r,0,-r,s,0,0,0,1]},He=je(0),$e=t=>[U(t[0]),U(t[1]),Le(t[2]),U(t[3]),U(t[4]),Le(t[5]),U(t[6]),U(t[7]),Le(t[8])],Z=t=>!t||typeof t!="object"?t:Array.isArray(t)?t.map(Z):Object.fromEntries(Object.entries(t).map(([e,s])=>[e,Z(s)])),H=t=>t>=0&&t<2**32;var y=(t,e,s)=>({type:t,contents:e&&new Uint8Array(e.flat(10)),children:s}),b=(t,e,s,r,n)=>y(t,[g(e),Fe(s),r??[]],n),tt=t=>{let e=512;return t.fragmented?y("ftyp",[k("iso5"),o(e),k("iso5"),k("iso6"),k("mp41")]):y("ftyp",[k("isom"),o(e),k("isom"),t.holdsAvc?k("avc1"):[],k("mp41")])},we=t=>({type:"mdat",largeSize:t}),st=t=>({type:"free",size:t}),ie=(t,e,s=!1)=>y("moov",null,[St(e,t),...t.map(r=>gt(r,e)),s?Xt(t):null]),St=(t,e)=>{let s=B(Math.max(0,...e.filter(l=>l.samples.length>0).map(l=>{let m=ye(l.samples);return m.presentationTimestamp+m.duration})),xe),r=Math.max(...e.map(l=>l.id))+1,n=!H(t)||!H(s),a=n?M:o;return b("mvhd",+n,0,[a(t),a(t),o(xe),a(s),U(1),ge(1),Array(10).fill(0),$e(He),Array(24).fill(0),o(r)])},gt=(t,e)=>y("trak",null,[yt(t,e),xt(t,e)]),yt=(t,e)=>{let s=ye(t.samples),r=B(s?s.presentationTimestamp+s.duration:0,xe),n=!H(e)||!H(r),a=n?M:o,l;return t.info.type==="video"?l=typeof t.info.rotation=="number"?je(t.info.rotation):t.info.rotation:l=He,b("tkhd",+n,3,[a(e),a(e),o(t.id),o(0),a(r),Array(8).fill(0),C(0),C(0),ge(t.info.type==="audio"?1:0),C(0),$e(l),U(t.info.type==="video"?t.info.width:0),U(t.info.type==="video"?t.info.height:0)])},xt=(t,e)=>y("mdia",null,[wt(t,e),vt(t.info.type==="video"?"vide":"soun"),Ot(t)]),wt=(t,e)=>{let s=ye(t.samples),r=B(s?s.presentationTimestamp+s.duration:0,t.timescale),n=!H(e)||!H(r),a=n?M:o;return b("mdhd",+n,0,[a(e),a(e),o(t.timescale),a(r),C(21956),C(0)])},vt=t=>b("hdlr",0,0,[k("mhlr"),k(t),o(0),o(0),o(0),k("mp4-muxer-hdlr",!0)]),Ot=t=>y("minf",null,[t.info.type==="video"?At():kt(),Bt(),Dt(t)]),At=()=>b("vmhd",0,1,[C(0),C(0),C(0),C(0)]),kt=()=>b("smhd",0,0,[C(0),C(0)]),Bt=()=>y("dinf",null,[zt()]),zt=()=>b("dref",0,0,[o(1)],[Ut()]),Ut=()=>b("url ",0,1),Dt=t=>{let e=t.compositionTimeOffsetTable.length>1||t.compositionTimeOffsetTable.some(s=>s.sampleCompositionTimeOffset!==0);return y("stbl",null,[Et(t),Ft(t),jt(t),Ht(t),$t(t),qt(t),e?Wt(t):null])},Et=t=>b("stsd",0,0,[o(1)],[t.info.type==="video"?_t(ss[t.info.codec],t):Nt(rs[t.info.codec],t)]),_t=(t,e)=>y(t,[Array(6).fill(0),C(1),C(0),C(0),Array(12).fill(0),C(e.info.width),C(e.info.height),o(4718592),o(4718592),o(0),C(1),Array(32).fill(0),C(24),Je(65535)],[is[e.info.codec](e)]),It=t=>t.info.decoderConfig&&y("avcC",[...new Uint8Array(t.info.decoderConfig.description)]),Mt=t=>t.info.decoderConfig&&y("hvcC",[...new Uint8Array(t.info.decoderConfig.description)]),Pt=t=>{if(!t.info.decoderConfig)return null;let e=t.info.decoderConfig;if(!e.colorSpace)throw new Error("'colorSpace' is required in the decoder config for VP9.");let s=e.codec.split("."),r=Number(s[1]),n=Number(s[2]),a=Number(s[3]),l=0,m=(a<<4)+(l<<1)+Number(e.colorSpace.fullRange),d=2,T=2,v=2;return b("vpcC",1,0,[g(r),g(n),g(m),g(d),g(T),g(v),C(0)])},Vt=()=>{let t=1,e=1,s=(t<<7)+e;return y("av1C",[s,0,0,0])},Nt=(t,e)=>y(t,[Array(6).fill(0),C(1),C(0),C(0),o(0),C(e.info.numberOfChannels),C(16),C(0),C(0),U(e.info.sampleRate)],[ns[e.info.codec](e)]),Rt=t=>{let e=new Uint8Array(t.info.decoderConfig.description);return b("esds",0,0,[o(58753152),g(32+e.byteLength),C(1),g(0),o(75530368),g(18+e.byteLength),g(64),g(21),Fe(0),o(130071),o(130071),o(92307584),g(e.byteLength),...e,o(109084800),g(1),g(2)])},Lt=t=>y("dOps",[g(0),g(t.info.numberOfChannels),C(3840),o(t.info.sampleRate),ge(0),g(0)]),Ft=t=>b("stts",0,0,[o(t.timeToSampleTable.length),t.timeToSampleTable.map(e=>[o(e.sampleCount),o(e.sampleDelta)])]),jt=t=>{if(t.samples.every(s=>s.type==="key"))return null;let e=[...t.samples.entries()].filter(([,s])=>s.type==="key");return b("stss",0,0,[o(e.length),e.map(([s])=>o(s+1))])},Ht=t=>b("stsc",0,0,[o(t.compactlyCodedChunkTable.length),t.compactlyCodedChunkTable.map(e=>[o(e.firstChunk),o(e.samplesPerChunk),o(1)])]),$t=t=>b("stsz",0,0,[o(0),o(t.samples.length),t.samples.map(e=>o(e.size))]),qt=t=>t.finalizedChunks.length>0&&K(t.finalizedChunks).offset>=2**32?b("co64",0,0,[o(t.finalizedChunks.length),t.finalizedChunks.map(e=>M(e.offset))]):b("stco",0,0,[o(t.finalizedChunks.length),t.finalizedChunks.map(e=>o(e.offset))]),Wt=t=>b("ctts",0,0,[o(t.compositionTimeOffsetTable.length),t.compositionTimeOffsetTable.map(e=>[o(e.sampleCount),o(e.sampleCompositionTimeOffset)])]),Xt=t=>y("mvex",null,t.map(Gt)),Gt=t=>b("trex",0,0,[o(t.id),o(1),o(0),o(0),o(0)]),qe=(t,e)=>y("moof",null,[Zt(t),...e.map(Kt)]),Zt=t=>b("mfhd",0,0,[o(t)]),it=t=>{let e=0,s=0,r=0,n=0,a=t.type==="delta";return s|=+a,a?e|=1:e|=2,e<<24|s<<16|r<<8|n},Kt=t=>y("traf",null,[Qt(t),Yt(t),Jt(t)]),Qt=t=>{let e=0;e|=8,e|=16,e|=32,e|=131072;let s=t.currentChunk.samples[1]??t.currentChunk.samples[0],r={duration:s.timescaleUnitsToNextSample,size:s.size,flags:it(s)};return b("tfhd",0,e,[o(t.id),o(r.duration),o(r.size),o(r.flags)])},Yt=t=>b("tfdt",1,0,[M(B(t.currentChunk.startTimestamp,t.timescale))]),Jt=t=>{let e=t.currentChunk.samples.map(_=>_.timescaleUnitsToNextSample),s=t.currentChunk.samples.map(_=>_.size),r=t.currentChunk.samples.map(it),n=t.currentChunk.samples.map(_=>B(_.presentationTimestamp-_.decodeTimestamp,t.timescale)),a=new Set(e),l=new Set(s),m=new Set(r),d=new Set(n),T=m.size===2&&r[0]!==r[1],v=a.size>1,F=l.size>1,se=!T&&m.size>1,Qe=d.size>1||[...d].some(_=>_!==0),j=0;return j|=1,j|=4*+T,j|=256*+v,j|=512*+F,j|=1024*+se,j|=2048*+Qe,b("trun",1,j,[o(t.currentChunk.samples.length),o(t.currentChunk.offset-t.currentChunk.moofOffset||0),T?o(r[0]):[],t.currentChunk.samples.map((_,Se)=>[v?o(e[Se]):[],F?o(s[Se]):[],se?o(r[Se]):[],Qe?et(n[Se]):[]])])},rt=t=>y("mfra",null,[...t.map(es),ts()]),es=(t,e)=>b("tfra",1,0,[o(t.id),o(63),o(t.finalizedChunks.length),t.finalizedChunks.map(r=>[M(B(r.startTimestamp,t.timescale)),M(r.moofOffset),o(e+1),o(1),o(1)])]),ts=()=>b("mfro",0,0,[o(0)]),ss={avc:"avc1",hevc:"hvc1",vp9:"vp09",av1:"av01"},is={avc:It,hevc:Mt,vp9:Pt,av1:Vt},rs={aac:"mp4a",opus:"Opus"},ns={aac:Rt,opus:Lt};var re=class{constructor(){this.buffer=null}},$=class{constructor(e){this.options=e}},ne=class{constructor(e,s){this.stream=e;this.options=s}};var P,q,ae=class{constructor(){this.pos=0;f(this,P,new Uint8Array(8));f(this,q,new DataView(i(this,P).buffer));this.offsets=new WeakMap}seek(e){this.pos=e}writeU32(e){i(this,q).setUint32(0,e,!1),this.write(i(this,P).subarray(0,4))}writeU64(e){i(this,q).setUint32(0,Math.floor(e/2**32),!1),i(this,q).setUint32(4,e,!1),this.write(i(this,P).subarray(0,8))}writeAscii(e){for(let s=0;sn.start-a.start);s.push({start:r[0].start,size:r[0].data.byteLength});for(let n=1;nT.start<=r&&ros){for(let T=0;T=s.written[l+1].start;)s.written[l].end=Math.max(s.written[l].end,s.written[l+1].end),s.written.splice(l+1,1)},Be=new WeakSet,at=function(s){let n={start:Math.floor(s/i(this,z))*i(this,z),data:new Uint8Array(i(this,z)),written:[],shouldFlush:!1};return i(this,O).push(n),i(this,O).sort((a,l)=>a.start-l.start),i(this,O).indexOf(n)},J=new WeakSet,ve=function(s=!1){for(let r=0;re.stream.write({type:"write",data:s,position:r}),chunkSize:e.options?.chunkSize}))}};var xe=1e3,ls=["avc","hevc","vp9","av1"],ms=["aac","opus"],hs=2082844800,us=["strict","offset","cross-track-offset"],h,u,ce,A,x,w,W,X,De,R,L,ee,Ee,ot,_e,lt,Ie,mt,Me,ht,Pe,ut,Te,Ge,D,I,Ve,ft,te,ze,Ce,Ze,G,pe,be,Ke,Ue=class{constructor(e){f(this,Ee);f(this,_e);f(this,Ie);f(this,Me);f(this,Pe);f(this,Te);f(this,D);f(this,Ve);f(this,te);f(this,Ce);f(this,G);f(this,be);f(this,h,void 0);f(this,u,void 0);f(this,ce,void 0);f(this,A,void 0);f(this,x,null);f(this,w,null);f(this,W,Math.floor(Date.now()/1e3)+hs);f(this,X,[]);f(this,De,1);f(this,R,[]);f(this,L,[]);f(this,ee,!1);if(p(this,Ee,ot).call(this,e),e.video=Z(e.video),e.audio=Z(e.audio),e.fastStart=Z(e.fastStart),this.target=e.target,S(this,h,{firstTimestampBehavior:"strict",...e}),e.target instanceof re)S(this,u,new Oe(e.target));else if(e.target instanceof $)S(this,u,e.target.options?.chunked?new le(e.target):new oe(e.target));else if(e.target instanceof ne)S(this,u,new Ae(e.target));else throw new Error(`Invalid target: ${e.target}`);p(this,Me,ht).call(this),p(this,_e,lt).call(this)}addVideoChunk(e,s,r,n){let a=new Uint8Array(e.byteLength);e.copyTo(a),this.addVideoChunkRaw(a,e.type,r??e.timestamp,e.duration,s,n)}addVideoChunkRaw(e,s,r,n,a,l){if(p(this,be,Ke).call(this),!i(this,h).video)throw new Error("No video track declared.");if(typeof i(this,h).fastStart=="object"&&i(this,x).samples.length===i(this,h).fastStart.expectedVideoChunks)throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${i(this,h).fastStart.expectedVideoChunks}).`);let m=p(this,Te,Ge).call(this,i(this,x),e,s,r,n,a,l);if(i(this,h).fastStart==="fragmented"&&i(this,w)){for(;i(this,L).length>0&&i(this,L)[0].decodeTimestamp<=m.decodeTimestamp;){let d=i(this,L).shift();p(this,D,I).call(this,i(this,w),d)}m.decodeTimestamp<=i(this,w).lastDecodeTimestamp?p(this,D,I).call(this,i(this,x),m):i(this,R).push(m)}else p(this,D,I).call(this,i(this,x),m)}addAudioChunk(e,s,r){let n=new Uint8Array(e.byteLength);e.copyTo(n),this.addAudioChunkRaw(n,e.type,r??e.timestamp,e.duration,s)}addAudioChunkRaw(e,s,r,n,a){if(p(this,be,Ke).call(this),!i(this,h).audio)throw new Error("No audio track declared.");if(typeof i(this,h).fastStart=="object"&&i(this,w).samples.length===i(this,h).fastStart.expectedAudioChunks)throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${i(this,h).fastStart.expectedAudioChunks}).`);let l=p(this,Te,Ge).call(this,i(this,w),e,s,r,n,a);if(i(this,h).fastStart==="fragmented"&&i(this,x)){for(;i(this,R).length>0&&i(this,R)[0].decodeTimestamp<=l.decodeTimestamp;){let m=i(this,R).shift();p(this,D,I).call(this,i(this,x),m)}l.decodeTimestamp<=i(this,x).lastDecodeTimestamp?p(this,D,I).call(this,i(this,w),l):i(this,L).push(l)}else p(this,D,I).call(this,i(this,w),l)}finalize(){if(i(this,ee))throw new Error("Cannot finalize a muxer more than once.");if(i(this,h).fastStart==="fragmented"){for(let s of i(this,R))p(this,D,I).call(this,i(this,x),s);for(let s of i(this,L))p(this,D,I).call(this,i(this,w),s);p(this,Ce,Ze).call(this,!1)}else i(this,x)&&p(this,te,ze).call(this,i(this,x)),i(this,w)&&p(this,te,ze).call(this,i(this,w));let e=[i(this,x),i(this,w)].filter(Boolean);if(i(this,h).fastStart==="in-memory"){let s;for(let n=0;n<2;n++){let a=ie(e,i(this,W)),l=i(this,u).measureBox(a);s=i(this,u).measureBox(i(this,A));let m=i(this,u).pos+l+s;for(let d of i(this,X)){d.offset=m;for(let{data:T}of d.samples)m+=T.byteLength,s+=T.byteLength}if(m<2**32)break;s>=2**32&&(i(this,A).largeSize=!0)}let r=ie(e,i(this,W));i(this,u).writeBox(r),i(this,A).size=s,i(this,u).writeBox(i(this,A));for(let n of i(this,X))for(let a of n.samples)i(this,u).write(a.data),a.data=null}else if(i(this,h).fastStart==="fragmented"){let s=i(this,u).pos,r=rt(e);i(this,u).writeBox(r);let n=i(this,u).pos-s;i(this,u).seek(i(this,u).pos-4),i(this,u).writeU32(n)}else{let s=i(this,u).offsets.get(i(this,A)),r=i(this,u).pos-s;i(this,A).size=r,i(this,A).largeSize=r>=2**32,i(this,u).patchBox(i(this,A));let n=ie(e,i(this,W));if(typeof i(this,h).fastStart=="object"){i(this,u).seek(i(this,ce)),i(this,u).writeBox(n);let a=s-i(this,u).pos;i(this,u).writeBox(st(a))}else i(this,u).writeBox(n)}p(this,G,pe).call(this),i(this,u).finalize(),S(this,ee,!0)}};h=new WeakMap,u=new WeakMap,ce=new WeakMap,A=new WeakMap,x=new WeakMap,w=new WeakMap,W=new WeakMap,X=new WeakMap,De=new WeakMap,R=new WeakMap,L=new WeakMap,ee=new WeakMap,Ee=new WeakSet,ot=function(e){if(e.video){if(!ls.includes(e.video.codec))throw new Error(`Unsupported video codec: ${e.video.codec}`);let s=e.video.rotation;if(typeof s=="number"&&![0,90,180,270].includes(s))throw new Error(`Invalid video rotation: ${s}. Has to be 0, 90, 180 or 270.`);if(Array.isArray(s)&&(s.length!==9||s.some(r=>typeof r!="number")))throw new Error(`Invalid video transformation matrix: ${s.join()}`)}if(e.audio&&!ms.includes(e.audio.codec))throw new Error(`Unsupported audio codec: ${e.audio.codec}`);if(e.firstTimestampBehavior&&!us.includes(e.firstTimestampBehavior))throw new Error(`Invalid first timestamp behavior: ${e.firstTimestampBehavior}`);if(typeof e.fastStart=="object"){if(e.video&&e.fastStart.expectedVideoChunks===void 0)throw new Error("'fastStart' is an object but is missing property 'expectedVideoChunks'.");if(e.audio&&e.fastStart.expectedAudioChunks===void 0)throw new Error("'fastStart' is an object but is missing property 'expectedAudioChunks'.")}else if(![!1,"in-memory","fragmented"].includes(e.fastStart))throw new Error("'fastStart' option must be false, 'in-memory', 'fragmented' or an object.")},_e=new WeakSet,lt=function(){if(i(this,u).writeBox(tt({holdsAvc:i(this,h).video?.codec==="avc",fragmented:i(this,h).fastStart==="fragmented"})),S(this,ce,i(this,u).pos),i(this,h).fastStart==="in-memory")S(this,A,we(!1));else if(i(this,h).fastStart!=="fragmented"){if(typeof i(this,h).fastStart=="object"){let e=p(this,Ie,mt).call(this);i(this,u).seek(i(this,u).pos+e)}S(this,A,we(!0)),i(this,u).writeBox(i(this,A))}p(this,G,pe).call(this)},Ie=new WeakSet,mt=function(){if(typeof i(this,h).fastStart!="object")return;let e=0,s=[i(this,h).fastStart.expectedVideoChunks,i(this,h).fastStart.expectedAudioChunks];for(let r of s)r&&(e+=(4+4)*Math.ceil(2/3*r),e+=4*r,e+=(4+4+4)*Math.ceil(2/3*r),e+=4*r,e+=8*r);return e+=4096,e},Me=new WeakSet,ht=function(){if(i(this,h).video&&S(this,x,{id:1,info:{type:"video",codec:i(this,h).video.codec,width:i(this,h).video.width,height:i(this,h).video.height,rotation:i(this,h).video.rotation??0,decoderConfig:null},timescale:11520,samples:[],finalizedChunks:[],currentChunk:null,firstDecodeTimestamp:void 0,lastDecodeTimestamp:-1,timeToSampleTable:[],compositionTimeOffsetTable:[],lastTimescaleUnits:null,lastSample:null,compactlyCodedChunkTable:[]}),i(this,h).audio){let e=p(this,Pe,ut).call(this,2,i(this,h).audio.sampleRate,i(this,h).audio.numberOfChannels);S(this,w,{id:i(this,h).video?2:1,info:{type:"audio",codec:i(this,h).audio.codec,numberOfChannels:i(this,h).audio.numberOfChannels,sampleRate:i(this,h).audio.sampleRate,decoderConfig:{codec:i(this,h).audio.codec,description:e,numberOfChannels:i(this,h).audio.numberOfChannels,sampleRate:i(this,h).audio.sampleRate}},timescale:i(this,h).audio.sampleRate,samples:[],finalizedChunks:[],currentChunk:null,firstDecodeTimestamp:void 0,lastDecodeTimestamp:-1,timeToSampleTable:[],compositionTimeOffsetTable:[],lastTimescaleUnits:null,lastSample:null,compactlyCodedChunkTable:[]})}},Pe=new WeakSet,ut=function(e,s,r){let a=[96e3,88200,64e3,48e3,44100,32e3,24e3,22050,16e3,12e3,11025,8e3,7350].indexOf(s),l=r,m="";m+=e.toString(2).padStart(5,"0"),m+=a.toString(2).padStart(4,"0"),a===15&&(m+=s.toString(2).padStart(24,"0")),m+=l.toString(2).padStart(4,"0");let d=Math.ceil(m.length/8)*8;m=m.padEnd(d,"0");let T=new Uint8Array(m.length/8);for(let v=0;v=1&&(n=!0,p(this,Ce,Ze).call(this))}else n=a>=.5}n&&(e.currentChunk&&p(this,te,ze).call(this,e),e.currentChunk={startTimestamp:s.presentationTimestamp,samples:[]}),e.currentChunk.samples.push(s)},Ve=new WeakSet,ft=function(e,s,r){let n=i(this,h).firstTimestampBehavior==="strict",a=r.lastDecodeTimestamp===-1;if(n&&a&&s!==0)throw new Error(`The first chunk for your media track must have a timestamp of 0 (received DTS=${s}).Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of thedocument, which is probably what you want.
+
+If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options.
+`);if(i(this,h).firstTimestampBehavior==="offset"||i(this,h).firstTimestampBehavior==="cross-track-offset"){r.firstDecodeTimestamp===void 0&&(r.firstDecodeTimestamp=s);let m;i(this,h).firstTimestampBehavior==="offset"?m=r.firstDecodeTimestamp:m=Math.min(i(this,x)?.firstDecodeTimestamp??1/0,i(this,w)?.firstDecodeTimestamp??1/0),s-=m,e-=m}if(sd&&d.currentChunk);if(s.length===0)return;let r=Ye(this,De)._++;if(r===1){let d=ie(s,i(this,W),!0);i(this,u).writeBox(d)}let n=i(this,u).pos,a=qe(r,s);i(this,u).writeBox(a);{let d=we(!1),T=0;for(let F of s)for(let se of F.currentChunk.samples)T+=se.size;let v=i(this,u).measureBox(d)+T;v>=2**32&&(d.largeSize=!0,v=i(this,u).measureBox(d)+T),d.size=v,i(this,u).writeBox(d)}for(let d of s){d.currentChunk.offset=i(this,u).pos,d.currentChunk.moofOffset=n;for(let T of d.currentChunk.samples)i(this,u).write(T.data),T.data=null}let l=i(this,u).pos;i(this,u).seek(i(this,u).offsets.get(a));let m=qe(r,s);i(this,u).writeBox(m),i(this,u).seek(l);for(let d of s)d.finalizedChunks.push(d.currentChunk),i(this,X).push(d.currentChunk),d.currentChunk=null;e&&p(this,G,pe).call(this)},G=new WeakSet,pe=function(){i(this,u)instanceof oe&&i(this,u).flush()},be=new WeakSet,Ke=function(){if(i(this,ee))throw new Error("Cannot add new video or audio chunks after the file has been finalized.")};return bt(fs);})();
+if (typeof module === "object" && typeof module.exports === "object") Object.assign(module.exports, Mp4Muxer)
diff --git a/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.min.mjs b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.min.mjs
new file mode 100644
index 0000000..e1bdb7f
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.min.mjs
@@ -0,0 +1,4 @@
+var Ve=(t,e,s)=>{if(!e.has(t))throw TypeError("Cannot "+s)};var i=(t,e,s)=>(Ve(t,e,"read from private field"),s?s.call(t):e.get(t)),f=(t,e,s)=>{if(e.has(t))throw TypeError("Cannot add the same private member more than once");e instanceof WeakSet?e.add(t):e.set(t,s)},S=(t,e,s,r)=>(Ve(t,e,"write to private field"),r?r.call(t,s):e.set(t,s),s),Qe=(t,e,s,r)=>({set _(n){S(t,e,n,s)},get _(){return i(t,e,r)}}),p=(t,e,s)=>(Ve(t,e,"access private method"),s);var c=new Uint8Array(8),E=new DataView(c.buffer),g=t=>[(t%256+256)%256],C=t=>(E.setUint16(0,t,!1),[c[0],c[1]]),Ye=t=>(E.setInt16(0,t,!1),[c[0],c[1]]),Re=t=>(E.setUint32(0,t,!1),[c[1],c[2],c[3]]),o=t=>(E.setUint32(0,t,!1),[c[0],c[1],c[2],c[3]]),Je=t=>(E.setInt32(0,t,!1),[c[0],c[1],c[2],c[3]]),M=t=>(E.setUint32(0,Math.floor(t/2**32),!1),E.setUint32(4,t,!1),[c[0],c[1],c[2],c[3],c[4],c[5],c[6],c[7]]),be=t=>(E.setInt16(0,2**8*t,!1),[c[0],c[1]]),U=t=>(E.setInt32(0,2**16*t,!1),[c[0],c[1],c[2],c[3]]),Ne=t=>(E.setInt32(0,2**30*t,!1),[c[0],c[1],c[2],c[3]]),k=(t,e=!1)=>{let s=Array(t.length).fill(null).map((r,n)=>t.charCodeAt(n));return e&&s.push(0),s},Z=t=>t&&t[t.length-1],Se=t=>{let e;for(let s of t)(!e||s.presentationTimestamp>e.presentationTimestamp)&&(e=s);return e},B=(t,e,s=!0)=>{let r=t*e;return s?Math.round(r):r},Le=t=>{let e=t*(Math.PI/180),s=Math.cos(e),r=Math.sin(e);return[s,r,0,-r,s,0,0,0,1]},Fe=Le(0),je=t=>[U(t[0]),U(t[1]),Ne(t[2]),U(t[3]),U(t[4]),Ne(t[5]),U(t[6]),U(t[7]),Ne(t[8])],G=t=>!t||typeof t!="object"?t:Array.isArray(t)?t.map(G):Object.fromEntries(Object.entries(t).map(([e,s])=>[e,G(s)])),H=t=>t>=0&&t<2**32;var y=(t,e,s)=>({type:t,contents:e&&new Uint8Array(e.flat(10)),children:s}),b=(t,e,s,r,n)=>y(t,[g(e),Re(s),r??[]],n),et=t=>{let e=512;return t.fragmented?y("ftyp",[k("iso5"),o(e),k("iso5"),k("iso6"),k("mp41")]):y("ftyp",[k("isom"),o(e),k("isom"),t.holdsAvc?k("avc1"):[],k("mp41")])},ye=t=>({type:"mdat",largeSize:t}),tt=t=>({type:"free",size:t}),ie=(t,e,s=!1)=>y("moov",null,[ft(e,t),...t.map(r=>dt(r,e)),s?Lt(t):null]),ft=(t,e)=>{let s=B(Math.max(0,...e.filter(l=>l.samples.length>0).map(l=>{let m=Se(l.samples);return m.presentationTimestamp+m.duration})),ge),r=Math.max(...e.map(l=>l.id))+1,n=!H(t)||!H(s),a=n?M:o;return b("mvhd",+n,0,[a(t),a(t),o(ge),a(s),U(1),be(1),Array(10).fill(0),je(Fe),Array(24).fill(0),o(r)])},dt=(t,e)=>y("trak",null,[pt(t,e),ct(t,e)]),pt=(t,e)=>{let s=Se(t.samples),r=B(s?s.presentationTimestamp+s.duration:0,ge),n=!H(e)||!H(r),a=n?M:o,l;return t.info.type==="video"?l=typeof t.info.rotation=="number"?Le(t.info.rotation):t.info.rotation:l=Fe,b("tkhd",+n,3,[a(e),a(e),o(t.id),o(0),a(r),Array(8).fill(0),C(0),C(0),be(t.info.type==="audio"?1:0),C(0),je(l),U(t.info.type==="video"?t.info.width:0),U(t.info.type==="video"?t.info.height:0)])},ct=(t,e)=>y("mdia",null,[Tt(t,e),Ct(t.info.type==="video"?"vide":"soun"),bt(t)]),Tt=(t,e)=>{let s=Se(t.samples),r=B(s?s.presentationTimestamp+s.duration:0,t.timescale),n=!H(e)||!H(r),a=n?M:o;return b("mdhd",+n,0,[a(e),a(e),o(t.timescale),a(r),C(21956),C(0)])},Ct=t=>b("hdlr",0,0,[k("mhlr"),k(t),o(0),o(0),o(0),k("mp4-muxer-hdlr",!0)]),bt=t=>y("minf",null,[t.info.type==="video"?St():gt(),yt(),vt(t)]),St=()=>b("vmhd",0,1,[C(0),C(0),C(0),C(0)]),gt=()=>b("smhd",0,0,[C(0),C(0)]),yt=()=>y("dinf",null,[xt()]),xt=()=>b("dref",0,0,[o(1)],[wt()]),wt=()=>b("url ",0,1),vt=t=>{let e=t.compositionTimeOffsetTable.length>1||t.compositionTimeOffsetTable.some(s=>s.sampleCompositionTimeOffset!==0);return y("stbl",null,[Ot(t),It(t),Mt(t),Pt(t),Vt(t),Nt(t),e?Rt(t):null])},Ot=t=>b("stsd",0,0,[o(1)],[t.info.type==="video"?At(Zt[t.info.codec],t):Dt(Qt[t.info.codec],t)]),At=(t,e)=>y(t,[Array(6).fill(0),C(1),C(0),C(0),Array(12).fill(0),C(e.info.width),C(e.info.height),o(4718592),o(4718592),o(0),C(1),Array(32).fill(0),C(24),Ye(65535)],[Kt[e.info.codec](e)]),kt=t=>t.info.decoderConfig&&y("avcC",[...new Uint8Array(t.info.decoderConfig.description)]),Bt=t=>t.info.decoderConfig&&y("hvcC",[...new Uint8Array(t.info.decoderConfig.description)]),zt=t=>{if(!t.info.decoderConfig)return null;let e=t.info.decoderConfig;if(!e.colorSpace)throw new Error("'colorSpace' is required in the decoder config for VP9.");let s=e.codec.split("."),r=Number(s[1]),n=Number(s[2]),a=Number(s[3]),l=0,m=(a<<4)+(l<<1)+Number(e.colorSpace.fullRange),d=2,T=2,v=2;return b("vpcC",1,0,[g(r),g(n),g(m),g(d),g(T),g(v),C(0)])},Ut=()=>{let t=1,e=1,s=(t<<7)+e;return y("av1C",[s,0,0,0])},Dt=(t,e)=>y(t,[Array(6).fill(0),C(1),C(0),C(0),o(0),C(e.info.numberOfChannels),C(16),C(0),C(0),U(e.info.sampleRate)],[Yt[e.info.codec](e)]),Et=t=>{let e=new Uint8Array(t.info.decoderConfig.description);return b("esds",0,0,[o(58753152),g(32+e.byteLength),C(1),g(0),o(75530368),g(18+e.byteLength),g(64),g(21),Re(0),o(130071),o(130071),o(92307584),g(e.byteLength),...e,o(109084800),g(1),g(2)])},_t=t=>y("dOps",[g(0),g(t.info.numberOfChannels),C(3840),o(t.info.sampleRate),be(0),g(0)]),It=t=>b("stts",0,0,[o(t.timeToSampleTable.length),t.timeToSampleTable.map(e=>[o(e.sampleCount),o(e.sampleDelta)])]),Mt=t=>{if(t.samples.every(s=>s.type==="key"))return null;let e=[...t.samples.entries()].filter(([,s])=>s.type==="key");return b("stss",0,0,[o(e.length),e.map(([s])=>o(s+1))])},Pt=t=>b("stsc",0,0,[o(t.compactlyCodedChunkTable.length),t.compactlyCodedChunkTable.map(e=>[o(e.firstChunk),o(e.samplesPerChunk),o(1)])]),Vt=t=>b("stsz",0,0,[o(0),o(t.samples.length),t.samples.map(e=>o(e.size))]),Nt=t=>t.finalizedChunks.length>0&&Z(t.finalizedChunks).offset>=2**32?b("co64",0,0,[o(t.finalizedChunks.length),t.finalizedChunks.map(e=>M(e.offset))]):b("stco",0,0,[o(t.finalizedChunks.length),t.finalizedChunks.map(e=>o(e.offset))]),Rt=t=>b("ctts",0,0,[o(t.compositionTimeOffsetTable.length),t.compositionTimeOffsetTable.map(e=>[o(e.sampleCount),o(e.sampleCompositionTimeOffset)])]),Lt=t=>y("mvex",null,t.map(Ft)),Ft=t=>b("trex",0,0,[o(t.id),o(1),o(0),o(0),o(0)]),He=(t,e)=>y("moof",null,[jt(t),...e.map(Ht)]),jt=t=>b("mfhd",0,0,[o(t)]),st=t=>{let e=0,s=0,r=0,n=0,a=t.type==="delta";return s|=+a,a?e|=1:e|=2,e<<24|s<<16|r<<8|n},Ht=t=>y("traf",null,[$t(t),qt(t),Wt(t)]),$t=t=>{let e=0;e|=8,e|=16,e|=32,e|=131072;let s=t.currentChunk.samples[1]??t.currentChunk.samples[0],r={duration:s.timescaleUnitsToNextSample,size:s.size,flags:st(s)};return b("tfhd",0,e,[o(t.id),o(r.duration),o(r.size),o(r.flags)])},qt=t=>b("tfdt",1,0,[M(B(t.currentChunk.startTimestamp,t.timescale))]),Wt=t=>{let e=t.currentChunk.samples.map(_=>_.timescaleUnitsToNextSample),s=t.currentChunk.samples.map(_=>_.size),r=t.currentChunk.samples.map(st),n=t.currentChunk.samples.map(_=>B(_.presentationTimestamp-_.decodeTimestamp,t.timescale)),a=new Set(e),l=new Set(s),m=new Set(r),d=new Set(n),T=m.size===2&&r[0]!==r[1],v=a.size>1,F=l.size>1,se=!T&&m.size>1,Ke=d.size>1||[...d].some(_=>_!==0),j=0;return j|=1,j|=4*+T,j|=256*+v,j|=512*+F,j|=1024*+se,j|=2048*+Ke,b("trun",1,j,[o(t.currentChunk.samples.length),o(t.currentChunk.offset-t.currentChunk.moofOffset||0),T?o(r[0]):[],t.currentChunk.samples.map((_,Ce)=>[v?o(e[Ce]):[],F?o(s[Ce]):[],se?o(r[Ce]):[],Ke?Je(n[Ce]):[]])])},it=t=>y("mfra",null,[...t.map(Xt),Gt()]),Xt=(t,e)=>b("tfra",1,0,[o(t.id),o(63),o(t.finalizedChunks.length),t.finalizedChunks.map(r=>[M(B(r.startTimestamp,t.timescale)),M(r.moofOffset),o(e+1),o(1),o(1)])]),Gt=()=>b("mfro",0,0,[o(0)]),Zt={avc:"avc1",hevc:"hvc1",vp9:"vp09",av1:"av01"},Kt={avc:kt,hevc:Bt,vp9:zt,av1:Ut},Qt={aac:"mp4a",opus:"Opus"},Yt={aac:Et,opus:_t};var xe=class{constructor(){this.buffer=null}},K=class{constructor(e){this.options=e}},we=class{constructor(e,s){this.stream=e;this.options=s}};var P,$,re=class{constructor(){this.pos=0;f(this,P,new Uint8Array(8));f(this,$,new DataView(i(this,P).buffer));this.offsets=new WeakMap}seek(e){this.pos=e}writeU32(e){i(this,$).setUint32(0,e,!1),this.write(i(this,P).subarray(0,4))}writeU64(e){i(this,$).setUint32(0,Math.floor(e/2**32),!1),i(this,$).setUint32(4,e,!1),this.write(i(this,P).subarray(0,8))}writeAscii(e){for(let s=0;sn.start-a.start);s.push({start:r[0].start,size:r[0].data.byteLength});for(let n=1;nT.start<=r&&res){for(let T=0;T=s.written[l+1].start;)s.written[l].end=Math.max(s.written[l].end,s.written[l+1].end),s.written.splice(l+1,1)},Be=new WeakSet,nt=function(s){let n={start:Math.floor(s/i(this,z))*i(this,z),data:new Uint8Array(i(this,z)),written:[],shouldFlush:!1};return i(this,O).push(n),i(this,O).sort((a,l)=>a.start-l.start),i(this,O).indexOf(n)},J=new WeakSet,ve=function(s=!1){for(let r=0;re.stream.write({type:"write",data:s,position:r}),chunkSize:e.options?.chunkSize}))}};var ge=1e3,ts=["avc","hevc","vp9","av1"],ss=["aac","opus"],is=2082844800,rs=["strict","offset","cross-track-offset"],h,u,de,A,x,w,q,W,Ue,R,L,ee,De,at,Ee,ot,_e,lt,Ie,mt,Me,ht,pe,Xe,D,I,Pe,ut,te,ze,ce,Ge,X,fe,Te,Ze,We=class{constructor(e){f(this,De);f(this,Ee);f(this,_e);f(this,Ie);f(this,Me);f(this,pe);f(this,D);f(this,Pe);f(this,te);f(this,ce);f(this,X);f(this,Te);f(this,h,void 0);f(this,u,void 0);f(this,de,void 0);f(this,A,void 0);f(this,x,null);f(this,w,null);f(this,q,Math.floor(Date.now()/1e3)+is);f(this,W,[]);f(this,Ue,1);f(this,R,[]);f(this,L,[]);f(this,ee,!1);if(p(this,De,at).call(this,e),e.video=G(e.video),e.audio=G(e.audio),e.fastStart=G(e.fastStart),this.target=e.target,S(this,h,{firstTimestampBehavior:"strict",...e}),e.target instanceof xe)S(this,u,new Oe(e.target));else if(e.target instanceof K)S(this,u,e.target.options?.chunked?new ae(e.target):new ne(e.target));else if(e.target instanceof we)S(this,u,new Ae(e.target));else throw new Error(`Invalid target: ${e.target}`);p(this,Ie,mt).call(this),p(this,Ee,ot).call(this)}addVideoChunk(e,s,r,n){let a=new Uint8Array(e.byteLength);e.copyTo(a),this.addVideoChunkRaw(a,e.type,r??e.timestamp,e.duration,s,n)}addVideoChunkRaw(e,s,r,n,a,l){if(p(this,Te,Ze).call(this),!i(this,h).video)throw new Error("No video track declared.");if(typeof i(this,h).fastStart=="object"&&i(this,x).samples.length===i(this,h).fastStart.expectedVideoChunks)throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${i(this,h).fastStart.expectedVideoChunks}).`);let m=p(this,pe,Xe).call(this,i(this,x),e,s,r,n,a,l);if(i(this,h).fastStart==="fragmented"&&i(this,w)){for(;i(this,L).length>0&&i(this,L)[0].decodeTimestamp<=m.decodeTimestamp;){let d=i(this,L).shift();p(this,D,I).call(this,i(this,w),d)}m.decodeTimestamp<=i(this,w).lastDecodeTimestamp?p(this,D,I).call(this,i(this,x),m):i(this,R).push(m)}else p(this,D,I).call(this,i(this,x),m)}addAudioChunk(e,s,r){let n=new Uint8Array(e.byteLength);e.copyTo(n),this.addAudioChunkRaw(n,e.type,r??e.timestamp,e.duration,s)}addAudioChunkRaw(e,s,r,n,a){if(p(this,Te,Ze).call(this),!i(this,h).audio)throw new Error("No audio track declared.");if(typeof i(this,h).fastStart=="object"&&i(this,w).samples.length===i(this,h).fastStart.expectedAudioChunks)throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${i(this,h).fastStart.expectedAudioChunks}).`);let l=p(this,pe,Xe).call(this,i(this,w),e,s,r,n,a);if(i(this,h).fastStart==="fragmented"&&i(this,x)){for(;i(this,R).length>0&&i(this,R)[0].decodeTimestamp<=l.decodeTimestamp;){let m=i(this,R).shift();p(this,D,I).call(this,i(this,x),m)}l.decodeTimestamp<=i(this,x).lastDecodeTimestamp?p(this,D,I).call(this,i(this,w),l):i(this,L).push(l)}else p(this,D,I).call(this,i(this,w),l)}finalize(){if(i(this,ee))throw new Error("Cannot finalize a muxer more than once.");if(i(this,h).fastStart==="fragmented"){for(let s of i(this,R))p(this,D,I).call(this,i(this,x),s);for(let s of i(this,L))p(this,D,I).call(this,i(this,w),s);p(this,ce,Ge).call(this,!1)}else i(this,x)&&p(this,te,ze).call(this,i(this,x)),i(this,w)&&p(this,te,ze).call(this,i(this,w));let e=[i(this,x),i(this,w)].filter(Boolean);if(i(this,h).fastStart==="in-memory"){let s;for(let n=0;n<2;n++){let a=ie(e,i(this,q)),l=i(this,u).measureBox(a);s=i(this,u).measureBox(i(this,A));let m=i(this,u).pos+l+s;for(let d of i(this,W)){d.offset=m;for(let{data:T}of d.samples)m+=T.byteLength,s+=T.byteLength}if(m<2**32)break;s>=2**32&&(i(this,A).largeSize=!0)}let r=ie(e,i(this,q));i(this,u).writeBox(r),i(this,A).size=s,i(this,u).writeBox(i(this,A));for(let n of i(this,W))for(let a of n.samples)i(this,u).write(a.data),a.data=null}else if(i(this,h).fastStart==="fragmented"){let s=i(this,u).pos,r=it(e);i(this,u).writeBox(r);let n=i(this,u).pos-s;i(this,u).seek(i(this,u).pos-4),i(this,u).writeU32(n)}else{let s=i(this,u).offsets.get(i(this,A)),r=i(this,u).pos-s;i(this,A).size=r,i(this,A).largeSize=r>=2**32,i(this,u).patchBox(i(this,A));let n=ie(e,i(this,q));if(typeof i(this,h).fastStart=="object"){i(this,u).seek(i(this,de)),i(this,u).writeBox(n);let a=s-i(this,u).pos;i(this,u).writeBox(tt(a))}else i(this,u).writeBox(n)}p(this,X,fe).call(this),i(this,u).finalize(),S(this,ee,!0)}};h=new WeakMap,u=new WeakMap,de=new WeakMap,A=new WeakMap,x=new WeakMap,w=new WeakMap,q=new WeakMap,W=new WeakMap,Ue=new WeakMap,R=new WeakMap,L=new WeakMap,ee=new WeakMap,De=new WeakSet,at=function(e){if(e.video){if(!ts.includes(e.video.codec))throw new Error(`Unsupported video codec: ${e.video.codec}`);let s=e.video.rotation;if(typeof s=="number"&&![0,90,180,270].includes(s))throw new Error(`Invalid video rotation: ${s}. Has to be 0, 90, 180 or 270.`);if(Array.isArray(s)&&(s.length!==9||s.some(r=>typeof r!="number")))throw new Error(`Invalid video transformation matrix: ${s.join()}`)}if(e.audio&&!ss.includes(e.audio.codec))throw new Error(`Unsupported audio codec: ${e.audio.codec}`);if(e.firstTimestampBehavior&&!rs.includes(e.firstTimestampBehavior))throw new Error(`Invalid first timestamp behavior: ${e.firstTimestampBehavior}`);if(typeof e.fastStart=="object"){if(e.video&&e.fastStart.expectedVideoChunks===void 0)throw new Error("'fastStart' is an object but is missing property 'expectedVideoChunks'.");if(e.audio&&e.fastStart.expectedAudioChunks===void 0)throw new Error("'fastStart' is an object but is missing property 'expectedAudioChunks'.")}else if(![!1,"in-memory","fragmented"].includes(e.fastStart))throw new Error("'fastStart' option must be false, 'in-memory', 'fragmented' or an object.")},Ee=new WeakSet,ot=function(){if(i(this,u).writeBox(et({holdsAvc:i(this,h).video?.codec==="avc",fragmented:i(this,h).fastStart==="fragmented"})),S(this,de,i(this,u).pos),i(this,h).fastStart==="in-memory")S(this,A,ye(!1));else if(i(this,h).fastStart!=="fragmented"){if(typeof i(this,h).fastStart=="object"){let e=p(this,_e,lt).call(this);i(this,u).seek(i(this,u).pos+e)}S(this,A,ye(!0)),i(this,u).writeBox(i(this,A))}p(this,X,fe).call(this)},_e=new WeakSet,lt=function(){if(typeof i(this,h).fastStart!="object")return;let e=0,s=[i(this,h).fastStart.expectedVideoChunks,i(this,h).fastStart.expectedAudioChunks];for(let r of s)r&&(e+=(4+4)*Math.ceil(2/3*r),e+=4*r,e+=(4+4+4)*Math.ceil(2/3*r),e+=4*r,e+=8*r);return e+=4096,e},Ie=new WeakSet,mt=function(){if(i(this,h).video&&S(this,x,{id:1,info:{type:"video",codec:i(this,h).video.codec,width:i(this,h).video.width,height:i(this,h).video.height,rotation:i(this,h).video.rotation??0,decoderConfig:null},timescale:11520,samples:[],finalizedChunks:[],currentChunk:null,firstDecodeTimestamp:void 0,lastDecodeTimestamp:-1,timeToSampleTable:[],compositionTimeOffsetTable:[],lastTimescaleUnits:null,lastSample:null,compactlyCodedChunkTable:[]}),i(this,h).audio){let e=p(this,Me,ht).call(this,2,i(this,h).audio.sampleRate,i(this,h).audio.numberOfChannels);S(this,w,{id:i(this,h).video?2:1,info:{type:"audio",codec:i(this,h).audio.codec,numberOfChannels:i(this,h).audio.numberOfChannels,sampleRate:i(this,h).audio.sampleRate,decoderConfig:{codec:i(this,h).audio.codec,description:e,numberOfChannels:i(this,h).audio.numberOfChannels,sampleRate:i(this,h).audio.sampleRate}},timescale:i(this,h).audio.sampleRate,samples:[],finalizedChunks:[],currentChunk:null,firstDecodeTimestamp:void 0,lastDecodeTimestamp:-1,timeToSampleTable:[],compositionTimeOffsetTable:[],lastTimescaleUnits:null,lastSample:null,compactlyCodedChunkTable:[]})}},Me=new WeakSet,ht=function(e,s,r){let a=[96e3,88200,64e3,48e3,44100,32e3,24e3,22050,16e3,12e3,11025,8e3,7350].indexOf(s),l=r,m="";m+=e.toString(2).padStart(5,"0"),m+=a.toString(2).padStart(4,"0"),a===15&&(m+=s.toString(2).padStart(24,"0")),m+=l.toString(2).padStart(4,"0");let d=Math.ceil(m.length/8)*8;m=m.padEnd(d,"0");let T=new Uint8Array(m.length/8);for(let v=0;v=1&&(n=!0,p(this,ce,Ge).call(this))}else n=a>=.5}n&&(e.currentChunk&&p(this,te,ze).call(this,e),e.currentChunk={startTimestamp:s.presentationTimestamp,samples:[]}),e.currentChunk.samples.push(s)},Pe=new WeakSet,ut=function(e,s,r){let n=i(this,h).firstTimestampBehavior==="strict",a=r.lastDecodeTimestamp===-1;if(n&&a&&s!==0)throw new Error(`The first chunk for your media track must have a timestamp of 0 (received DTS=${s}).Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of thedocument, which is probably what you want.
+
+If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options.
+`);if(i(this,h).firstTimestampBehavior==="offset"||i(this,h).firstTimestampBehavior==="cross-track-offset"){r.firstDecodeTimestamp===void 0&&(r.firstDecodeTimestamp=s);let m;i(this,h).firstTimestampBehavior==="offset"?m=r.firstDecodeTimestamp:m=Math.min(i(this,x)?.firstDecodeTimestamp??1/0,i(this,w)?.firstDecodeTimestamp??1/0),s-=m,e-=m}if(sd&&d.currentChunk);if(s.length===0)return;let r=Qe(this,Ue)._++;if(r===1){let d=ie(s,i(this,q),!0);i(this,u).writeBox(d)}let n=i(this,u).pos,a=He(r,s);i(this,u).writeBox(a);{let d=ye(!1),T=0;for(let F of s)for(let se of F.currentChunk.samples)T+=se.size;let v=i(this,u).measureBox(d)+T;v>=2**32&&(d.largeSize=!0,v=i(this,u).measureBox(d)+T),d.size=v,i(this,u).writeBox(d)}for(let d of s){d.currentChunk.offset=i(this,u).pos,d.currentChunk.moofOffset=n;for(let T of d.currentChunk.samples)i(this,u).write(T.data),T.data=null}let l=i(this,u).pos;i(this,u).seek(i(this,u).offsets.get(a));let m=He(r,s);i(this,u).writeBox(m),i(this,u).seek(l);for(let d of s)d.finalizedChunks.push(d.currentChunk),i(this,W).push(d.currentChunk),d.currentChunk=null;e&&p(this,X,fe).call(this)},X=new WeakSet,fe=function(){i(this,u)instanceof ne&&i(this,u).flush()},Te=new WeakSet,Ze=function(){if(i(this,ee))throw new Error("Cannot add new video or audio chunks after the file has been finalized.")};export{xe as ArrayBufferTarget,we as FileSystemWritableFileStreamTarget,We as Muxer,K as StreamTarget};
diff --git a/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.mjs b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.mjs
new file mode 100644
index 0000000..0bb9f6b
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/build/mp4-muxer.mjs
@@ -0,0 +1,1693 @@
+var __accessCheck = (obj, member, msg) => {
+ if (!member.has(obj))
+ throw TypeError("Cannot " + msg);
+};
+var __privateGet = (obj, member, getter) => {
+ __accessCheck(obj, member, "read from private field");
+ return getter ? getter.call(obj) : member.get(obj);
+};
+var __privateAdd = (obj, member, value) => {
+ if (member.has(obj))
+ throw TypeError("Cannot add the same private member more than once");
+ member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
+};
+var __privateSet = (obj, member, value, setter) => {
+ __accessCheck(obj, member, "write to private field");
+ setter ? setter.call(obj, value) : member.set(obj, value);
+ return value;
+};
+var __privateWrapper = (obj, member, setter, getter) => ({
+ set _(value) {
+ __privateSet(obj, member, value, setter);
+ },
+ get _() {
+ return __privateGet(obj, member, getter);
+ }
+});
+var __privateMethod = (obj, member, method) => {
+ __accessCheck(obj, member, "access private method");
+ return method;
+};
+
+// src/misc.ts
+var bytes = new Uint8Array(8);
+var view = new DataView(bytes.buffer);
+var u8 = (value) => {
+ return [(value % 256 + 256) % 256];
+};
+var u16 = (value) => {
+ view.setUint16(0, value, false);
+ return [bytes[0], bytes[1]];
+};
+var i16 = (value) => {
+ view.setInt16(0, value, false);
+ return [bytes[0], bytes[1]];
+};
+var u24 = (value) => {
+ view.setUint32(0, value, false);
+ return [bytes[1], bytes[2], bytes[3]];
+};
+var u32 = (value) => {
+ view.setUint32(0, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+var i32 = (value) => {
+ view.setInt32(0, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+var u64 = (value) => {
+ view.setUint32(0, Math.floor(value / 2 ** 32), false);
+ view.setUint32(4, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7]];
+};
+var fixed_8_8 = (value) => {
+ view.setInt16(0, 2 ** 8 * value, false);
+ return [bytes[0], bytes[1]];
+};
+var fixed_16_16 = (value) => {
+ view.setInt32(0, 2 ** 16 * value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+var fixed_2_30 = (value) => {
+ view.setInt32(0, 2 ** 30 * value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+var ascii = (text, nullTerminated = false) => {
+ let bytes2 = Array(text.length).fill(null).map((_, i) => text.charCodeAt(i));
+ if (nullTerminated)
+ bytes2.push(0);
+ return bytes2;
+};
+var last = (arr) => {
+ return arr && arr[arr.length - 1];
+};
+var lastPresentedSample = (samples) => {
+ let result = void 0;
+ for (let sample of samples) {
+ if (!result || sample.presentationTimestamp > result.presentationTimestamp) {
+ result = sample;
+ }
+ }
+ return result;
+};
+var intoTimescale = (timeInSeconds, timescale, round = true) => {
+ let value = timeInSeconds * timescale;
+ return round ? Math.round(value) : value;
+};
+var rotationMatrix = (rotationInDegrees) => {
+ let theta = rotationInDegrees * (Math.PI / 180);
+ let cosTheta = Math.cos(theta);
+ let sinTheta = Math.sin(theta);
+ return [
+ cosTheta,
+ sinTheta,
+ 0,
+ -sinTheta,
+ cosTheta,
+ 0,
+ 0,
+ 0,
+ 1
+ ];
+};
+var IDENTITY_MATRIX = rotationMatrix(0);
+var matrixToBytes = (matrix) => {
+ return [
+ fixed_16_16(matrix[0]),
+ fixed_16_16(matrix[1]),
+ fixed_2_30(matrix[2]),
+ fixed_16_16(matrix[3]),
+ fixed_16_16(matrix[4]),
+ fixed_2_30(matrix[5]),
+ fixed_16_16(matrix[6]),
+ fixed_16_16(matrix[7]),
+ fixed_2_30(matrix[8])
+ ];
+};
+var deepClone = (x) => {
+ if (!x)
+ return x;
+ if (typeof x !== "object")
+ return x;
+ if (Array.isArray(x))
+ return x.map(deepClone);
+ return Object.fromEntries(Object.entries(x).map(([key, value]) => [key, deepClone(value)]));
+};
+var isU32 = (value) => {
+ return value >= 0 && value < 2 ** 32;
+};
+
+// src/box.ts
+var box = (type, contents, children) => ({
+ type,
+ contents: contents && new Uint8Array(contents.flat(10)),
+ children
+});
+var fullBox = (type, version, flags, contents, children) => box(
+ type,
+ [u8(version), u24(flags), contents ?? []],
+ children
+);
+var ftyp = (details) => {
+ let minorVersion = 512;
+ if (details.fragmented)
+ return box("ftyp", [
+ ascii("iso5"),
+ // Major brand
+ u32(minorVersion),
+ // Minor version
+ // Compatible brands
+ ascii("iso5"),
+ ascii("iso6"),
+ ascii("mp41")
+ ]);
+ return box("ftyp", [
+ ascii("isom"),
+ // Major brand
+ u32(minorVersion),
+ // Minor version
+ // Compatible brands
+ ascii("isom"),
+ details.holdsAvc ? ascii("avc1") : [],
+ ascii("mp41")
+ ]);
+};
+var mdat = (reserveLargeSize) => ({ type: "mdat", largeSize: reserveLargeSize });
+var free = (size) => ({ type: "free", size });
+var moov = (tracks, creationTime, fragmented = false) => box("moov", null, [
+ mvhd(creationTime, tracks),
+ ...tracks.map((x) => trak(x, creationTime)),
+ fragmented ? mvex(tracks) : null
+]);
+var mvhd = (creationTime, tracks) => {
+ let duration = intoTimescale(Math.max(
+ 0,
+ ...tracks.filter((x) => x.samples.length > 0).map((x) => {
+ const lastSample = lastPresentedSample(x.samples);
+ return lastSample.presentationTimestamp + lastSample.duration;
+ })
+ ), GLOBAL_TIMESCALE);
+ let nextTrackId = Math.max(...tracks.map((x) => x.id)) + 1;
+ let needsU64 = !isU32(creationTime) || !isU32(duration);
+ let u32OrU64 = needsU64 ? u64 : u32;
+ return fullBox("mvhd", +needsU64, 0, [
+ u32OrU64(creationTime),
+ // Creation time
+ u32OrU64(creationTime),
+ // Modification time
+ u32(GLOBAL_TIMESCALE),
+ // Timescale
+ u32OrU64(duration),
+ // Duration
+ fixed_16_16(1),
+ // Preferred rate
+ fixed_8_8(1),
+ // Preferred volume
+ Array(10).fill(0),
+ // Reserved
+ matrixToBytes(IDENTITY_MATRIX),
+ // Matrix
+ Array(24).fill(0),
+ // Pre-defined
+ u32(nextTrackId)
+ // Next track ID
+ ]);
+};
+var trak = (track, creationTime) => box("trak", null, [
+ tkhd(track, creationTime),
+ mdia(track, creationTime)
+]);
+var tkhd = (track, creationTime) => {
+ let lastSample = lastPresentedSample(track.samples);
+ let durationInGlobalTimescale = intoTimescale(
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
+ GLOBAL_TIMESCALE
+ );
+ let needsU64 = !isU32(creationTime) || !isU32(durationInGlobalTimescale);
+ let u32OrU64 = needsU64 ? u64 : u32;
+ let matrix;
+ if (track.info.type === "video") {
+ matrix = typeof track.info.rotation === "number" ? rotationMatrix(track.info.rotation) : track.info.rotation;
+ } else {
+ matrix = IDENTITY_MATRIX;
+ }
+ return fullBox("tkhd", +needsU64, 3, [
+ u32OrU64(creationTime),
+ // Creation time
+ u32OrU64(creationTime),
+ // Modification time
+ u32(track.id),
+ // Track ID
+ u32(0),
+ // Reserved
+ u32OrU64(durationInGlobalTimescale),
+ // Duration
+ Array(8).fill(0),
+ // Reserved
+ u16(0),
+ // Layer
+ u16(0),
+ // Alternate group
+ fixed_8_8(track.info.type === "audio" ? 1 : 0),
+ // Volume
+ u16(0),
+ // Reserved
+ matrixToBytes(matrix),
+ // Matrix
+ fixed_16_16(track.info.type === "video" ? track.info.width : 0),
+ // Track width
+ fixed_16_16(track.info.type === "video" ? track.info.height : 0)
+ // Track height
+ ]);
+};
+var mdia = (track, creationTime) => box("mdia", null, [
+ mdhd(track, creationTime),
+ hdlr(track.info.type === "video" ? "vide" : "soun"),
+ minf(track)
+]);
+var mdhd = (track, creationTime) => {
+ let lastSample = lastPresentedSample(track.samples);
+ let localDuration = intoTimescale(
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
+ track.timescale
+ );
+ let needsU64 = !isU32(creationTime) || !isU32(localDuration);
+ let u32OrU64 = needsU64 ? u64 : u32;
+ return fullBox("mdhd", +needsU64, 0, [
+ u32OrU64(creationTime),
+ // Creation time
+ u32OrU64(creationTime),
+ // Modification time
+ u32(track.timescale),
+ // Timescale
+ u32OrU64(localDuration),
+ // Duration
+ u16(21956),
+ // Language ("und", undetermined)
+ u16(0)
+ // Quality
+ ]);
+};
+var hdlr = (componentSubtype) => fullBox("hdlr", 0, 0, [
+ ascii("mhlr"),
+ // Component type
+ ascii(componentSubtype),
+ // Component subtype
+ u32(0),
+ // Component manufacturer
+ u32(0),
+ // Component flags
+ u32(0),
+ // Component flags mask
+ ascii("mp4-muxer-hdlr", true)
+ // Component name
+]);
+var minf = (track) => box("minf", null, [
+ track.info.type === "video" ? vmhd() : smhd(),
+ dinf(),
+ stbl(track)
+]);
+var vmhd = () => fullBox("vmhd", 0, 1, [
+ u16(0),
+ // Graphics mode
+ u16(0),
+ // Opcolor R
+ u16(0),
+ // Opcolor G
+ u16(0)
+ // Opcolor B
+]);
+var smhd = () => fullBox("smhd", 0, 0, [
+ u16(0),
+ // Balance
+ u16(0)
+ // Reserved
+]);
+var dinf = () => box("dinf", null, [
+ dref()
+]);
+var dref = () => fullBox("dref", 0, 0, [
+ u32(1)
+ // Entry count
+], [
+ url()
+]);
+var url = () => fullBox("url ", 0, 1);
+var stbl = (track) => {
+ const needsCtts = track.compositionTimeOffsetTable.length > 1 || track.compositionTimeOffsetTable.some((x) => x.sampleCompositionTimeOffset !== 0);
+ return box("stbl", null, [
+ stsd(track),
+ stts(track),
+ stss(track),
+ stsc(track),
+ stsz(track),
+ stco(track),
+ needsCtts ? ctts(track) : null
+ ]);
+};
+var stsd = (track) => fullBox("stsd", 0, 0, [
+ u32(1)
+ // Entry count
+], [
+ track.info.type === "video" ? videoSampleDescription(
+ VIDEO_CODEC_TO_BOX_NAME[track.info.codec],
+ track
+ ) : soundSampleDescription(
+ AUDIO_CODEC_TO_BOX_NAME[track.info.codec],
+ track
+ )
+]);
+var videoSampleDescription = (compressionType, track) => box(compressionType, [
+ Array(6).fill(0),
+ // Reserved
+ u16(1),
+ // Data reference index
+ u16(0),
+ // Pre-defined
+ u16(0),
+ // Reserved
+ Array(12).fill(0),
+ // Pre-defined
+ u16(track.info.width),
+ // Width
+ u16(track.info.height),
+ // Height
+ u32(4718592),
+ // Horizontal resolution
+ u32(4718592),
+ // Vertical resolution
+ u32(0),
+ // Reserved
+ u16(1),
+ // Frame count
+ Array(32).fill(0),
+ // Compressor name
+ u16(24),
+ // Depth
+ i16(65535)
+ // Pre-defined
+], [
+ VIDEO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
+]);
+var avcC = (track) => track.info.decoderConfig && box("avcC", [
+ // For AVC, description is an AVCDecoderConfigurationRecord, so nothing else to do here
+ ...new Uint8Array(track.info.decoderConfig.description)
+]);
+var hvcC = (track) => track.info.decoderConfig && box("hvcC", [
+ // For HEVC, description is a HEVCDecoderConfigurationRecord, so nothing else to do here
+ ...new Uint8Array(track.info.decoderConfig.description)
+]);
+var vpcC = (track) => {
+ if (!track.info.decoderConfig) {
+ return null;
+ }
+ let decoderConfig = track.info.decoderConfig;
+ if (!decoderConfig.colorSpace) {
+ throw new Error(`'colorSpace' is required in the decoder config for VP9.`);
+ }
+ let parts = decoderConfig.codec.split(".");
+ let profile = Number(parts[1]);
+ let level = Number(parts[2]);
+ let bitDepth = Number(parts[3]);
+ let chromaSubsampling = 0;
+ let thirdByte = (bitDepth << 4) + (chromaSubsampling << 1) + Number(decoderConfig.colorSpace.fullRange);
+ let colourPrimaries = 2;
+ let transferCharacteristics = 2;
+ let matrixCoefficients = 2;
+ return fullBox("vpcC", 1, 0, [
+ u8(profile),
+ // Profile
+ u8(level),
+ // Level
+ u8(thirdByte),
+ // Bit depth, chroma subsampling, full range
+ u8(colourPrimaries),
+ // Colour primaries
+ u8(transferCharacteristics),
+ // Transfer characteristics
+ u8(matrixCoefficients),
+ // Matrix coefficients
+ u16(0)
+ // Codec initialization data size
+ ]);
+};
+var av1C = () => {
+ let marker = 1;
+ let version = 1;
+ let firstByte = (marker << 7) + version;
+ return box("av1C", [
+ firstByte,
+ 0,
+ 0,
+ 0
+ ]);
+};
+var soundSampleDescription = (compressionType, track) => box(compressionType, [
+ Array(6).fill(0),
+ // Reserved
+ u16(1),
+ // Data reference index
+ u16(0),
+ // Version
+ u16(0),
+ // Revision level
+ u32(0),
+ // Vendor
+ u16(track.info.numberOfChannels),
+ // Number of channels
+ u16(16),
+ // Sample size (bits)
+ u16(0),
+ // Compression ID
+ u16(0),
+ // Packet size
+ fixed_16_16(track.info.sampleRate)
+ // Sample rate
+], [
+ AUDIO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
+]);
+var esds = (track) => {
+ let description = new Uint8Array(track.info.decoderConfig.description);
+ return fullBox("esds", 0, 0, [
+ // https://stackoverflow.com/a/54803118
+ u32(58753152),
+ // TAG(3) = Object Descriptor ([2])
+ u8(32 + description.byteLength),
+ // length of this OD (which includes the next 2 tags)
+ u16(1),
+ // ES_ID = 1
+ u8(0),
+ // flags etc = 0
+ u32(75530368),
+ // TAG(4) = ES Descriptor ([2]) embedded in above OD
+ u8(18 + description.byteLength),
+ // length of this ESD
+ u8(64),
+ // MPEG-4 Audio
+ u8(21),
+ // stream type(6bits)=5 audio, flags(2bits)=1
+ u24(0),
+ // 24bit buffer size
+ u32(130071),
+ // max bitrate
+ u32(130071),
+ // avg bitrate
+ u32(92307584),
+ // TAG(5) = ASC ([2],[3]) embedded in above OD
+ u8(description.byteLength),
+ // length
+ ...description,
+ u32(109084800),
+ // TAG(6)
+ u8(1),
+ // length
+ u8(2)
+ // data
+ ]);
+};
+var dOps = (track) => box("dOps", [
+ u8(0),
+ // Version
+ u8(track.info.numberOfChannels),
+ // OutputChannelCount
+ u16(3840),
+ // PreSkip, should be at least 80 milliseconds worth of playback, measured in 48000 Hz samples
+ u32(track.info.sampleRate),
+ // InputSampleRate
+ fixed_8_8(0),
+ // OutputGain
+ u8(0)
+ // ChannelMappingFamily
+]);
+var stts = (track) => {
+ return fullBox("stts", 0, 0, [
+ u32(track.timeToSampleTable.length),
+ // Number of entries
+ track.timeToSampleTable.map((x) => [
+ // Time-to-sample table
+ u32(x.sampleCount),
+ // Sample count
+ u32(x.sampleDelta)
+ // Sample duration
+ ])
+ ]);
+};
+var stss = (track) => {
+ if (track.samples.every((x) => x.type === "key"))
+ return null;
+ let keySamples = [...track.samples.entries()].filter(([, sample]) => sample.type === "key");
+ return fullBox("stss", 0, 0, [
+ u32(keySamples.length),
+ // Number of entries
+ keySamples.map(([index]) => u32(index + 1))
+ // Sync sample table
+ ]);
+};
+var stsc = (track) => {
+ return fullBox("stsc", 0, 0, [
+ u32(track.compactlyCodedChunkTable.length),
+ // Number of entries
+ track.compactlyCodedChunkTable.map((x) => [
+ // Sample-to-chunk table
+ u32(x.firstChunk),
+ // First chunk
+ u32(x.samplesPerChunk),
+ // Samples per chunk
+ u32(1)
+ // Sample description index
+ ])
+ ]);
+};
+var stsz = (track) => fullBox("stsz", 0, 0, [
+ u32(0),
+ // Sample size (0 means non-constant size)
+ u32(track.samples.length),
+ // Number of entries
+ track.samples.map((x) => u32(x.size))
+ // Sample size table
+]);
+var stco = (track) => {
+ if (track.finalizedChunks.length > 0 && last(track.finalizedChunks).offset >= 2 ** 32) {
+ return fullBox("co64", 0, 0, [
+ u32(track.finalizedChunks.length),
+ // Number of entries
+ track.finalizedChunks.map((x) => u64(x.offset))
+ // Chunk offset table
+ ]);
+ }
+ return fullBox("stco", 0, 0, [
+ u32(track.finalizedChunks.length),
+ // Number of entries
+ track.finalizedChunks.map((x) => u32(x.offset))
+ // Chunk offset table
+ ]);
+};
+var ctts = (track) => {
+ return fullBox("ctts", 0, 0, [
+ u32(track.compositionTimeOffsetTable.length),
+ // Number of entries
+ track.compositionTimeOffsetTable.map((x) => [
+ // Time-to-sample table
+ u32(x.sampleCount),
+ // Sample count
+ u32(x.sampleCompositionTimeOffset)
+ // Sample offset
+ ])
+ ]);
+};
+var mvex = (tracks) => {
+ return box("mvex", null, tracks.map(trex));
+};
+var trex = (track) => {
+ return fullBox("trex", 0, 0, [
+ u32(track.id),
+ // Track ID
+ u32(1),
+ // Default sample description index
+ u32(0),
+ // Default sample duration
+ u32(0),
+ // Default sample size
+ u32(0)
+ // Default sample flags
+ ]);
+};
+var moof = (sequenceNumber, tracks) => {
+ return box("moof", null, [
+ mfhd(sequenceNumber),
+ ...tracks.map(traf)
+ ]);
+};
+var mfhd = (sequenceNumber) => {
+ return fullBox("mfhd", 0, 0, [
+ u32(sequenceNumber)
+ // Sequence number
+ ]);
+};
+var fragmentSampleFlags = (sample) => {
+ let byte1 = 0;
+ let byte2 = 0;
+ let byte3 = 0;
+ let byte4 = 0;
+ let sampleIsDifferenceSample = sample.type === "delta";
+ byte2 |= +sampleIsDifferenceSample;
+ if (sampleIsDifferenceSample) {
+ byte1 |= 1;
+ } else {
+ byte1 |= 2;
+ }
+ return byte1 << 24 | byte2 << 16 | byte3 << 8 | byte4;
+};
+var traf = (track) => {
+ return box("traf", null, [
+ tfhd(track),
+ tfdt(track),
+ trun(track)
+ ]);
+};
+var tfhd = (track) => {
+ let tfFlags = 0;
+ tfFlags |= 8;
+ tfFlags |= 16;
+ tfFlags |= 32;
+ tfFlags |= 131072;
+ let referenceSample = track.currentChunk.samples[1] ?? track.currentChunk.samples[0];
+ let referenceSampleInfo = {
+ duration: referenceSample.timescaleUnitsToNextSample,
+ size: referenceSample.size,
+ flags: fragmentSampleFlags(referenceSample)
+ };
+ return fullBox("tfhd", 0, tfFlags, [
+ u32(track.id),
+ // Track ID
+ u32(referenceSampleInfo.duration),
+ // Default sample duration
+ u32(referenceSampleInfo.size),
+ // Default sample size
+ u32(referenceSampleInfo.flags)
+ // Default sample flags
+ ]);
+};
+var tfdt = (track) => {
+ return fullBox("tfdt", 1, 0, [
+ u64(intoTimescale(track.currentChunk.startTimestamp, track.timescale))
+ // Base Media Decode Time
+ ]);
+};
+var trun = (track) => {
+ let allSampleDurations = track.currentChunk.samples.map((x) => x.timescaleUnitsToNextSample);
+ let allSampleSizes = track.currentChunk.samples.map((x) => x.size);
+ let allSampleFlags = track.currentChunk.samples.map(fragmentSampleFlags);
+ let allSampleCompositionTimeOffsets = track.currentChunk.samples.map((x) => intoTimescale(x.presentationTimestamp - x.decodeTimestamp, track.timescale));
+ let uniqueSampleDurations = new Set(allSampleDurations);
+ let uniqueSampleSizes = new Set(allSampleSizes);
+ let uniqueSampleFlags = new Set(allSampleFlags);
+ let uniqueSampleCompositionTimeOffsets = new Set(allSampleCompositionTimeOffsets);
+ let firstSampleFlagsPresent = uniqueSampleFlags.size === 2 && allSampleFlags[0] !== allSampleFlags[1];
+ let sampleDurationPresent = uniqueSampleDurations.size > 1;
+ let sampleSizePresent = uniqueSampleSizes.size > 1;
+ let sampleFlagsPresent = !firstSampleFlagsPresent && uniqueSampleFlags.size > 1;
+ let sampleCompositionTimeOffsetsPresent = uniqueSampleCompositionTimeOffsets.size > 1 || [...uniqueSampleCompositionTimeOffsets].some((x) => x !== 0);
+ let flags = 0;
+ flags |= 1;
+ flags |= 4 * +firstSampleFlagsPresent;
+ flags |= 256 * +sampleDurationPresent;
+ flags |= 512 * +sampleSizePresent;
+ flags |= 1024 * +sampleFlagsPresent;
+ flags |= 2048 * +sampleCompositionTimeOffsetsPresent;
+ return fullBox("trun", 1, flags, [
+ u32(track.currentChunk.samples.length),
+ // Sample count
+ u32(track.currentChunk.offset - track.currentChunk.moofOffset || 0),
+ // Data offset
+ firstSampleFlagsPresent ? u32(allSampleFlags[0]) : [],
+ track.currentChunk.samples.map((_, i) => [
+ sampleDurationPresent ? u32(allSampleDurations[i]) : [],
+ // Sample duration
+ sampleSizePresent ? u32(allSampleSizes[i]) : [],
+ // Sample size
+ sampleFlagsPresent ? u32(allSampleFlags[i]) : [],
+ // Sample flags
+ // Sample composition time offsets
+ sampleCompositionTimeOffsetsPresent ? i32(allSampleCompositionTimeOffsets[i]) : []
+ ])
+ ]);
+};
+var mfra = (tracks) => {
+ return box("mfra", null, [
+ ...tracks.map(tfra),
+ mfro()
+ ]);
+};
+var tfra = (track, trackIndex) => {
+ let version = 1;
+ return fullBox("tfra", version, 0, [
+ u32(track.id),
+ // Track ID
+ u32(63),
+ // This specifies that traf number, trun number and sample number are 32-bit ints
+ u32(track.finalizedChunks.length),
+ // Number of entries
+ track.finalizedChunks.map((chunk) => [
+ u64(intoTimescale(chunk.startTimestamp, track.timescale)),
+ // Time
+ u64(chunk.moofOffset),
+ // moof offset
+ u32(trackIndex + 1),
+ // traf number
+ u32(1),
+ // trun number
+ u32(1)
+ // Sample number
+ ])
+ ]);
+};
+var mfro = () => {
+ return fullBox("mfro", 0, 0, [
+ // This value needs to be overwritten manually from the outside, where the actual size of the enclosing mfra box
+ // is known
+ u32(0)
+ // Size
+ ]);
+};
+var VIDEO_CODEC_TO_BOX_NAME = {
+ "avc": "avc1",
+ "hevc": "hvc1",
+ "vp9": "vp09",
+ "av1": "av01"
+};
+var VIDEO_CODEC_TO_CONFIGURATION_BOX = {
+ "avc": avcC,
+ "hevc": hvcC,
+ "vp9": vpcC,
+ "av1": av1C
+};
+var AUDIO_CODEC_TO_BOX_NAME = {
+ "aac": "mp4a",
+ "opus": "Opus"
+};
+var AUDIO_CODEC_TO_CONFIGURATION_BOX = {
+ "aac": esds,
+ "opus": dOps
+};
+
+// src/target.ts
+var ArrayBufferTarget = class {
+ constructor() {
+ this.buffer = null;
+ }
+};
+var StreamTarget = class {
+ constructor(options) {
+ this.options = options;
+ }
+};
+var FileSystemWritableFileStreamTarget = class {
+ constructor(stream, options) {
+ this.stream = stream;
+ this.options = options;
+ }
+};
+
+// src/writer.ts
+var _helper, _helperView;
+var Writer = class {
+ constructor() {
+ this.pos = 0;
+ __privateAdd(this, _helper, new Uint8Array(8));
+ __privateAdd(this, _helperView, new DataView(__privateGet(this, _helper).buffer));
+ /**
+ * Stores the position from the start of the file to where boxes elements have been written. This is used to
+ * rewrite/edit elements that were already added before, and to measure sizes of things.
+ */
+ this.offsets = /* @__PURE__ */ new WeakMap();
+ }
+ /** Sets the current position for future writes to a new one. */
+ seek(newPos) {
+ this.pos = newPos;
+ }
+ writeU32(value) {
+ __privateGet(this, _helperView).setUint32(0, value, false);
+ this.write(__privateGet(this, _helper).subarray(0, 4));
+ }
+ writeU64(value) {
+ __privateGet(this, _helperView).setUint32(0, Math.floor(value / 2 ** 32), false);
+ __privateGet(this, _helperView).setUint32(4, value, false);
+ this.write(__privateGet(this, _helper).subarray(0, 8));
+ }
+ writeAscii(text) {
+ for (let i = 0; i < text.length; i++) {
+ __privateGet(this, _helperView).setUint8(i % 8, text.charCodeAt(i));
+ if (i % 8 === 7)
+ this.write(__privateGet(this, _helper));
+ }
+ if (text.length % 8 !== 0) {
+ this.write(__privateGet(this, _helper).subarray(0, text.length % 8));
+ }
+ }
+ writeBox(box2) {
+ this.offsets.set(box2, this.pos);
+ if (box2.contents && !box2.children) {
+ this.writeBoxHeader(box2, box2.size ?? box2.contents.byteLength + 8);
+ this.write(box2.contents);
+ } else {
+ let startPos = this.pos;
+ this.writeBoxHeader(box2, 0);
+ if (box2.contents)
+ this.write(box2.contents);
+ if (box2.children) {
+ for (let child of box2.children)
+ if (child)
+ this.writeBox(child);
+ }
+ let endPos = this.pos;
+ let size = box2.size ?? endPos - startPos;
+ this.seek(startPos);
+ this.writeBoxHeader(box2, size);
+ this.seek(endPos);
+ }
+ }
+ writeBoxHeader(box2, size) {
+ this.writeU32(box2.largeSize ? 1 : size);
+ this.writeAscii(box2.type);
+ if (box2.largeSize)
+ this.writeU64(size);
+ }
+ measureBoxHeader(box2) {
+ return 8 + (box2.largeSize ? 8 : 0);
+ }
+ patchBox(box2) {
+ let endPos = this.pos;
+ this.seek(this.offsets.get(box2));
+ this.writeBox(box2);
+ this.seek(endPos);
+ }
+ measureBox(box2) {
+ if (box2.contents && !box2.children) {
+ let headerSize = this.measureBoxHeader(box2);
+ return headerSize + box2.contents.byteLength;
+ } else {
+ let result = this.measureBoxHeader(box2);
+ if (box2.contents)
+ result += box2.contents.byteLength;
+ if (box2.children) {
+ for (let child of box2.children)
+ if (child)
+ result += this.measureBox(child);
+ }
+ return result;
+ }
+ }
+};
+_helper = new WeakMap();
+_helperView = new WeakMap();
+var _target, _buffer, _bytes, _maxPos, _ensureSize, ensureSize_fn;
+var ArrayBufferTargetWriter = class extends Writer {
+ constructor(target) {
+ super();
+ __privateAdd(this, _ensureSize);
+ __privateAdd(this, _target, void 0);
+ __privateAdd(this, _buffer, new ArrayBuffer(2 ** 16));
+ __privateAdd(this, _bytes, new Uint8Array(__privateGet(this, _buffer)));
+ __privateAdd(this, _maxPos, 0);
+ __privateSet(this, _target, target);
+ }
+ write(data) {
+ __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos + data.byteLength);
+ __privateGet(this, _bytes).set(data, this.pos);
+ this.pos += data.byteLength;
+ __privateSet(this, _maxPos, Math.max(__privateGet(this, _maxPos), this.pos));
+ }
+ finalize() {
+ __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos);
+ __privateGet(this, _target).buffer = __privateGet(this, _buffer).slice(0, Math.max(__privateGet(this, _maxPos), this.pos));
+ }
+};
+_target = new WeakMap();
+_buffer = new WeakMap();
+_bytes = new WeakMap();
+_maxPos = new WeakMap();
+_ensureSize = new WeakSet();
+ensureSize_fn = function(size) {
+ let newLength = __privateGet(this, _buffer).byteLength;
+ while (newLength < size)
+ newLength *= 2;
+ if (newLength === __privateGet(this, _buffer).byteLength)
+ return;
+ let newBuffer = new ArrayBuffer(newLength);
+ let newBytes = new Uint8Array(newBuffer);
+ newBytes.set(__privateGet(this, _bytes), 0);
+ __privateSet(this, _buffer, newBuffer);
+ __privateSet(this, _bytes, newBytes);
+};
+var _target2, _sections;
+var StreamTargetWriter = class extends Writer {
+ constructor(target) {
+ super();
+ __privateAdd(this, _target2, void 0);
+ __privateAdd(this, _sections, []);
+ __privateSet(this, _target2, target);
+ }
+ write(data) {
+ __privateGet(this, _sections).push({
+ data: data.slice(),
+ start: this.pos
+ });
+ this.pos += data.byteLength;
+ }
+ flush() {
+ if (__privateGet(this, _sections).length === 0)
+ return;
+ let chunks = [];
+ let sorted = [...__privateGet(this, _sections)].sort((a, b) => a.start - b.start);
+ chunks.push({
+ start: sorted[0].start,
+ size: sorted[0].data.byteLength
+ });
+ for (let i = 1; i < sorted.length; i++) {
+ let lastChunk = chunks[chunks.length - 1];
+ let section = sorted[i];
+ if (section.start <= lastChunk.start + lastChunk.size) {
+ lastChunk.size = Math.max(lastChunk.size, section.start + section.data.byteLength - lastChunk.start);
+ } else {
+ chunks.push({
+ start: section.start,
+ size: section.data.byteLength
+ });
+ }
+ }
+ for (let chunk of chunks) {
+ chunk.data = new Uint8Array(chunk.size);
+ for (let section of __privateGet(this, _sections)) {
+ if (chunk.start <= section.start && section.start < chunk.start + chunk.size) {
+ chunk.data.set(section.data, section.start - chunk.start);
+ }
+ }
+ __privateGet(this, _target2).options.onData?.(chunk.data, chunk.start);
+ }
+ __privateGet(this, _sections).length = 0;
+ }
+ finalize() {
+ }
+};
+_target2 = new WeakMap();
+_sections = new WeakMap();
+var DEFAULT_CHUNK_SIZE = 2 ** 24;
+var MAX_CHUNKS_AT_ONCE = 2;
+var _target3, _chunkSize, _chunks, _writeDataIntoChunks, writeDataIntoChunks_fn, _insertSectionIntoChunk, insertSectionIntoChunk_fn, _createChunk, createChunk_fn, _flushChunks, flushChunks_fn;
+var ChunkedStreamTargetWriter = class extends Writer {
+ constructor(target) {
+ super();
+ __privateAdd(this, _writeDataIntoChunks);
+ __privateAdd(this, _insertSectionIntoChunk);
+ __privateAdd(this, _createChunk);
+ __privateAdd(this, _flushChunks);
+ __privateAdd(this, _target3, void 0);
+ __privateAdd(this, _chunkSize, void 0);
+ /**
+ * The data is divided up into fixed-size chunks, whose contents are first filled in RAM and then flushed out.
+ * A chunk is flushed if all of its contents have been written.
+ */
+ __privateAdd(this, _chunks, []);
+ __privateSet(this, _target3, target);
+ __privateSet(this, _chunkSize, target.options?.chunkSize ?? DEFAULT_CHUNK_SIZE);
+ if (!Number.isInteger(__privateGet(this, _chunkSize)) || __privateGet(this, _chunkSize) < 2 ** 10) {
+ throw new Error("Invalid StreamTarget options: chunkSize must be an integer not smaller than 1024.");
+ }
+ }
+ write(data) {
+ __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data, this.pos);
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this);
+ this.pos += data.byteLength;
+ }
+ finalize() {
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this, true);
+ }
+};
+_target3 = new WeakMap();
+_chunkSize = new WeakMap();
+_chunks = new WeakMap();
+_writeDataIntoChunks = new WeakSet();
+writeDataIntoChunks_fn = function(data, position) {
+ let chunkIndex = __privateGet(this, _chunks).findIndex((x) => x.start <= position && position < x.start + __privateGet(this, _chunkSize));
+ if (chunkIndex === -1)
+ chunkIndex = __privateMethod(this, _createChunk, createChunk_fn).call(this, position);
+ let chunk = __privateGet(this, _chunks)[chunkIndex];
+ let relativePosition = position - chunk.start;
+ let toWrite = data.subarray(0, Math.min(__privateGet(this, _chunkSize) - relativePosition, data.byteLength));
+ chunk.data.set(toWrite, relativePosition);
+ let section = {
+ start: relativePosition,
+ end: relativePosition + toWrite.byteLength
+ };
+ __privateMethod(this, _insertSectionIntoChunk, insertSectionIntoChunk_fn).call(this, chunk, section);
+ if (chunk.written[0].start === 0 && chunk.written[0].end === __privateGet(this, _chunkSize)) {
+ chunk.shouldFlush = true;
+ }
+ if (__privateGet(this, _chunks).length > MAX_CHUNKS_AT_ONCE) {
+ for (let i = 0; i < __privateGet(this, _chunks).length - 1; i++) {
+ __privateGet(this, _chunks)[i].shouldFlush = true;
+ }
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this);
+ }
+ if (toWrite.byteLength < data.byteLength) {
+ __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data.subarray(toWrite.byteLength), position + toWrite.byteLength);
+ }
+};
+_insertSectionIntoChunk = new WeakSet();
+insertSectionIntoChunk_fn = function(chunk, section) {
+ let low = 0;
+ let high = chunk.written.length - 1;
+ let index = -1;
+ while (low <= high) {
+ let mid = Math.floor(low + (high - low + 1) / 2);
+ if (chunk.written[mid].start <= section.start) {
+ low = mid + 1;
+ index = mid;
+ } else {
+ high = mid - 1;
+ }
+ }
+ chunk.written.splice(index + 1, 0, section);
+ if (index === -1 || chunk.written[index].end < section.start)
+ index++;
+ while (index < chunk.written.length - 1 && chunk.written[index].end >= chunk.written[index + 1].start) {
+ chunk.written[index].end = Math.max(chunk.written[index].end, chunk.written[index + 1].end);
+ chunk.written.splice(index + 1, 1);
+ }
+};
+_createChunk = new WeakSet();
+createChunk_fn = function(includesPosition) {
+ let start = Math.floor(includesPosition / __privateGet(this, _chunkSize)) * __privateGet(this, _chunkSize);
+ let chunk = {
+ start,
+ data: new Uint8Array(__privateGet(this, _chunkSize)),
+ written: [],
+ shouldFlush: false
+ };
+ __privateGet(this, _chunks).push(chunk);
+ __privateGet(this, _chunks).sort((a, b) => a.start - b.start);
+ return __privateGet(this, _chunks).indexOf(chunk);
+};
+_flushChunks = new WeakSet();
+flushChunks_fn = function(force = false) {
+ for (let i = 0; i < __privateGet(this, _chunks).length; i++) {
+ let chunk = __privateGet(this, _chunks)[i];
+ if (!chunk.shouldFlush && !force)
+ continue;
+ for (let section of chunk.written) {
+ __privateGet(this, _target3).options.onData?.(
+ chunk.data.subarray(section.start, section.end),
+ chunk.start + section.start
+ );
+ }
+ __privateGet(this, _chunks).splice(i--, 1);
+ }
+};
+var FileSystemWritableFileStreamTargetWriter = class extends ChunkedStreamTargetWriter {
+ constructor(target) {
+ super(new StreamTarget({
+ onData: (data, position) => target.stream.write({
+ type: "write",
+ data,
+ position
+ }),
+ chunkSize: target.options?.chunkSize
+ }));
+ }
+};
+
+// src/muxer.ts
+var GLOBAL_TIMESCALE = 1e3;
+var SUPPORTED_VIDEO_CODECS2 = ["avc", "hevc", "vp9", "av1"];
+var SUPPORTED_AUDIO_CODECS2 = ["aac", "opus"];
+var TIMESTAMP_OFFSET = 2082844800;
+var FIRST_TIMESTAMP_BEHAVIORS = ["strict", "offset", "cross-track-offset"];
+var _options, _writer, _ftypSize, _mdat, _videoTrack, _audioTrack, _creationTime, _finalizedChunks, _nextFragmentNumber, _videoSampleQueue, _audioSampleQueue, _finalized, _validateOptions, validateOptions_fn, _writeHeader, writeHeader_fn, _computeMoovSizeUpperBound, computeMoovSizeUpperBound_fn, _prepareTracks, prepareTracks_fn, _generateMpeg4AudioSpecificConfig, generateMpeg4AudioSpecificConfig_fn, _createSampleForTrack, createSampleForTrack_fn, _addSampleToTrack, addSampleToTrack_fn, _validateTimestamp, validateTimestamp_fn, _finalizeCurrentChunk, finalizeCurrentChunk_fn, _finalizeFragment, finalizeFragment_fn, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn, _ensureNotFinalized, ensureNotFinalized_fn;
+var Muxer = class {
+ constructor(options) {
+ __privateAdd(this, _validateOptions);
+ __privateAdd(this, _writeHeader);
+ __privateAdd(this, _computeMoovSizeUpperBound);
+ __privateAdd(this, _prepareTracks);
+ // https://wiki.multimedia.cx/index.php/MPEG-4_Audio
+ __privateAdd(this, _generateMpeg4AudioSpecificConfig);
+ __privateAdd(this, _createSampleForTrack);
+ __privateAdd(this, _addSampleToTrack);
+ __privateAdd(this, _validateTimestamp);
+ __privateAdd(this, _finalizeCurrentChunk);
+ __privateAdd(this, _finalizeFragment);
+ __privateAdd(this, _maybeFlushStreamingTargetWriter);
+ __privateAdd(this, _ensureNotFinalized);
+ __privateAdd(this, _options, void 0);
+ __privateAdd(this, _writer, void 0);
+ __privateAdd(this, _ftypSize, void 0);
+ __privateAdd(this, _mdat, void 0);
+ __privateAdd(this, _videoTrack, null);
+ __privateAdd(this, _audioTrack, null);
+ __privateAdd(this, _creationTime, Math.floor(Date.now() / 1e3) + TIMESTAMP_OFFSET);
+ __privateAdd(this, _finalizedChunks, []);
+ // Fields for fragmented MP4:
+ __privateAdd(this, _nextFragmentNumber, 1);
+ __privateAdd(this, _videoSampleQueue, []);
+ __privateAdd(this, _audioSampleQueue, []);
+ __privateAdd(this, _finalized, false);
+ __privateMethod(this, _validateOptions, validateOptions_fn).call(this, options);
+ options.video = deepClone(options.video);
+ options.audio = deepClone(options.audio);
+ options.fastStart = deepClone(options.fastStart);
+ this.target = options.target;
+ __privateSet(this, _options, {
+ firstTimestampBehavior: "strict",
+ ...options
+ });
+ if (options.target instanceof ArrayBufferTarget) {
+ __privateSet(this, _writer, new ArrayBufferTargetWriter(options.target));
+ } else if (options.target instanceof StreamTarget) {
+ __privateSet(this, _writer, options.target.options?.chunked ? new ChunkedStreamTargetWriter(options.target) : new StreamTargetWriter(options.target));
+ } else if (options.target instanceof FileSystemWritableFileStreamTarget) {
+ __privateSet(this, _writer, new FileSystemWritableFileStreamTargetWriter(options.target));
+ } else {
+ throw new Error(`Invalid target: ${options.target}`);
+ }
+ __privateMethod(this, _prepareTracks, prepareTracks_fn).call(this);
+ __privateMethod(this, _writeHeader, writeHeader_fn).call(this);
+ }
+ addVideoChunk(sample, meta, timestamp, compositionTimeOffset) {
+ let data = new Uint8Array(sample.byteLength);
+ sample.copyTo(data);
+ this.addVideoChunkRaw(
+ data,
+ sample.type,
+ timestamp ?? sample.timestamp,
+ sample.duration,
+ meta,
+ compositionTimeOffset
+ );
+ }
+ addVideoChunkRaw(data, type, timestamp, duration, meta, compositionTimeOffset) {
+ __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this);
+ if (!__privateGet(this, _options).video)
+ throw new Error("No video track declared.");
+ if (typeof __privateGet(this, _options).fastStart === "object" && __privateGet(this, _videoTrack).samples.length === __privateGet(this, _options).fastStart.expectedVideoChunks) {
+ throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${__privateGet(this, _options).fastStart.expectedVideoChunks}).`);
+ }
+ let videoSample = __privateMethod(this, _createSampleForTrack, createSampleForTrack_fn).call(this, __privateGet(this, _videoTrack), data, type, timestamp, duration, meta, compositionTimeOffset);
+ if (__privateGet(this, _options).fastStart === "fragmented" && __privateGet(this, _audioTrack)) {
+ while (__privateGet(this, _audioSampleQueue).length > 0 && __privateGet(this, _audioSampleQueue)[0].decodeTimestamp <= videoSample.decodeTimestamp) {
+ let audioSample = __privateGet(this, _audioSampleQueue).shift();
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ }
+ if (videoSample.decodeTimestamp <= __privateGet(this, _audioTrack).lastDecodeTimestamp) {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ } else {
+ __privateGet(this, _videoSampleQueue).push(videoSample);
+ }
+ } else {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ }
+ }
+ addAudioChunk(sample, meta, timestamp) {
+ let data = new Uint8Array(sample.byteLength);
+ sample.copyTo(data);
+ this.addAudioChunkRaw(data, sample.type, timestamp ?? sample.timestamp, sample.duration, meta);
+ }
+ addAudioChunkRaw(data, type, timestamp, duration, meta) {
+ __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this);
+ if (!__privateGet(this, _options).audio)
+ throw new Error("No audio track declared.");
+ if (typeof __privateGet(this, _options).fastStart === "object" && __privateGet(this, _audioTrack).samples.length === __privateGet(this, _options).fastStart.expectedAudioChunks) {
+ throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${__privateGet(this, _options).fastStart.expectedAudioChunks}).`);
+ }
+ let audioSample = __privateMethod(this, _createSampleForTrack, createSampleForTrack_fn).call(this, __privateGet(this, _audioTrack), data, type, timestamp, duration, meta);
+ if (__privateGet(this, _options).fastStart === "fragmented" && __privateGet(this, _videoTrack)) {
+ while (__privateGet(this, _videoSampleQueue).length > 0 && __privateGet(this, _videoSampleQueue)[0].decodeTimestamp <= audioSample.decodeTimestamp) {
+ let videoSample = __privateGet(this, _videoSampleQueue).shift();
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ }
+ if (audioSample.decodeTimestamp <= __privateGet(this, _videoTrack).lastDecodeTimestamp) {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ } else {
+ __privateGet(this, _audioSampleQueue).push(audioSample);
+ }
+ } else {
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ }
+ }
+ /** Finalizes the file, making it ready for use. Must be called after all video and audio chunks have been added. */
+ finalize() {
+ if (__privateGet(this, _finalized)) {
+ throw new Error("Cannot finalize a muxer more than once.");
+ }
+ if (__privateGet(this, _options).fastStart === "fragmented") {
+ for (let videoSample of __privateGet(this, _videoSampleQueue))
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
+ for (let audioSample of __privateGet(this, _audioSampleQueue))
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
+ __privateMethod(this, _finalizeFragment, finalizeFragment_fn).call(this, false);
+ } else {
+ if (__privateGet(this, _videoTrack))
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, __privateGet(this, _videoTrack));
+ if (__privateGet(this, _audioTrack))
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, __privateGet(this, _audioTrack));
+ }
+ let tracks = [__privateGet(this, _videoTrack), __privateGet(this, _audioTrack)].filter(Boolean);
+ if (__privateGet(this, _options).fastStart === "in-memory") {
+ let mdatSize;
+ for (let i = 0; i < 2; i++) {
+ let movieBox2 = moov(tracks, __privateGet(this, _creationTime));
+ let movieBoxSize = __privateGet(this, _writer).measureBox(movieBox2);
+ mdatSize = __privateGet(this, _writer).measureBox(__privateGet(this, _mdat));
+ let currentChunkPos = __privateGet(this, _writer).pos + movieBoxSize + mdatSize;
+ for (let chunk of __privateGet(this, _finalizedChunks)) {
+ chunk.offset = currentChunkPos;
+ for (let { data } of chunk.samples) {
+ currentChunkPos += data.byteLength;
+ mdatSize += data.byteLength;
+ }
+ }
+ if (currentChunkPos < 2 ** 32)
+ break;
+ if (mdatSize >= 2 ** 32)
+ __privateGet(this, _mdat).largeSize = true;
+ }
+ let movieBox = moov(tracks, __privateGet(this, _creationTime));
+ __privateGet(this, _writer).writeBox(movieBox);
+ __privateGet(this, _mdat).size = mdatSize;
+ __privateGet(this, _writer).writeBox(__privateGet(this, _mdat));
+ for (let chunk of __privateGet(this, _finalizedChunks)) {
+ for (let sample of chunk.samples) {
+ __privateGet(this, _writer).write(sample.data);
+ sample.data = null;
+ }
+ }
+ } else if (__privateGet(this, _options).fastStart === "fragmented") {
+ let startPos = __privateGet(this, _writer).pos;
+ let mfraBox = mfra(tracks);
+ __privateGet(this, _writer).writeBox(mfraBox);
+ let mfraBoxSize = __privateGet(this, _writer).pos - startPos;
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).pos - 4);
+ __privateGet(this, _writer).writeU32(mfraBoxSize);
+ } else {
+ let mdatPos = __privateGet(this, _writer).offsets.get(__privateGet(this, _mdat));
+ let mdatSize = __privateGet(this, _writer).pos - mdatPos;
+ __privateGet(this, _mdat).size = mdatSize;
+ __privateGet(this, _mdat).largeSize = mdatSize >= 2 ** 32;
+ __privateGet(this, _writer).patchBox(__privateGet(this, _mdat));
+ let movieBox = moov(tracks, __privateGet(this, _creationTime));
+ if (typeof __privateGet(this, _options).fastStart === "object") {
+ __privateGet(this, _writer).seek(__privateGet(this, _ftypSize));
+ __privateGet(this, _writer).writeBox(movieBox);
+ let remainingBytes = mdatPos - __privateGet(this, _writer).pos;
+ __privateGet(this, _writer).writeBox(free(remainingBytes));
+ } else {
+ __privateGet(this, _writer).writeBox(movieBox);
+ }
+ }
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+ __privateGet(this, _writer).finalize();
+ __privateSet(this, _finalized, true);
+ }
+};
+_options = new WeakMap();
+_writer = new WeakMap();
+_ftypSize = new WeakMap();
+_mdat = new WeakMap();
+_videoTrack = new WeakMap();
+_audioTrack = new WeakMap();
+_creationTime = new WeakMap();
+_finalizedChunks = new WeakMap();
+_nextFragmentNumber = new WeakMap();
+_videoSampleQueue = new WeakMap();
+_audioSampleQueue = new WeakMap();
+_finalized = new WeakMap();
+_validateOptions = new WeakSet();
+validateOptions_fn = function(options) {
+ if (options.video) {
+ if (!SUPPORTED_VIDEO_CODECS2.includes(options.video.codec)) {
+ throw new Error(`Unsupported video codec: ${options.video.codec}`);
+ }
+ const videoRotation = options.video.rotation;
+ if (typeof videoRotation === "number" && ![0, 90, 180, 270].includes(videoRotation)) {
+ throw new Error(`Invalid video rotation: ${videoRotation}. Has to be 0, 90, 180 or 270.`);
+ } else if (Array.isArray(videoRotation) && (videoRotation.length !== 9 || videoRotation.some((value) => typeof value !== "number"))) {
+ throw new Error(`Invalid video transformation matrix: ${videoRotation.join()}`);
+ }
+ }
+ if (options.audio && !SUPPORTED_AUDIO_CODECS2.includes(options.audio.codec)) {
+ throw new Error(`Unsupported audio codec: ${options.audio.codec}`);
+ }
+ if (options.firstTimestampBehavior && !FIRST_TIMESTAMP_BEHAVIORS.includes(options.firstTimestampBehavior)) {
+ throw new Error(`Invalid first timestamp behavior: ${options.firstTimestampBehavior}`);
+ }
+ if (typeof options.fastStart === "object") {
+ if (options.video && options.fastStart.expectedVideoChunks === void 0) {
+ throw new Error(`'fastStart' is an object but is missing property 'expectedVideoChunks'.`);
+ }
+ if (options.audio && options.fastStart.expectedAudioChunks === void 0) {
+ throw new Error(`'fastStart' is an object but is missing property 'expectedAudioChunks'.`);
+ }
+ } else if (![false, "in-memory", "fragmented"].includes(options.fastStart)) {
+ throw new Error(`'fastStart' option must be false, 'in-memory', 'fragmented' or an object.`);
+ }
+};
+_writeHeader = new WeakSet();
+writeHeader_fn = function() {
+ __privateGet(this, _writer).writeBox(ftyp({
+ holdsAvc: __privateGet(this, _options).video?.codec === "avc",
+ fragmented: __privateGet(this, _options).fastStart === "fragmented"
+ }));
+ __privateSet(this, _ftypSize, __privateGet(this, _writer).pos);
+ if (__privateGet(this, _options).fastStart === "in-memory") {
+ __privateSet(this, _mdat, mdat(false));
+ } else if (__privateGet(this, _options).fastStart === "fragmented") {
+ } else {
+ if (typeof __privateGet(this, _options).fastStart === "object") {
+ let moovSizeUpperBound = __privateMethod(this, _computeMoovSizeUpperBound, computeMoovSizeUpperBound_fn).call(this);
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).pos + moovSizeUpperBound);
+ }
+ __privateSet(this, _mdat, mdat(true));
+ __privateGet(this, _writer).writeBox(__privateGet(this, _mdat));
+ }
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+};
+_computeMoovSizeUpperBound = new WeakSet();
+computeMoovSizeUpperBound_fn = function() {
+ if (typeof __privateGet(this, _options).fastStart !== "object")
+ return;
+ let upperBound = 0;
+ let sampleCounts = [
+ __privateGet(this, _options).fastStart.expectedVideoChunks,
+ __privateGet(this, _options).fastStart.expectedAudioChunks
+ ];
+ for (let n of sampleCounts) {
+ if (!n)
+ continue;
+ upperBound += (4 + 4) * Math.ceil(2 / 3 * n);
+ upperBound += 4 * n;
+ upperBound += (4 + 4 + 4) * Math.ceil(2 / 3 * n);
+ upperBound += 4 * n;
+ upperBound += 8 * n;
+ }
+ upperBound += 4096;
+ return upperBound;
+};
+_prepareTracks = new WeakSet();
+prepareTracks_fn = function() {
+ if (__privateGet(this, _options).video) {
+ __privateSet(this, _videoTrack, {
+ id: 1,
+ info: {
+ type: "video",
+ codec: __privateGet(this, _options).video.codec,
+ width: __privateGet(this, _options).video.width,
+ height: __privateGet(this, _options).video.height,
+ rotation: __privateGet(this, _options).video.rotation ?? 0,
+ decoderConfig: null
+ },
+ timescale: 11520,
+ // Timescale used by FFmpeg, contains many common frame rates as factors
+ samples: [],
+ finalizedChunks: [],
+ currentChunk: null,
+ firstDecodeTimestamp: void 0,
+ lastDecodeTimestamp: -1,
+ timeToSampleTable: [],
+ compositionTimeOffsetTable: [],
+ lastTimescaleUnits: null,
+ lastSample: null,
+ compactlyCodedChunkTable: []
+ });
+ }
+ if (__privateGet(this, _options).audio) {
+ let guessedCodecPrivate = __privateMethod(this, _generateMpeg4AudioSpecificConfig, generateMpeg4AudioSpecificConfig_fn).call(
+ this,
+ 2,
+ // Object type for AAC-LC, since it's the most common
+ __privateGet(this, _options).audio.sampleRate,
+ __privateGet(this, _options).audio.numberOfChannels
+ );
+ __privateSet(this, _audioTrack, {
+ id: __privateGet(this, _options).video ? 2 : 1,
+ info: {
+ type: "audio",
+ codec: __privateGet(this, _options).audio.codec,
+ numberOfChannels: __privateGet(this, _options).audio.numberOfChannels,
+ sampleRate: __privateGet(this, _options).audio.sampleRate,
+ decoderConfig: {
+ codec: __privateGet(this, _options).audio.codec,
+ description: guessedCodecPrivate,
+ numberOfChannels: __privateGet(this, _options).audio.numberOfChannels,
+ sampleRate: __privateGet(this, _options).audio.sampleRate
+ }
+ },
+ timescale: __privateGet(this, _options).audio.sampleRate,
+ samples: [],
+ finalizedChunks: [],
+ currentChunk: null,
+ firstDecodeTimestamp: void 0,
+ lastDecodeTimestamp: -1,
+ timeToSampleTable: [],
+ compositionTimeOffsetTable: [],
+ lastTimescaleUnits: null,
+ lastSample: null,
+ compactlyCodedChunkTable: []
+ });
+ }
+};
+_generateMpeg4AudioSpecificConfig = new WeakSet();
+generateMpeg4AudioSpecificConfig_fn = function(objectType, sampleRate, numberOfChannels) {
+ let frequencyIndices = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350];
+ let frequencyIndex = frequencyIndices.indexOf(sampleRate);
+ let channelConfig = numberOfChannels;
+ let configBits = "";
+ configBits += objectType.toString(2).padStart(5, "0");
+ configBits += frequencyIndex.toString(2).padStart(4, "0");
+ if (frequencyIndex === 15)
+ configBits += sampleRate.toString(2).padStart(24, "0");
+ configBits += channelConfig.toString(2).padStart(4, "0");
+ let paddingLength = Math.ceil(configBits.length / 8) * 8;
+ configBits = configBits.padEnd(paddingLength, "0");
+ let configBytes = new Uint8Array(configBits.length / 8);
+ for (let i = 0; i < configBits.length; i += 8) {
+ configBytes[i / 8] = parseInt(configBits.slice(i, i + 8), 2);
+ }
+ return configBytes;
+};
+_createSampleForTrack = new WeakSet();
+createSampleForTrack_fn = function(track, data, type, timestamp, duration, meta, compositionTimeOffset) {
+ let presentationTimestampInSeconds = timestamp / 1e6;
+ let decodeTimestampInSeconds = (timestamp - (compositionTimeOffset ?? 0)) / 1e6;
+ let durationInSeconds = duration / 1e6;
+ let adjusted = __privateMethod(this, _validateTimestamp, validateTimestamp_fn).call(this, presentationTimestampInSeconds, decodeTimestampInSeconds, track);
+ presentationTimestampInSeconds = adjusted.presentationTimestamp;
+ decodeTimestampInSeconds = adjusted.decodeTimestamp;
+ if (meta?.decoderConfig) {
+ if (track.info.decoderConfig === null) {
+ track.info.decoderConfig = meta.decoderConfig;
+ } else {
+ Object.assign(track.info.decoderConfig, meta.decoderConfig);
+ }
+ }
+ let sample = {
+ presentationTimestamp: presentationTimestampInSeconds,
+ decodeTimestamp: decodeTimestampInSeconds,
+ duration: durationInSeconds,
+ data,
+ size: data.byteLength,
+ type,
+ // Will be refined once the next sample comes in
+ timescaleUnitsToNextSample: intoTimescale(durationInSeconds, track.timescale)
+ };
+ return sample;
+};
+_addSampleToTrack = new WeakSet();
+addSampleToTrack_fn = function(track, sample) {
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ track.samples.push(sample);
+ }
+ const sampleCompositionTimeOffset = intoTimescale(sample.presentationTimestamp - sample.decodeTimestamp, track.timescale);
+ if (track.lastTimescaleUnits !== null) {
+ let timescaleUnits = intoTimescale(sample.decodeTimestamp, track.timescale, false);
+ let delta = Math.round(timescaleUnits - track.lastTimescaleUnits);
+ track.lastTimescaleUnits += delta;
+ track.lastSample.timescaleUnitsToNextSample = delta;
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ let lastTableEntry = last(track.timeToSampleTable);
+ if (lastTableEntry.sampleCount === 1) {
+ lastTableEntry.sampleDelta = delta;
+ lastTableEntry.sampleCount++;
+ } else if (lastTableEntry.sampleDelta === delta) {
+ lastTableEntry.sampleCount++;
+ } else {
+ lastTableEntry.sampleCount--;
+ track.timeToSampleTable.push({
+ sampleCount: 2,
+ sampleDelta: delta
+ });
+ }
+ const lastCompositionTimeOffsetTableEntry = last(track.compositionTimeOffsetTable);
+ if (lastCompositionTimeOffsetTableEntry.sampleCompositionTimeOffset === sampleCompositionTimeOffset) {
+ lastCompositionTimeOffsetTableEntry.sampleCount++;
+ } else {
+ track.compositionTimeOffsetTable.push({
+ sampleCount: 1,
+ sampleCompositionTimeOffset
+ });
+ }
+ }
+ } else {
+ track.lastTimescaleUnits = 0;
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ track.timeToSampleTable.push({
+ sampleCount: 1,
+ sampleDelta: intoTimescale(sample.duration, track.timescale)
+ });
+ track.compositionTimeOffsetTable.push({
+ sampleCount: 1,
+ sampleCompositionTimeOffset
+ });
+ }
+ }
+ track.lastSample = sample;
+ let beginNewChunk = false;
+ if (!track.currentChunk) {
+ beginNewChunk = true;
+ } else {
+ let currentChunkDuration = sample.presentationTimestamp - track.currentChunk.startTimestamp;
+ if (__privateGet(this, _options).fastStart === "fragmented") {
+ let mostImportantTrack = __privateGet(this, _videoTrack) ?? __privateGet(this, _audioTrack);
+ if (track === mostImportantTrack && sample.type === "key" && currentChunkDuration >= 1) {
+ beginNewChunk = true;
+ __privateMethod(this, _finalizeFragment, finalizeFragment_fn).call(this);
+ }
+ } else {
+ beginNewChunk = currentChunkDuration >= 0.5;
+ }
+ }
+ if (beginNewChunk) {
+ if (track.currentChunk) {
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, track);
+ }
+ track.currentChunk = {
+ startTimestamp: sample.presentationTimestamp,
+ samples: []
+ };
+ }
+ track.currentChunk.samples.push(sample);
+};
+_validateTimestamp = new WeakSet();
+validateTimestamp_fn = function(presentationTimestamp, decodeTimestamp, track) {
+ const strictTimestampBehavior = __privateGet(this, _options).firstTimestampBehavior === "strict";
+ const noLastDecodeTimestamp = track.lastDecodeTimestamp === -1;
+ const timestampNonZero = decodeTimestamp !== 0;
+ if (strictTimestampBehavior && noLastDecodeTimestamp && timestampNonZero) {
+ throw new Error(
+ `The first chunk for your media track must have a timestamp of 0 (received DTS=${decodeTimestamp}).Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of thedocument, which is probably what you want.
+
+If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options.
+`
+ );
+ } else if (__privateGet(this, _options).firstTimestampBehavior === "offset" || __privateGet(this, _options).firstTimestampBehavior === "cross-track-offset") {
+ if (track.firstDecodeTimestamp === void 0) {
+ track.firstDecodeTimestamp = decodeTimestamp;
+ }
+ let baseDecodeTimestamp;
+ if (__privateGet(this, _options).firstTimestampBehavior === "offset") {
+ baseDecodeTimestamp = track.firstDecodeTimestamp;
+ } else {
+ baseDecodeTimestamp = Math.min(
+ __privateGet(this, _videoTrack)?.firstDecodeTimestamp ?? Infinity,
+ __privateGet(this, _audioTrack)?.firstDecodeTimestamp ?? Infinity
+ );
+ }
+ decodeTimestamp -= baseDecodeTimestamp;
+ presentationTimestamp -= baseDecodeTimestamp;
+ }
+ if (decodeTimestamp < track.lastDecodeTimestamp) {
+ throw new Error(
+ `Timestamps must be monotonically increasing (DTS went from ${track.lastDecodeTimestamp * 1e6} to ${decodeTimestamp * 1e6}).`
+ );
+ }
+ track.lastDecodeTimestamp = decodeTimestamp;
+ return { presentationTimestamp, decodeTimestamp };
+};
+_finalizeCurrentChunk = new WeakSet();
+finalizeCurrentChunk_fn = function(track) {
+ if (__privateGet(this, _options).fastStart === "fragmented") {
+ throw new Error("Can't finalize individual chunks 'fastStart' is set to 'fragmented'.");
+ }
+ if (!track.currentChunk)
+ return;
+ track.finalizedChunks.push(track.currentChunk);
+ __privateGet(this, _finalizedChunks).push(track.currentChunk);
+ if (track.compactlyCodedChunkTable.length === 0 || last(track.compactlyCodedChunkTable).samplesPerChunk !== track.currentChunk.samples.length) {
+ track.compactlyCodedChunkTable.push({
+ firstChunk: track.finalizedChunks.length,
+ // 1-indexed
+ samplesPerChunk: track.currentChunk.samples.length
+ });
+ }
+ if (__privateGet(this, _options).fastStart === "in-memory") {
+ track.currentChunk.offset = 0;
+ return;
+ }
+ track.currentChunk.offset = __privateGet(this, _writer).pos;
+ for (let sample of track.currentChunk.samples) {
+ __privateGet(this, _writer).write(sample.data);
+ sample.data = null;
+ }
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+};
+_finalizeFragment = new WeakSet();
+finalizeFragment_fn = function(flushStreamingWriter = true) {
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
+ throw new Error("Can't finalize a fragment unless 'fastStart' is set to 'fragmented'.");
+ }
+ let tracks = [__privateGet(this, _videoTrack), __privateGet(this, _audioTrack)].filter((track) => track && track.currentChunk);
+ if (tracks.length === 0)
+ return;
+ let fragmentNumber = __privateWrapper(this, _nextFragmentNumber)._++;
+ if (fragmentNumber === 1) {
+ let movieBox = moov(tracks, __privateGet(this, _creationTime), true);
+ __privateGet(this, _writer).writeBox(movieBox);
+ }
+ let moofOffset = __privateGet(this, _writer).pos;
+ let moofBox = moof(fragmentNumber, tracks);
+ __privateGet(this, _writer).writeBox(moofBox);
+ {
+ let mdatBox = mdat(false);
+ let totalTrackSampleSize = 0;
+ for (let track of tracks) {
+ for (let sample of track.currentChunk.samples) {
+ totalTrackSampleSize += sample.size;
+ }
+ }
+ let mdatSize = __privateGet(this, _writer).measureBox(mdatBox) + totalTrackSampleSize;
+ if (mdatSize >= 2 ** 32) {
+ mdatBox.largeSize = true;
+ mdatSize = __privateGet(this, _writer).measureBox(mdatBox) + totalTrackSampleSize;
+ }
+ mdatBox.size = mdatSize;
+ __privateGet(this, _writer).writeBox(mdatBox);
+ }
+ for (let track of tracks) {
+ track.currentChunk.offset = __privateGet(this, _writer).pos;
+ track.currentChunk.moofOffset = moofOffset;
+ for (let sample of track.currentChunk.samples) {
+ __privateGet(this, _writer).write(sample.data);
+ sample.data = null;
+ }
+ }
+ let endPos = __privateGet(this, _writer).pos;
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(moofBox));
+ let newMoofBox = moof(fragmentNumber, tracks);
+ __privateGet(this, _writer).writeBox(newMoofBox);
+ __privateGet(this, _writer).seek(endPos);
+ for (let track of tracks) {
+ track.finalizedChunks.push(track.currentChunk);
+ __privateGet(this, _finalizedChunks).push(track.currentChunk);
+ track.currentChunk = null;
+ }
+ if (flushStreamingWriter) {
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
+ }
+};
+_maybeFlushStreamingTargetWriter = new WeakSet();
+maybeFlushStreamingTargetWriter_fn = function() {
+ if (__privateGet(this, _writer) instanceof StreamTargetWriter) {
+ __privateGet(this, _writer).flush();
+ }
+};
+_ensureNotFinalized = new WeakSet();
+ensureNotFinalized_fn = function() {
+ if (__privateGet(this, _finalized)) {
+ throw new Error("Cannot add new video or audio chunks after the file has been finalized.");
+ }
+};
+export {
+ ArrayBufferTarget,
+ FileSystemWritableFileStreamTarget,
+ Muxer,
+ StreamTarget
+};
diff --git a/v1-com-officielle/public/mp4-muxer-main/canvasVideoExport.js b/v1-com-officielle/public/mp4-muxer-main/canvasVideoExport.js
new file mode 100644
index 0000000..15fd839
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/canvasVideoExport.js
@@ -0,0 +1,265 @@
+let projectName = "komorebi"; //to be updated
+
+//detect user browser
+var ua = navigator.userAgent;
+var isSafari = false;
+var isFirefox = false;
+var isIOS = false;
+var isAndroid = false;
+if(ua.includes("Safari")){
+ isSafari = true;
+}
+if(ua.includes("Firefox")){
+ isFirefox = true;
+}
+if(ua.includes("iPhone") || ua.includes("iPad") || ua.includes("iPod")){
+ isIOS = true;
+}
+if(ua.includes("Android")){
+ isAndroid = true;
+}
+console.log("isSafari: "+isSafari+", isFirefox: "+isFirefox+", isIOS: "+isIOS+", isAndroid: "+isAndroid);
+
+let useMobileRecord = false;
+if(isIOS || isAndroid || isFirefox){
+ useMobileRecord = true;
+}
+
+var mediaRecorder;
+var recordedChunks;
+var finishedBlob;
+var recordingMessageDiv = document.getElementById("videoRecordingMessageDiv");
+var recordVideoState = false;
+var videoRecordInterval;
+var videoEncoder;
+var muxer;
+var mobileRecorder;
+var videofps = 30;
+let bitrate = 16_000_000;
+
+function saveImage() {
+ console.log("Export png image");
+
+ // Create a temporary canvas with the same dimensions
+ const tempCanvas = document.createElement('canvas');
+ tempCanvas.width = canvas.width;
+ tempCanvas.height = canvas.height;
+ const tempContext = tempCanvas.getContext('2d', {
+ willReadFrequently: true,
+ alpha: true // Enable alpha for transparency
+ });
+
+ // Skip filling the background, leaving it transparent
+
+ // Force a render frame to ensure latest content
+ drawScene();
+ gl.flush();
+ gl.finish();
+
+ // Draw the WebGL canvas onto the temporary canvas
+ tempContext.drawImage(canvas, 0, 0);
+
+ // Create download link
+ const link = document.createElement('a');
+ link.href = tempCanvas.toDataURL('image/png');
+
+ const date = new Date();
+ const filename = projectName + `_${date.toLocaleDateString()}_${date.toLocaleTimeString()}.png`;
+ link.download = filename;
+ link.click();
+
+ // Cleanup
+ tempCanvas.remove();
+}
+
+function toggleVideoRecord(){
+
+ if(recordVideoState == false){
+ recordVideoState = true;
+ chooseRecordingFunction();
+ } else {
+ recordVideoState = false;
+ chooseEndRecordingFunction();
+ }
+}
+
+function chooseRecordingFunction(){
+ //resetAnimation();
+ if(useMobileRecord){
+ startMobileRecording();
+ }else {
+ recordVideoMuxer();
+ }
+}
+
+function chooseEndRecordingFunction(){
+ if(useMobileRecord){
+ mobileRecorder.stop();
+ }else {
+ finalizeVideo();
+ }
+}
+
+//record html canvas element and export as mp4 video
+//source: https://devtails.xyz/adam/how-to-save-html-canvas-to-mp4-using-web-codecs-api
+async function recordVideoMuxer() {
+console.log("start muxer video recording");
+
+var videoWidth = Math.floor(canvas.width/2)*2;
+var videoHeight = Math.floor(canvas.height/4)*4; //force a number which is divisible by 4
+
+console.log("Video dimensions: "+videoWidth+", "+videoHeight);
+
+//display user message
+recordingMessageDiv.classList.remove("hidden");
+
+recordVideoState = true;
+const ctx = canvas.getContext("2d", {
+ // This forces the use of a software (instead of hardware accelerated) 2D canvas
+ // This isn't necessary, but produces quicker results
+ willReadFrequently: true,
+ // Desynchronizes the canvas paint cycle from the event loop
+ // Should be less necessary with OffscreenCanvas, but with a real canvas you will want this
+ desynchronized: true,
+});
+
+muxer = new Mp4Muxer.Muxer({
+ target: new Mp4Muxer.ArrayBufferTarget(),
+ video: {
+ // If you change this, make sure to change the VideoEncoder codec as well
+ codec: "avc",
+ width: videoWidth,
+ height: videoHeight,
+ },
+
+ firstTimestampBehavior: 'offset',
+
+ // mp4-muxer docs claim you should always use this with ArrayBufferTarget
+ fastStart: "in-memory",
+});
+
+videoEncoder = new VideoEncoder({
+ output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
+ error: (e) => console.error(e),
+});
+
+// This codec should work in most browsers
+// See https://dmnsgn.github.io/media-codecs for list of codecs and see if your browser supports
+videoEncoder.configure({
+ codec: "avc1.4d0032",
+ width: videoWidth,
+ height: videoHeight,
+ bitrate: bitrate,
+ bitrateMode: "variable",
+});
+//NEW codec: "avc1.4d0032",
+//ORIGINAL codec: "avc1.42003e",
+
+var frameNumber = 0;
+
+//take a snapshot of the canvas every x miliseconds and encode to video
+
+videoRecordInterval = setInterval(
+ function(){
+ if(recordVideoState == true){
+ //gl.flush();
+ //gl.finish();
+ drawScene();
+ renderCanvasToVideoFrameAndEncode({
+ canvas,
+ videoEncoder,
+ frameNumber,
+ videofps
+ })
+ frameNumber++;
+ }else{
+ }
+ } , 1000/videofps);
+
+}
+
+//finish and export video
+async function finalizeVideo(){
+ console.log("finalize muxer video");
+ togglePlayPause();
+ clearInterval(videoRecordInterval);
+ //playAnimationToggle = false;
+ recordVideoState = false;
+
+ // Forces all pending encodes to complete
+ await videoEncoder.flush();
+ muxer.finalize();
+ let buffer = muxer.target.buffer;
+ finishedBlob = new Blob([buffer]);
+ downloadBlob(new Blob([buffer]));
+
+ //hide user message
+ recordingMessageDiv.classList.add("hidden");
+ togglePlayPause();
+}
+
+async function renderCanvasToVideoFrameAndEncode({
+ canvas,
+ videoEncoder,
+ frameNumber,
+ videofps,
+ }) {
+ let frame = new VideoFrame(canvas, {
+ // Equally spaces frames out depending on frames per second
+ timestamp: (frameNumber * 1e6) / videofps,
+ });
+
+ // The encode() method of the VideoEncoder interface asynchronously encodes a VideoFrame
+ videoEncoder.encode(frame);
+
+ // The close() method of the VideoFrame interface clears all states and releases the reference to the media resource.
+ frame.close();
+}
+
+function downloadBlob() {
+ console.log("download video");
+ let url = window.URL.createObjectURL(finishedBlob);
+ let a = document.createElement("a");
+ a.style.display = "none";
+ a.href = url;
+ const date = new Date();
+ const filename = projectName+`_${date.toLocaleDateString()}_${date.toLocaleTimeString()}.mp4`;
+ a.download = filename;
+ document.body.appendChild(a);
+ a.click();
+ window.URL.revokeObjectURL(url);
+}
+
+//record and download videos on mobile devices
+function startMobileRecording(){
+ var stream = canvas.captureStream(videofps);
+ mobileRecorder = new MediaRecorder(stream, { 'type': 'video/mp4' });
+ mobileRecorder.addEventListener('dataavailable', finalizeMobileVideo);
+
+ console.log("start simple video recording");
+ console.log("Video dimensions: "+canvas.width+", "+canvas.height);
+
+ recordingMessageDiv.classList.remove("hidden");
+
+ recordVideoState = true;
+ mobileRecorder.start(); //start mobile video recording
+
+}
+
+function finalizeMobileVideo(e) {
+ setTimeout(function(){
+ console.log("finish simple video recording");
+ togglePlayPause();
+ recordVideoState = false;
+ /*
+ mobileRecorder.stop();*/
+ var videoData = [ e.data ];
+ finishedBlob = new Blob(videoData, { 'type': 'video/mp4' });
+ downloadBlob(finishedBlob);
+
+ //hide user message
+ recordingMessageDiv.classList.add("hidden");
+ togglePlayPause();
+
+ },500);
+}
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/demo-streaming/index.html b/v1-com-officielle/public/mp4-muxer-main/demo-streaming/index.html
new file mode 100644
index 0000000..8313554
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/demo-streaming/index.html
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+ MP4 muxer streaming demo
+
+
+
+
+
+
+ MP4 muxer streaming demo - draw something!
+ The live canvas state and your microphone input will be recorded,
muxed into a fragmented MP4 stream and shown live in the <video> element.
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/demo-streaming/script.js b/v1-com-officielle/public/mp4-muxer-main/demo-streaming/script.js
new file mode 100644
index 0000000..249ebca
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/demo-streaming/script.js
@@ -0,0 +1,204 @@
+const canvas = document.querySelector('canvas');
+const ctx = canvas.getContext('2d', { desynchronized: true });
+const streamPreview = document.querySelector('#stream-preview');
+const startRecordingButton = document.querySelector('#start-recording');
+const endRecordingButton = document.querySelector('#end-recording');
+const recordingStatus = document.querySelector('#recording-status');
+
+/** RECORDING & MUXING STUFF */
+
+let muxer = null;
+let videoEncoder = null;
+let audioEncoder = null;
+let startTime = null;
+let recording = false;
+let audioTrack = null;
+let intervalId = null;
+let lastKeyFrame = null;
+let framesGenerated = 0;
+
+const startRecording = async () => {
+ // Check for VideoEncoder availability
+ if (typeof VideoEncoder === 'undefined') {
+ alert("Looks like your user agent doesn't support VideoEncoder / WebCodecs API yet.");
+ return;
+ }
+
+ startRecordingButton.style.display = 'none';
+
+ // Check for AudioEncoder availability
+ if (typeof AudioEncoder !== 'undefined') {
+ // Try to get access to the user's microphone
+ try {
+ let userMedia = await navigator.mediaDevices.getUserMedia({ video: false, audio: true });
+ audioTrack = userMedia.getAudioTracks()[0];
+ } catch (e) {}
+ if (!audioTrack) console.warn("Couldn't acquire a user media audio track.");
+ } else {
+ console.warn('AudioEncoder not available; no need to acquire a user media audio track.');
+ }
+
+ let mediaSource = new MediaSource();
+ streamPreview.src = URL.createObjectURL(mediaSource);
+ streamPreview.play();
+
+ await new Promise(resolve => mediaSource.onsourceopen = resolve);
+
+ // We'll append ArrayBuffers to this as the muxer starts to spit out chunks
+ let sourceBuffer = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.64001F, mp4a.40.2"');
+
+ endRecordingButton.style.display = 'block';
+
+ let audioSampleRate = audioTrack?.getCapabilities().sampleRate.max;
+
+ // Create an MP4 muxer with a video track and maybe an audio track
+ muxer = new Mp4Muxer.Muxer({
+ target: new Mp4Muxer.StreamTarget({
+ onData: buffer => sourceBuffer.appendBuffer(buffer)
+ }),
+
+ video: {
+ codec: 'avc',
+ width: canvas.width,
+ height: canvas.height
+ },
+ audio: audioTrack ? {
+ codec: 'aac',
+ sampleRate: audioSampleRate,
+ numberOfChannels: 1
+ } : undefined,
+
+ // Puts metadata to the start of the file. Since we're using ArrayBufferTarget anyway, this makes no difference
+ // to memory footprint.
+ fastStart: 'fragmented',
+
+ // Because we're directly pumping a MediaStreamTrack's data into it, which doesn't start at timestamp = 0
+ firstTimestampBehavior: 'offset'
+ });
+
+ videoEncoder = new VideoEncoder({
+ output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
+ error: e => console.error(e)
+ });
+ videoEncoder.configure({
+ codec: 'avc1.64001F',
+ width: canvas.width,
+ height: canvas.height,
+ bitrate: 1e6
+ });
+
+ if (audioTrack) {
+ audioEncoder = new AudioEncoder({
+ output: (chunk, meta) => muxer.addAudioChunk(chunk, meta),
+ error: e => console.error(e)
+ });
+ audioEncoder.configure({
+ codec: 'mp4a.40.2',
+ numberOfChannels: 1,
+ sampleRate: audioSampleRate,
+ bitrate: 128000
+ });
+
+ // Create a MediaStreamTrackProcessor to get AudioData chunks from the audio track
+ let trackProcessor = new MediaStreamTrackProcessor({ track: audioTrack });
+ let consumer = new WritableStream({
+ write(audioData) {
+ if (!recording) return;
+ audioEncoder.encode(audioData);
+ audioData.close();
+ }
+ });
+ trackProcessor.readable.pipeTo(consumer);
+ }
+
+ startTime = document.timeline.currentTime;
+ recording = true;
+ lastKeyFrame = -Infinity;
+ framesGenerated = 0;
+
+ encodeVideoFrame();
+ intervalId = setInterval(encodeVideoFrame, 1000/30);
+};
+startRecordingButton.addEventListener('click', startRecording);
+
+const encodeVideoFrame = () => {
+ let elapsedTime = document.timeline.currentTime - startTime;
+ let frame = new VideoFrame(canvas, {
+ timestamp: framesGenerated * 1e6 / 30, // Ensure equally-spaced frames every 1/30th of a second
+ duration: 1e6 / 30
+ });
+ framesGenerated++;
+
+ // Ensure a video key frame at least every 0.5 seconds
+ let needsKeyFrame = elapsedTime - lastKeyFrame >= 500;
+ if (needsKeyFrame) lastKeyFrame = elapsedTime;
+
+ videoEncoder.encode(frame, { keyFrame: needsKeyFrame });
+ frame.close();
+
+ recordingStatus.textContent =
+ `${elapsedTime % 1000 < 500 ? '🔴' : '⚫'} Recording - ${(elapsedTime / 1000).toFixed(1)} s`;
+};
+
+const endRecording = async () => {
+ endRecordingButton.style.display = 'none';
+ recordingStatus.textContent = '';
+ recording = false;
+
+ clearInterval(intervalId);
+ audioTrack?.stop();
+
+ await videoEncoder?.flush();
+ await audioEncoder?.flush();
+ muxer.finalize();
+
+ videoEncoder = null;
+ audioEncoder = null;
+ muxer = null;
+ startTime = null;
+ firstAudioTimestamp = null;
+
+ startRecordingButton.style.display = 'block';
+};
+endRecordingButton.addEventListener('click', endRecording);
+
+/** CANVAS DRAWING STUFF */
+
+ctx.fillStyle = 'white';
+ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+let drawing = false;
+let lastPos = { x: 0, y: 0 };
+
+const getRelativeMousePos = (e) => {
+ let rect = canvas.getBoundingClientRect();
+ return { x: e.clientX - rect.x, y: e.clientY - rect.y };
+};
+
+const drawLine = (from, to) => {
+ ctx.beginPath();
+ ctx.moveTo(from.x, from.y);
+ ctx.lineTo(to.x, to.y);
+ ctx.strokeStyle = 'black';
+ ctx.lineWidth = 3;
+ ctx.lineCap = 'round';
+ ctx.stroke();
+};
+
+canvas.addEventListener('pointerdown', (e) => {
+ if (e.button !== 0) return;
+
+ drawing = true;
+ lastPos = getRelativeMousePos(e);
+ drawLine(lastPos, lastPos);
+});
+window.addEventListener('pointerup', () => {
+ drawing = false;
+});
+window.addEventListener('mousemove', (e) => {
+ if (!drawing) return;
+
+ let newPos = getRelativeMousePos(e);
+ drawLine(lastPos, newPos);
+ lastPos = newPos;
+});
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/demo-streaming/style.css b/v1-com-officielle/public/mp4-muxer-main/demo-streaming/style.css
new file mode 100644
index 0000000..fbf448f
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/demo-streaming/style.css
@@ -0,0 +1,68 @@
+html, body {
+ margin: 0;
+ width: 100%;
+ height: 100%;
+ background: #120d17;
+ color: white;
+ font-family: monospace;
+}
+
+body {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+
+* {
+ user-select: none;
+}
+
+main {
+ width: 100%;
+}
+
+h1 {
+ margin: 0;
+ font-weight: normal;
+ text-align: center;
+ margin-bottom: 10px;
+}
+
+h2 {
+ margin: 0;
+ font-weight: normal;
+ text-align: center;
+ font-size: 14px;
+ margin-bottom: 20px;
+}
+
+canvas {
+ border-radius: 10px;
+ outline: 3px solid rgb(202, 202, 202);
+}
+
+#controls {
+ margin-bottom: 20px;
+ display: flex;
+ justify-content: center;
+ height: 38px;
+}
+
+#center {
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ gap: 10px;
+}
+
+button {
+ font-size: 20px;
+ padding: 5px 8px;
+}
+
+p {
+ margin: 0;
+ text-align: center;
+ margin-top: 20px;
+ height: 20px;
+}
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/demo/index.html b/v1-com-officielle/public/mp4-muxer-main/demo/index.html
new file mode 100644
index 0000000..3f3d20c
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/demo/index.html
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+ MP4 muxer demo
+
+
+
+
+
+
+ MP4 muxer demo - draw something!
+ The live canvas state and your microphone input will be recorded
and muxed into an MP4 file.
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/demo/script.js b/v1-com-officielle/public/mp4-muxer-main/demo/script.js
new file mode 100644
index 0000000..e9baa4f
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/demo/script.js
@@ -0,0 +1,206 @@
+const canvas = document.querySelector('canvas');
+const ctx = canvas.getContext('2d', { desynchronized: true });
+const startRecordingButton = document.querySelector('#start-recording');
+const endRecordingButton = document.querySelector('#end-recording');
+const recordingStatus = document.querySelector('#recording-status');
+
+/** RECORDING & MUXING STUFF */
+
+let muxer = null;
+let videoEncoder = null;
+let audioEncoder = null;
+let startTime = null;
+let recording = false;
+let audioTrack = null;
+let intervalId = null;
+let lastKeyFrame = null;
+let framesGenerated = 0;
+
+const startRecording = async () => {
+ // Check for VideoEncoder availability
+ if (typeof VideoEncoder === 'undefined') {
+ alert("Looks like your user agent doesn't support VideoEncoder / WebCodecs API yet.");
+ return;
+ }
+
+ startRecordingButton.style.display = 'none';
+
+ // Check for AudioEncoder availability
+ if (typeof AudioEncoder !== 'undefined') {
+ // Try to get access to the user's microphone
+ try {
+ let userMedia = await navigator.mediaDevices.getUserMedia({ video: false, audio: true });
+ audioTrack = userMedia.getAudioTracks()[0];
+ } catch (e) {}
+ if (!audioTrack) console.warn("Couldn't acquire a user media audio track.");
+ } else {
+ console.warn('AudioEncoder not available; no need to acquire a user media audio track.');
+ }
+
+ endRecordingButton.style.display = 'block';
+
+ let audioSampleRate = audioTrack?.getCapabilities().sampleRate.max;
+
+ // Create an MP4 muxer with a video track and maybe an audio track
+ muxer = new Mp4Muxer.Muxer({
+ target: new Mp4Muxer.ArrayBufferTarget(),
+
+ video: {
+ codec: 'avc',
+ width: canvas.width,
+ height: canvas.height
+ },
+ audio: audioTrack ? {
+ codec: 'aac',
+ sampleRate: audioSampleRate,
+ numberOfChannels: 1
+ } : undefined,
+
+ // Puts metadata to the start of the file. Since we're using ArrayBufferTarget anyway, this makes no difference
+ // to memory footprint.
+ fastStart: 'in-memory',
+
+ // Because we're directly pumping a MediaStreamTrack's data into it, which doesn't start at timestamp = 0
+ firstTimestampBehavior: 'offset'
+ });
+
+ videoEncoder = new VideoEncoder({
+ output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
+ error: e => console.error(e)
+ });
+ videoEncoder.configure({
+ codec: 'avc1.42001f',
+ width: canvas.width,
+ height: canvas.height,
+ bitrate: 1e6
+ });
+
+ if (audioTrack) {
+ audioEncoder = new AudioEncoder({
+ output: (chunk, meta) => muxer.addAudioChunk(chunk, meta),
+ error: e => console.error(e)
+ });
+ audioEncoder.configure({
+ codec: 'mp4a.40.2',
+ numberOfChannels: 1,
+ sampleRate: audioSampleRate,
+ bitrate: 128000
+ });
+
+ // Create a MediaStreamTrackProcessor to get AudioData chunks from the audio track
+ let trackProcessor = new MediaStreamTrackProcessor({ track: audioTrack });
+ let consumer = new WritableStream({
+ write(audioData) {
+ if (!recording) return;
+ audioEncoder.encode(audioData);
+ audioData.close();
+ }
+ });
+ trackProcessor.readable.pipeTo(consumer);
+ }
+
+ startTime = document.timeline.currentTime;
+ recording = true;
+ lastKeyFrame = -Infinity;
+ framesGenerated = 0;
+
+ encodeVideoFrame();
+ intervalId = setInterval(encodeVideoFrame, 1000/30);
+};
+startRecordingButton.addEventListener('click', startRecording);
+
+const encodeVideoFrame = () => {
+ let elapsedTime = document.timeline.currentTime - startTime;
+ let frame = new VideoFrame(canvas, {
+ timestamp: framesGenerated * 1e6 / 30, // Ensure equally-spaced frames every 1/30th of a second
+ duration: 1e6 / 30
+ });
+ framesGenerated++;
+
+ // Ensure a video key frame at least every 5 seconds for good scrubbing
+ let needsKeyFrame = elapsedTime - lastKeyFrame >= 5000;
+ if (needsKeyFrame) lastKeyFrame = elapsedTime;
+
+ videoEncoder.encode(frame, { keyFrame: needsKeyFrame });
+ frame.close();
+
+ recordingStatus.textContent =
+ `${elapsedTime % 1000 < 500 ? '🔴' : '⚫'} Recording - ${(elapsedTime / 1000).toFixed(1)} s`;
+};
+
+const endRecording = async () => {
+ endRecordingButton.style.display = 'none';
+ recordingStatus.textContent = '';
+ recording = false;
+
+ clearInterval(intervalId);
+ audioTrack?.stop();
+
+ await videoEncoder?.flush();
+ await audioEncoder?.flush();
+ muxer.finalize();
+
+ let buffer = muxer.target.buffer;
+ downloadBlob(new Blob([buffer]));
+
+ videoEncoder = null;
+ audioEncoder = null;
+ muxer = null;
+ startTime = null;
+ firstAudioTimestamp = null;
+
+ startRecordingButton.style.display = 'block';
+};
+endRecordingButton.addEventListener('click', endRecording);
+
+const downloadBlob = (blob) => {
+ let url = window.URL.createObjectURL(blob);
+ let a = document.createElement('a');
+ a.style.display = 'none';
+ a.href = url;
+ a.download = 'davinci.mp4';
+ document.body.appendChild(a);
+ a.click();
+ window.URL.revokeObjectURL(url);
+};
+
+/** CANVAS DRAWING STUFF */
+
+ctx.fillStyle = 'white';
+ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+let drawing = false;
+let lastPos = { x: 0, y: 0 };
+
+const getRelativeMousePos = (e) => {
+ let rect = canvas.getBoundingClientRect();
+ return { x: e.clientX - rect.x, y: e.clientY - rect.y };
+};
+
+const drawLine = (from, to) => {
+ ctx.beginPath();
+ ctx.moveTo(from.x, from.y);
+ ctx.lineTo(to.x, to.y);
+ ctx.strokeStyle = 'black';
+ ctx.lineWidth = 3;
+ ctx.lineCap = 'round';
+ ctx.stroke();
+};
+
+canvas.addEventListener('pointerdown', (e) => {
+ if (e.button !== 0) return;
+
+ drawing = true;
+ lastPos = getRelativeMousePos(e);
+ drawLine(lastPos, lastPos);
+});
+window.addEventListener('pointerup', () => {
+ drawing = false;
+});
+window.addEventListener('mousemove', (e) => {
+ if (!drawing) return;
+
+ let newPos = getRelativeMousePos(e);
+ drawLine(lastPos, newPos);
+ lastPos = newPos;
+});
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/demo/style.css b/v1-com-officielle/public/mp4-muxer-main/demo/style.css
new file mode 100644
index 0000000..0a2bbf7
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/demo/style.css
@@ -0,0 +1,61 @@
+html, body {
+ margin: 0;
+ width: 100%;
+ height: 100%;
+ background: #120d17;
+ color: white;
+ font-family: monospace;
+}
+
+body {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+
+* {
+ user-select: none;
+}
+
+main {
+ width: 640px;
+}
+
+h1 {
+ margin: 0;
+ font-weight: normal;
+ text-align: center;
+ margin-bottom: 10px;
+}
+
+h2 {
+ margin: 0;
+ font-weight: normal;
+ text-align: center;
+ font-size: 14px;
+ margin-bottom: 20px;
+}
+
+canvas {
+ border-radius: 10px;
+ outline: 3px solid rgb(202, 202, 202);
+}
+
+#controls {
+ margin-bottom: 20px;
+ display: flex;
+ justify-content: center;
+ height: 38px;
+}
+
+button {
+ font-size: 20px;
+ padding: 5px 8px;
+}
+
+p {
+ margin: 0;
+ text-align: center;
+ margin-top: 20px;
+ height: 20px;
+}
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/helperFunctions.js b/v1-com-officielle/public/mp4-muxer-main/helperFunctions.js
new file mode 100644
index 0000000..60fce30
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/helperFunctions.js
@@ -0,0 +1,79 @@
+// Toggle play/pause
+function togglePlayPause() {
+
+ if (isPlaying) {
+ cancelAnimationFrame(animationID);
+ isPlaying = false;
+ } else {
+ isPlaying = true;
+ animationID = requestAnimationFrame(render);
+ }
+}
+
+// Function to refresh the pattern with a new random seed
+const selectedSeeds = [53, 118, 506];
+var seedCount = 2;
+function refreshPattern() {
+ timeOffset = performance.now();
+ //randomSeed = Math.floor(Math.random() * 1000,0);
+ randomSeed = selectedSeeds[seedCount];
+ gl.uniform1f(seedLocation, randomSeed);
+ if(!isPlaying){
+ isPlaying = true;
+ animationID = requestAnimationFrame(render);
+ }
+ console.log('seed:', randomSeed);
+}
+
+function startFromZeroTime(){
+ console.log("Restarting animation from time = 0");
+
+ // Cancel current animation if running
+ if (animationID) {
+ cancelAnimationFrame(animationID);
+ }
+
+ // Set the time offset to the current time
+ // This will be subtracted in the render function
+ timeOffset = performance.now();
+
+ // Reset frame counter for FPS calculation
+ frameCount = 0;
+ lastTime = performance.now();
+
+ // Make sure all other uniforms are updated
+ updateUniforms();
+
+ // Ensure animation is playing
+ isPlaying = true;
+
+ // Start the animation loop from the beginning
+ animationID = requestAnimationFrame(render);
+}
+
+// Add this function to handle canvas resizing
+function updateCanvasSize() {
+ // Update canvas dimensions to window size
+ canvas.width = window.innerWidth;
+ canvas.height = window.innerHeight;
+
+ // Update the WebGL viewport to match
+ gl.viewport(0, 0, canvas.width, canvas.height);
+
+ // Re-render if not already playing
+ if (!isPlaying) {
+ drawScene();
+ }
+
+ // If recording is active, we need to handle that
+ if (recordVideoState) {
+ stopRecording();
+ startRecording();
+ }
+}
+
+//intro overlay info screen
+
+let musicPlaying = false;
+
+let isZenMode = false;
diff --git a/v1-com-officielle/public/mp4-muxer-main/main.js b/v1-com-officielle/public/mp4-muxer-main/main.js
new file mode 100644
index 0000000..8c54f5a
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/main.js
@@ -0,0 +1,200 @@
+/*
+To do:
+Press z for zen mode (hides all control and other display on top of the canvas)
+Ability to add this shader effect on top of an image?
+Presets / seed choice??
+Allow user to upload a song, and then it becomes audio reactive?
+Generate perfect loops in x seconds
+*/
+
+// Initialize WebGL context
+const canvas = document.getElementById('canvas');
+let startingWidth = window.innerWidth;
+let startingHeight = window.innerHeight;
+canvas.width = startingWidth;
+canvas.height = startingHeight;
+console.log("canvas width/height: "+canvas.width+" / "+canvas.height);
+
+const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
+let isPlaying = false;
+let animationID = null;
+let randomSeed;
+let time;
+let timeOffset = 0;
+
+// FPS tracking variables
+let frameCount = 0;
+let lastTime = 0;
+let fps = 0;
+
+if (!gl) {
+ alert('WebGL not supported');
+}
+
+// Compile shaders
+function compileShader(source, type) {
+ const shader = gl.createShader(type);
+ gl.shaderSource(shader, source);
+ gl.compileShader(shader);
+
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
+ console.error('Shader compilation error:', gl.getShaderInfoLog(shader));
+ gl.deleteShader(shader);
+ return null;
+ }
+
+ return shader;
+}
+
+// Create program
+const vertexShader = compileShader(document.getElementById('vertexShader').textContent, gl.VERTEX_SHADER);
+const fragmentShader = compileShader(document.getElementById('fragmentShader').textContent, gl.FRAGMENT_SHADER);
+
+const program = gl.createProgram();
+gl.attachShader(program, vertexShader);
+gl.attachShader(program, fragmentShader);
+gl.linkProgram(program);
+
+if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
+ console.error('Program linking error:', gl.getProgramInfoLog(program));
+}
+
+gl.useProgram(program);
+
+// Create rectangle covering the entire canvas
+const positionBuffer = gl.createBuffer();
+gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
+gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
+ -1.0, -1.0,
+ 1.0, -1.0,
+ -1.0, 1.0,
+ 1.0, 1.0
+]), gl.STATIC_DRAW);
+
+// Set up attributes and uniforms
+const positionLocation = gl.getAttribLocation(program, 'position');
+gl.enableVertexAttribArray(positionLocation);
+gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
+
+const timeLocation = gl.getUniformLocation(program, 'time');
+const resolutionLocation = gl.getUniformLocation(program, 'resolution');
+const seedLocation = gl.getUniformLocation(program, 'seed');
+
+// GUI-controlled uniform locations
+const timeScaleLocation = gl.getUniformLocation(program, 'timeScale');
+const bloomStrengthLocation = gl.getUniformLocation(program, 'bloomStrength');
+const saturationLocation = gl.getUniformLocation(program, 'saturation');
+const grainAmountLocation = gl.getUniformLocation(program, 'grainAmount');
+const colorTintLocation = gl.getUniformLocation(program, 'colorTint');
+const minCircleSizeLocation = gl.getUniformLocation(program, 'minCircleSize');
+const circleStrengthLocation = gl.getUniformLocation(program, 'circleStrength');
+const distortXLocation = gl.getUniformLocation(program, 'distortX');
+const distortYLocation = gl.getUniformLocation(program, 'distortY');
+
+const patternAmpLocation = gl.getUniformLocation(program, 'patternAmp');
+const patternFreqLocation = gl.getUniformLocation(program, 'patternFreq');
+
+// Initialize parameters object for dat.gui
+const params = {
+ canvasWidth: startingWidth,
+ canvasHeight: startingHeight,
+ timeScale: .666,
+ patternAmp: 2,
+ patternFreq: 0.4,
+ bloomStrength: 0.5,
+ saturation: 1.74,
+ grainAmount: 0.161,
+ colorTintR: 1.5,
+ colorTintG: 1.0,
+ colorTintB: 1.0,
+ minCircleSize: 2.8,
+ circleStrength: 0,
+ distortX: 1,
+ distortY: 1,
+};
+
+// Also refresh on page load
+window.addEventListener('load', refreshPattern);
+window.addEventListener('resize', updateCanvasSize);
+
+// Initialize dat.gui
+const gui = new dat.GUI({ autoplace: false });
+gui.close();
+
+// Add GUI controls with folders for organization
+const canvasFolder = gui.addFolder('Canvas Size');
+canvasFolder.add(params, 'canvasWidth', 100, 4000).step(10).name('Width').onChange(updateCanvasSize);
+canvasFolder.add(params, 'canvasHeight', 100, 4000).step(10).name('Height').onChange(updateCanvasSize);
+canvasFolder.open();
+
+const timeFolder = gui.addFolder('Animation');
+timeFolder.add(params, 'timeScale', 0.1, 3.0).name('Speed').onChange(updateUniforms);
+timeFolder.open();
+
+const patternFolder = gui.addFolder('Pattern');
+patternFolder.add(params, 'patternAmp', 1.0, 50.0).step(0.1).name('Pattern Amp').onChange(updateUniforms);
+patternFolder.add(params, 'patternFreq', 0.2, 10.0).step(0.1).name('Pattern Freq').onChange(updateUniforms);
+patternFolder.open();
+
+const visualFolder = gui.addFolder('Visual Effects');
+visualFolder.add(params, 'bloomStrength', 0.0, 5.0).name('Bloom').onChange(updateUniforms);
+visualFolder.add(params, 'saturation', 0.0, 2.0).name('Saturation').onChange(updateUniforms);
+visualFolder.add(params, 'grainAmount', 0.0, 0.5).name('Grain').onChange(updateUniforms);
+visualFolder.add(params, 'minCircleSize', 0.0, 10.0).name('Circle Size').onChange(updateUniforms);
+visualFolder.add(params, 'circleStrength', 0.0, 3.0).name('Circle Strength').onChange(updateUniforms);
+visualFolder.add(params, 'distortX', 0.0, 50.0).name('Distort-X').onChange(updateUniforms);
+visualFolder.add(params, 'distortY', 0.0, 50.0).name('Distort-Y').onChange(updateUniforms);
+
+visualFolder.open();
+
+const colorFolder = gui.addFolder('Color Tint');
+colorFolder.add(params, 'colorTintR', 0.0, 1.5).name('Red').onChange(updateUniforms);
+colorFolder.add(params, 'colorTintG', 0.0, 1.5).name('Green').onChange(updateUniforms);
+colorFolder.add(params, 'colorTintB', 0.0, 1.5).name('Blue').onChange(updateUniforms);
+colorFolder.open();
+
+// Function to update shader uniforms from GUI values
+function updateUniforms() {
+ gl.uniform1f(timeScaleLocation, params.timeScale);
+ gl.uniform1f(patternAmpLocation, params.patternAmp);
+ gl.uniform1f(patternFreqLocation, params.patternFreq);
+ gl.uniform1f(bloomStrengthLocation, params.bloomStrength);
+ gl.uniform1f(saturationLocation, params.saturation);
+ gl.uniform1f(grainAmountLocation, params.grainAmount);
+ gl.uniform3f(colorTintLocation, params.colorTintR, params.colorTintG, params.colorTintB);
+ gl.uniform1f(minCircleSizeLocation, params.minCircleSize);
+ gl.uniform1f(circleStrengthLocation, params.circleStrength);
+ gl.uniform1f(distortXLocation, params.distortX);
+ gl.uniform1f(distortYLocation, params.distortY);
+}
+
+function drawScene(){
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+}
+
+// Animation loop
+function render(timestamp) {
+ if (isPlaying) {
+ // Calculate adjusted time by subtracting the offset
+ const adjustedTime = timestamp - timeOffset;
+ time = timestamp;
+
+ const timeInSeconds = adjustedTime * 0.0035;
+ gl.uniform1f(timeLocation, timeInSeconds);
+ gl.uniform2f(resolutionLocation, canvas.width, canvas.height);
+
+
+ // If video recording is ongoing, drawScene is called already
+ if (!recordVideoState || useMobileRecord) {
+ drawScene();
+ }
+
+ animationID = requestAnimationFrame(render);
+ }
+}
+
+// Start the animation loop
+isPlaying = true;
+refreshPattern();
+updateUniforms();
+animationID = requestAnimationFrame(render);
diff --git a/v1-com-officielle/public/mp4-muxer-main/package-lock.json b/v1-com-officielle/public/mp4-muxer-main/package-lock.json
new file mode 100644
index 0000000..4786905
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/package-lock.json
@@ -0,0 +1,2051 @@
+{
+ "name": "mp4-muxer",
+ "version": "3.0.5",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "mp4-muxer",
+ "version": "3.0.5",
+ "license": "MIT",
+ "dependencies": {
+ "@types/dom-webcodecs": "^0.1.6",
+ "@types/wicg-file-system-access": "^2020.9.5"
+ },
+ "devDependencies": {
+ "@types/node": "^18.15.0",
+ "@typescript-eslint/eslint-plugin": "^5.54.1",
+ "@typescript-eslint/parser": "^5.54.1",
+ "esbuild": "^0.17.11",
+ "eslint": "^8.36.0",
+ "typescript": "^4.9.5"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.11.tgz",
+ "integrity": "sha512-CdyX6sRVh1NzFCsf5vw3kULwlAhfy9wVt8SZlrhQ7eL2qBjGbFhRBWkkAzuZm9IIEOCKJw4DXA6R85g+qc8RDw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.11.tgz",
+ "integrity": "sha512-QnK4d/zhVTuV4/pRM4HUjcsbl43POALU2zvBynmrrqZt9LPcLA3x1fTZPBg2RRguBQnJcnU059yKr+bydkntjg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.11.tgz",
+ "integrity": "sha512-3PL3HKtsDIXGQcSCKtWD/dy+mgc4p2Tvo2qKgKHj9Yf+eniwFnuoQ0OUhlSfAEpKAFzF9N21Nwgnap6zy3L3MQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.11.tgz",
+ "integrity": "sha512-pJ950bNKgzhkGNO3Z9TeHzIFtEyC2GDQL3wxkMApDEghYx5Qers84UTNc1bAxWbRkuJOgmOha5V0WUeh8G+YGw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.11.tgz",
+ "integrity": "sha512-iB0dQkIHXyczK3BZtzw1tqegf0F0Ab5texX2TvMQjiJIWXAfM4FQl7D909YfXWnB92OQz4ivBYQ2RlxBJrMJOw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.11.tgz",
+ "integrity": "sha512-7EFzUADmI1jCHeDRGKgbnF5sDIceZsQGapoO6dmw7r/ZBEKX7CCDnIz8m9yEclzr7mFsd+DyasHzpjfJnmBB1Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.11.tgz",
+ "integrity": "sha512-iPgenptC8i8pdvkHQvXJFzc1eVMR7W2lBPrTE6GbhR54sLcF42mk3zBOjKPOodezzuAz/KSu8CPyFSjcBMkE9g==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.11.tgz",
+ "integrity": "sha512-M9iK/d4lgZH0U5M1R2p2gqhPV/7JPJcRz+8O8GBKVgqndTzydQ7B2XGDbxtbvFkvIs53uXTobOhv+RyaqhUiMg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.11.tgz",
+ "integrity": "sha512-Qxth3gsWWGKz2/qG2d5DsW/57SeA2AmpSMhdg9TSB5Svn2KDob3qxfQSkdnWjSd42kqoxIPy3EJFs+6w1+6Qjg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.11.tgz",
+ "integrity": "sha512-dB1nGaVWtUlb/rRDHmuDQhfqazWE0LMro/AIbT2lWM3CDMHJNpLckH+gCddQyhhcLac2OYw69ikUMO34JLt3wA==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.11.tgz",
+ "integrity": "sha512-aCWlq70Q7Nc9WDnormntGS1ar6ZFvUpqr8gXtO+HRejRYPweAFQN615PcgaSJkZjhHp61+MNLhzyVALSF2/Q0g==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.11.tgz",
+ "integrity": "sha512-cGeGNdQxqY8qJwlYH1BP6rjIIiEcrM05H7k3tR7WxOLmD1ZxRMd6/QIOWMb8mD2s2YJFNRuNQ+wjMhgEL2oCEw==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.11.tgz",
+ "integrity": "sha512-BdlziJQPW/bNe0E8eYsHB40mYOluS+jULPCjlWiHzDgr+ZBRXPtgMV1nkLEGdpjrwgmtkZHEGEPaKdS/8faLDA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.11.tgz",
+ "integrity": "sha512-MDLwQbtF+83oJCI1Cixn68Et/ME6gelmhssPebC40RdJaect+IM+l7o/CuG0ZlDs6tZTEIoxUe53H3GmMn8oMA==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.11.tgz",
+ "integrity": "sha512-4N5EMESvws0Ozr2J94VoUD8HIRi7X0uvUv4c0wpTHZyZY9qpaaN7THjosdiW56irQ4qnJ6Lsc+i+5zGWnyqWqQ==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.11.tgz",
+ "integrity": "sha512-rM/v8UlluxpytFSmVdbCe1yyKQd/e+FmIJE2oPJvbBo+D0XVWi1y/NQ4iTNx+436WmDHQBjVLrbnAQLQ6U7wlw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.11.tgz",
+ "integrity": "sha512-4WaAhuz5f91h3/g43VBGdto1Q+X7VEZfpcWGtOFXnggEuLvjV+cP6DyLRU15IjiU9fKLLk41OoJfBFN5DhPvag==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.11.tgz",
+ "integrity": "sha512-UBj135Nx4FpnvtE+C8TWGp98oUgBcmNmdYgl5ToKc0mBHxVVqVE7FUS5/ELMImOp205qDAittL6Ezhasc2Ev/w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.11.tgz",
+ "integrity": "sha512-1/gxTifDC9aXbV2xOfCbOceh5AlIidUrPsMpivgzo8P8zUtczlq1ncFpeN1ZyQJ9lVs2hILy1PG5KPp+w8QPPg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.11.tgz",
+ "integrity": "sha512-vtSfyx5yRdpiOW9yp6Ax0zyNOv9HjOAw8WaZg3dF5djEHKKm3UnoohftVvIJtRh0Ec7Hso0RIdTqZvPXJ7FdvQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.11.tgz",
+ "integrity": "sha512-GFPSLEGQr4wHFTiIUJQrnJKZhZjjq4Sphf+mM76nQR6WkQn73vm7IsacmBRPkALfpOCHsopSvLgqdd4iUW2mYw==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.11.tgz",
+ "integrity": "sha512-N9vXqLP3eRL8BqSy8yn4Y98cZI2pZ8fyuHx6lKjiG2WABpT2l01TXdzq5Ma2ZUBzfB7tx5dXVhge8X9u0S70ZQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@eslint-community/eslint-utils": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.2.0.tgz",
+ "integrity": "sha512-gB8T4H4DEfX2IV9zGDJPOBgP1e/DbfCPDTtEqUMckpvzS1OYtva8JdFYBqMwYk7xAQ429WGF/UPqn8uQ//h2vQ==",
+ "dev": true,
+ "dependencies": {
+ "eslint-visitor-keys": "^3.3.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "peerDependencies": {
+ "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
+ }
+ },
+ "node_modules/@eslint-community/regexpp": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.4.0.tgz",
+ "integrity": "sha512-A9983Q0LnDGdLPjxyXQ00sbV+K+O+ko2Dr+CZigbHWtX9pNfxlaBkMR8X1CztI73zuEyEBXTVjx7CE+/VSwDiQ==",
+ "dev": true,
+ "engines": {
+ "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
+ }
+ },
+ "node_modules/@eslint/eslintrc": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.1.tgz",
+ "integrity": "sha512-eFRmABvW2E5Ho6f5fHLqgena46rOj7r7OKHYfLElqcBfGFHHpjBhivyi5+jOEQuSpdc/1phIZJlbC2te+tZNIw==",
+ "dev": true,
+ "dependencies": {
+ "ajv": "^6.12.4",
+ "debug": "^4.3.2",
+ "espree": "^9.5.0",
+ "globals": "^13.19.0",
+ "ignore": "^5.2.0",
+ "import-fresh": "^3.2.1",
+ "js-yaml": "^4.1.0",
+ "minimatch": "^3.1.2",
+ "strip-json-comments": "^3.1.1"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
+ "node_modules/@eslint/js": {
+ "version": "8.36.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.36.0.tgz",
+ "integrity": "sha512-lxJ9R5ygVm8ZWgYdUweoq5ownDlJ4upvoWmO4eLxBYHdMo+vZ/Rx0EN6MbKWDJOSUGrqJy2Gt+Dyv/VKml0fjg==",
+ "dev": true,
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ }
+ },
+ "node_modules/@humanwhocodes/config-array": {
+ "version": "0.11.8",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz",
+ "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==",
+ "dev": true,
+ "dependencies": {
+ "@humanwhocodes/object-schema": "^1.2.1",
+ "debug": "^4.1.1",
+ "minimatch": "^3.0.5"
+ },
+ "engines": {
+ "node": ">=10.10.0"
+ }
+ },
+ "node_modules/@humanwhocodes/module-importer": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
+ "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
+ "dev": true,
+ "engines": {
+ "node": ">=12.22"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/nzakas"
+ }
+ },
+ "node_modules/@humanwhocodes/object-schema": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
+ "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
+ "dev": true
+ },
+ "node_modules/@nodelib/fs.scandir": {
+ "version": "2.1.5",
+ "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
+ "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
+ "dev": true,
+ "dependencies": {
+ "@nodelib/fs.stat": "2.0.5",
+ "run-parallel": "^1.1.9"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/@nodelib/fs.stat": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
+ "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
+ "dev": true,
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/@nodelib/fs.walk": {
+ "version": "1.2.8",
+ "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
+ "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
+ "dev": true,
+ "dependencies": {
+ "@nodelib/fs.scandir": "2.1.5",
+ "fastq": "^1.6.0"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/@types/dom-webcodecs": {
+ "version": "0.1.6",
+ "resolved": "https://registry.npmjs.org/@types/dom-webcodecs/-/dom-webcodecs-0.1.6.tgz",
+ "integrity": "sha512-m+Y2WRIKvLwFzyToNGA5XV+sfihtrfcOaMrXzsZULVdqYyKy4yn0XZ8lES9RRwZTzW7TKcz84xed3e5FBk3wCg=="
+ },
+ "node_modules/@types/json-schema": {
+ "version": "7.0.11",
+ "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz",
+ "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==",
+ "dev": true
+ },
+ "node_modules/@types/node": {
+ "version": "18.15.0",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.0.tgz",
+ "integrity": "sha512-z6nr0TTEOBGkzLGmbypWOGnpSpSIBorEhC4L+4HeQ2iezKCi4f77kyslRwvHeNitymGQ+oFyIWGP96l/DPSV9w==",
+ "dev": true
+ },
+ "node_modules/@types/semver": {
+ "version": "7.3.13",
+ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz",
+ "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==",
+ "dev": true
+ },
+ "node_modules/@types/wicg-file-system-access": {
+ "version": "2020.9.5",
+ "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2020.9.5.tgz",
+ "integrity": "sha512-UYK244awtmcUYQfs7FR8710MJcefL2WvkyHMjA8yJzxd1mo0Gfn88sRZ1Bls7hiUhA2w7ne1gpJ9T5g3G0wOyA=="
+ },
+ "node_modules/@typescript-eslint/eslint-plugin": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.54.1.tgz",
+ "integrity": "sha512-a2RQAkosH3d3ZIV08s3DcL/mcGc2M/UC528VkPULFxR9VnVPT8pBu0IyBAJJmVsCmhVfwQX1v6q+QGnmSe1bew==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/scope-manager": "5.54.1",
+ "@typescript-eslint/type-utils": "5.54.1",
+ "@typescript-eslint/utils": "5.54.1",
+ "debug": "^4.3.4",
+ "grapheme-splitter": "^1.0.4",
+ "ignore": "^5.2.0",
+ "natural-compare-lite": "^1.4.0",
+ "regexpp": "^3.2.0",
+ "semver": "^7.3.7",
+ "tsutils": "^3.21.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "@typescript-eslint/parser": "^5.0.0",
+ "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/parser": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.54.1.tgz",
+ "integrity": "sha512-8zaIXJp/nG9Ff9vQNh7TI+C3nA6q6iIsGJ4B4L6MhZ7mHnTMR4YP5vp2xydmFXIy8rpyIVbNAG44871LMt6ujg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/scope-manager": "5.54.1",
+ "@typescript-eslint/types": "5.54.1",
+ "@typescript-eslint/typescript-estree": "5.54.1",
+ "debug": "^4.3.4"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/scope-manager": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.54.1.tgz",
+ "integrity": "sha512-zWKuGliXxvuxyM71UA/EcPxaviw39dB2504LqAmFDjmkpO8qNLHcmzlh6pbHs1h/7YQ9bnsO8CCcYCSA8sykUg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "5.54.1",
+ "@typescript-eslint/visitor-keys": "5.54.1"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/type-utils": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.54.1.tgz",
+ "integrity": "sha512-WREHsTz0GqVYLIbzIZYbmUUr95DKEKIXZNH57W3s+4bVnuF1TKe2jH8ZNH8rO1CeMY3U4j4UQeqPNkHMiGem3g==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/typescript-estree": "5.54.1",
+ "@typescript-eslint/utils": "5.54.1",
+ "debug": "^4.3.4",
+ "tsutils": "^3.21.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "*"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/types": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.54.1.tgz",
+ "integrity": "sha512-G9+1vVazrfAfbtmCapJX8jRo2E4MDXxgm/IMOF4oGh3kq7XuK3JRkOg6y2Qu1VsTRmWETyTkWt1wxy7X7/yLkw==",
+ "dev": true,
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/typescript-estree": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.54.1.tgz",
+ "integrity": "sha512-bjK5t+S6ffHnVwA0qRPTZrxKSaFYocwFIkZx5k7pvWfsB1I57pO/0M0Skatzzw1sCkjJ83AfGTL0oFIFiDX3bg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "5.54.1",
+ "@typescript-eslint/visitor-keys": "5.54.1",
+ "debug": "^4.3.4",
+ "globby": "^11.1.0",
+ "is-glob": "^4.0.3",
+ "semver": "^7.3.7",
+ "tsutils": "^3.21.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@typescript-eslint/utils": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.54.1.tgz",
+ "integrity": "sha512-IY5dyQM8XD1zfDe5X8jegX6r2EVU5o/WJnLu/znLPWCBF7KNGC+adacXnt5jEYS9JixDcoccI6CvE4RCjHMzCQ==",
+ "dev": true,
+ "dependencies": {
+ "@types/json-schema": "^7.0.9",
+ "@types/semver": "^7.3.12",
+ "@typescript-eslint/scope-manager": "5.54.1",
+ "@typescript-eslint/types": "5.54.1",
+ "@typescript-eslint/typescript-estree": "5.54.1",
+ "eslint-scope": "^5.1.1",
+ "eslint-utils": "^3.0.0",
+ "semver": "^7.3.7"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
+ }
+ },
+ "node_modules/@typescript-eslint/visitor-keys": {
+ "version": "5.54.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.54.1.tgz",
+ "integrity": "sha512-q8iSoHTgwCfgcRJ2l2x+xCbu8nBlRAlsQ33k24Adj8eoVBE0f8dUeI+bAa8F84Mv05UGbAx57g2zrRsYIooqQg==",
+ "dev": true,
+ "dependencies": {
+ "@typescript-eslint/types": "5.54.1",
+ "eslint-visitor-keys": "^3.3.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/acorn": {
+ "version": "8.8.2",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
+ "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==",
+ "dev": true,
+ "bin": {
+ "acorn": "bin/acorn"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/acorn-jsx": {
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
+ "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
+ "dev": true,
+ "peerDependencies": {
+ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
+ }
+ },
+ "node_modules/ajv": {
+ "version": "6.12.6",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "dev": true,
+ "dependencies": {
+ "fast-deep-equal": "^3.1.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/epoberezkin"
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
+ "dev": true
+ },
+ "node_modules/array-union": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
+ "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true
+ },
+ "node_modules/brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dev": true,
+ "dependencies": {
+ "fill-range": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/callsites": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+ "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/chalk": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+ "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
+ "dev": true,
+ "dependencies": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/chalk?sponsor=1"
+ }
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "node_modules/concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
+ "dev": true
+ },
+ "node_modules/cross-spawn": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+ "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "dev": true,
+ "dependencies": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/debug": {
+ "version": "4.3.4",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
+ "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
+ "dev": true,
+ "dependencies": {
+ "ms": "2.1.2"
+ },
+ "engines": {
+ "node": ">=6.0"
+ },
+ "peerDependenciesMeta": {
+ "supports-color": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/deep-is": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
+ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
+ "dev": true
+ },
+ "node_modules/dir-glob": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
+ "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
+ "dev": true,
+ "dependencies": {
+ "path-type": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/doctrine": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+ "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
+ "dev": true,
+ "dependencies": {
+ "esutils": "^2.0.2"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/esbuild": {
+ "version": "0.17.11",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.11.tgz",
+ "integrity": "sha512-pAMImyokbWDtnA/ufPxjQg0fYo2DDuzAlqwnDvbXqHLphe+m80eF++perYKVm8LeTuj2zUuFXC+xgSVxyoHUdg==",
+ "dev": true,
+ "hasInstallScript": true,
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "optionalDependencies": {
+ "@esbuild/android-arm": "0.17.11",
+ "@esbuild/android-arm64": "0.17.11",
+ "@esbuild/android-x64": "0.17.11",
+ "@esbuild/darwin-arm64": "0.17.11",
+ "@esbuild/darwin-x64": "0.17.11",
+ "@esbuild/freebsd-arm64": "0.17.11",
+ "@esbuild/freebsd-x64": "0.17.11",
+ "@esbuild/linux-arm": "0.17.11",
+ "@esbuild/linux-arm64": "0.17.11",
+ "@esbuild/linux-ia32": "0.17.11",
+ "@esbuild/linux-loong64": "0.17.11",
+ "@esbuild/linux-mips64el": "0.17.11",
+ "@esbuild/linux-ppc64": "0.17.11",
+ "@esbuild/linux-riscv64": "0.17.11",
+ "@esbuild/linux-s390x": "0.17.11",
+ "@esbuild/linux-x64": "0.17.11",
+ "@esbuild/netbsd-x64": "0.17.11",
+ "@esbuild/openbsd-x64": "0.17.11",
+ "@esbuild/sunos-x64": "0.17.11",
+ "@esbuild/win32-arm64": "0.17.11",
+ "@esbuild/win32-ia32": "0.17.11",
+ "@esbuild/win32-x64": "0.17.11"
+ }
+ },
+ "node_modules/escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/eslint": {
+ "version": "8.36.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.36.0.tgz",
+ "integrity": "sha512-Y956lmS7vDqomxlaaQAHVmeb4tNMp2FWIvU/RnU5BD3IKMD/MJPr76xdyr68P8tV1iNMvN2mRK0yy3c+UjL+bw==",
+ "dev": true,
+ "dependencies": {
+ "@eslint-community/eslint-utils": "^4.2.0",
+ "@eslint-community/regexpp": "^4.4.0",
+ "@eslint/eslintrc": "^2.0.1",
+ "@eslint/js": "8.36.0",
+ "@humanwhocodes/config-array": "^0.11.8",
+ "@humanwhocodes/module-importer": "^1.0.1",
+ "@nodelib/fs.walk": "^1.2.8",
+ "ajv": "^6.10.0",
+ "chalk": "^4.0.0",
+ "cross-spawn": "^7.0.2",
+ "debug": "^4.3.2",
+ "doctrine": "^3.0.0",
+ "escape-string-regexp": "^4.0.0",
+ "eslint-scope": "^7.1.1",
+ "eslint-visitor-keys": "^3.3.0",
+ "espree": "^9.5.0",
+ "esquery": "^1.4.2",
+ "esutils": "^2.0.2",
+ "fast-deep-equal": "^3.1.3",
+ "file-entry-cache": "^6.0.1",
+ "find-up": "^5.0.0",
+ "glob-parent": "^6.0.2",
+ "globals": "^13.19.0",
+ "grapheme-splitter": "^1.0.4",
+ "ignore": "^5.2.0",
+ "import-fresh": "^3.0.0",
+ "imurmurhash": "^0.1.4",
+ "is-glob": "^4.0.0",
+ "is-path-inside": "^3.0.3",
+ "js-sdsl": "^4.1.4",
+ "js-yaml": "^4.1.0",
+ "json-stable-stringify-without-jsonify": "^1.0.1",
+ "levn": "^0.4.1",
+ "lodash.merge": "^4.6.2",
+ "minimatch": "^3.1.2",
+ "natural-compare": "^1.4.0",
+ "optionator": "^0.9.1",
+ "strip-ansi": "^6.0.1",
+ "strip-json-comments": "^3.1.0",
+ "text-table": "^0.2.0"
+ },
+ "bin": {
+ "eslint": "bin/eslint.js"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
+ "node_modules/eslint-scope": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
+ "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
+ "dev": true,
+ "dependencies": {
+ "esrecurse": "^4.3.0",
+ "estraverse": "^4.1.1"
+ },
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
+ "node_modules/eslint-utils": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz",
+ "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==",
+ "dev": true,
+ "dependencies": {
+ "eslint-visitor-keys": "^2.0.0"
+ },
+ "engines": {
+ "node": "^10.0.0 || ^12.0.0 || >= 14.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/mysticatea"
+ },
+ "peerDependencies": {
+ "eslint": ">=5"
+ }
+ },
+ "node_modules/eslint-utils/node_modules/eslint-visitor-keys": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz",
+ "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/eslint-visitor-keys": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz",
+ "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==",
+ "dev": true,
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ }
+ },
+ "node_modules/eslint/node_modules/eslint-scope": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz",
+ "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==",
+ "dev": true,
+ "dependencies": {
+ "esrecurse": "^4.3.0",
+ "estraverse": "^5.2.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ }
+ },
+ "node_modules/eslint/node_modules/estraverse": {
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
+ "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.0"
+ }
+ },
+ "node_modules/espree": {
+ "version": "9.5.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.0.tgz",
+ "integrity": "sha512-JPbJGhKc47++oo4JkEoTe2wjy4fmMwvFpgJT9cQzmfXKp22Dr6Hf1tdCteLz1h0P3t+mGvWZ+4Uankvh8+c6zw==",
+ "dev": true,
+ "dependencies": {
+ "acorn": "^8.8.0",
+ "acorn-jsx": "^5.3.2",
+ "eslint-visitor-keys": "^3.3.0"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
+ }
+ },
+ "node_modules/esquery": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz",
+ "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==",
+ "dev": true,
+ "dependencies": {
+ "estraverse": "^5.1.0"
+ },
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/esquery/node_modules/estraverse": {
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
+ "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.0"
+ }
+ },
+ "node_modules/esrecurse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
+ "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
+ "dev": true,
+ "dependencies": {
+ "estraverse": "^5.2.0"
+ },
+ "engines": {
+ "node": ">=4.0"
+ }
+ },
+ "node_modules/esrecurse/node_modules/estraverse": {
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
+ "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.0"
+ }
+ },
+ "node_modules/estraverse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
+ "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.0"
+ }
+ },
+ "node_modules/esutils": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "dev": true
+ },
+ "node_modules/fast-glob": {
+ "version": "3.2.12",
+ "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz",
+ "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==",
+ "dev": true,
+ "dependencies": {
+ "@nodelib/fs.stat": "^2.0.2",
+ "@nodelib/fs.walk": "^1.2.3",
+ "glob-parent": "^5.1.2",
+ "merge2": "^1.3.0",
+ "micromatch": "^4.0.4"
+ },
+ "engines": {
+ "node": ">=8.6.0"
+ }
+ },
+ "node_modules/fast-glob/node_modules/glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "dependencies": {
+ "is-glob": "^4.0.1"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/fast-json-stable-stringify": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
+ "dev": true
+ },
+ "node_modules/fast-levenshtein": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
+ "dev": true
+ },
+ "node_modules/fastq": {
+ "version": "1.15.0",
+ "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz",
+ "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==",
+ "dev": true,
+ "dependencies": {
+ "reusify": "^1.0.4"
+ }
+ },
+ "node_modules/file-entry-cache": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
+ "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
+ "dev": true,
+ "dependencies": {
+ "flat-cache": "^3.0.4"
+ },
+ "engines": {
+ "node": "^10.12.0 || >=12.0.0"
+ }
+ },
+ "node_modules/fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dev": true,
+ "dependencies": {
+ "to-regex-range": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/find-up": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+ "dev": true,
+ "dependencies": {
+ "locate-path": "^6.0.0",
+ "path-exists": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/flat-cache": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
+ "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==",
+ "dev": true,
+ "dependencies": {
+ "flatted": "^3.1.0",
+ "rimraf": "^3.0.2"
+ },
+ "engines": {
+ "node": "^10.12.0 || >=12.0.0"
+ }
+ },
+ "node_modules/flatted": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
+ "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
+ "dev": true
+ },
+ "node_modules/fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
+ "dev": true
+ },
+ "node_modules/glob": {
+ "version": "7.2.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+ "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+ "dev": true,
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.1.1",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/glob-parent": {
+ "version": "6.0.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
+ "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
+ "dev": true,
+ "dependencies": {
+ "is-glob": "^4.0.3"
+ },
+ "engines": {
+ "node": ">=10.13.0"
+ }
+ },
+ "node_modules/globals": {
+ "version": "13.20.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz",
+ "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==",
+ "dev": true,
+ "dependencies": {
+ "type-fest": "^0.20.2"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/globby": {
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
+ "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
+ "dev": true,
+ "dependencies": {
+ "array-union": "^2.1.0",
+ "dir-glob": "^3.0.1",
+ "fast-glob": "^3.2.9",
+ "ignore": "^5.2.0",
+ "merge2": "^1.4.1",
+ "slash": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/grapheme-splitter": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",
+ "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==",
+ "dev": true
+ },
+ "node_modules/has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ignore": {
+ "version": "5.2.4",
+ "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz",
+ "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">= 4"
+ }
+ },
+ "node_modules/import-fresh": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz",
+ "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==",
+ "dev": true,
+ "dependencies": {
+ "parent-module": "^1.0.0",
+ "resolve-from": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/imurmurhash": {
+ "version": "0.1.4",
+ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
+ "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.8.19"
+ }
+ },
+ "node_modules/inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+ "dev": true,
+ "dependencies": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "dev": true
+ },
+ "node_modules/is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-glob": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+ "dev": true,
+ "dependencies": {
+ "is-extglob": "^2.1.1"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.12.0"
+ }
+ },
+ "node_modules/is-path-inside": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
+ "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+ "dev": true
+ },
+ "node_modules/js-sdsl": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz",
+ "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==",
+ "dev": true,
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/js-sdsl"
+ }
+ },
+ "node_modules/js-yaml": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+ "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "dev": true,
+ "dependencies": {
+ "argparse": "^2.0.1"
+ },
+ "bin": {
+ "js-yaml": "bin/js-yaml.js"
+ }
+ },
+ "node_modules/json-schema-traverse": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
+ "dev": true
+ },
+ "node_modules/json-stable-stringify-without-jsonify": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
+ "dev": true
+ },
+ "node_modules/levn": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
+ "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
+ "dev": true,
+ "dependencies": {
+ "prelude-ls": "^1.2.1",
+ "type-check": "~0.4.0"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/locate-path": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+ "dev": true,
+ "dependencies": {
+ "p-locate": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/lodash.merge": {
+ "version": "4.6.2",
+ "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
+ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
+ "dev": true
+ },
+ "node_modules/lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "dev": true,
+ "dependencies": {
+ "yallist": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/merge2": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
+ "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
+ "dev": true,
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/micromatch": {
+ "version": "4.0.5",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz",
+ "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==",
+ "dev": true,
+ "dependencies": {
+ "braces": "^3.0.2",
+ "picomatch": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=8.6"
+ }
+ },
+ "node_modules/minimatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ },
+ "node_modules/natural-compare": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
+ "dev": true
+ },
+ "node_modules/natural-compare-lite": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz",
+ "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==",
+ "dev": true
+ },
+ "node_modules/once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
+ "dev": true,
+ "dependencies": {
+ "wrappy": "1"
+ }
+ },
+ "node_modules/optionator": {
+ "version": "0.9.1",
+ "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz",
+ "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==",
+ "dev": true,
+ "dependencies": {
+ "deep-is": "^0.1.3",
+ "fast-levenshtein": "^2.0.6",
+ "levn": "^0.4.1",
+ "prelude-ls": "^1.2.1",
+ "type-check": "^0.4.0",
+ "word-wrap": "^1.2.3"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "dev": true,
+ "dependencies": {
+ "yocto-queue": "^0.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/p-locate": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+ "dev": true,
+ "dependencies": {
+ "p-limit": "^3.0.2"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/parent-module": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+ "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
+ "dev": true,
+ "dependencies": {
+ "callsites": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-type": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
+ "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "dev": true,
+ "engines": {
+ "node": ">=8.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/prelude-ls": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
+ "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
+ "dev": true,
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/punycode": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
+ "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==",
+ "dev": true,
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/queue-microtask": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
+ "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/regexpp": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
+ "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/mysticatea"
+ }
+ },
+ "node_modules/resolve-from": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+ "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
+ "dev": true,
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/reusify": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
+ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
+ "dev": true,
+ "engines": {
+ "iojs": ">=1.0.0",
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/rimraf": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "dev": true,
+ "dependencies": {
+ "glob": "^7.1.3"
+ },
+ "bin": {
+ "rimraf": "bin.js"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/run-parallel": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
+ "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ],
+ "dependencies": {
+ "queue-microtask": "^1.2.2"
+ }
+ },
+ "node_modules/semver": {
+ "version": "7.3.8",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
+ "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
+ "dev": true,
+ "dependencies": {
+ "lru-cache": "^6.0.0"
+ },
+ "bin": {
+ "semver": "bin/semver.js"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "dependencies": {
+ "shebang-regex": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-json-comments": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "dependencies": {
+ "has-flag": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/text-table": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
+ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
+ "dev": true
+ },
+ "node_modules/to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "dependencies": {
+ "is-number": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=8.0"
+ }
+ },
+ "node_modules/tslib": {
+ "version": "1.14.1",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
+ "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
+ "dev": true
+ },
+ "node_modules/tsutils": {
+ "version": "3.21.0",
+ "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz",
+ "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==",
+ "dev": true,
+ "dependencies": {
+ "tslib": "^1.8.1"
+ },
+ "engines": {
+ "node": ">= 6"
+ },
+ "peerDependencies": {
+ "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta"
+ }
+ },
+ "node_modules/type-check": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
+ "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
+ "dev": true,
+ "dependencies": {
+ "prelude-ls": "^1.2.1"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "node_modules/type-fest": {
+ "version": "0.20.2",
+ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
+ "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/typescript": {
+ "version": "4.9.5",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
+ "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
+ "dev": true,
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=4.2.0"
+ }
+ },
+ "node_modules/uri-js": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
+ "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
+ "dev": true,
+ "dependencies": {
+ "punycode": "^2.1.0"
+ }
+ },
+ "node_modules/which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "dependencies": {
+ "isexe": "^2.0.0"
+ },
+ "bin": {
+ "node-which": "bin/node-which"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/word-wrap": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
+ "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
+ "dev": true
+ },
+ "node_modules/yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+ "dev": true
+ },
+ "node_modules/yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ }
+ }
+}
diff --git a/v1-com-officielle/public/mp4-muxer-main/package.json b/v1-com-officielle/public/mp4-muxer-main/package.json
new file mode 100644
index 0000000..93350ef
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/package.json
@@ -0,0 +1,59 @@
+{
+ "name": "mp4-muxer",
+ "version": "4.3.3",
+ "description": "MP4 multiplexer in pure TypeScript with support for WebCodecs API, video & audio.",
+ "main": "./build/mp4-muxer.js",
+ "module": "./build/mp4-muxer.mjs",
+ "types": "./build/mp4-muxer.d.ts",
+ "exports": {
+ "types": "./build/mp4-muxer.d.ts",
+ "import": "./build/mp4-muxer.mjs",
+ "require": "./build/mp4-muxer.js"
+ },
+ "files": [
+ "README.md",
+ "package.json",
+ "LICENSE",
+ "build/mp4-muxer.js",
+ "build/mp4-muxer.mjs",
+ "build/mp4-muxer.d.ts"
+ ],
+ "scripts": {
+ "watch": "node build.mjs",
+ "check": "npx tsc --noEmit --skipLibCheck",
+ "lint": "npx eslint src demo build"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/Vanilagy/mp4-muxer.git"
+ },
+ "author": "Vanilagy",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/Vanilagy/mp4-muxer/issues"
+ },
+ "homepage": "https://github.com/Vanilagy/mp4-muxer#readme",
+ "dependencies": {
+ "@types/dom-webcodecs": "^0.1.6",
+ "@types/wicg-file-system-access": "^2020.9.5"
+ },
+ "devDependencies": {
+ "@types/node": "^18.15.0",
+ "@typescript-eslint/eslint-plugin": "^5.54.1",
+ "@typescript-eslint/parser": "^5.54.1",
+ "esbuild": "^0.17.11",
+ "eslint": "^8.36.0",
+ "typescript": "^4.9.5"
+ },
+ "keywords": [
+ "mp4",
+ "fmp4",
+ "muxer",
+ "muxing",
+ "multiplexer",
+ "video",
+ "audio",
+ "media",
+ "webcodecs"
+ ]
+}
diff --git a/v1-com-officielle/public/mp4-muxer-main/paramList4Background.png b/v1-com-officielle/public/mp4-muxer-main/paramList4Background.png
new file mode 100644
index 0000000..64281f1
Binary files /dev/null and b/v1-com-officielle/public/mp4-muxer-main/paramList4Background.png differ
diff --git a/v1-com-officielle/public/mp4-muxer-main/src/box.ts b/v1-com-officielle/public/mp4-muxer-main/src/box.ts
new file mode 100644
index 0000000..48f8037
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/src/box.ts
@@ -0,0 +1,740 @@
+import {
+ AudioTrack,
+ GLOBAL_TIMESCALE,
+ SUPPORTED_AUDIO_CODECS,
+ SUPPORTED_VIDEO_CODECS,
+ Sample,
+ Track,
+ VideoTrack
+} from './muxer';
+import {
+ ascii,
+ i16,
+ i32,
+ intoTimescale,
+ last,
+ lastPresentedSample,
+ u16,
+ u64,
+ u8,
+ u32,
+ fixed_16_16,
+ fixed_8_8,
+ u24,
+ IDENTITY_MATRIX,
+ matrixToBytes,
+ rotationMatrix,
+ isU32,
+ TransformationMatrix
+} from './misc';
+
+export interface Box {
+ type: string,
+ contents?: Uint8Array,
+ children?: Box[],
+ size?: number,
+ largeSize?: boolean
+}
+
+type NestedNumberArray = (number | NestedNumberArray)[];
+
+export const box = (type: string, contents?: NestedNumberArray, children?: Box[]): Box => ({
+ type,
+ contents: contents && new Uint8Array(contents.flat(10) as number[]),
+ children
+});
+
+/** A FullBox always starts with a version byte, followed by three flag bytes. */
+export const fullBox = (
+ type: string,
+ version: number,
+ flags: number,
+ contents?: NestedNumberArray,
+ children?: Box[]
+) => box(
+ type,
+ [u8(version), u24(flags), contents ?? []],
+ children
+);
+
+/**
+ * File Type Compatibility Box: Allows the reader to determine whether this is a type of file that the
+ * reader understands.
+ */
+export const ftyp = (details: {
+ holdsAvc: boolean,
+ fragmented: boolean
+}) => {
+ // You can find the full logic for this at
+ // https://github.com/FFmpeg/FFmpeg/blob/de2fb43e785773738c660cdafb9309b1ef1bc80d/libavformat/movenc.c#L5518
+ // Obviously, this lib only needs a small subset of that logic.
+
+ let minorVersion = 0x200;
+
+ if (details.fragmented) return box('ftyp', [
+ ascii('iso5'), // Major brand
+ u32(minorVersion), // Minor version
+ // Compatible brands
+ ascii('iso5'),
+ ascii('iso6'),
+ ascii('mp41')
+ ]);
+
+ return box('ftyp', [
+ ascii('isom'), // Major brand
+ u32(minorVersion), // Minor version
+ // Compatible brands
+ ascii('isom'),
+ details.holdsAvc ? ascii('avc1') : [],
+ ascii('mp41')
+ ]);
+};
+
+/** Movie Sample Data Box. Contains the actual frames/samples of the media. */
+export const mdat = (reserveLargeSize: boolean): Box => ({ type: 'mdat', largeSize: reserveLargeSize });
+
+/** Free Space Box: A box that designates unused space in the movie data file. */
+export const free = (size: number): Box => ({ type: 'free', size });
+
+/**
+ * Movie Box: Used to specify the information that defines a movie - that is, the information that allows
+ * an application to interpret the sample data that is stored elsewhere.
+ */
+export const moov = (tracks: Track[], creationTime: number, fragmented = false) => box('moov', null, [
+ mvhd(creationTime, tracks),
+ ...tracks.map(x => trak(x, creationTime)),
+ fragmented ? mvex(tracks) : null
+]);
+
+/** Movie Header Box: Used to specify the characteristics of the entire movie, such as timescale and duration. */
+export const mvhd = (
+ creationTime: number,
+ tracks: Track[]
+) => {
+ let duration = intoTimescale(Math.max(
+ 0,
+ ...tracks.
+ filter(x => x.samples.length > 0).
+ map(x => {
+ const lastSample = lastPresentedSample(x.samples);
+ return lastSample.presentationTimestamp + lastSample.duration;
+ })
+ ), GLOBAL_TIMESCALE);
+ let nextTrackId = Math.max(...tracks.map(x => x.id)) + 1;
+
+ // Conditionally use u64 if u32 isn't enough
+ let needsU64 = !isU32(creationTime) || !isU32(duration);
+ let u32OrU64 = needsU64 ? u64 : u32;
+
+ return fullBox('mvhd', +needsU64, 0, [
+ u32OrU64(creationTime), // Creation time
+ u32OrU64(creationTime), // Modification time
+ u32(GLOBAL_TIMESCALE), // Timescale
+ u32OrU64(duration), // Duration
+ fixed_16_16(1), // Preferred rate
+ fixed_8_8(1), // Preferred volume
+ Array(10).fill(0), // Reserved
+ matrixToBytes(IDENTITY_MATRIX), // Matrix
+ Array(24).fill(0), // Pre-defined
+ u32(nextTrackId) // Next track ID
+ ]);
+};
+
+/**
+ * Track Box: Defines a single track of a movie. A movie may consist of one or more tracks. Each track is
+ * independent of the other tracks in the movie and carries its own temporal and spatial information. Each Track Box
+ * contains its associated Media Box.
+ */
+export const trak = (track: Track, creationTime: number) => box('trak', null, [
+ tkhd(track, creationTime),
+ mdia(track, creationTime)
+]);
+
+/** Track Header Box: Specifies the characteristics of a single track within a movie. */
+export const tkhd = (
+ track: Track,
+ creationTime: number
+) => {
+ let lastSample = lastPresentedSample(track.samples);
+ let durationInGlobalTimescale = intoTimescale(
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
+ GLOBAL_TIMESCALE
+ );
+
+ let needsU64 = !isU32(creationTime) || !isU32(durationInGlobalTimescale);
+ let u32OrU64 = needsU64 ? u64 : u32;
+
+ let matrix: TransformationMatrix;
+ if (track.info.type === 'video') {
+ matrix = typeof track.info.rotation === 'number' ? rotationMatrix(track.info.rotation) : track.info.rotation;
+ } else {
+ matrix = IDENTITY_MATRIX;
+ }
+
+ return fullBox('tkhd', +needsU64, 3, [
+ u32OrU64(creationTime), // Creation time
+ u32OrU64(creationTime), // Modification time
+ u32(track.id), // Track ID
+ u32(0), // Reserved
+ u32OrU64(durationInGlobalTimescale), // Duration
+ Array(8).fill(0), // Reserved
+ u16(0), // Layer
+ u16(0), // Alternate group
+ fixed_8_8(track.info.type === 'audio' ? 1 : 0), // Volume
+ u16(0), // Reserved
+ matrixToBytes(matrix), // Matrix
+ fixed_16_16(track.info.type === 'video' ? track.info.width : 0), // Track width
+ fixed_16_16(track.info.type === 'video' ? track.info.height : 0) // Track height
+ ]);
+};
+
+/** Media Box: Describes and define a track's media type and sample data. */
+export const mdia = (track: Track, creationTime: number) => box('mdia', null, [
+ mdhd(track, creationTime),
+ hdlr(track.info.type === 'video' ? 'vide' : 'soun'),
+ minf(track)
+]);
+
+/** Media Header Box: Specifies the characteristics of a media, including timescale and duration. */
+export const mdhd = (
+ track: Track,
+ creationTime: number
+) => {
+ let lastSample = lastPresentedSample(track.samples);
+ let localDuration = intoTimescale(
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
+ track.timescale
+ );
+
+ let needsU64 = !isU32(creationTime) || !isU32(localDuration);
+ let u32OrU64 = needsU64 ? u64 : u32;
+
+ return fullBox('mdhd', +needsU64, 0, [
+ u32OrU64(creationTime), // Creation time
+ u32OrU64(creationTime), // Modification time
+ u32(track.timescale), // Timescale
+ u32OrU64(localDuration), // Duration
+ u16(0b01010101_11000100), // Language ("und", undetermined)
+ u16(0) // Quality
+ ]);
+};
+
+/** Handler Reference Box: Specifies the media handler component that is to be used to interpret the media's data. */
+export const hdlr = (componentSubtype: string) => fullBox('hdlr', 0, 0, [
+ ascii('mhlr'), // Component type
+ ascii(componentSubtype), // Component subtype
+ u32(0), // Component manufacturer
+ u32(0), // Component flags
+ u32(0), // Component flags mask
+ ascii('mp4-muxer-hdlr', true) // Component name
+]);
+
+/**
+ * Media Information Box: Stores handler-specific information for a track's media data. The media handler uses this
+ * information to map from media time to media data and to process the media data.
+ */
+export const minf = (track: Track) => box('minf', null, [
+ track.info.type === 'video' ? vmhd() : smhd(),
+ dinf(),
+ stbl(track)
+]);
+
+/** Video Media Information Header Box: Defines specific color and graphics mode information. */
+export const vmhd = () => fullBox('vmhd', 0, 1, [
+ u16(0), // Graphics mode
+ u16(0), // Opcolor R
+ u16(0), // Opcolor G
+ u16(0) // Opcolor B
+]);
+
+/** Sound Media Information Header Box: Stores the sound media's control information, such as balance. */
+export const smhd = () => fullBox('smhd', 0, 0, [
+ u16(0), // Balance
+ u16(0) // Reserved
+]);
+
+/**
+ * Data Information Box: Contains information specifying the data handler component that provides access to the
+ * media data. The data handler component uses the Data Information Box to interpret the media's data.
+ */
+export const dinf = () => box('dinf', null, [
+ dref()
+]);
+
+/**
+ * Data Reference Box: Contains tabular data that instructs the data handler component how to access the media's data.
+ */
+export const dref = () => fullBox('dref', 0, 0, [
+ u32(1) // Entry count
+], [
+ url()
+]);
+
+export const url = () => fullBox('url ', 0, 1); // Self-reference flag enabled
+
+/**
+ * Sample Table Box: Contains information for converting from media time to sample number to sample location. This box
+ * also indicates how to interpret the sample (for example, whether to decompress the video data and, if so, how).
+ */
+export const stbl = (track: Track) => {
+ const needsCtts = track.compositionTimeOffsetTable.length > 1 ||
+ track.compositionTimeOffsetTable.some((x) => x.sampleCompositionTimeOffset !== 0);
+
+ return box('stbl', null, [
+ stsd(track),
+ stts(track),
+ stss(track),
+ stsc(track),
+ stsz(track),
+ stco(track),
+ needsCtts ? ctts(track) : null
+ ]);
+};
+
+/**
+ * Sample Description Box: Stores information that allows you to decode samples in the media. The data stored in the
+ * sample description varies, depending on the media type.
+ */
+export const stsd = (track: Track) => fullBox('stsd', 0, 0, [
+ u32(1) // Entry count
+], [
+ track.info.type === 'video'
+ ? videoSampleDescription(
+ VIDEO_CODEC_TO_BOX_NAME[track.info.codec],
+ track as VideoTrack
+ )
+ : soundSampleDescription(
+ AUDIO_CODEC_TO_BOX_NAME[track.info.codec],
+ track as AudioTrack
+ )
+]);
+
+/** Video Sample Description Box: Contains information that defines how to interpret video media data. */
+export const videoSampleDescription = (
+ compressionType: string,
+ track: VideoTrack
+) => box(compressionType, [
+ Array(6).fill(0), // Reserved
+ u16(1), // Data reference index
+ u16(0), // Pre-defined
+ u16(0), // Reserved
+ Array(12).fill(0), // Pre-defined
+ u16(track.info.width), // Width
+ u16(track.info.height), // Height
+ u32(0x00480000), // Horizontal resolution
+ u32(0x00480000), // Vertical resolution
+ u32(0), // Reserved
+ u16(1), // Frame count
+ Array(32).fill(0), // Compressor name
+ u16(0x0018), // Depth
+ i16(0xffff) // Pre-defined
+], [
+ VIDEO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
+]);
+
+/** AVC Configuration Box: Provides additional information to the decoder. */
+export const avcC = (track: VideoTrack) => track.info.decoderConfig && box('avcC', [
+ // For AVC, description is an AVCDecoderConfigurationRecord, so nothing else to do here
+ ...new Uint8Array(track.info.decoderConfig.description as ArrayBuffer)
+]);
+
+/** HEVC Configuration Box: Provides additional information to the decoder. */
+export const hvcC = (track: VideoTrack) => track.info.decoderConfig && box('hvcC', [
+ // For HEVC, description is a HEVCDecoderConfigurationRecord, so nothing else to do here
+ ...new Uint8Array(track.info.decoderConfig.description as ArrayBuffer)
+]);
+
+/** VP9 Configuration Box: Provides additional information to the decoder. */
+export const vpcC = (track: VideoTrack) => {
+ // Reference: https://www.webmproject.org/vp9/mp4/
+
+ if (!track.info.decoderConfig) {
+ return null;
+ }
+
+ let decoderConfig = track.info.decoderConfig;
+ if (!decoderConfig.colorSpace) {
+ throw new Error(`'colorSpace' is required in the decoder config for VP9.`);
+ }
+
+ let parts = decoderConfig.codec.split('.');
+ let profile = Number(parts[1]);
+ let level = Number(parts[2]);
+
+ let bitDepth = Number(parts[3]);
+ let chromaSubsampling = 0;
+ let thirdByte = (bitDepth << 4) + (chromaSubsampling << 1) + Number(decoderConfig.colorSpace.fullRange);
+
+ // Set all to undetermined. We could determine them using the codec color space info, but there's no need.
+ let colourPrimaries = 2;
+ let transferCharacteristics = 2;
+ let matrixCoefficients = 2;
+
+ return fullBox('vpcC', 1, 0, [
+ u8(profile), // Profile
+ u8(level), // Level
+ u8(thirdByte), // Bit depth, chroma subsampling, full range
+ u8(colourPrimaries), // Colour primaries
+ u8(transferCharacteristics), // Transfer characteristics
+ u8(matrixCoefficients), // Matrix coefficients
+ u16(0) // Codec initialization data size
+ ]);
+};
+
+/** AV1 Configuration Box: Provides additional information to the decoder. */
+export const av1C = () => {
+ // Reference: https://aomediacodec.github.io/av1-isobmff/
+
+ let marker = 1;
+ let version = 1;
+ let firstByte = (marker << 7) + version;
+
+ // The box contents are not correct like this, but its length is. Getting the values for the last three bytes
+ // requires peeking into the bitstream of the coded chunks. Might come back later.
+ return box('av1C', [
+ firstByte,
+ 0,
+ 0,
+ 0
+ ]);
+};
+
+/** Sound Sample Description Box: Contains information that defines how to interpret sound media data. */
+export const soundSampleDescription = (
+ compressionType: string,
+ track: AudioTrack
+) => box(compressionType, [
+ Array(6).fill(0), // Reserved
+ u16(1), // Data reference index
+ u16(0), // Version
+ u16(0), // Revision level
+ u32(0), // Vendor
+ u16(track.info.numberOfChannels), // Number of channels
+ u16(16), // Sample size (bits)
+ u16(0), // Compression ID
+ u16(0), // Packet size
+ fixed_16_16(track.info.sampleRate) // Sample rate
+], [
+ AUDIO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
+]);
+
+/** MPEG-4 Elementary Stream Descriptor Box. */
+export const esds = (track: Track) => {
+ let description = new Uint8Array(track.info.decoderConfig.description as ArrayBuffer);
+
+ return fullBox('esds', 0, 0, [
+ // https://stackoverflow.com/a/54803118
+ u32(0x03808080), // TAG(3) = Object Descriptor ([2])
+ u8(0x20 + description.byteLength), // length of this OD (which includes the next 2 tags)
+ u16(1), // ES_ID = 1
+ u8(0x00), // flags etc = 0
+ u32(0x04808080), // TAG(4) = ES Descriptor ([2]) embedded in above OD
+ u8(0x12 + description.byteLength), // length of this ESD
+ u8(0x40), // MPEG-4 Audio
+ u8(0x15), // stream type(6bits)=5 audio, flags(2bits)=1
+ u24(0), // 24bit buffer size
+ u32(0x0001FC17), // max bitrate
+ u32(0x0001FC17), // avg bitrate
+ u32(0x05808080), // TAG(5) = ASC ([2],[3]) embedded in above OD
+ u8(description.byteLength), // length
+ ...description,
+ u32(0x06808080), // TAG(6)
+ u8(0x01), // length
+ u8(0x02) // data
+ ]);
+};
+
+/** Opus Specific Box. */
+export const dOps = (track: AudioTrack) => box('dOps', [
+ u8(0), // Version
+ u8(track.info.numberOfChannels), // OutputChannelCount
+ u16(3840), // PreSkip, should be at least 80 milliseconds worth of playback, measured in 48000 Hz samples
+ u32(track.info.sampleRate), // InputSampleRate
+ fixed_8_8(0), // OutputGain
+ u8(0) // ChannelMappingFamily
+]);
+
+/**
+ * Time-To-Sample Box: Stores duration information for a media's samples, providing a mapping from a time in a media
+ * to the corresponding data sample. The table is compact, meaning that consecutive samples with the same time delta
+ * will be grouped.
+ */
+export const stts = (track: Track) => {
+ return fullBox('stts', 0, 0, [
+ u32(track.timeToSampleTable.length), // Number of entries
+ track.timeToSampleTable.map(x => [ // Time-to-sample table
+ u32(x.sampleCount), // Sample count
+ u32(x.sampleDelta) // Sample duration
+ ])
+ ]);
+};
+
+/** Sync Sample Box: Identifies the key frames in the media, marking the random access points within a stream. */
+export const stss = (track: Track) => {
+ if (track.samples.every(x => x.type === 'key')) return null; // No stss box -> every frame is a key frame
+
+ let keySamples = [...track.samples.entries()].filter(([, sample]) => sample.type === 'key');
+ return fullBox('stss', 0, 0, [
+ u32(keySamples.length), // Number of entries
+ keySamples.map(([index]) => u32(index + 1)) // Sync sample table
+ ]);
+};
+
+/**
+ * Sample-To-Chunk Box: As samples are added to a media, they are collected into chunks that allow optimized data
+ * access. A chunk contains one or more samples. Chunks in a media may have different sizes, and the samples within a
+ * chunk may have different sizes. The Sample-To-Chunk Box stores chunk information for the samples in a media, stored
+ * in a compactly-coded fashion.
+ */
+export const stsc = (track: Track) => {
+ return fullBox('stsc', 0, 0, [
+ u32(track.compactlyCodedChunkTable.length), // Number of entries
+ track.compactlyCodedChunkTable.map(x => [ // Sample-to-chunk table
+ u32(x.firstChunk), // First chunk
+ u32(x.samplesPerChunk), // Samples per chunk
+ u32(1) // Sample description index
+ ])
+ ]);
+};
+
+/** Sample Size Box: Specifies the byte size of each sample in the media. */
+export const stsz = (track: Track) => fullBox('stsz', 0, 0, [
+ u32(0), // Sample size (0 means non-constant size)
+ u32(track.samples.length), // Number of entries
+ track.samples.map(x => u32(x.size)) // Sample size table
+]);
+
+/** Chunk Offset Box: Identifies the location of each chunk of data in the media's data stream, relative to the file. */
+export const stco = (track: Track) => {
+ if (track.finalizedChunks.length > 0 && last(track.finalizedChunks).offset >= 2**32) {
+ // If the file is large, use the co64 box
+ return fullBox('co64', 0, 0, [
+ u32(track.finalizedChunks.length), // Number of entries
+ track.finalizedChunks.map(x => u64(x.offset)) // Chunk offset table
+ ]);
+ }
+
+ return fullBox('stco', 0, 0, [
+ u32(track.finalizedChunks.length), // Number of entries
+ track.finalizedChunks.map(x => u32(x.offset)) // Chunk offset table
+ ]);
+};
+
+/** Composition Time to Sample Box: Stores composition time offset information (PTS-DTS) for a
+ * media's samples. The table is compact, meaning that consecutive samples with the same time
+ * composition time offset will be grouped. */
+export const ctts = (track: Track) => {
+ return fullBox('ctts', 0, 0, [
+ u32(track.compositionTimeOffsetTable.length), // Number of entries
+ track.compositionTimeOffsetTable.map(x => [ // Time-to-sample table
+ u32(x.sampleCount), // Sample count
+ u32(x.sampleCompositionTimeOffset) // Sample offset
+ ])
+ ]);
+};
+
+/**
+ * Movie Extends Box: This box signals to readers that the file is fragmented. Contains a single Track Extends Box
+ * for each track in the movie.
+ */
+export const mvex = (tracks: Track[]) => {
+ return box('mvex', null, tracks.map(trex));
+};
+
+/** Track Extends Box: Contains the default values used by the movie fragments. */
+export const trex = (track: Track) => {
+ return fullBox('trex', 0, 0, [
+ u32(track.id), // Track ID
+ u32(1), // Default sample description index
+ u32(0), // Default sample duration
+ u32(0), // Default sample size
+ u32(0) // Default sample flags
+ ]);
+};
+
+/**
+ * Movie Fragment Box: The movie fragments extend the presentation in time. They provide the information that would
+ * previously have been in the Movie Box.
+ */
+export const moof = (sequenceNumber: number, tracks: Track[]) => {
+ return box('moof', null, [
+ mfhd(sequenceNumber),
+ ...tracks.map(traf)
+ ]);
+};
+
+/** Movie Fragment Header Box: Contains a sequence number as a safety check. */
+export const mfhd = (sequenceNumber: number) => {
+ return fullBox('mfhd', 0, 0, [
+ u32(sequenceNumber) // Sequence number
+ ]);
+};
+
+const fragmentSampleFlags = (sample: Sample) => {
+ let byte1 = 0;
+ let byte2 = 0;
+ let byte3 = 0;
+ let byte4 = 0;
+
+ let sampleIsDifferenceSample = sample.type === 'delta';
+ byte2 |= +sampleIsDifferenceSample;
+
+ if (sampleIsDifferenceSample) {
+ byte1 |= 1; // There is redundant coding in this sample
+ } else {
+ byte1 |= 2; // There is no redundant coding in this sample
+ }
+
+ // Note that there are a lot of other flags to potentially set here, but most are irrelevant / non-necessary
+ return byte1 << 24 | byte2 << 16 | byte3 << 8 | byte4;
+};
+
+/** Track Fragment Box */
+export const traf = (track: Track) => {
+ return box('traf', null, [
+ tfhd(track),
+ tfdt(track),
+ trun(track)
+ ]);
+};
+
+/** Track Fragment Header Box: Provides a reference to the extended track, and flags. */
+export const tfhd = (track: Track) => {
+ let tfFlags = 0;
+ tfFlags |= 0x00008; // Default sample duration present
+ tfFlags |= 0x00010; // Default sample size present
+ tfFlags |= 0x00020; // Default sample flags present
+ tfFlags |= 0x20000; // Default base is moof
+
+ // Prefer the second sample over the first one, as the first one is a sync sample and therefore the "odd one out"
+ let referenceSample = track.currentChunk.samples[1] ?? track.currentChunk.samples[0];
+ let referenceSampleInfo = {
+ duration: referenceSample.timescaleUnitsToNextSample,
+ size: referenceSample.size,
+ flags: fragmentSampleFlags(referenceSample)
+ };
+
+ return fullBox('tfhd', 0, tfFlags, [
+ u32(track.id), // Track ID
+ u32(referenceSampleInfo.duration), // Default sample duration
+ u32(referenceSampleInfo.size), // Default sample size
+ u32(referenceSampleInfo.flags) // Default sample flags
+ ]);
+};
+
+/**
+ * Track Fragment Decode Time Box: Provides the absolute decode time of the first sample of the fragment. This is
+ * useful for performing random access on the media file.
+ */
+export const tfdt = (track: Track) => {
+ return fullBox('tfdt', 1, 0, [
+ u64(intoTimescale(track.currentChunk.startTimestamp, track.timescale)) // Base Media Decode Time
+ ]);
+};
+
+/** Track Run Box: Specifies a run of contiguous samples for a given track. */
+export const trun = (track: Track) => {
+ let allSampleDurations = track.currentChunk.samples.map(x => x.timescaleUnitsToNextSample);
+ let allSampleSizes = track.currentChunk.samples.map(x => x.size);
+ let allSampleFlags = track.currentChunk.samples.map(fragmentSampleFlags);
+ let allSampleCompositionTimeOffsets = track.currentChunk.samples.
+ map(x => intoTimescale(x.presentationTimestamp - x.decodeTimestamp, track.timescale));
+
+ let uniqueSampleDurations = new Set(allSampleDurations);
+ let uniqueSampleSizes = new Set(allSampleSizes);
+ let uniqueSampleFlags = new Set(allSampleFlags);
+ let uniqueSampleCompositionTimeOffsets = new Set(allSampleCompositionTimeOffsets);
+
+ let firstSampleFlagsPresent = uniqueSampleFlags.size === 2 && allSampleFlags[0] !== allSampleFlags[1];
+ let sampleDurationPresent = uniqueSampleDurations.size > 1;
+ let sampleSizePresent = uniqueSampleSizes.size > 1;
+ let sampleFlagsPresent = !firstSampleFlagsPresent && uniqueSampleFlags.size > 1;
+ let sampleCompositionTimeOffsetsPresent =
+ uniqueSampleCompositionTimeOffsets.size > 1 || [...uniqueSampleCompositionTimeOffsets].some(x => x !== 0);
+
+ let flags = 0;
+ flags |= 0x0001; // Data offset present
+ flags |= 0x0004 * +firstSampleFlagsPresent; // First sample flags present
+ flags |= 0x0100 * +sampleDurationPresent; // Sample duration present
+ flags |= 0x0200 * +sampleSizePresent; // Sample size present
+ flags |= 0x0400 * +sampleFlagsPresent; // Sample flags present
+ flags |= 0x0800 * +sampleCompositionTimeOffsetsPresent; // Sample composition time offsets present
+
+ return fullBox('trun', 1, flags, [
+ u32(track.currentChunk.samples.length), // Sample count
+ u32(track.currentChunk.offset - track.currentChunk.moofOffset || 0), // Data offset
+ firstSampleFlagsPresent ? u32(allSampleFlags[0]) : [],
+ track.currentChunk.samples.map((_, i) => [
+ sampleDurationPresent ? u32(allSampleDurations[i]) : [], // Sample duration
+ sampleSizePresent ? u32(allSampleSizes[i]) : [], // Sample size
+ sampleFlagsPresent ? u32(allSampleFlags[i]) : [], // Sample flags
+ // Sample composition time offsets
+ sampleCompositionTimeOffsetsPresent ? i32(allSampleCompositionTimeOffsets[i]) : []
+ ])
+ ]);
+};
+
+/**
+ * Movie Fragment Random Access Box: For each track, provides pointers to sync samples within the file
+ * for random access.
+ */
+export const mfra = (tracks: Track[]) => {
+ return box('mfra', null, [
+ ...tracks.map(tfra),
+ mfro()
+ ]);
+};
+
+/** Track Fragment Random Access Box: Provides pointers to sync samples within the file for random access. */
+export const tfra = (track: Track, trackIndex: number) => {
+ let version = 1; // Using this version allows us to use 64-bit time and offset values
+
+ return fullBox('tfra', version, 0, [
+ u32(track.id), // Track ID
+ u32(0b111111), // This specifies that traf number, trun number and sample number are 32-bit ints
+ u32(track.finalizedChunks.length), // Number of entries
+ track.finalizedChunks.map(chunk => [
+ u64(intoTimescale(chunk.startTimestamp, track.timescale)), // Time
+ u64(chunk.moofOffset), // moof offset
+ u32(trackIndex + 1), // traf number
+ u32(1), // trun number
+ u32(1) // Sample number
+ ])
+ ]);
+};
+
+/**
+ * Movie Fragment Random Access Offset Box: Provides the size of the enclosing mfra box. This box can be used by readers
+ * to quickly locate the mfra box by searching from the end of the file.
+ */
+export const mfro = () => {
+ return fullBox('mfro', 0, 0, [
+ // This value needs to be overwritten manually from the outside, where the actual size of the enclosing mfra box
+ // is known
+ u32(0) // Size
+ ]);
+};
+
+const VIDEO_CODEC_TO_BOX_NAME: Record = {
+ 'avc': 'avc1',
+ 'hevc': 'hvc1',
+ 'vp9': 'vp09',
+ 'av1': 'av01'
+};
+
+const VIDEO_CODEC_TO_CONFIGURATION_BOX: Record Box> = {
+ 'avc': avcC,
+ 'hevc': hvcC,
+ 'vp9': vpcC,
+ 'av1': av1C
+};
+
+const AUDIO_CODEC_TO_BOX_NAME: Record = {
+ 'aac': 'mp4a',
+ 'opus': 'Opus'
+};
+
+const AUDIO_CODEC_TO_CONFIGURATION_BOX: Record Box> = {
+ 'aac': esds,
+ 'opus': dOps
+};
diff --git a/v1-com-officielle/public/mp4-muxer-main/src/index.ts b/v1-com-officielle/public/mp4-muxer-main/src/index.ts
new file mode 100644
index 0000000..0c1811b
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/src/index.ts
@@ -0,0 +1,2 @@
+export { Muxer } from './muxer';
+export * from './target';
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/src/misc.ts b/v1-com-officielle/public/mp4-muxer-main/src/misc.ts
new file mode 100644
index 0000000..f08fa96
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/src/misc.ts
@@ -0,0 +1,117 @@
+import { Sample } from './muxer';
+
+let bytes = new Uint8Array(8);
+let view = new DataView(bytes.buffer);
+
+export const u8 = (value: number) => {
+ return [(value % 0x100 + 0x100) % 0x100];
+};
+
+export const u16 = (value: number) => {
+ view.setUint16(0, value, false);
+ return [bytes[0], bytes[1]];
+};
+
+export const i16 = (value: number) => {
+ view.setInt16(0, value, false);
+ return [bytes[0], bytes[1]];
+};
+
+export const u24 = (value: number) => {
+ view.setUint32(0, value, false);
+ return [bytes[1], bytes[2], bytes[3]];
+};
+
+export const u32 = (value: number) => {
+ view.setUint32(0, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+
+export const i32 = (value: number) => {
+ view.setInt32(0, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+
+export const u64 = (value: number) => {
+ view.setUint32(0, Math.floor(value / 2**32), false);
+ view.setUint32(4, value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7]];
+};
+
+export const fixed_8_8 = (value: number) => {
+ view.setInt16(0, 2**8 * value, false);
+ return [bytes[0], bytes[1]];
+};
+
+export const fixed_16_16 = (value: number) => {
+ view.setInt32(0, 2**16 * value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+
+export const fixed_2_30 = (value: number) => {
+ view.setInt32(0, 2**30 * value, false);
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
+};
+
+export const ascii = (text: string, nullTerminated = false) => {
+ let bytes = Array(text.length).fill(null).map((_, i) => text.charCodeAt(i));
+ if (nullTerminated) bytes.push(0x00);
+ return bytes;
+};
+
+export const last = (arr: T[]) => {
+ return arr && arr[arr.length - 1];
+};
+
+export const lastPresentedSample = (samples: Sample[]): Sample | undefined => {
+ let result: Sample | undefined = undefined;
+
+ for (let sample of samples) {
+ if (!result || sample.presentationTimestamp > result.presentationTimestamp) {
+ result = sample;
+ }
+ }
+
+ return result;
+};
+
+export const intoTimescale = (timeInSeconds: number, timescale: number, round = true) => {
+ let value = timeInSeconds * timescale;
+ return round ? Math.round(value) : value;
+};
+
+export type TransformationMatrix = [number, number, number, number, number, number, number, number, number];
+
+export const rotationMatrix = (rotationInDegrees: number): TransformationMatrix => {
+ let theta = rotationInDegrees * (Math.PI / 180);
+ let cosTheta = Math.cos(theta);
+ let sinTheta = Math.sin(theta);
+
+ // Matrices are post-multiplied in MP4, meaning this is the transpose of your typical rotation matrix
+ return [
+ cosTheta, sinTheta, 0,
+ -sinTheta, cosTheta, 0,
+ 0, 0, 1
+ ];
+};
+
+export const IDENTITY_MATRIX = rotationMatrix(0);
+
+export const matrixToBytes = (matrix: TransformationMatrix) => {
+ return [
+ fixed_16_16(matrix[0]), fixed_16_16(matrix[1]), fixed_2_30(matrix[2]),
+ fixed_16_16(matrix[3]), fixed_16_16(matrix[4]), fixed_2_30(matrix[5]),
+ fixed_16_16(matrix[6]), fixed_16_16(matrix[7]), fixed_2_30(matrix[8])
+ ];
+};
+
+export const deepClone = (x: T): T => {
+ if (!x) return x;
+ if (typeof x !== 'object') return x;
+ if (Array.isArray(x)) return x.map(deepClone) as T;
+ return Object.fromEntries(Object.entries(x).map(([key, value]) => [key, deepClone(value)])) as T;
+};
+
+export const isU32 = (value: number) => {
+ return value >= 0 && value < 2**32;
+};
diff --git a/v1-com-officielle/public/mp4-muxer-main/src/muxer.ts b/v1-com-officielle/public/mp4-muxer-main/src/muxer.ts
new file mode 100644
index 0000000..a99bacd
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/src/muxer.ts
@@ -0,0 +1,842 @@
+import { Box, free, ftyp, mdat, mfra, moof, moov } from './box';
+import { deepClone, intoTimescale, last, TransformationMatrix } from './misc';
+import { ArrayBufferTarget, FileSystemWritableFileStreamTarget, StreamTarget, Target } from './target';
+import {
+ Writer,
+ ArrayBufferTargetWriter,
+ StreamTargetWriter,
+ ChunkedStreamTargetWriter,
+ FileSystemWritableFileStreamTargetWriter
+} from './writer';
+
+export const GLOBAL_TIMESCALE = 1000;
+export const SUPPORTED_VIDEO_CODECS = ['avc', 'hevc', 'vp9', 'av1'] as const;
+export const SUPPORTED_AUDIO_CODECS = ['aac', 'opus'] as const;
+const TIMESTAMP_OFFSET = 2_082_844_800; // Seconds between Jan 1 1904 and Jan 1 1970
+const FIRST_TIMESTAMP_BEHAVIORS = ['strict', 'offset', 'cross-track-offset'] as const;
+
+interface VideoOptions {
+ codec: typeof SUPPORTED_VIDEO_CODECS[number],
+ width: number,
+ height: number,
+ rotation?: 0 | 90 | 180 | 270 | TransformationMatrix
+}
+
+interface AudioOptions {
+ codec: typeof SUPPORTED_AUDIO_CODECS[number],
+ numberOfChannels: number,
+ sampleRate: number
+}
+
+type Mp4MuxerOptions = {
+ target: T,
+ video?: VideoOptions,
+ audio?: AudioOptions,
+ fastStart: false | 'in-memory' | 'fragmented' | {
+ expectedVideoChunks?: number,
+ expectedAudioChunks?: number
+ },
+ firstTimestampBehavior?: typeof FIRST_TIMESTAMP_BEHAVIORS[number]
+};
+
+export interface Track {
+ id: number,
+ info: {
+ type: 'video',
+ codec: VideoOptions['codec'],
+ width: number,
+ height: number,
+ rotation: 0 | 90 | 180 | 270 | TransformationMatrix,
+ decoderConfig: VideoDecoderConfig
+ } | {
+ type: 'audio',
+ codec: AudioOptions['codec'],
+ numberOfChannels: number,
+ sampleRate: number,
+ decoderConfig: AudioDecoderConfig
+ },
+ timescale: number,
+ samples: Sample[],
+
+ firstDecodeTimestamp: number,
+ lastDecodeTimestamp: number,
+
+ timeToSampleTable: { sampleCount: number, sampleDelta: number }[];
+ compositionTimeOffsetTable: { sampleCount: number, sampleCompositionTimeOffset: number }[];
+ lastTimescaleUnits: number,
+ lastSample: Sample,
+
+ finalizedChunks: Chunk[],
+ currentChunk: Chunk,
+ compactlyCodedChunkTable: {
+ firstChunk: number,
+ samplesPerChunk: number
+ }[]
+}
+
+export type VideoTrack = Track & { info: { type: 'video' } };
+export type AudioTrack = Track & { info: { type: 'audio' } };
+
+export interface Sample {
+ presentationTimestamp: number,
+ decodeTimestamp: number,
+ duration: number,
+ data: Uint8Array,
+ size: number,
+ type: 'key' | 'delta',
+ timescaleUnitsToNextSample: number
+}
+
+interface Chunk {
+ startTimestamp: number,
+ samples: Sample[],
+ offset?: number,
+ // In the case of a fragmented file, this indicates the position of the moof box pointing to the data in this chunk
+ moofOffset?: number
+}
+
+export class Muxer {
+ target: T;
+
+ #options: Mp4MuxerOptions;
+ #writer: Writer;
+ #ftypSize: number;
+ #mdat: Box;
+
+ #videoTrack: Track = null;
+ #audioTrack: Track = null;
+ #creationTime = Math.floor(Date.now() / 1000) + TIMESTAMP_OFFSET;
+ #finalizedChunks: Chunk[] = [];
+
+ // Fields for fragmented MP4:
+ #nextFragmentNumber = 1;
+ #videoSampleQueue: Sample[] = [];
+ #audioSampleQueue: Sample[] = [];
+
+ #finalized = false;
+
+ constructor(options: Mp4MuxerOptions) {
+ this.#validateOptions(options);
+
+ // Don't want these to be modified from the outside while processing:
+ options.video = deepClone(options.video);
+ options.audio = deepClone(options.audio);
+ options.fastStart = deepClone(options.fastStart);
+
+ this.target = options.target;
+ this.#options = {
+ firstTimestampBehavior: 'strict',
+ ...options
+ };
+
+ if (options.target instanceof ArrayBufferTarget) {
+ this.#writer = new ArrayBufferTargetWriter(options.target);
+ } else if (options.target instanceof StreamTarget) {
+ this.#writer = options.target.options?.chunked
+ ? new ChunkedStreamTargetWriter(options.target)
+ : new StreamTargetWriter(options.target);
+ } else if (options.target instanceof FileSystemWritableFileStreamTarget) {
+ this.#writer = new FileSystemWritableFileStreamTargetWriter(options.target);
+ } else {
+ throw new Error(`Invalid target: ${options.target}`);
+ }
+
+ this.#prepareTracks();
+ this.#writeHeader();
+ }
+
+ #validateOptions(options: Mp4MuxerOptions) {
+ if (options.video) {
+ if (!SUPPORTED_VIDEO_CODECS.includes(options.video.codec)) {
+ throw new Error(`Unsupported video codec: ${options.video.codec}`);
+ }
+
+ const videoRotation = options.video.rotation;
+ if (typeof videoRotation === 'number' && ![0, 90, 180, 270].includes(videoRotation)) {
+ throw new Error(`Invalid video rotation: ${videoRotation}. Has to be 0, 90, 180 or 270.`);
+ } else if (
+ Array.isArray(videoRotation) &&
+ (videoRotation.length !== 9 || videoRotation.some(value => typeof value !== 'number'))
+ ) {
+ throw new Error(`Invalid video transformation matrix: ${videoRotation.join()}`);
+ }
+ }
+
+ if (options.audio && !SUPPORTED_AUDIO_CODECS.includes(options.audio.codec)) {
+ throw new Error(`Unsupported audio codec: ${options.audio.codec}`);
+ }
+
+ if (options.firstTimestampBehavior && !FIRST_TIMESTAMP_BEHAVIORS.includes(options.firstTimestampBehavior)) {
+ throw new Error(`Invalid first timestamp behavior: ${options.firstTimestampBehavior}`);
+ }
+
+ if (typeof options.fastStart === 'object') {
+ if (options.video && options.fastStart.expectedVideoChunks === undefined) {
+ throw new Error(`'fastStart' is an object but is missing property 'expectedVideoChunks'.`);
+ }
+
+ if (options.audio && options.fastStart.expectedAudioChunks === undefined) {
+ throw new Error(`'fastStart' is an object but is missing property 'expectedAudioChunks'.`);
+ }
+ } else if (![false, 'in-memory', 'fragmented'].includes(options.fastStart)) {
+ throw new Error(`'fastStart' option must be false, 'in-memory', 'fragmented' or an object.`);
+ }
+ }
+
+ #writeHeader() {
+ this.#writer.writeBox(ftyp({
+ holdsAvc: this.#options.video?.codec === 'avc',
+ fragmented: this.#options.fastStart === 'fragmented'
+ }));
+
+ this.#ftypSize = this.#writer.pos;
+
+ if (this.#options.fastStart === 'in-memory') {
+ this.#mdat = mdat(false);
+ } else if (this.#options.fastStart === 'fragmented') {
+ // We write the moov box once we write out the first fragment to make sure we get the decoder configs
+ } else {
+ if (typeof this.#options.fastStart === 'object') {
+ let moovSizeUpperBound = this.#computeMoovSizeUpperBound();
+ this.#writer.seek(this.#writer.pos + moovSizeUpperBound);
+ }
+
+ this.#mdat = mdat(true); // Reserve large size by default, can refine this when finalizing.
+ this.#writer.writeBox(this.#mdat);
+ }
+
+ this.#maybeFlushStreamingTargetWriter();
+ }
+
+ #computeMoovSizeUpperBound() {
+ if (typeof this.#options.fastStart !== 'object') return;
+
+ let upperBound = 0;
+ let sampleCounts = [
+ this.#options.fastStart.expectedVideoChunks,
+ this.#options.fastStart.expectedAudioChunks
+ ];
+
+ for (let n of sampleCounts) {
+ if (!n) continue;
+
+ // Given the max allowed sample count, compute the space they'll take up in the Sample Table Box, assuming
+ // the worst case for each individual box:
+
+ // stts box - since it is compactly coded, the maximum length of this table will be 2/3n
+ upperBound += (4 + 4) * Math.ceil(2/3 * n);
+ // stss box - 1 entry per sample
+ upperBound += 4 * n;
+ // stsc box - since it is compactly coded, the maximum length of this table will be 2/3n
+ upperBound += (4 + 4 + 4) * Math.ceil(2/3 * n);
+ // stsz box - 1 entry per sample
+ upperBound += 4 * n;
+ // co64 box - we assume 1 sample per chunk and 64-bit chunk offsets
+ upperBound += 8 * n;
+ }
+
+ upperBound += 4096; // Assume a generous 4 kB for everything else: Track metadata, codec descriptors, etc.
+
+ return upperBound;
+ }
+
+ #prepareTracks() {
+ if (this.#options.video) {
+ this.#videoTrack = {
+ id: 1,
+ info: {
+ type: 'video',
+ codec: this.#options.video.codec,
+ width: this.#options.video.width,
+ height: this.#options.video.height,
+ rotation: this.#options.video.rotation ?? 0,
+ decoderConfig: null
+ },
+ timescale: 11520, // Timescale used by FFmpeg, contains many common frame rates as factors
+ samples: [],
+ finalizedChunks: [],
+ currentChunk: null,
+ firstDecodeTimestamp: undefined,
+ lastDecodeTimestamp: -1,
+ timeToSampleTable: [],
+ compositionTimeOffsetTable: [],
+ lastTimescaleUnits: null,
+ lastSample: null,
+ compactlyCodedChunkTable: []
+ };
+ }
+
+ if (this.#options.audio) {
+ // For the case that we don't get any further decoder details, we can still make a pretty educated guess:
+ let guessedCodecPrivate = this.#generateMpeg4AudioSpecificConfig(
+ 2, // Object type for AAC-LC, since it's the most common
+ this.#options.audio.sampleRate,
+ this.#options.audio.numberOfChannels
+ );
+
+ this.#audioTrack = {
+ id: this.#options.video ? 2 : 1,
+ info: {
+ type: 'audio',
+ codec: this.#options.audio.codec,
+ numberOfChannels: this.#options.audio.numberOfChannels,
+ sampleRate: this.#options.audio.sampleRate,
+ decoderConfig: {
+ codec: this.#options.audio.codec,
+ description: guessedCodecPrivate,
+ numberOfChannels: this.#options.audio.numberOfChannels,
+ sampleRate: this.#options.audio.sampleRate
+ }
+ },
+ timescale: this.#options.audio.sampleRate,
+ samples: [],
+ finalizedChunks: [],
+ currentChunk: null,
+ firstDecodeTimestamp: undefined,
+ lastDecodeTimestamp: -1,
+ timeToSampleTable: [],
+ compositionTimeOffsetTable: [],
+ lastTimescaleUnits: null,
+ lastSample: null,
+ compactlyCodedChunkTable: []
+ };
+ }
+ }
+
+ // https://wiki.multimedia.cx/index.php/MPEG-4_Audio
+ #generateMpeg4AudioSpecificConfig(objectType: number, sampleRate: number, numberOfChannels: number) {
+ let frequencyIndices =
+ [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
+ let frequencyIndex = frequencyIndices.indexOf(sampleRate);
+ let channelConfig = numberOfChannels;
+
+ let configBits = '';
+ configBits += objectType.toString(2).padStart(5, '0');
+
+ configBits += frequencyIndex.toString(2).padStart(4, '0');
+ if (frequencyIndex === 15) configBits += sampleRate.toString(2).padStart(24, '0');
+
+ configBits += channelConfig.toString(2).padStart(4, '0');
+
+ // Pad with 0 bits to fit into a multiple of bytes
+ let paddingLength = Math.ceil(configBits.length / 8) * 8;
+ configBits = configBits.padEnd(paddingLength, '0');
+
+ let configBytes = new Uint8Array(configBits.length / 8);
+ for (let i = 0; i < configBits.length; i += 8) {
+ configBytes[i / 8] = parseInt(configBits.slice(i, i + 8), 2);
+ }
+
+ return configBytes;
+ }
+
+ addVideoChunk(
+ sample: EncodedVideoChunk,
+ meta?: EncodedVideoChunkMetadata,
+ timestamp?: number,
+ compositionTimeOffset?: number
+ ) {
+ let data = new Uint8Array(sample.byteLength);
+ sample.copyTo(data);
+
+ this.addVideoChunkRaw(
+ data, sample.type, timestamp ?? sample.timestamp, sample.duration, meta, compositionTimeOffset
+ );
+ }
+
+ addVideoChunkRaw(
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number,
+ duration: number,
+ meta?: EncodedVideoChunkMetadata,
+ compositionTimeOffset?: number
+ ) {
+ this.#ensureNotFinalized();
+ if (!this.#options.video) throw new Error('No video track declared.');
+
+ if (
+ typeof this.#options.fastStart === 'object' &&
+ this.#videoTrack.samples.length === this.#options.fastStart.expectedVideoChunks
+ ) {
+ throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${
+ this.#options.fastStart.expectedVideoChunks
+ }).`);
+ }
+
+ let videoSample = this.#createSampleForTrack(
+ this.#videoTrack, data, type, timestamp, duration, meta, compositionTimeOffset
+ );
+
+ // Check if we need to interleave the samples in the case of a fragmented file
+ if (this.#options.fastStart === 'fragmented' && this.#audioTrack) {
+ // Add all audio samples with a timestamp smaller than the incoming video sample
+ while (
+ this.#audioSampleQueue.length > 0 &&
+ this.#audioSampleQueue[0].decodeTimestamp <= videoSample.decodeTimestamp
+ ) {
+ let audioSample = this.#audioSampleQueue.shift();
+ this.#addSampleToTrack(this.#audioTrack, audioSample);
+ }
+
+ // Depending on the last audio sample, either add the video sample to the file or enqueue it
+ if (videoSample.decodeTimestamp <= this.#audioTrack.lastDecodeTimestamp) {
+ this.#addSampleToTrack(this.#videoTrack, videoSample);
+ } else {
+ this.#videoSampleQueue.push(videoSample);
+ }
+ } else {
+ this.#addSampleToTrack(this.#videoTrack, videoSample);
+ }
+ }
+
+ addAudioChunk(sample: EncodedAudioChunk, meta?: EncodedAudioChunkMetadata, timestamp?: number) {
+ let data = new Uint8Array(sample.byteLength);
+ sample.copyTo(data);
+
+ this.addAudioChunkRaw(data, sample.type, timestamp ?? sample.timestamp, sample.duration, meta);
+ }
+
+ addAudioChunkRaw(
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number,
+ duration: number,
+ meta?: EncodedAudioChunkMetadata
+ ) {
+ this.#ensureNotFinalized();
+ if (!this.#options.audio) throw new Error('No audio track declared.');
+
+ if (
+ typeof this.#options.fastStart === 'object' &&
+ this.#audioTrack.samples.length === this.#options.fastStart.expectedAudioChunks
+ ) {
+ throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${
+ this.#options.fastStart.expectedAudioChunks
+ }).`);
+ }
+
+ let audioSample = this.#createSampleForTrack(this.#audioTrack, data, type, timestamp, duration, meta);
+
+ // Check if we need to interleave the samples in the case of a fragmented file
+ if (this.#options.fastStart === 'fragmented' && this.#videoTrack) {
+ // Add all video samples with a timestamp smaller than the incoming audio sample
+ while (
+ this.#videoSampleQueue.length > 0 &&
+ this.#videoSampleQueue[0].decodeTimestamp <= audioSample.decodeTimestamp
+ ) {
+ let videoSample = this.#videoSampleQueue.shift();
+ this.#addSampleToTrack(this.#videoTrack, videoSample);
+ }
+
+ // Depending on the last video sample, either add the audio sample to the file or enqueue it
+ if (audioSample.decodeTimestamp <= this.#videoTrack.lastDecodeTimestamp) {
+ this.#addSampleToTrack(this.#audioTrack, audioSample);
+ } else {
+ this.#audioSampleQueue.push(audioSample);
+ }
+ } else {
+ this.#addSampleToTrack(this.#audioTrack, audioSample);
+ }
+ }
+
+ #createSampleForTrack(
+ track: Track,
+ data: Uint8Array,
+ type: 'key' | 'delta',
+ timestamp: number,
+ duration: number,
+ meta?: EncodedVideoChunkMetadata | EncodedAudioChunkMetadata,
+ compositionTimeOffset?: number
+ ) {
+ let presentationTimestampInSeconds = timestamp / 1e6;
+ let decodeTimestampInSeconds = (timestamp - (compositionTimeOffset ?? 0)) / 1e6;
+ let durationInSeconds = duration / 1e6;
+
+ let adjusted = this.#validateTimestamp(presentationTimestampInSeconds, decodeTimestampInSeconds, track);
+ presentationTimestampInSeconds = adjusted.presentationTimestamp;
+ decodeTimestampInSeconds = adjusted.decodeTimestamp;
+
+ if (meta?.decoderConfig) {
+ if (track.info.decoderConfig === null) {
+ track.info.decoderConfig = meta.decoderConfig;
+ } else {
+ Object.assign(track.info.decoderConfig, meta.decoderConfig);
+ }
+ }
+
+ let sample: Sample = {
+ presentationTimestamp: presentationTimestampInSeconds,
+ decodeTimestamp: decodeTimestampInSeconds,
+ duration: durationInSeconds,
+ data: data,
+ size: data.byteLength,
+ type: type,
+ // Will be refined once the next sample comes in
+ timescaleUnitsToNextSample: intoTimescale(durationInSeconds, track.timescale)
+ };
+
+ return sample;
+ }
+
+ #addSampleToTrack(
+ track: Track,
+ sample: Sample
+ ) {
+ if (this.#options.fastStart !== 'fragmented') {
+ track.samples.push(sample);
+ }
+
+ const sampleCompositionTimeOffset =
+ intoTimescale(sample.presentationTimestamp - sample.decodeTimestamp, track.timescale);
+
+ if (track.lastTimescaleUnits !== null) {
+ let timescaleUnits = intoTimescale(sample.decodeTimestamp, track.timescale, false);
+ let delta = Math.round(timescaleUnits - track.lastTimescaleUnits);
+ track.lastTimescaleUnits += delta;
+ track.lastSample.timescaleUnitsToNextSample = delta;
+
+ if (this.#options.fastStart !== 'fragmented') {
+ let lastTableEntry = last(track.timeToSampleTable);
+ if (lastTableEntry.sampleCount === 1) {
+ // If we hit this case, we're the second sample
+ lastTableEntry.sampleDelta = delta;
+ lastTableEntry.sampleCount++;
+ } else if (lastTableEntry.sampleDelta === delta) {
+ // Simply increment the count
+ lastTableEntry.sampleCount++;
+ } else {
+ // The delta has changed, subtract one from the previous run and create a new run with the new delta
+ lastTableEntry.sampleCount--;
+ track.timeToSampleTable.push({
+ sampleCount: 2,
+ sampleDelta: delta
+ });
+ }
+
+ const lastCompositionTimeOffsetTableEntry = last(track.compositionTimeOffsetTable);
+ if (lastCompositionTimeOffsetTableEntry.sampleCompositionTimeOffset === sampleCompositionTimeOffset) {
+ // Simply increment the count
+ lastCompositionTimeOffsetTableEntry.sampleCount++;
+ } else {
+ // The composition time offset has changed, so create a new entry with the new composition time
+ // offset
+ track.compositionTimeOffsetTable.push({
+ sampleCount: 1,
+ sampleCompositionTimeOffset: sampleCompositionTimeOffset
+ });
+ }
+ }
+ } else {
+ track.lastTimescaleUnits = 0;
+
+ if (this.#options.fastStart !== 'fragmented') {
+ track.timeToSampleTable.push({
+ sampleCount: 1,
+ sampleDelta: intoTimescale(sample.duration, track.timescale)
+ });
+ track.compositionTimeOffsetTable.push({
+ sampleCount: 1,
+ sampleCompositionTimeOffset: sampleCompositionTimeOffset
+ });
+ }
+ }
+
+ track.lastSample = sample;
+
+ let beginNewChunk = false;
+ if (!track.currentChunk) {
+ beginNewChunk = true;
+ } else {
+ let currentChunkDuration = sample.presentationTimestamp - track.currentChunk.startTimestamp;
+
+ if (this.#options.fastStart === 'fragmented') {
+ let mostImportantTrack = this.#videoTrack ?? this.#audioTrack;
+ if (track === mostImportantTrack && sample.type === 'key' && currentChunkDuration >= 1.0) {
+ beginNewChunk = true;
+ this.#finalizeFragment();
+ }
+ } else {
+ beginNewChunk = currentChunkDuration >= 0.5; // Chunk is long enough, we need a new one
+ }
+ }
+
+ if (beginNewChunk) {
+ if (track.currentChunk) {
+ this.#finalizeCurrentChunk(track);
+ }
+
+ track.currentChunk = {
+ startTimestamp: sample.presentationTimestamp,
+ samples: []
+ };
+ }
+
+ track.currentChunk.samples.push(sample);
+ }
+
+ #validateTimestamp(presentationTimestamp: number, decodeTimestamp: number, track: Track) {
+ // Check first timestamp behavior
+ const strictTimestampBehavior = this.#options.firstTimestampBehavior === 'strict';
+ const noLastDecodeTimestamp = track.lastDecodeTimestamp === -1;
+ const timestampNonZero = decodeTimestamp !== 0;
+ if (strictTimestampBehavior && noLastDecodeTimestamp && timestampNonZero) {
+ throw new Error(
+ `The first chunk for your media track must have a timestamp of 0 (received DTS=${decodeTimestamp}).` +
+ `Non-zero first timestamps are often caused by directly piping frames or audio data from a ` +
+ `MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of the` +
+ `document, which is probably what you want.\n\nIf you want to offset all timestamps of a track such ` +
+ `that the first one is zero, set firstTimestampBehavior: 'offset' in the options.\n`
+ );
+ } else if (
+ this.#options.firstTimestampBehavior === 'offset' ||
+ this.#options.firstTimestampBehavior === 'cross-track-offset'
+ ) {
+ if (track.firstDecodeTimestamp === undefined) {
+ track.firstDecodeTimestamp = decodeTimestamp;
+ }
+
+ let baseDecodeTimestamp: number;
+ if (this.#options.firstTimestampBehavior === 'offset') {
+ baseDecodeTimestamp = track.firstDecodeTimestamp;
+ } else {
+ // Since each track may have its firstDecodeTimestamp set independently, but the tracks' timestamps come
+ // from the same clock, we should subtract the earlier of the (up to) two tracks' first timestamps to
+ // ensure A/V sync.
+ baseDecodeTimestamp = Math.min(
+ this.#videoTrack?.firstDecodeTimestamp ?? Infinity,
+ this.#audioTrack?.firstDecodeTimestamp ?? Infinity
+ );
+ }
+
+ decodeTimestamp -= baseDecodeTimestamp;
+ presentationTimestamp -= baseDecodeTimestamp;
+ }
+
+ if (decodeTimestamp < track.lastDecodeTimestamp) {
+ throw new Error(
+ `Timestamps must be monotonically increasing ` +
+ `(DTS went from ${track.lastDecodeTimestamp * 1e6} to ${decodeTimestamp * 1e6}).`
+ );
+ }
+
+ track.lastDecodeTimestamp = decodeTimestamp;
+
+ return { presentationTimestamp, decodeTimestamp };
+ }
+
+ #finalizeCurrentChunk(track: Track) {
+ if (this.#options.fastStart === 'fragmented') {
+ throw new Error("Can't finalize individual chunks 'fastStart' is set to 'fragmented'.");
+ }
+
+ if (!track.currentChunk) return;
+
+ track.finalizedChunks.push(track.currentChunk);
+ this.#finalizedChunks.push(track.currentChunk);
+
+ if (
+ track.compactlyCodedChunkTable.length === 0
+ || last(track.compactlyCodedChunkTable).samplesPerChunk !== track.currentChunk.samples.length
+ ) {
+ track.compactlyCodedChunkTable.push({
+ firstChunk: track.finalizedChunks.length, // 1-indexed
+ samplesPerChunk: track.currentChunk.samples.length
+ });
+ }
+
+ if (this.#options.fastStart === 'in-memory') {
+ track.currentChunk.offset = 0; // We'll compute the proper offset when finalizing
+ return;
+ }
+
+ // Write out the data
+ track.currentChunk.offset = this.#writer.pos;
+ for (let sample of track.currentChunk.samples) {
+ this.#writer.write(sample.data);
+ sample.data = null; // Can be GC'd
+ }
+
+ this.#maybeFlushStreamingTargetWriter();
+ }
+
+ #finalizeFragment(flushStreamingWriter = true) {
+ if (this.#options.fastStart !== 'fragmented') {
+ throw new Error("Can't finalize a fragment unless 'fastStart' is set to 'fragmented'.");
+ }
+
+ let tracks = [this.#videoTrack, this.#audioTrack].filter((track) => track && track.currentChunk);
+ if (tracks.length === 0) return;
+
+ let fragmentNumber = this.#nextFragmentNumber++;
+
+ if (fragmentNumber === 1) {
+ // Write the moov box now that we have all decoder configs
+ let movieBox = moov(tracks, this.#creationTime, true);
+ this.#writer.writeBox(movieBox);
+ }
+
+ // Write out an initial moof box; will be overwritten later once actual chunk offsets are known
+ let moofOffset = this.#writer.pos;
+ let moofBox = moof(fragmentNumber, tracks);
+ this.#writer.writeBox(moofBox);
+
+ // Create the mdat box
+ {
+ let mdatBox = mdat(false); // Initially assume no fragment is larger than 4 GiB
+ let totalTrackSampleSize = 0;
+
+ // Compute the size of the mdat box
+ for (let track of tracks) {
+ for (let sample of track.currentChunk.samples) {
+ totalTrackSampleSize += sample.size;
+ }
+ }
+
+ let mdatSize = this.#writer.measureBox(mdatBox) + totalTrackSampleSize;
+ if (mdatSize >= 2**32) {
+ // Fragment is larger than 4 GiB, we need to use the large size
+ mdatBox.largeSize = true;
+ mdatSize = this.#writer.measureBox(mdatBox) + totalTrackSampleSize;
+ }
+
+ mdatBox.size = mdatSize;
+ this.#writer.writeBox(mdatBox);
+ }
+
+ // Write sample data
+ for (let track of tracks) {
+ track.currentChunk.offset = this.#writer.pos;
+ track.currentChunk.moofOffset = moofOffset;
+
+ for (let sample of track.currentChunk.samples) {
+ this.#writer.write(sample.data);
+ sample.data = null; // Can be GC'd
+ }
+ }
+
+ // Now that we set the actual chunk offsets, fix the moof box
+ let endPos = this.#writer.pos;
+ this.#writer.seek(this.#writer.offsets.get(moofBox));
+ let newMoofBox = moof(fragmentNumber, tracks);
+ this.#writer.writeBox(newMoofBox);
+ this.#writer.seek(endPos);
+
+ for (let track of tracks) {
+ track.finalizedChunks.push(track.currentChunk);
+ this.#finalizedChunks.push(track.currentChunk);
+ track.currentChunk = null;
+ }
+
+ if (flushStreamingWriter) {
+ this.#maybeFlushStreamingTargetWriter();
+ }
+ }
+
+ #maybeFlushStreamingTargetWriter() {
+ if (this.#writer instanceof StreamTargetWriter) {
+ this.#writer.flush();
+ }
+ }
+
+ #ensureNotFinalized() {
+ if (this.#finalized) {
+ throw new Error('Cannot add new video or audio chunks after the file has been finalized.');
+ }
+ }
+
+ /** Finalizes the file, making it ready for use. Must be called after all video and audio chunks have been added. */
+ finalize() {
+ if (this.#finalized) {
+ throw new Error('Cannot finalize a muxer more than once.');
+ }
+
+ if (this.#options.fastStart === 'fragmented') {
+ for (let videoSample of this.#videoSampleQueue) this.#addSampleToTrack(this.#videoTrack, videoSample);
+ for (let audioSample of this.#audioSampleQueue) this.#addSampleToTrack(this.#audioTrack, audioSample);
+
+ this.#finalizeFragment(false); // Don't flush the last fragment as we will flush it with the mfra box soon
+ } else {
+ if (this.#videoTrack) this.#finalizeCurrentChunk(this.#videoTrack);
+ if (this.#audioTrack) this.#finalizeCurrentChunk(this.#audioTrack);
+ }
+
+ let tracks = [this.#videoTrack, this.#audioTrack].filter(Boolean);
+
+ if (this.#options.fastStart === 'in-memory') {
+ let mdatSize: number;
+
+ // We know how many chunks there are, but computing the chunk positions requires an iterative approach:
+ // In order to know where the first chunk should go, we first need to know the size of the moov box. But we
+ // cannot write a proper moov box without first knowing all chunk positions. So, we generate a tentative
+ // moov box with placeholder values (0) for the chunk offsets to be able to compute its size. If it then
+ // turns out that appending all chunks exceeds 4 GiB, we need to repeat this process, now with the co64 box
+ // being used in the moov box instead, which will make it larger. After that, we definitely know the final
+ // size of the moov box and can compute the proper chunk positions.
+
+ for (let i = 0; i < 2; i++) {
+ let movieBox = moov(tracks, this.#creationTime);
+ let movieBoxSize = this.#writer.measureBox(movieBox);
+ mdatSize = this.#writer.measureBox(this.#mdat);
+ let currentChunkPos = this.#writer.pos + movieBoxSize + mdatSize;
+
+ for (let chunk of this.#finalizedChunks) {
+ chunk.offset = currentChunkPos;
+ for (let { data } of chunk.samples) {
+ currentChunkPos += data.byteLength;
+ mdatSize += data.byteLength;
+ }
+ }
+
+ if (currentChunkPos < 2**32) break;
+ if (mdatSize >= 2**32) this.#mdat.largeSize = true;
+ }
+
+ let movieBox = moov(tracks, this.#creationTime);
+ this.#writer.writeBox(movieBox);
+
+ this.#mdat.size = mdatSize;
+ this.#writer.writeBox(this.#mdat);
+
+ for (let chunk of this.#finalizedChunks) {
+ for (let sample of chunk.samples) {
+ this.#writer.write(sample.data);
+ sample.data = null;
+ }
+ }
+ } else if (this.#options.fastStart === 'fragmented') {
+ // Append the mfra box to the end of the file for better random access
+ let startPos = this.#writer.pos;
+ let mfraBox = mfra(tracks);
+ this.#writer.writeBox(mfraBox);
+
+ // Patch the 'size' field of the mfro box at the end of the mfra box now that we know its actual size
+ let mfraBoxSize = this.#writer.pos - startPos;
+ this.#writer.seek(this.#writer.pos - 4);
+ this.#writer.writeU32(mfraBoxSize);
+ } else {
+ let mdatPos = this.#writer.offsets.get(this.#mdat);
+ let mdatSize = this.#writer.pos - mdatPos;
+ this.#mdat.size = mdatSize;
+ this.#mdat.largeSize = mdatSize >= 2**32; // Only use the large size if we need it
+ this.#writer.patchBox(this.#mdat);
+
+ let movieBox = moov(tracks, this.#creationTime);
+
+ if (typeof this.#options.fastStart === 'object') {
+ this.#writer.seek(this.#ftypSize);
+ this.#writer.writeBox(movieBox);
+
+ let remainingBytes = mdatPos - this.#writer.pos;
+ this.#writer.writeBox(free(remainingBytes));
+ } else {
+ this.#writer.writeBox(movieBox);
+ }
+ }
+
+ this.#maybeFlushStreamingTargetWriter();
+ this.#writer.finalize();
+
+ this.#finalized = true;
+ }
+}
diff --git a/v1-com-officielle/public/mp4-muxer-main/src/target.ts b/v1-com-officielle/public/mp4-muxer-main/src/target.ts
new file mode 100644
index 0000000..a4f76e4
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/src/target.ts
@@ -0,0 +1,20 @@
+export type Target = ArrayBufferTarget | StreamTarget | FileSystemWritableFileStreamTarget;
+
+export class ArrayBufferTarget {
+ buffer: ArrayBuffer = null;
+}
+
+export class StreamTarget {
+ constructor(public options: {
+ onData?: (data: Uint8Array, position: number) => void,
+ chunked?: boolean,
+ chunkSize?: number
+ }) {}
+}
+
+export class FileSystemWritableFileStreamTarget {
+ constructor(
+ public stream: FileSystemWritableFileStream,
+ public options?: { chunkSize?: number }
+ ) {}
+}
diff --git a/v1-com-officielle/public/mp4-muxer-main/src/writer.ts b/v1-com-officielle/public/mp4-muxer-main/src/writer.ts
new file mode 100644
index 0000000..96ba200
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/src/writer.ts
@@ -0,0 +1,380 @@
+import { Box } from './box';
+import { ArrayBufferTarget, FileSystemWritableFileStreamTarget, StreamTarget } from './target';
+
+export abstract class Writer {
+ pos = 0;
+ #helper = new Uint8Array(8);
+ #helperView = new DataView(this.#helper.buffer);
+
+ /**
+ * Stores the position from the start of the file to where boxes elements have been written. This is used to
+ * rewrite/edit elements that were already added before, and to measure sizes of things.
+ */
+ offsets = new WeakMap();
+
+ /** Writes the given data to the target, at the current position. */
+ abstract write(data: Uint8Array): void;
+ /** Called after muxing has finished. */
+ abstract finalize(): void;
+
+ /** Sets the current position for future writes to a new one. */
+ seek(newPos: number) {
+ this.pos = newPos;
+ }
+
+ writeU32(value: number) {
+ this.#helperView.setUint32(0, value, false);
+ this.write(this.#helper.subarray(0, 4));
+ }
+
+ writeU64(value: number) {
+ this.#helperView.setUint32(0, Math.floor(value / 2**32), false);
+ this.#helperView.setUint32(4, value, false);
+ this.write(this.#helper.subarray(0, 8));
+ }
+
+ writeAscii(text: string) {
+ for (let i = 0; i < text.length; i++) {
+ this.#helperView.setUint8(i % 8, text.charCodeAt(i));
+ if (i % 8 === 7) this.write(this.#helper);
+ }
+
+ if (text.length % 8 !== 0) {
+ this.write(this.#helper.subarray(0, text.length % 8));
+ }
+ }
+
+ writeBox(box: Box) {
+ this.offsets.set(box, this.pos);
+
+ if (box.contents && !box.children) {
+ this.writeBoxHeader(box, box.size ?? box.contents.byteLength + 8);
+ this.write(box.contents);
+ } else {
+ let startPos = this.pos;
+ this.writeBoxHeader(box, 0);
+
+ if (box.contents) this.write(box.contents);
+ if (box.children) for (let child of box.children) if (child) this.writeBox(child);
+
+ let endPos = this.pos;
+ let size = box.size ?? endPos - startPos;
+ this.seek(startPos);
+ this.writeBoxHeader(box, size);
+ this.seek(endPos);
+ }
+ }
+
+ writeBoxHeader(box: Box, size: number) {
+ this.writeU32(box.largeSize ? 1 : size);
+ this.writeAscii(box.type);
+ if (box.largeSize) this.writeU64(size);
+ }
+
+ measureBoxHeader(box: Box) {
+ return 8 + (box.largeSize ? 8 : 0);
+ }
+
+ patchBox(box: Box) {
+ let endPos = this.pos;
+ this.seek(this.offsets.get(box));
+ this.writeBox(box);
+ this.seek(endPos);
+ }
+
+ measureBox(box: Box) {
+ if (box.contents && !box.children) {
+ let headerSize = this.measureBoxHeader(box);
+ return headerSize + box.contents.byteLength;
+ } else {
+ let result = this.measureBoxHeader(box);
+ if (box.contents) result += box.contents.byteLength;
+ if (box.children) for (let child of box.children) if (child) result += this.measureBox(child);
+
+ return result;
+ }
+ }
+}
+
+/**
+ * Writes to an ArrayBufferTarget. Maintains a growable internal buffer during the muxing process, which will then be
+ * written to the ArrayBufferTarget once the muxing finishes.
+ */
+export class ArrayBufferTargetWriter extends Writer {
+ #target: ArrayBufferTarget;
+ #buffer = new ArrayBuffer(2**16);
+ #bytes = new Uint8Array(this.#buffer);
+ #maxPos = 0;
+
+ constructor(target: ArrayBufferTarget) {
+ super();
+
+ this.#target = target;
+ }
+
+ #ensureSize(size: number) {
+ let newLength = this.#buffer.byteLength;
+ while (newLength < size) newLength *= 2;
+
+ if (newLength === this.#buffer.byteLength) return;
+
+ let newBuffer = new ArrayBuffer(newLength);
+ let newBytes = new Uint8Array(newBuffer);
+ newBytes.set(this.#bytes, 0);
+
+ this.#buffer = newBuffer;
+ this.#bytes = newBytes;
+ }
+
+ write(data: Uint8Array) {
+ this.#ensureSize(this.pos + data.byteLength);
+
+ this.#bytes.set(data, this.pos);
+ this.pos += data.byteLength;
+
+ this.#maxPos = Math.max(this.#maxPos, this.pos);
+ }
+
+ finalize() {
+ this.#ensureSize(this.pos);
+ this.#target.buffer = this.#buffer.slice(0, Math.max(this.#maxPos, this.pos));
+ }
+}
+
+/**
+ * Writes to a StreamTarget every time it is flushed, sending out all of the new data written since the
+ * last flush. This is useful for streaming applications, like piping the output to disk.
+ */
+export class StreamTargetWriter extends Writer {
+ #target: StreamTarget;
+ #sections: {
+ data: Uint8Array,
+ start: number
+ }[] = [];
+
+ constructor(target: StreamTarget) {
+ super();
+
+ this.#target = target;
+ }
+
+ write(data: Uint8Array) {
+ this.#sections.push({
+ data: data.slice(),
+ start: this.pos
+ });
+ this.pos += data.byteLength;
+ }
+
+ flush() {
+ if (this.#sections.length === 0) return;
+
+ let chunks: {
+ start: number,
+ size: number,
+ data?: Uint8Array
+ }[] = [];
+ let sorted = [...this.#sections].sort((a, b) => a.start - b.start);
+
+ chunks.push({
+ start: sorted[0].start,
+ size: sorted[0].data.byteLength
+ });
+
+ // Figure out how many contiguous chunks we have
+ for (let i = 1; i < sorted.length; i++) {
+ let lastChunk = chunks[chunks.length - 1];
+ let section = sorted[i];
+
+ if (section.start <= lastChunk.start + lastChunk.size) {
+ lastChunk.size = Math.max(lastChunk.size, section.start + section.data.byteLength - lastChunk.start);
+ } else {
+ chunks.push({
+ start: section.start,
+ size: section.data.byteLength
+ });
+ }
+ }
+
+ for (let chunk of chunks) {
+ chunk.data = new Uint8Array(chunk.size);
+
+ // Make sure to write the data in the correct order for correct overwriting
+ for (let section of this.#sections) {
+ // Check if the section is in the chunk
+ if (chunk.start <= section.start && section.start < chunk.start + chunk.size) {
+ chunk.data.set(section.data, section.start - chunk.start);
+ }
+ }
+
+ this.#target.options.onData?.(chunk.data, chunk.start);
+ }
+
+ this.#sections.length = 0;
+ }
+
+ finalize() {}
+}
+
+const DEFAULT_CHUNK_SIZE = 2**24;
+const MAX_CHUNKS_AT_ONCE = 2;
+
+interface Chunk {
+ start: number,
+ written: ChunkSection[],
+ data: Uint8Array,
+ shouldFlush: boolean
+}
+
+interface ChunkSection {
+ start: number,
+ end: number
+}
+
+/**
+ * Writes to a StreamTarget using a chunked approach: Data is first buffered in memory until it reaches a large enough
+ * size, which is when it is piped to the StreamTarget. This is helpful for reducing the total amount of writes.
+ */
+export class ChunkedStreamTargetWriter extends Writer {
+ #target: StreamTarget;
+ #chunkSize: number;
+ /**
+ * The data is divided up into fixed-size chunks, whose contents are first filled in RAM and then flushed out.
+ * A chunk is flushed if all of its contents have been written.
+ */
+ #chunks: Chunk[] = [];
+
+ constructor(target: StreamTarget) {
+ super();
+
+ this.#target = target;
+ this.#chunkSize = target.options?.chunkSize ?? DEFAULT_CHUNK_SIZE;
+
+ if (!Number.isInteger(this.#chunkSize) || this.#chunkSize < 2**10) {
+ throw new Error('Invalid StreamTarget options: chunkSize must be an integer not smaller than 1024.');
+ }
+ }
+
+ write(data: Uint8Array) {
+ this.#writeDataIntoChunks(data, this.pos);
+ this.#flushChunks();
+
+ this.pos += data.byteLength;
+ }
+
+ #writeDataIntoChunks(data: Uint8Array, position: number) {
+ // First, find the chunk to write the data into, or create one if none exists
+ let chunkIndex = this.#chunks.findIndex(x => x.start <= position && position < x.start + this.#chunkSize);
+ if (chunkIndex === -1) chunkIndex = this.#createChunk(position);
+ let chunk = this.#chunks[chunkIndex];
+
+ // Figure out how much to write to the chunk, and then write to the chunk
+ let relativePosition = position - chunk.start;
+ let toWrite = data.subarray(0, Math.min(this.#chunkSize - relativePosition, data.byteLength));
+ chunk.data.set(toWrite, relativePosition);
+
+ // Create a section describing the region of data that was just written to
+ let section: ChunkSection = {
+ start: relativePosition,
+ end: relativePosition + toWrite.byteLength
+ };
+ this.#insertSectionIntoChunk(chunk, section);
+
+ // Queue chunk for flushing to target if it has been fully written to
+ if (chunk.written[0].start === 0 && chunk.written[0].end === this.#chunkSize) {
+ chunk.shouldFlush = true;
+ }
+
+ // Make sure we don't hold too many chunks in memory at once to keep memory usage down
+ if (this.#chunks.length > MAX_CHUNKS_AT_ONCE) {
+ // Flush all but the last chunk
+ for (let i = 0; i < this.#chunks.length-1; i++) {
+ this.#chunks[i].shouldFlush = true;
+ }
+ this.#flushChunks();
+ }
+
+ // If the data didn't fit in one chunk, recurse with the remaining datas
+ if (toWrite.byteLength < data.byteLength) {
+ this.#writeDataIntoChunks(data.subarray(toWrite.byteLength), position + toWrite.byteLength);
+ }
+ }
+
+ #insertSectionIntoChunk(chunk: Chunk, section: ChunkSection) {
+ let low = 0;
+ let high = chunk.written.length - 1;
+ let index = -1;
+
+ // Do a binary search to find the last section with a start not larger than `section`'s start
+ while (low <= high) {
+ let mid = Math.floor(low + (high - low + 1) / 2);
+
+ if (chunk.written[mid].start <= section.start) {
+ low = mid + 1;
+ index = mid;
+ } else {
+ high = mid - 1;
+ }
+ }
+
+ // Insert the new section
+ chunk.written.splice(index + 1, 0, section);
+ if (index === -1 || chunk.written[index].end < section.start) index++;
+
+ // Merge overlapping sections
+ while (index < chunk.written.length - 1 && chunk.written[index].end >= chunk.written[index + 1].start) {
+ chunk.written[index].end = Math.max(chunk.written[index].end, chunk.written[index + 1].end);
+ chunk.written.splice(index + 1, 1);
+ }
+ }
+
+ #createChunk(includesPosition: number) {
+ let start = Math.floor(includesPosition / this.#chunkSize) * this.#chunkSize;
+ let chunk: Chunk = {
+ start,
+ data: new Uint8Array(this.#chunkSize),
+ written: [],
+ shouldFlush: false
+ };
+ this.#chunks.push(chunk);
+ this.#chunks.sort((a, b) => a.start - b.start);
+
+ return this.#chunks.indexOf(chunk);
+ }
+
+ #flushChunks(force = false) {
+ for (let i = 0; i < this.#chunks.length; i++) {
+ let chunk = this.#chunks[i];
+ if (!chunk.shouldFlush && !force) continue;
+
+ for (let section of chunk.written) {
+ this.#target.options.onData?.(
+ chunk.data.subarray(section.start, section.end),
+ chunk.start + section.start
+ );
+ }
+ this.#chunks.splice(i--, 1);
+ }
+ }
+
+ finalize() {
+ this.#flushChunks(true);
+ }
+}
+
+/**
+ * Essentially a wrapper around ChunkedStreamTargetWriter, writing directly to disk using the File System Access API.
+ * This is useful for large files, as available RAM is no longer a bottleneck.
+ */
+export class FileSystemWritableFileStreamTargetWriter extends ChunkedStreamTargetWriter {
+ constructor(target: FileSystemWritableFileStreamTarget) {
+ super(new StreamTarget({
+ onData: (data, position) => target.stream.write({
+ type: 'write',
+ data,
+ position
+ }),
+ chunkSize: target.options?.chunkSize
+ }));
+ }
+}
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/test/CantinaBand60.wav b/v1-com-officielle/public/mp4-muxer-main/test/CantinaBand60.wav
new file mode 100644
index 0000000..47842ed
Binary files /dev/null and b/v1-com-officielle/public/mp4-muxer-main/test/CantinaBand60.wav differ
diff --git a/v1-com-officielle/public/mp4-muxer-main/test/deconstruct.html b/v1-com-officielle/public/mp4-muxer-main/test/deconstruct.html
new file mode 100644
index 0000000..f0c8cde
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/test/deconstruct.html
@@ -0,0 +1,199 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/test/test.html b/v1-com-officielle/public/mp4-muxer-main/test/test.html
new file mode 100644
index 0000000..dcf704c
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/test/test.html
@@ -0,0 +1,121 @@
+
+
+
\ No newline at end of file
diff --git a/v1-com-officielle/public/mp4-muxer-main/tsconfig.json b/v1-com-officielle/public/mp4-muxer-main/tsconfig.json
new file mode 100644
index 0000000..4979124
--- /dev/null
+++ b/v1-com-officielle/public/mp4-muxer-main/tsconfig.json
@@ -0,0 +1,14 @@
+{
+ "compilerOptions": {
+ "target": "ES2021",
+ "strict": true,
+ "strictNullChecks": false,
+ "noImplicitAny": true,
+ "noImplicitOverride": true,
+ "types": ["@types/wicg-file-system-access", "@types/dom-webcodecs"]
+ },
+ "include": [
+ "src/**/*",
+ "build/**/*.ts"
+ ]
+}
\ No newline at end of file
diff --git a/v1-com-officielle/public/paramList4Background.png b/v1-com-officielle/public/paramList4Background.png
new file mode 100644
index 0000000..64281f1
Binary files /dev/null and b/v1-com-officielle/public/paramList4Background.png differ
diff --git a/v1-com-officielle/src/App.vue b/v1-com-officielle/src/App.vue
index 81c28ec..06f389b 100644
--- a/v1-com-officielle/src/App.vue
+++ b/v1-com-officielle/src/App.vue
@@ -1,30 +1,22 @@
-
-
+
+
+
+
+
diff --git a/v1-com-officielle/src/assets/style.css b/v1-com-officielle/src/assets/style.css
new file mode 100644
index 0000000..44e72b8
--- /dev/null
+++ b/v1-com-officielle/src/assets/style.css
@@ -0,0 +1,85 @@
+/*=============Gamme colorée*/
+
+:root[data-theme="dark"]{
+ --dark-color: #380052;
+ --light-color: #C303FF;
+ --accent-color: #00FF76;
+}
+
+:root[data-theme="light"]{
+ --dark-color: #380052;
+ --light-color: #C303FF;
+ --accent-color: #00FF76;
+}
+
+/*=============Typo*/
+@font-face {
+ font-family: 'lineal';
+ src: url('./typo/Lineal-Heavy.ttf');
+}
+
+@font-face {
+ font-family: 'pressStart2P';
+ src: url('./typo/PressStart2P-Regular.ttf');
+}
+
+@font-face {
+ font-family: 'velvelyne';
+ src: url('./typo/Velvelyne-Light.ttf') format('truetype');
+ font-weight:lighter;
+}
+
+@font-face {
+ font-family: 'velvelyne';
+ src:url('./typo/Velvelyne-Bold.ttf') format('truetype');
+ font-weight: bold;
+}
+
+/*=============Désact défault*/
+input {
+ -webkit-appearance: none;
+ appearance: none;
+}
+
+a{
+ text-decoration: none;
+}
+
+/*=====================Référencement*/
+.referenceText{
+ position:absolute;
+ z-index: -33;
+ color: var(--light-color);
+}
+
+/*=============Body & background*/
+html, body{
+ margin: 0;
+ padding: 0;
+ position: absolute;
+ background-color: #000000;
+ width: 100%;
+ height:100%;
+ position: fixed;
+ inset: 0;
+ overflow-x: hidden;
+ display: block;
+}
+
+canvas {
+ display: block;
+ max-width: 100%;
+ position: fixed;
+ margin: 0 auto;
+ padding: 0;
+ /* margin-top: 0vh; */
+ text-align: center;
+ /* height: 100vh; */
+}
+
+
+.dg{
+ display: none !important;
+ height: 0;
+ overflow: hidden;
+}
diff --git a/v1-com-officielle/src/assets/vue.svg b/v1-com-officielle/src/assets/vue.svg
deleted file mode 100644
index 770e9d3..0000000
--- a/v1-com-officielle/src/assets/vue.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/v1-com-officielle/src/main.js b/v1-com-officielle/src/main.js
index 2425c0f..c433840 100644
--- a/v1-com-officielle/src/main.js
+++ b/v1-com-officielle/src/main.js
@@ -1,5 +1,16 @@
import { createApp } from 'vue'
-import './style.css'
+// import CSS global
+import './assets/style.css'
+// import app vue racine
import App from './App.vue'
+// import composants
+//import MainContent from './components/MainContent.vue'
-createApp(App).mount('#app')
+// création app racine
+const app = createApp(App);
+
+//Composants
+//app.component('mainDiv',MainContent);
+
+//Montage dans div#app de index.html
+app.mount('#app');
diff --git a/v1-com-officielle/src/style.css b/v1-com-officielle/src/style.css
deleted file mode 100644
index f691315..0000000
--- a/v1-com-officielle/src/style.css
+++ /dev/null
@@ -1,79 +0,0 @@
-:root {
- font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
- line-height: 1.5;
- font-weight: 400;
-
- color-scheme: light dark;
- color: rgba(255, 255, 255, 0.87);
- background-color: #242424;
-
- font-synthesis: none;
- text-rendering: optimizeLegibility;
- -webkit-font-smoothing: antialiased;
- -moz-osx-font-smoothing: grayscale;
-}
-
-a {
- font-weight: 500;
- color: #646cff;
- text-decoration: inherit;
-}
-a:hover {
- color: #535bf2;
-}
-
-body {
- margin: 0;
- display: flex;
- place-items: center;
- min-width: 320px;
- min-height: 100vh;
-}
-
-h1 {
- font-size: 3.2em;
- line-height: 1.1;
-}
-
-button {
- border-radius: 8px;
- border: 1px solid transparent;
- padding: 0.6em 1.2em;
- font-size: 1em;
- font-weight: 500;
- font-family: inherit;
- background-color: #1a1a1a;
- cursor: pointer;
- transition: border-color 0.25s;
-}
-button:hover {
- border-color: #646cff;
-}
-button:focus,
-button:focus-visible {
- outline: 4px auto -webkit-focus-ring-color;
-}
-
-.card {
- padding: 2em;
-}
-
-#app {
- max-width: 1280px;
- margin: 0 auto;
- padding: 2rem;
- text-align: center;
-}
-
-@media (prefers-color-scheme: light) {
- :root {
- color: #213547;
- background-color: #ffffff;
- }
- a:hover {
- color: #747bff;
- }
- button {
- background-color: #f9f9f9;
- }
-}