add: PASSAGE V1 COM OFFICIELLE
préparation du template intégration du fond animé fusion du style background et style pcp
This commit is contained in:
@@ -0,0 +1,24 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" translate="no">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>MP4 muxer demo</title>
|
||||
<link rel="stylesheet" href="./style.css">
|
||||
<script src="../build/mp4-muxer.js" defer></script>
|
||||
<script src="./script.js" defer></script>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
<h1>MP4 muxer demo - draw something!</h1>
|
||||
<h2>The live canvas state and your microphone input will be recorded<br>and muxed into an MP4 file.</h2>
|
||||
<div id="controls">
|
||||
<button id="start-recording">Start recording</button>
|
||||
<button id="end-recording" style="display: none;">End recording</button>
|
||||
</div>
|
||||
<canvas width="640" height="480"></canvas>
|
||||
<p id="recording-status"></p>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,206 @@
|
||||
const canvas = document.querySelector('canvas');
|
||||
const ctx = canvas.getContext('2d', { desynchronized: true });
|
||||
const startRecordingButton = document.querySelector('#start-recording');
|
||||
const endRecordingButton = document.querySelector('#end-recording');
|
||||
const recordingStatus = document.querySelector('#recording-status');
|
||||
|
||||
/** RECORDING & MUXING STUFF */
|
||||
|
||||
let muxer = null;
|
||||
let videoEncoder = null;
|
||||
let audioEncoder = null;
|
||||
let startTime = null;
|
||||
let recording = false;
|
||||
let audioTrack = null;
|
||||
let intervalId = null;
|
||||
let lastKeyFrame = null;
|
||||
let framesGenerated = 0;
|
||||
|
||||
const startRecording = async () => {
|
||||
// Check for VideoEncoder availability
|
||||
if (typeof VideoEncoder === 'undefined') {
|
||||
alert("Looks like your user agent doesn't support VideoEncoder / WebCodecs API yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
startRecordingButton.style.display = 'none';
|
||||
|
||||
// Check for AudioEncoder availability
|
||||
if (typeof AudioEncoder !== 'undefined') {
|
||||
// Try to get access to the user's microphone
|
||||
try {
|
||||
let userMedia = await navigator.mediaDevices.getUserMedia({ video: false, audio: true });
|
||||
audioTrack = userMedia.getAudioTracks()[0];
|
||||
} catch (e) {}
|
||||
if (!audioTrack) console.warn("Couldn't acquire a user media audio track.");
|
||||
} else {
|
||||
console.warn('AudioEncoder not available; no need to acquire a user media audio track.');
|
||||
}
|
||||
|
||||
endRecordingButton.style.display = 'block';
|
||||
|
||||
let audioSampleRate = audioTrack?.getCapabilities().sampleRate.max;
|
||||
|
||||
// Create an MP4 muxer with a video track and maybe an audio track
|
||||
muxer = new Mp4Muxer.Muxer({
|
||||
target: new Mp4Muxer.ArrayBufferTarget(),
|
||||
|
||||
video: {
|
||||
codec: 'avc',
|
||||
width: canvas.width,
|
||||
height: canvas.height
|
||||
},
|
||||
audio: audioTrack ? {
|
||||
codec: 'aac',
|
||||
sampleRate: audioSampleRate,
|
||||
numberOfChannels: 1
|
||||
} : undefined,
|
||||
|
||||
// Puts metadata to the start of the file. Since we're using ArrayBufferTarget anyway, this makes no difference
|
||||
// to memory footprint.
|
||||
fastStart: 'in-memory',
|
||||
|
||||
// Because we're directly pumping a MediaStreamTrack's data into it, which doesn't start at timestamp = 0
|
||||
firstTimestampBehavior: 'offset'
|
||||
});
|
||||
|
||||
videoEncoder = new VideoEncoder({
|
||||
output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
|
||||
error: e => console.error(e)
|
||||
});
|
||||
videoEncoder.configure({
|
||||
codec: 'avc1.42001f',
|
||||
width: canvas.width,
|
||||
height: canvas.height,
|
||||
bitrate: 1e6
|
||||
});
|
||||
|
||||
if (audioTrack) {
|
||||
audioEncoder = new AudioEncoder({
|
||||
output: (chunk, meta) => muxer.addAudioChunk(chunk, meta),
|
||||
error: e => console.error(e)
|
||||
});
|
||||
audioEncoder.configure({
|
||||
codec: 'mp4a.40.2',
|
||||
numberOfChannels: 1,
|
||||
sampleRate: audioSampleRate,
|
||||
bitrate: 128000
|
||||
});
|
||||
|
||||
// Create a MediaStreamTrackProcessor to get AudioData chunks from the audio track
|
||||
let trackProcessor = new MediaStreamTrackProcessor({ track: audioTrack });
|
||||
let consumer = new WritableStream({
|
||||
write(audioData) {
|
||||
if (!recording) return;
|
||||
audioEncoder.encode(audioData);
|
||||
audioData.close();
|
||||
}
|
||||
});
|
||||
trackProcessor.readable.pipeTo(consumer);
|
||||
}
|
||||
|
||||
startTime = document.timeline.currentTime;
|
||||
recording = true;
|
||||
lastKeyFrame = -Infinity;
|
||||
framesGenerated = 0;
|
||||
|
||||
encodeVideoFrame();
|
||||
intervalId = setInterval(encodeVideoFrame, 1000/30);
|
||||
};
|
||||
startRecordingButton.addEventListener('click', startRecording);
|
||||
|
||||
const encodeVideoFrame = () => {
|
||||
let elapsedTime = document.timeline.currentTime - startTime;
|
||||
let frame = new VideoFrame(canvas, {
|
||||
timestamp: framesGenerated * 1e6 / 30, // Ensure equally-spaced frames every 1/30th of a second
|
||||
duration: 1e6 / 30
|
||||
});
|
||||
framesGenerated++;
|
||||
|
||||
// Ensure a video key frame at least every 5 seconds for good scrubbing
|
||||
let needsKeyFrame = elapsedTime - lastKeyFrame >= 5000;
|
||||
if (needsKeyFrame) lastKeyFrame = elapsedTime;
|
||||
|
||||
videoEncoder.encode(frame, { keyFrame: needsKeyFrame });
|
||||
frame.close();
|
||||
|
||||
recordingStatus.textContent =
|
||||
`${elapsedTime % 1000 < 500 ? '🔴' : '⚫'} Recording - ${(elapsedTime / 1000).toFixed(1)} s`;
|
||||
};
|
||||
|
||||
const endRecording = async () => {
|
||||
endRecordingButton.style.display = 'none';
|
||||
recordingStatus.textContent = '';
|
||||
recording = false;
|
||||
|
||||
clearInterval(intervalId);
|
||||
audioTrack?.stop();
|
||||
|
||||
await videoEncoder?.flush();
|
||||
await audioEncoder?.flush();
|
||||
muxer.finalize();
|
||||
|
||||
let buffer = muxer.target.buffer;
|
||||
downloadBlob(new Blob([buffer]));
|
||||
|
||||
videoEncoder = null;
|
||||
audioEncoder = null;
|
||||
muxer = null;
|
||||
startTime = null;
|
||||
firstAudioTimestamp = null;
|
||||
|
||||
startRecordingButton.style.display = 'block';
|
||||
};
|
||||
endRecordingButton.addEventListener('click', endRecording);
|
||||
|
||||
const downloadBlob = (blob) => {
|
||||
let url = window.URL.createObjectURL(blob);
|
||||
let a = document.createElement('a');
|
||||
a.style.display = 'none';
|
||||
a.href = url;
|
||||
a.download = 'davinci.mp4';
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
/** CANVAS DRAWING STUFF */
|
||||
|
||||
ctx.fillStyle = 'white';
|
||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
let drawing = false;
|
||||
let lastPos = { x: 0, y: 0 };
|
||||
|
||||
const getRelativeMousePos = (e) => {
|
||||
let rect = canvas.getBoundingClientRect();
|
||||
return { x: e.clientX - rect.x, y: e.clientY - rect.y };
|
||||
};
|
||||
|
||||
const drawLine = (from, to) => {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(from.x, from.y);
|
||||
ctx.lineTo(to.x, to.y);
|
||||
ctx.strokeStyle = 'black';
|
||||
ctx.lineWidth = 3;
|
||||
ctx.lineCap = 'round';
|
||||
ctx.stroke();
|
||||
};
|
||||
|
||||
canvas.addEventListener('pointerdown', (e) => {
|
||||
if (e.button !== 0) return;
|
||||
|
||||
drawing = true;
|
||||
lastPos = getRelativeMousePos(e);
|
||||
drawLine(lastPos, lastPos);
|
||||
});
|
||||
window.addEventListener('pointerup', () => {
|
||||
drawing = false;
|
||||
});
|
||||
window.addEventListener('mousemove', (e) => {
|
||||
if (!drawing) return;
|
||||
|
||||
let newPos = getRelativeMousePos(e);
|
||||
drawLine(lastPos, newPos);
|
||||
lastPos = newPos;
|
||||
});
|
||||
@@ -0,0 +1,61 @@
|
||||
html, body {
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background: #120d17;
|
||||
color: white;
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
* {
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
main {
|
||||
width: 640px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin: 0;
|
||||
font-weight: normal;
|
||||
text-align: center;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
h2 {
|
||||
margin: 0;
|
||||
font-weight: normal;
|
||||
text-align: center;
|
||||
font-size: 14px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
canvas {
|
||||
border-radius: 10px;
|
||||
outline: 3px solid rgb(202, 202, 202);
|
||||
}
|
||||
|
||||
#controls {
|
||||
margin-bottom: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
height: 38px;
|
||||
}
|
||||
|
||||
button {
|
||||
font-size: 20px;
|
||||
padding: 5px 8px;
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 0;
|
||||
text-align: center;
|
||||
margin-top: 20px;
|
||||
height: 20px;
|
||||
}
|
||||
Reference in New Issue
Block a user