CSS代码:
canvas, video {
width: 300px;
height: 200px;
}
HTML代码:
<canvas id="canvas" width="600" height="400"></canvas>
<h4>音频素材</h4>
<p>
<audio src="./happy-and-bright.mp3" controls preload="auto">
</p>
<p class="flex">
<button id="generate">生成MP4视频</button>
<a id="download" download="muxer-with-audio.mp4">下载</a>
</p>
<div class="view">
<video id="video" width="600" height="400" controls></video>
<p class="time">视频绘制时间:<output id="output1"></output>s,生成时间和播放一致(等音频流完整播放):<output id="output2"></output>s</p>
</div>
JS代码:
// 页面内动画示意
handleDraw(document.getElementById('canvas'));
// 构造器,和音视频编码对象
var muxer = null;
var videoEncoder = null;
var audioEncoder = null;
// 结束编码
const endEncoding = async () => {
await videoEncoder?.flush();
await audioEncoder?.flush();
muxer.finalize();
let { buffer } = muxer.target;
var blobUrl = URL.createObjectURL(new Blob([buffer]));
video.src = blobUrl;
download.href = blobUrl;
videoEncoder = null;
audioEncoder = null;
muxer = null;
myAudio.pause();
};
// 创建屏幕外 canvas
var canvas = document.createElement('canvas');
canvas.width = 600;
canvas.height = 400;
// 构造包装器
muxer = new Mp4Muxer.Muxer({
target: new Mp4Muxer.ArrayBufferTarget(),
video: {
codec: 'avc',
width: canvas.width,
height: canvas.height,
frameRate: 30
},
audio: {
codec: 'aac',
sampleRate: 48000,
numberOfChannels: 1
},
firstTimestampBehavior: 'offset'
});
// 音视频编码器,这里使用的是WebCodese API
videoEncoder = new VideoEncoder({
output: (chunk, meta) => muxer.addVideoChunk(chunk, meta),
error: e => console.error(e)
});
videoEncoder.configure({
codec: 'avc1.42001f',
width: canvas.width,
height: canvas.height,
bitrate: 1e6
});
// 音频的
audioEncoder = new AudioEncoder({
output: (chunk, meta) => muxer.addAudioChunk(chunk, meta),
error: e => console.error(e)
});
audioEncoder.configure({
codec: 'mp4a.40.2',
numberOfChannels: 2,
sampleRate: 44100,
bitrate: 128000
});
// 音频资源获取
const myAudio = new Audio();
fetch(audio.src).then(res => {
var reader = res.body.getReader();
return reader.read().then(result => {
return result;
});
}).then(data => {
var blob = new Blob([data.value], {
type: 'audio/mp3'
});
var blobUrl = URL.createObjectURL(blob);
// 创建音频对象
myAudio.src = blobUrl;
// 隐藏不可见
myAudio.hidden = true;
// 静音,避免干扰
// myAudio.muted = true;
// 在页面内,方便播放
document.body.append(myAudio);
});
// 点击按钮的mp4生成
generate.onclick = async function () {
const audioTrack = myAudio.captureStream().getAudioTracks()[0];
// MediaStreamTrackProcessor可以用来生成媒体帧流
let trackProcessor = new MediaStreamTrackProcessor({
track: audioTrack
});
// 音频播放,并实时抓取视频流
// 交给webcodecs API进行编码
myAudio.play();
// 编码音频数据
let consumer = new WritableStream({
write(audioData) {
// 可以显示音频的采样率和通道数量
// console.dir(audioData);
if (!audioEncoder) {
return;
}
audioEncoder.encode(audioData);
audioData.close();
}
});
trackProcessor.readable.pipeTo(consumer);
// 编码视频数据
var startTime = document.timeline.currentTime;
var frameCounter = 0;
// handleDraw源码可右键页面查看
handleDraw(canvas, function () {
let frame = new VideoFrame(canvas, {
timestamp: (frameCounter * 1000 / 30) * 1000
});
// 把最后的帧作为视频的预览画面
if (frameCounter == 30) {
canvas.toBlob(function (blob) {
video.poster = URL.createObjectURL(blob);
}, 'image/jpeg', 0.95);
}
frameCounter++;
videoEncoder.encode(frame, { keyFrame: frameCounter % 30 === 0 });
frame.close();
}, function () {
// 预期结束时间
// 由于音频和视频的编码时间不一致
// 所以这里需要等待音频编码结束
// 才能结束视频编码
const timeUsed = document.timeline.currentTime - startTime;
const timerActualEnd = frameCounter * 1000 / 30;
setTimeout(function () {
endEncoding();
// 按钮提示还原
generate.innerHTML = '生成完成';
}, timerActualEnd - timeUsed);
// 时间设置
output1.innerHTML = Math.round(timeUsed / 10) / 100;
output2.innerHTML = Math.round(timerActualEnd / 10) / 100;
});
// 一次性点击
this.disabled = true;
this.textContent = '生成中...';
};