我正在做一个项目,我想:
通过调整这个WebRTC演示代码:https://webrtc.github.io/samples/src/content/capture/canvas-record/,我能够在视频元素中显示画布记录
也就是说,我不知道如何在画布旁边记录视频的音频。是否可以从两个不同的源/元素创建包含MediaStream实例的MediaStreamTrack?
根据MediaStream API的规范,理论上应该有一些方法来实现这一点:https://w3c.github.io/mediacapture-main/#introduction
“MediaStream API中的两个主要组件是MediaStreamTrack和MediaStream接口。MediaStreamTrack对象表示来自用户代理中的一个媒体源的单一类型的媒体,例如由网络摄像机生成的视频。MediaStream用于将多个MediaStreamTrack对象组合成一个单元,可以在一个媒体元素中记录或呈现。”
发布于 2016-09-03 03:37:16
是否可以从两个不同的源/元素创建包含MediaStream实例的MediaStreamTrack?
是的,您可以使用MediaStream.addTrack()方法或new MediaStream([track1, track2])来完成它。
OP已经知道了如何获得所有这些信息,但以下是对未来读者的提醒:
<canvas>获取视频流跟踪,可以调用canvas.captureStream(framerate)方法。<video>元素获得音频流跟踪,可以使用Web及其createMediaStreamDestination方法。这将返回包含我们的音频流的MediaStreamAudioDestinationNode节点(dest)。然后,您必须将从MediaElementAudioSourceNode元素创建的<video>连接到这个dest。如果您需要向此流添加更多的音频轨道,则应将所有这些源连接到dest。现在我们有了两个流,一个用于<canvas>视频,一个用于音频,我们可以在初始化记录器之前将音频轨道添加到画布流中:
canvasStream.addTrack(audioStream.getAudioTracks()[0]);
const recorder = new MediaRecorder(canvasStream)或者我们可以从这两条轨道创建第三个MediaStream对象:
const [videoTrack] = canvasStream.getVideoTracks();
const [audioTrack] = audioStream.getAudioTracks();
const recordedStream = new MediaStream(videoTrack, audioTrack)
const recorder = new MediaRecorder(recordedStream);下面是一个完整的例子:
var
btn = document.querySelector("button"),
canvas,
cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
btn.textContent = 'stop recording';
if (!aStream) {
initAudioStream();
}
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
btn.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks, { type: chunks[0].type });
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
btn.remove();
recorder.stop();
}
function initAudioStream() {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(vid);
sourceNode.connect(dest)
// start the video
vid.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
}
function enableButton() {
vid.oncanplay = null;
btn.onclick = clickHandler;
btn.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = enableButton;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
canvas = Object.assign(document.createElement("canvas"), { width: 500, height: 200});
document.body.prepend(canvas);
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();button { vertical-align: top }<button disabled>record</button>
发布于 2018-09-19 07:04:39
凯伊多的演示很精彩。对于那些正在寻找tl;dr代码的人来说,将音频流添加到他们现有的画布流中:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...https://stackoverflow.com/questions/39302814
复制相似问题