提问人:Kurizu 提问时间:11/7/2023 最后编辑:Kurizu 更新时间:11/7/2023 访问量:38
使用客户端的流或画布录制和下载客户端的视频
Record and download the video of a client using their stream or canvas
问:
我正在尝试使用 canvas 或其流录制和下载客户端的实时视频
代码如下:
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder = []; // this is an array because i will have multiple media recorders for multiple clients
let recordedBlobs = []; // this is an array because i will have multiple recorded blobs for multiple clients
let sourceBuffer = []; // this is an array because i will have multiple source buffers for multiple clients
let recording = false;
let canvasArray = [{
username: "",
socketId: "",
canvas: null
}];
let streams = [{
username: "",
socketId: "",
stream: null
}];
function setStream() {
console.log(connections)
for (let i = 1; i < canvasArray.length; i++) {
console.log(canvasArray[i].username, canvasArray[i].socketId, canvasArray[i].canvas, canvasArray[i].clstream);
console.log("canvasArray", canvasArray);
streams.push({
username: canvasArray[i].username,
socketId: canvasArray[i].socketId,
stream: canvasArray[i].canvas
});
console.log('Started stream capture from canvas element: ', streams[i].stream, streams[i].username, streams[i].socketId);
console.log("hi", connections[canvasArray[i].socketId].getLocalStreams())
}
console.log("streams", streams);
console.log("canvasArray", canvasArray);
}
function handleSourceOpen(event, username) {
console.log('MediaSource opened for: ', username);
sourceBuffer = [];
for (let i = 1; i < streams.length; i++) {
sourceBuffer.push(mediaSource.addSourceBuffer('video/webm; codecs="vp9"'));
console.log('Source buffer: ', sourceBuffer[i]);
}
}
function handleDataAvailable(event, socketId, username) {
if (event.data && event.data.size > 0) {
for (let i = 1; i < streams.length; i++) {
if (streams[i].socketId === socketId) {
recordedBlobs[i].push(event.data);
console.log('Recorded Blobs: ', recordedBlobs[i]);
break;
}
}
console.log("For socketId: ", socketId, "with username: ", username, "recordedBlobs", recordedBlobs);
} else {
console.log('No data available');
}
}
function startRecording() {
const options = { mimeType: 'video/webm; codecs: "vp9"' };
for (let i = 1; i < streams.length; i++) {
const username = streams[i].username;
const socketId = streams[i].socketId;
// Check if there's an existing MediaRecorder and stop it
if (mediaRecorder[i] && mediaRecorder[i].state !== 'inactive') {
mediaRecorder[i].stop();
}
console.log('Started stream capture from canvas element:', username, socketId);
if (streams[i].stream) {
recordedBlobs[i] = [];
try {
console.log("streams[i].stream", streams[i].stream);
mediaRecorder[i] = new MediaRecorder(streams[i].stream, options);
console.log('Created MediaRecorder', mediaRecorder[i], 'with options', options);
mediaRecorder[i].onstop = handleStop;
mediaRecorder[i].ondataavailable = (event) => handleDataAvailable(event, socketId, username);
try {
mediaRecorder[i].start(100); // collect 100ms of data
} catch (e) {
console.error('Exception while starting MediaRecorder:', e);
alert('Exception while starting MediaRecorder: ' + e + '.\n\n' +
'See console logs for more details.');
return;
}
console.log('MediaRecorder started', mediaRecorder[i]);
} catch (e) {
console.error('Exception while creating MediaRecorder:', e);
alert('MediaRecorder is not supported by this browser.\n\n' +
'Try Firefox 29 or later, or Chrome 47 or later, ' +
'with Enable experimental Web Platform features enabled from chrome://flags.');
return;
}
}
}
}
function handleStop(event) {
console.log('Recorder stopped: ', event);
}
function stopRecording() {
for (let i = 0; i < mediaRecorder.length; i++) {
if (mediaRecorder[i]) {
mediaRecorder[i].stop();
console.log('Recorded Blobs: ', recordedBlobs[i]);
}
}
}
function download() {
for (let i = 1; i < recordedBlobs.length; i++) {
const blob = new Blob(recordedBlobs[i], { type: 'video/webm' });
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = `${streams[i].username}.webm`;
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
break;
}
console.log("recordedBlobs", recordedBlobs);
}
这是在我的组件之上
socket.on('user-joined', (id, clients, connectionsWithNames) => {
clients.forEach((socketListId) => {
connections[socketListId] = new RTCPeerConnection(peerConnectionConfig)
// Wait for their ice candidate
connections[socketListId].onicecandidate = function (event) {
if (event.candidate != null) {
socket.emit('signal', socketListId, JSON.stringify({ 'ice': event.candidate }))
}
}
// Wait for their video stream
connections[socketListId].onaddstream = (event) => {
var searchVidep = document.querySelector(`[data-socket="${socketListId}"]`)
if (searchVidep !== null) {
searchVidep.srcObject = event.stream
} else {
elms = clients.length
let main = document.getElementById('main')
let cssMesure = changeCssVideos({ main, elms });
let videoWrapper = document.createElement('div')
let video = document.createElement('video')
videoWrapper.id = socketListId
videoWrapper.className = "videoWrapper"
const idDisplay = document.createElement('div');
idDisplay.innerText = connectionsWithNames[socketListId];
videoWrapper.style.display = "flex";
videoWrapper.style.flexDirection = "column";
videoWrapper.style.justifyContent = "flex-start";
videoWrapper.style.alignItems = "flex-start";
videoWrapper.style.position = "relative";
videoWrapper.style.zIndex = "1";
idDisplay.style.position = "absolute";
idDisplay.style.top = "0px";
idDisplay.style.left = "0px";
idDisplay.style.zIndex = "2";
idDisplay.style.padding = "5px";
idDisplay.style.backgroundColor = "rgba(0, 0, 0, 0.5)";
idDisplay.style.color = "white";
idDisplay.style.fontWeight = "bold";
idDisplay.style.fontSize = "10px";
let css = {
minWidth: cssMesure.minWidth,
minHeight: cssMesure.minHeight,
maxHeight: "100%",
margin: "10px",
borderStyle: "solid",
borderColor: "#bdbdbd",
objectFit: "fill"
}
for (let prop in css) {
videoWrapper.style[prop] = css[prop];
}
videoWrapper.style.setProperty("width", cssMesure.width)
videoWrapper.style.setProperty("height", cssMesure.height)
// for (let i in css) video.style[i] = css[i]
// video.style.setProperty("width", cssMesure.width)
// video.style.setProperty("height", cssMesure.height)
video.style.width = "100%";
video.style.height = "100%";
video.style.objectFit = "fill";
// add classname to video element
video.className = `client-video-${socketListId}`
video.setAttribute('data-socket', socketListId)
video.srcObject = event.stream
video.autoplay = true
video.playsinline = true
let canvas = document.createElement("canvas");
canvas.style.width = "100%"
canvas.style.height = "100%"
video.style.objectFit = "fill";
canvas.style.position = "absolute";
canvas.style.top = "0px";
canvas.style.left = "0px";
canvas.style.zIndex = "3";
canvas.style.pointerEvents = "none";
// canvas.style.backgroundColor = "red";
canvas.className = "clientcanvas";
canvasArray.push({
username: connectionsWithNames[socketListId],
socketId: socketListId,
// canvas: canvas.captureStream(25).getVideoTracks()[0],/
// i dont want tracts i want the stream
canvas: canvas.captureStream(25),
// but then the stread doesnt have handleDataAvailable function
// so i need to add the handleDataAvailable function to the stream
// but how do i do that?
clstream: connections[socketListId].getLocalStreams()[0] // this is the stream of the user who joined the call
});
console.log("canvasArray", canvasArray);
videoWrapper.appendChild(idDisplay);
videoWrapper.appendChild(canvas);
videoWrapper.appendChild(video)
main.appendChild(videoWrapper)
setStream();
}
}
// Add the local video stream
if (window.localStream !== undefined && window.localStream !== null) {
connections[socketListId].addStream(window.localStream)
} else {
let blackSilence = (...args) => new MediaStream([black(...args), silence()])
window.localStream = blackSilence()
connections[socketListId].addStream(window.localStream)
}
})
if (id === socketId) {
for (let id2 in connections) {
if (id2 === socketId) continue
try {
connections[id2].addStream(window.localStream)
} catch (e) { }
connections[id2].createOffer().then((description) => {
connections[id2].setLocalDescription(description)
.then(() => {
socket.emit('signal', id2, JSON.stringify({ 'sdp': connections[id2].localDescription }))
})
.catch(e => console.log(e))
})
}
}
})
这就是我将 clstream 添加到 canvasArray 的地方
现在,如果我使用流而不是画布
streams.push({
username: canvasArray[i].username,
socketId: canvasArray[i].socketId,
stream: canvasArray[i].clstream
});
代码工作正常,但它下载自己的流视频,而不是客户端的视频 我不知道我做错了什么,即使 clstream 是客户端的流
如果我使用画布
canvasArray.push({
username: connectionsWithNames[socketListId],
socketId: socketListId,
canvas: canvas.captureStream(25).getVideoTracks()[0],
clstream: connections[socketListId].getLocalStreams()[0] // this is the stream of the user who joined the call
});
它返回 CanvasCaptureMediaStreamTrack,因此它不是媒体流和 recordinf ,下载不起作用
idk 我做错了什么 所以任何帮助将不胜感激,谢谢
答:
0赞
Kurizu
11/7/2023
#1
解决!
谢谢 - https://github.com/webrtc/samples/blob/gh-pages/src/content/insertable-streams/video-crop/js/main.js
在他们的 github 上找到了一个演示!
评论