WebRTC replaceTrack 不适用于音频,除非我也替换视频

WebRTC replaceTrack won't work for audio unless I replace video too

提问人:Etohimself 提问时间:2/13/2023 更新时间:2/13/2023 访问量:409

问:

我正在使用 WebRTC 构建视频聊天应用程序。基本上,用户通过 Node.js 套接字服务器交换 SDP 和 ICE 候选。用户最初交换虚拟视频和音频轨道,然后在用户切换音频和视频时替换轨道。用户可以最初切换他们的视频,然后毫无问题地添加音频。问题是,您最初无法共享音频。出于某种原因,如果没有视频,音频将无法正常工作。

流程如下:

  1. 所有用户在加载页面时都连接到 WebSocket 服务器。

  2. 现有用户向新用户发送 SDP 报价,并使用套接字服务器交换 ICE 候选人

  3. 现有用户发送虚拟视频和音频轨道,除非他们已经在共享视频/音频

  4. 新人发送虚拟视频和音频以及答案(加入时无法共享)

  5. 当用户共享其音频或视频时,replaceTrack 会替换相应的视频或音频

  • 我可以毫无问题✅地分享视频

  • 我可以毫无问题✅地共享视频,然后添加音频

  • 我最初❌无法共享音频

  • 即使认为它不起作用,我也可以先共享音频,然后添加视频和音频也✅开始工作

RTCRtpSender.replaceTrack() 似乎只有在视频也被替换后才起作用。我已经为此发疯一个月了。

当其他人加入时:


    socketObj.current.on("userJoined", (data) => {
      addUser(data);

      //Establish peer connection
      if (data.userid != myUserID.current) {
        const establishPeerConnection = async () => {
          var peerConnection = new RTCPeerConnection({
            iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
          });
          console.log("Creating new peer connection");

          //Transfer ICE Candidates
          peerConnection.addEventListener("icecandidate", (event) => {
            if (event.candidate !== null) {
              socketObj.current.emit("peerData", {
                sender: myUserID.current,
                target: data.userid,
                type: "ICE",
                data: event.candidate,
              });
              console.log("Sent ICE Candidate (Initiator)");
            }
          });

          //Handle incoming stream tracks
          peerConnection.addEventListener("track", async (event) => {
            console.log("Incoming track!!! (initiator)");
            console.log(
              event && event.track && event.track.kind && event.track.kind
            );

            let foundConnection = activePeerConnections.current.filter(
              (x) => x.userid == data.userid
            );
            if (foundConnection && foundConnection.length) {
              if (foundConnection[0].stream) {
                //There is already a stream for this person, lets update the tracks
                if (event.track.kind == "video") {
                  activePeerConnections.current.forEach(
                    (eachConnection, index) => {
                      if (eachConnection.userid == data.userid) {
                        activePeerConnections.current[index].stream =
                          new MediaStream([
                            event.track,
                            activePeerConnections.current[
                              index
                            ].stream.getAudioTracks()[0],
                          ]);
                        activePeerConnections.current[index].streamID = uuid();
                      }
                    }
                  );
                } else if (event.track.kind == "audio") {
                  activePeerConnections.current.forEach(
                    (eachConnection, index) => {
                      if (eachConnection.userid == data.userid) {
                        activePeerConnections.current[index].stream =
                          new MediaStream([
                            activePeerConnections.current[
                              index
                            ].stream.getVideoTracks()[0],
                            event.track,
                          ]);
                        activePeerConnections.current[index].streamID = uuid();
                      }
                    }
                  );
                }
              } else {
                //We are adding the first track to this peers stream
                if (event.track.kind == "video") {
                  activePeerConnections.current.forEach(
                    (eachConnection, index) => {
                      if (eachConnection.userid == data.userid) {
                        activePeerConnections.current[index].stream =
                          new MediaStream([event.track, silence()]);
                        activePeerConnections.current[index].streamID = uuid();
                      }
                    }
                  );
                } else if (event.track.kind == "audio") {
                  activePeerConnections.current.forEach(
                    (eachConnection, index) => {
                      if (eachConnection.userid == data.userid) {
                        activePeerConnections.current[index].stream =
                          new MediaStream([black(), event.track]);
                        activePeerConnections.current[index].streamID = uuid();
                      }
                    }
                  );
                }
              }
            }
          });

          activePeerConnections.current.push({
            userid: data.userid,
            peer: peerConnection,
            stream: null,
            streamID: uuid(),
          });

          let foundMyVideo = activeVideos.current.filter(
            (x) => x.userid == myUserID.current
          );
          if (foundMyVideo && foundMyVideo.length) {
            //We are already streaming video, lets send that to the new comer
            console.log("ALREADY STREAMING");
            foundMyVideo[0].stream
              .getTracks()
              .forEach((track) => peerConnection.addTrack(track));
          } else {
            console.log("NOT STREAMING, SENDING DUMMY INITIALLY");

            let dummyStream = blackSilence();
            dummyStream
              .getTracks()
              .forEach((track) => peerConnection.addTrack(track));

            console.log("Created dummy stream, lets add it");
            console.log(dummyStream);
          }

          //Create offer
          let offer = await peerConnection.createOffer();
          //Set local description
          await peerConnection.setLocalDescription(offer);
          console.log("Set local description : ");
          console.log(peerConnection);

          //Transfer SDP
          socketObj.current.emit("peerData", {
            sender: myUserID.current,
            target: data.userid,
            type: "SDP",
            data: offer,
          });

          console.log(data.username + " has joined, sending SDP...");
        };
        establishPeerConnection();
      }
    });

切换音频时:

//User clicked toggle audio button
  const handleToggleAudio = () => {
    if (!socketConnected) return;
    if (!socketObj.current) return;
    if (!myUserID.current) return;
    if (!navigator) return;

    if (audioEnabled) {
      //User is trying to stop the audio, lets see if audio has started yet
      let getMyStream = activeVideos.current.filter(
        (x) => x.userid == myUserID.current && x.stream
      );
      if (!getMyStream) return;
      if (!getMyStream.length) return;

      //Replace our tracks here..
      activePeerConnections.current.forEach((eachConnection) => {
        eachConnection.peer.getSenders().forEach((eachSender) => {
          if (eachSender.track.kind == "audio") {
            eachSender.replaceTrack(silence());
            console.log("Replaced with dummy " + eachSender.track.kind);
          }
        });
      });

      //Stop the audio track
      getMyStream[0].stream
        .getTracks()
        .forEach((track) => track.kind == "audio" && track.stop());

      //If video stays, replace audio with silence  in activeVideos
      if (videoEnabled) {
        activeVideos.current.forEach((eachVideo, index) => {
          if (eachVideo.userid == myUserID.current) {
            let existingVideo = activeVideos.current[index].stream
              .getTracks()
              .find((eachTrack) => eachTrack.kind == "video");
            activeVideos.current[index].stream = new MediaStream([
              existingVideo, //existing video
              silence(), //silence
            ]);
          }
        });
      } else {
        //Otherwise, remove the activeVideo completely
        activeVideos.current = [
          ...activeVideos.current.filter(
            (eachStream) => eachStream.userid != myUserID.current
          ),
        ];

        //Remove video reference from the video ref list
        videoRefs.current = videoRefs.current.filter(
          (eachRef) => eachRef.getAttribute("data-userid") != myUserID.current
        );
      }

      //Trigger re-render of video elements
      setAvUpdate((prevData) => prevData + 1);

      //Broadcast to all users that our audio has stopped via socket server
      socketObj.current.emit("audioStopped", {
        userid: myUserID.current,
      });

      setAudioEnabled(0);
    } else {
      //Start user audio, broadcast it
      setAudioEnabled(1);
      if (navigator.mediaDevices.getUserMedia) {
        navigator.mediaDevices
          .getUserMedia({ video: false, audio: true })
          .then(function (stream) {
            let audioTrack = stream
              .getTracks()
              .find((eachTrack) => eachTrack.kind == "audio");
            if (!audioTrack) return;

            //Replace our audio tracks with connected users here..
            activePeerConnections.current.forEach((eachConnection) => {
              eachConnection.peer.getSenders().forEach((eachSender) => {
                stream.getTracks().forEach((eachStreamTrack) => {
                  if (
                    eachSender.track.kind == eachStreamTrack.kind &&
                    eachStreamTrack.kind == "audio"
                  ) {
                    console.log(eachSender.replaceTrack(eachStreamTrack));
                    console.log("Replaced " + eachSender.track.kind);
                  }
                });
              });
            });

            //If video exists, add the audio to activeVideos so we can stop it later or stream to newcomers
            let foundVideo = 0;
            activeVideos.current.forEach((eachVideo, index) => {
              if (eachVideo.userid == myUserID.current) {
                foundVideo = 1;
                let existingVideo = activeVideos.current[index].stream
                  .getTracks()
                  .find((eachTrack) => eachTrack.kind == "video");
                activeVideos.current[index].stream = new MediaStream([
                  existingVideo, //existing video
                  audioTrack, //new audio
                ]);
                activeVideos.current[index].streamID = uuid();
              }
            });

            //If there is not an active video stream, user is starting the audio first, therefore push it to activeVideos
            if (!foundVideo) {
              activeVideos.current.push({
                userid: myUserID.current,
                stream: stream,
                streamID: uuid(),
              });
            }

            //Trigger re-rendering of video boxes
            setAvUpdate((prevData) => prevData + 1);

            //Broadcast it
            socketObj.current.emit("audioStarted", {
              userid: myUserID.current,
            });
          });
      }
    }
  };

切换视频时:

//User clicked video button
  const handleToggleVideo = () => {
    if (!socketConnected) return;
    if (!socketObj.current) return;
    if (!myUserID.current) return;
    if (!navigator) return;

    if (videoEnabled) {
      //User is trying to stop the video, lets see if video has started yet
      let getMyStream = activeVideos.current.filter(
        (x) => x.userid == myUserID.current && x.stream
      );
      if (!getMyStream) return;
      if (!getMyStream.length) return;

      //Replace our tracks here..
      activePeerConnections.current.forEach((eachConnection) => {
        eachConnection.peer.getSenders().forEach((eachSender) => {
          if (eachSender.track.kind == "video") {
            eachSender.replaceTrack(black());
            console.log("Replaced with dummy " + eachSender.track.kind);
          }
        });
      });

      //Stop the video track
      getMyStream[0].stream
        .getTracks()
        .forEach((track) => track.kind == "video" && track.stop());

      //If audio stays, replace video with black in activeVideos
      if (audioEnabled) {
        activeVideos.current.forEach((eachVideo, index) => {
          if (eachVideo.userid == myUserID.current) {
            let existingAudio = activeVideos.current[index].stream
              .getTracks()
              .find((eachTrack) => eachTrack.kind == "audio");
            activeVideos.current[index].stream = new MediaStream([
              black(), //new video
              existingAudio, //existing audio
            ]);
          }
        });
      } else {
        //Otherwise, remove the activeVideo completely
        activeVideos.current = [
          ...activeVideos.current.filter(
            (eachStream) => eachStream.userid != myUserID.current
          ),
        ];

        //Remove video reference from the video ref list
        videoRefs.current = videoRefs.current.filter(
          (eachRef) => eachRef.getAttribute("data-userid") != myUserID.current
        );
      }

      //Trigger re-render of video elements
      setAvUpdate((prevData) => prevData + 1);

      //Broadcast to all users that our video has stopped via socket server
      socketObj.current.emit("videoStopped", {
        userid: myUserID.current,
      });

      setVideoEnabled(0);
    } else {
      //User is trying to share his video
      setVideoEnabled(1);
      //Attempt to get video stream from mediaDevices
      if (navigator.mediaDevices.getUserMedia) {
        navigator.mediaDevices
          .getUserMedia({ video: true, audio: false })
          .then(function (stream) {
            let videoTrack = stream
              .getTracks()
              .find((eachTrack) => eachTrack.kind == "video");
            if (!videoTrack) return;

            //Replace our video tracks with connected users here..
            activePeerConnections.current.forEach((eachConnection) => {
              eachConnection.peer.getSenders().forEach((eachSender) => {
                stream.getTracks().forEach((eachStreamTrack) => {
                  if (
                    eachSender.track.kind == eachStreamTrack.kind &&
                    eachStreamTrack.kind == "video"
                  ) {
                    eachSender.replaceTrack(eachStreamTrack);
                    console.log("Replaced " + eachSender.track.kind);
                  }
                });
              });
            });

            //Also find our activeVideo stream and update the video track,
            //so when a new user joins, we can send this from stored stream or later stop tracks
            let foundVideo = 0;
            activeVideos.current.forEach((eachVideo, index) => {
              if (eachVideo.userid == myUserID.current) {
                foundVideo = 1;
                let existingAudio = activeVideos.current[index].stream
                  .getTracks()
                  .find((eachTrack) => eachTrack.kind == "audio");
                activeVideos.current[index].stream = new MediaStream([
                  videoTrack, //new video
                  existingAudio, //existing audio
                ]);
                activeVideos.current[index].streamID = uuid();
              }
            });

            //If there is not an active audio stream, simply add our stream to activeVideos
            if (!foundVideo) {
              activeVideos.current.push({
                userid: myUserID.current,
                stream: stream,
                streamID: uuid(),
              });
            }

            //Trigger re-rendering of video boxes
            setAvUpdate((prevData) => prevData + 1);

            //Broadcast to other users that our video has started
            socketObj.current.emit("videoStarted", {
              userid: myUserID.current,
            });
          })
          .catch(function (error) {
            console.log(error);
          });
      }
    }
  };

虚拟视频和音频生成功能:

var silence = () => {
    let ctx = new AudioContext(),
      oscillator = ctx.createOscillator();
    let dst = oscillator.connect(ctx.createMediaStreamDestination());
    oscillator.start();
    return Object.assign(dst.stream.getAudioTracks()[0], { enabled: false });
  };

  var black = ({ width = 640, height = 480 } = {}) => {
    let canvas = Object.assign(document.createElement("canvas"), {
      width,
      height,
    });
    let ctx = canvas.getContext("2d");
    ctx.fillStyle = "red";
    ctx.fillRect(0, 0, width, height);

    let stream = canvas.captureStream();
    return Object.assign(stream.getVideoTracks()[0], { enabled: false });
  };

  var blackSilence = () =>
    new MediaStream([black({ width: 640, height: 480 }), silence()]);

JavaScript 反应JS WebRTC 视频聊天 MediaStreamTrack

评论

0赞 Rob 7/18/2023
有没有弄清楚这个问题?

答: 暂无答案