GStreamer 只向多个 WebRTC 客户端发送

GStreamer sendonly to multiple WebRTC clients

提问人:Arjan 提问时间:2/19/2022 最后编辑:Arjan 更新时间:2/19/2022 访问量:1358

问:

我一直在尝试使用 GStreamer 设置一个简单的仅发送 WebRTC 客户端,但我在让实际视频显示在 WebRTC 接收器端时遇到了问题。我是 GStreamer 和 WebRTC 的新手。

我正在使用 https://gitlab.freedesktop.org/gstreamer/gst-examples/-/tree/master/webrtc 示例来尝试提出某些部分的组合。我已经进行了 1:1 的沟通,但我想介绍这些房间,这样我就可以让更多客户查看来自 GStreamer 的“仅查看”流。

我当前的代码基于 multiparty-sendrecv 示例,我在该示例中将音频换成了视频。此外,我正在使用信令服务器的修改版本和 javascript webrtc 客户端的修改版本。如有必要,我可以为上述所有内容提供代码,但为了简单起见,我不会。这是因为我认为问题不在于信令服务器或webrtc客户端,因为ICE候选者已经与SDP的报价和答案一起成功协商。见下图。chrome://webrtc-internals/

ICE candidates successfully negotiated

为了弄清楚发生了什么,我导出了一个图表,该图表显示了用户加入房间并添加到管道后的 GStreamer 管道。见下图。

enter image description here

据我所知,我应该在前端接收视频数据,但我没有。我有一个奇怪的情况,videotestsrc确实出现了,但我无法重现它。但正因为如此,这让我认为管道本身并不一定是错的,但也许我们正在处理某种竞争条件。

我在下面添加了修改后的 multiparty-sendrecv 示例,请看一下。由于 Stackoverflow 的字符限制,大多数方法都被故意省略了。

主要功能

static void
handle_media_stream(GstPad* pad, GstElement* pipe, const char* convert_name,
    const char* sink_name)
{
    GstPad* qpad;
    GstElement* q, * conv, * sink;
    GstPadLinkReturn ret;

    q = gst_element_factory_make("queue", NULL);
    g_assert_nonnull(q);
    conv = gst_element_factory_make(convert_name, NULL);
    g_assert_nonnull(conv);
    sink = gst_element_factory_make(sink_name, NULL);
    g_assert_nonnull(sink);
    gst_bin_add_many(GST_BIN(pipe), q, conv, sink, NULL);
    gst_element_sync_state_with_parent(q);
    gst_element_sync_state_with_parent(conv);
    gst_element_sync_state_with_parent(sink);
    gst_element_link_many(q, conv, sink, NULL);

    qpad = gst_element_get_static_pad(q, "sink");

    ret = gst_pad_link(pad, qpad);
    g_assert_cmpint(ret, == , GST_PAD_LINK_OK);
}

static void
on_incoming_decodebin_stream(GstElement* decodebin, GstPad* pad,
    GstElement* pipe)
{
    GstCaps* caps;
    const gchar* name;

    if (!gst_pad_has_current_caps(pad)) {
        g_printerr("Pad '%s' has no caps, can't do anything, ignoring\n",
            GST_PAD_NAME(pad));
        return;
    }

    caps = gst_pad_get_current_caps(pad);
    name = gst_structure_get_name(gst_caps_get_structure(caps, 0));

    if (g_str_has_prefix(name, "video")) {
        handle_media_stream(pad, pipe, "videoconvert", "autovideosink");
    }
    else if (g_str_has_prefix(name, "audio")) {
        handle_media_stream(pad, pipe, "audioconvert", "autoaudiosink");
    }
    else {
        g_printerr("Unknown pad %s, ignoring", GST_PAD_NAME(pad));
    }
}

static void
on_incoming_stream(GstElement* webrtc, GstPad* pad, GstElement* pipe)
{
    GstElement* decodebin;
    GstPad* sinkpad;

    if (GST_PAD_DIRECTION(pad) != GST_PAD_SRC)
        return;

    decodebin = gst_element_factory_make("decodebin", NULL);
    g_signal_connect(decodebin, "pad-added",
        G_CALLBACK(on_incoming_decodebin_stream), pipe);
    gst_bin_add(GST_BIN(pipe), decodebin);
    gst_element_sync_state_with_parent(decodebin);

    sinkpad = gst_element_get_static_pad(decodebin, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref(sinkpad);
}

static void
add_peer_to_pipeline(const gchar* peer_id, gboolean offer)
{
    int ret;
    gchar* tmp;
    GstElement* tee, * webrtc, * q;
    GstPad* srcpad, * sinkpad;

    tmp = g_strdup_printf("queue-%s", peer_id);
    q = gst_element_factory_make("queue", tmp);
    g_free(tmp);
    webrtc = gst_element_factory_make("webrtcbin", peer_id);
    g_object_set(webrtc, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, NULL);

    gst_bin_add_many(GST_BIN(pipeline), q, webrtc, NULL);

    srcpad = gst_element_get_static_pad(q, "src");
    g_assert_nonnull(srcpad);
    sinkpad = gst_element_get_request_pad(webrtc, "sink_%u");
    g_assert_nonnull(sinkpad);
    ret = gst_pad_link(srcpad, sinkpad);
    g_assert_cmpint(ret, == , GST_PAD_LINK_OK);
    gst_object_unref(srcpad);
    gst_object_unref(sinkpad);

    tee = gst_bin_get_by_name(GST_BIN(pipeline), "videotee");
    g_assert_nonnull(tee);
    srcpad = gst_element_get_request_pad(tee, "src_%u");
    g_assert_nonnull(srcpad);
    gst_object_unref(tee);
    sinkpad = gst_element_get_static_pad(q, "sink");
    g_assert_nonnull(sinkpad);
    ret = gst_pad_link(srcpad, sinkpad);
    g_assert_cmpint(ret, == , GST_PAD_LINK_OK);
    gst_object_unref(srcpad);
    gst_object_unref(sinkpad);

    /* This is the gstwebrtc entry point where we create the offer and so on. It
     * will be called when the pipeline goes to PLAYING.
     * XXX: We must connect this after webrtcbin has been linked to a source via
     * get_request_pad() and before we go from NULL->READY otherwise webrtcbin
     * will create an SDP offer with no media lines in it. */
    if (offer)
        g_signal_connect(webrtc, "on-negotiation-needed",
            G_CALLBACK(on_negotiation_needed), (gpointer)peer_id);

    /* We need to transmit this ICE candidate to the browser via the websockets
     * signalling server. Incoming ice candidates from the browser need to be
     * added by us too, see on_server_message() */
    g_signal_connect(webrtc, "on-ice-candidate",
        G_CALLBACK(send_ice_candidate_message), (gpointer)peer_id);
    /* Incoming streams will be exposed via this signal */
    g_signal_connect(webrtc, "pad-added", G_CALLBACK(on_incoming_stream),
        pipeline);

    /* Set to pipeline branch to PLAYING */
    ret = gst_element_sync_state_with_parent(q);
    g_assert_true(ret);
    ret = gst_element_sync_state_with_parent(webrtc);
    g_assert_true(ret);

    GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
}

static gboolean
start_pipeline(void)
{
    GstStateChangeReturn ret;
    GError* error = NULL;

    /* NOTE: webrtcbin currently does not support dynamic addition/removal of
     * streams, so we use a separate webrtcbin for each peer, but all of them are
     * inside the same pipeline. We start by connecting it to a fakesink so that
     * we can preroll early. */
    /*pipeline = gst_parse_launch("tee name=videotee ! queue ! fakesink "
        "videotestsrc is-live=true pattern=ball ! videoconvert ! queue ! vp8enc deadline=1 ! rtpvp8pay ! "
        "queue ! " RTP_CAPS_VP8 "96 ! videotee. ", &error);*/
    pipeline = gst_parse_launch("tee name=videotee ! queue ! fakesink "
        "videotestsrc is-live=true pattern=ball ! videoconvert ! queue ! vp8enc deadline=1 ! rtpvp8pay ! "
        "queue ! " RTP_CAPS_VP8 "96 ! videotee. ", &error);

    if (error) {
        g_printerr("Failed to parse launch: %s\n", error->message);
        g_error_free(error);
        goto err;
    }

    g_print("Starting pipeline, not transmitting yet\n");
    ret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
        goto err;

    return TRUE;

err:
    g_print("State change failure\n");
    if (pipeline)
        g_clear_object(&pipeline);
    return FALSE;
}

/*
 * When we join a room, we are responsible for calling by starting negotiation
 * with each peer in it by sending an SDP offer and ICE candidates.
 */
static void
do_join_room(const gchar* text)
{
    gint ii, len;
    gchar** peer_ids;

    if (app_state != ROOM_JOINING) {
        cleanup_and_quit_loop("ERROR: Received ROOM_OK when not calling",
            ROOM_JOIN_ERROR);
        return;
    }

    app_state = ROOM_JOINED;
    g_print("Room joined\n");
    /* Start recording, but not transmitting */
    if (!start_pipeline()) {
        cleanup_and_quit_loop("ERROR: Failed to start pipeline", ROOM_CALL_ERROR);
        return;
    }

    peer_ids = g_strsplit(text, " ", -1);
    g_assert_cmpstr(peer_ids[0], == , "ROOM_OK");
    len = g_strv_length(peer_ids);
    /* There are peers in the room already. We need to start negotiation
     * (exchange SDP and ICE candidates) and transmission of media. */
    if (len > 1 && strlen(peer_ids[1]) > 0) {
        g_print("Found %i peers already in room\n", len - 1);
        app_state = ROOM_CALL_OFFERING;
        for (ii = 1; ii < len; ii++) {
            gchar* peer_id = g_strdup(peer_ids[ii]);
            g_print("Negotiating with peer %s\n", peer_id);
            /* This might fail asynchronously */
            call_peer(peer_id);
            peers = g_list_prepend(peers, peer_id);
        }
    }

    g_strfreev(peer_ids);
    return;
}

int
main(int argc, char* argv[])
{
    GOptionContext* context;
    GError* error = NULL;

    context = g_option_context_new("- gstreamer webrtc sendrecv demo");
    g_option_context_add_main_entries(context, entries, NULL);
    g_option_context_add_group(context, gst_init_get_option_group());
    if (!g_option_context_parse(context, &argc, &argv, &error)) {
        g_printerr("Error initializing: %s\n", error->message);
        return -1;
    }

    if (!check_plugins())
        return -1;

    if (!room_id) {
        g_printerr("--room-id is a required argument\n");
        return -1;
    }

    if (!local_id)
        local_id = g_strdup_printf("%s-%i", g_get_user_name(),
            g_random_int_range(10, 10000));
    /* Sanitize by removing whitespace, modifies string in-place */
    g_strdelimit(local_id, " \t\n\r", '-');

    g_print("Our local id is %s\n", local_id);

    if (!server_url)
        server_url = g_strdup(default_server_url);

    /* Don't use strict ssl when running a localhost server, because
     * it's probably a test server with a self-signed certificate */
    {
        GstUri* uri = gst_uri_from_string(server_url);
        if (g_strcmp0("localhost", gst_uri_get_host(uri)) == 0 ||
            g_strcmp0("127.0.0.1", gst_uri_get_host(uri)) == 0)
            strict_ssl = FALSE;
        gst_uri_unref(uri);
    }

    loop = g_main_loop_new(NULL, FALSE);

    connect_to_websocket_server_async();

    g_main_loop_run(loop);

    gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
    g_print("Pipeline stopped\n");

    gst_object_unref(pipeline);
    g_free(server_url);
    g_free(local_id);
    g_free(room_id);

    return 0;
}

C++ WebRTC 视频流 gstreamer

评论

0赞 RSATom 2/19/2022
在我看来,例子有点不对劲。我基于它做了自己的视频实现:github.com/WebRTSP/RtStreaming/blob/master/GstRtStreaming/...... github.com/WebRTSP/RtStreaming/blob/master/GstRtStreaming/......multiparty-sendrecv
0赞 RSATom 2/19/2022
基于该代码的演示:clock.webrtsp.org:5080。二进制文件 snapcraft.io/rtsp-to-webrtsp snapcraft.io/webrtsp-clock
0赞 RSATom 2/19/2022
而且我认为不调试就不容易发现问题......可能的原因太多了......
0赞 Arjan 2/20/2022
@RSATom感谢您提供的信息,我将检查您自己的实现
0赞 Usama 3/30/2022
您能否使用 GST_DEBUG=3,webrtc:7 共享调试日志

答: 暂无答案