如何使用 gstreamer 和 OpenCV 为 RTSP 组播服务器提供服务?

How to serve RTSP multicast server with gstreamer and OpenCV?

提问人:Cynki 提问时间:11/1/2023 最后编辑:toyota SupraCynki 更新时间:11/2/2023 访问量:108

问:

我想编写一个程序,通过多播或广播 rtsp/rtp 服务器流式传输 cv2 帧。

我在下面写了一段带有一些示例的代码,但是使用此代码,我只能使用第一个客户端进行流式传输,而首先运行后的所有其他客户端都无法获取图像流(在ffplay和vlc上测试,url是)。rtsp://host_url:5000/stream

此代码中是否有任何需要改进的部分?

我认为第一个客户端会接受所有发出的块......

class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, image_type):
        super(SensorFactory, self).__init__()
        self.number_frames = 0
        self.fps = 30.0
        self.duration = 1.0 / self.fps * Gst.SECOND  # duration of a frame in gst units
        self.launch_string = 'appsrc name=source is-live=true format=GST_FORMAT_TIME ' \
                             'caps=video/x-raw,format=BGR,width=640,height=360,framerate=30/1 ' \
                             '! videoconvert ! video/x-raw,format=I420 ' \
                             '! x264enc speed-preset=ultrafast tune=zerolatency ' \
                             '! rtph264pay name=pay0 pt=96'
        self.image_type = image_type
        
    def do_create_element(self, url):
        return Gst.parse_launch(self.launch_string)

    def on_need_data(self, src, length):
        # frame_list is a Manager.list() containing cv2 frame
        data = cv2.resize(frame_list[self.image_type], (640, 360), interpolation = cv2.INTER_LINEAR)
        data = data.tobytes()
        buf = Gst.Buffer.new_allocate(None, len(data), None)
        buf.fill(0, data)
        buf.duration = self.duration
        timestamp = self.number_frames * self.duration
        buf.pts = buf.dts = int(timestamp)
        buf.offset = timestamp
        self.number_frames += 1
        src.emit('push-buffer', buf)
    
    # attaching the source element to the rtsp media
    def do_configure(self, rtsp_media):
        self.number_frames = 0
        appsrc = rtsp_media.get_element().get_child_by_name('source')
        appsrc.connect('need-data', self.on_need_data)


class MulticastServer(GstRtspServer.RTSPServer):
    def __init__(self):
        GstRtspServer.RTSPServer.__init__(self)
        self.set_service("5000")

        self.factory0 = SensorFactory(0)
        self.factory0.set_shared(True)
        self.factory0.set_eos_shutdown(True)

        # Multicast address setup
        self.address_pool = GstRtspServer.RTSPAddressPool.new()
        self.address_pool.add_range("224.0.0.1", "240.0.0.10", 5000, 5010, 5)
        self.factory0.set_address_pool(self.address_pool)
        self.get_mount_points().add_factory("/stream", self.factory0)

Gst.init(None)
server = MulticastServer()
server.attach(None)

GLib.MainLoop().run()
python opencv gstreamer rtsp 组播

评论

0赞 Christoph Rackwitz 11/1/2023
我能看到的opencv的唯一用途是.我敢肯定这不是问题,无论你有什么问题。cv2.resize()
0赞 Diego Rodriguez 12/1/2023
您的 RTSP 可能不会将 SDP 发送到后续客户端,而是在每一帧上抛出一个 after 以发送 SDP 信息。您可以使用属性进一步配置h264parsex264encinterval

答:

0赞 SeB 11/2/2023 #1

这可能不是最佳解决方案,但这个简单的方案可能适用于您的情况:

1. 使用带有 gstreamer 后端的 openCV VideoWriter 编码到 H264 并使用 UDP/5000 将RTPH264流式传输到本地主机:

import cv2

# Here we simulate a frame source using gstreamer backend from a videotestsrc element, using BGR format as expected by opencv appsink
cap = cv2.VideoCapture('videotestsrc ! video/x-raw,format=BGR,width=640,height=480 ! queue ! appsink drop=1', cv2.CAP_GSTREAMER)
if not cap.isOpened() :
   print('Error: failed to open capture')
   exit(-1)
w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = float(cap.get(cv2.CAP_PROP_FPS))
print('Capture opened, framing %dx%d@%f' % (w,h,fps))

# Now create a VideoWriter with gstreamer backend (4CC code is 0:RAW), using a pipeline that will convert BGR frames from opencv, encode into H264 and stream as RTPH264 to localhost over UDP/5000:
rtph264 = cv2.VideoWriter('appsrc ! video/x-raw,format=BGR ! queue ! videoconvert ! x264enc key-int-max=30 insert-vui=1 speed-preset=ultrafast tune=zerolatency ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5000', cv2.CAP_GSTREAMER, 0, fps, (w,h))
if not rtph264.isOpened() :
   print('Error: failed to open rtph264')
   exit(-2)

# Loop reading a frame from capture and pushing it into rtph264 writer:
while True:
   ret, frame = cap.read()
   if not ret:
      print('Read frame failed')
      break
   rtph264.write(frame)

rtph264.release()
cap.release()

2. 在同一主机上使用另一个线程或进程来运行 RTSP 服务器,从 UDP/5000 读取RTPH264,然后只支付和偿还:

import gi
gi.require_version('Gst','1.0')
gi.require_version('GstVideo','1.0')
gi.require_version('GstRtspServer','1.0')
from gi.repository import GLib, Gst, GstVideo, GstRtspServer

Gst.init(None)
mainloop = GLib.MainLoop()
server = GstRtspServer.RTSPServer()
mounts = server.get_mount_points()
factory = GstRtspServer.RTSPMediaFactory()
factory.set_launch('( udpsrc address=127.0.0.1 port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! rtph264pay name=pay0 )')
mounts.add_factory("/test", factory)
server.attach(None)
print ("stream ready at rtsp://127.0.0.1:8554/test")
mainloop.run()

3. 测试:

从 localhost:

gst-play-1.0 -v rtsp://127.0.0.1:8554/test

从另一台主机:

gst-play-1.0 -v rtsp://<rtsp_server_ip>:8554/test

# Or (O latency may not be the best choice, you would adjust for your case):
gst-launch-1.0 rtspsrc location=rtsp://<rtsp_server_ip>:8554/test latency=0 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

# Or disabling sync:
gst-launch-1.0 rtspsrc location=rtsp://<rtsp_server_ip>:8554/test latency=0 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink sync=0