繁体   English   中英

使用 Python 捕获 gstreamer 网络视频

[英]Capture gstreamer network video with Python

我正在尝试使用 Python 捕获和显示网络视频流。 已使用以下命令创建流(在我的笔记本电脑上):

gst-launch-1.0 v4l2src ! videorate ! video/x-raw,framerate=2/1,width=640,height=480 ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay config-interval=10 pt=96 ! udpsink host=127.0.0.1 port=5000

它获取网络摄像头输入并通过 UDP 端口传输。 我可以捕获流并使用以下命令显示它:

gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp, payload=127" ! rtph264depay ! avdec_h264 ! xvimagesink sync=false

现在我正在尝试用 Python 脚本做同样的事情(捕获),但并不缺乏。 这是我的代码:

import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst

udpPipe = Gst.pipeline("player")
source = Gst.ElementFactory.make('udpsrc', None)
source.set_property("port", 5000)
source.set_property("host", "127.0.0.1")

rdepay = Gst.ElementFactory.make('rtph264depay', 'rdepay')
vdecode = Gst.ElementFactory.make('avdec_h264', 'vdecode')
sink = Gst.ElementFactory.make('xvimagesink', None)

udpPipe.add(source, rdepay, vdecode, sink)
gst.element_link_many(source, rdepay, vdecode, sink)
udpPipe.set_state(gst.STATE_PLAYING)

我得到的错误是:

/usr/lib/python2.7/dist-packages/gi/overrides/Gst.py:56: Warning: /build/glib2.0-prJhLS/glib2.0-2.48.2/./gobject/gsignal.c:1674: parameter 1 of type '<invalid>' for signal "GstBus::sync_message" is not a value type
  Gst.Bin.__init__(self, name=name)
/usr/lib/python2.7/dist-packages/gi/overrides/Gst.py:56: Warning: /build/glib2.0-prJhLS/glib2.0-2.48.2/./gobject/gsignal.c:1674: parameter 1 of type '<invalid>' for signal "GstBus::message" is not a value type
  Gst.Bin.__init__(self, name=name)
Traceback (most recent call last):
  File "getUdp.py", line 13, in <module>
    source = Gst.ElementFactory.make('udpsrc', None)
  File "/usr/lib/python2.7/dist-packages/gi/overrides/Gst.py", line 217, in make
    return Gst.ElementFactory.make(factory_name, instance_name)
TypeError: unbound method fake_method() must be called with ElementFactory instance as first argument (got str instance instead) 

有任何想法吗? :-(

我今天在 Debian 9.3 (stretch) 上也遇到了同样的错误。 显式调用Gst.init解决了这个问题。

以下代码在我的系统上使用 python 2.7 和 3.5 弹出了一个 xvimagesink 窗口。

#!/usr/bin/python
import sys
import gi
gi.require_version('GLib', '2.0')
gi.require_version('Gst', '1.0')
from gi.repository import GLib, Gst

Gst.init(sys.argv)

udpPipe = Gst.Pipeline("player")
source = Gst.ElementFactory.make('udpsrc', None)
source.set_property("port", 5000)
#source.set_property("host", "127.0.0.1")
caps = Gst.caps_from_string("application/x-rtp, payload=127")
source.set_property("caps", caps)

rdepay = Gst.ElementFactory.make('rtph264depay', 'rdepay')
vdecode = Gst.ElementFactory.make('avdec_h264', 'vdecode')
sink = Gst.ElementFactory.make('xvimagesink', None)
sink.set_property("sync", False)

udpPipe.add(source, rdepay, vdecode, sink)

#Gst.element_link_many(source, rdepay, vdecode, sink)
source.link(rdepay)
rdepay.link(vdecode)
vdecode.link(sink)

udpPipe.set_state(Gst.State.PLAYING)

GLib.MainLoop().run()

我认为有必要调用 Gst.init 并运行 mainloop 以使用 PyGObject 将 gst-launch 命令行转换为 python 脚本。

您可以按如下方式使用“mjpeg”流:

gst-launch-1.0 videotestsrc ! videoconvert ! videoscale ! video/x-raw,format=I420,width=800,height=600,framerate=25/1 ! jpegenc ! rtpjpegpay ! udpsink host=127.0.0.1 port=5000

在 python3 中,您可以获得这样的帧:

#!/usr/bin/env python

import cv2
import gi
import numpy as np

gi.require_version('Gst', '1.0')
from gi.repository import Gst


class Video():
    """BlueRov video capture class constructor

    Attributes:
        port (int): Video UDP port
        video_codec (string): Source h264 parser
        video_decode (string): Transform YUV (12bits) to BGR (24bits)
        video_pipe (object): GStreamer top-level pipeline
        video_sink (object): Gstreamer sink element
        video_sink_conf (string): Sink configuration
        video_source (string): Udp source ip and port
    """

    def __init__(self, port=5000):
        """Summary

        Args:
            port (int, optional): UDP port
        """

        Gst.init(None)

        self.port = port
        self._frame = None

        # [Software component diagram](https://www.ardusub.com/software/components.html)
        # UDP video stream (:5000)
        self.video_source = 'udpsrc port={}'.format(self.port)
        # [Rasp raw image](http://picamera.readthedocs.io/en/release-0.7/recipes2.html#raw-image-capture-yuv-format)
        # Cam -> CSI-2 -> H264 Raw (YUV 4-4-4 (12bits) I420)
        # self.video_codec = '! application/x-rtp, payload=96 ! rtph264depay ! h264parse ! avdec_h264'
        self.video_codec = '! application/x-rtp, payload=26 ! rtpjpegdepay ! jpegdec'
        # Python don't have nibble, convert YUV nibbles (4-4-4) to OpenCV standard BGR bytes (8-8-8)
        self.video_decode = \
            '! decodebin ! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert'
        # Create a sink to get data
        self.video_sink_conf = \
            '! appsink emit-signals=true sync=false max-buffers=2 drop=true'

        self.video_pipe = None
        self.video_sink = None

        self.run()

    def start_gst(self, config=None):
        """ Start gstreamer pipeline and sink
        Pipeline description list e.g:
            [
                'videotestsrc ! decodebin', \
                '! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert',
                '! appsink'
            ]

        Args:
            config (list, optional): Gstreamer pileline description list
        """

        if not config:
            config = \
                [
                    'videotestsrc ! decodebin',
                    '! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert',
                    '! appsink'
                ]

        command = ' '.join(config)
        self.video_pipe = Gst.parse_launch(command)
        self.video_pipe.set_state(Gst.State.PLAYING)
        self.video_sink = self.video_pipe.get_by_name('appsink0')

    @staticmethod
    def gst_to_opencv(sample):
        """Transform byte array into np array

        Args:
            sample (TYPE): Description

        Returns:
            TYPE: Description
        """
        buf = sample.get_buffer()
        caps = sample.get_caps()
        array = np.ndarray(
            (
                caps.get_structure(0).get_value('height'),
                caps.get_structure(0).get_value('width'),
                3
            ),
            buffer=buf.extract_dup(0, buf.get_size()), dtype=np.uint8)
        return array

    def frame(self):
        """ Get Frame

        Returns:
            iterable: bool and image frame, cap.read() output
        """
        return self._frame

    def frame_available(self):
        """Check if frame is available

        Returns:
            bool: true if frame is available
        """
        return type(self._frame) != type(None)

    def run(self):
        """ Get frame to update _frame
        """

        self.start_gst(
            [
                self.video_source,
                self.video_codec,
                self.video_decode,
                self.video_sink_conf
            ])

        self.video_sink.connect('new-sample', self.callback)

    def callback(self, sink):
        sample = sink.emit('pull-sample')
        new_frame = self.gst_to_opencv(sample)
        self._frame = new_frame

        return Gst.FlowReturn.OK


if __name__ == '__main__':
    # Create the video object
    # Add port= if is necessary to use a different one
    video = Video(port=5000)

    while True:
        # Wait for the next frame
        if not video.frame_available():
            continue

        frame = video.frame()
        cv2.imshow('frame', frame)
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

参考文献

1: http ://www.einarsundgren.se/gstreamer-basic-real-time-streaming-tutorial/

2: https ://gist.github.com/patrickelectric/443645bb0fd6e71b34c504d20d475d5a

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM