[英]Basler Pylon to GStreamer to multicast UDP
我正在嘗試使用 GStreamer 通過 UDP 多播將來自 Basler acA720-290gm GigE 相機的捕獲圖像發送到多個客戶端。 我已經能夠使用 Pylon 7.1.0.25066 API 獲取圖像,並通過由 appsrc、videoconvert 和 ximagesink 組成的 GStreamer 管道發送它們。 但是,如果我嘗試添加元素以將 stream 轉換為 RTP 並返回,那么我只會得到一個 static 圖像。
此代碼在 Debian 11 上成功顯示 stream。我不確定是否可以將 GRAY8 以外的其他內容用於appsrc caps 格式。
測試.cpp
#include <pylon/PylonIncludes.h>
#include <gst/gst.h>
#include "unistd.h"
#include "pthread.h"
pthread_t thread_1;
pthread_t thread_2;
Pylon::CPylonImage image;
bool image_valid = false;
bool gstreamer_thread_done = false;
void* pylon_thread(void*) {
Pylon::CDeviceInfo device_info;
Pylon::CGrabResultPtr ptrGrabResult;
Pylon::PylonAutoInitTerm autoInitTerm;
device_info.SetIpAddress("192.168.1.109");
while (1) {
if (gstreamer_thread_done) { break; }
try {
Pylon::CInstantCamera camera(Pylon::CTlFactory::GetInstance().CreateDevice(device_info));
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
while (camera.IsGrabbing()) {
if (gstreamer_thread_done) { break; }
camera.RetrieveResult(5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
if (ptrGrabResult->GrabSucceeded()) {
image.CopyImage(ptrGrabResult);
image_valid = true;
}
else {
fprintf(stderr, "Error: %u %s\n", ptrGrabResult->GetErrorCode(), ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException &e) {
fprintf(stderr, "An exception occurred.\n");
fprintf(stderr, "%s\n", e.GetDescription());
sleep(1);
}
}
pthread_exit(NULL);
}
struct gstreamer_data {
GstElement* appsrc1;
GstElement* videoconvert1;
GstElement* ximagesink1;
GstElement* pipeline;
GMainLoop* main_loop;
guint source_id;
};
static gboolean push_data(gstreamer_data* data) {
GstBuffer* buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags) GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, (gpointer) image.GetBuffer(), image.GetImageSize(), 0, image.GetImageSize(), NULL, NULL
);
g_signal_emit_by_name(data->appsrc1, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
printf("Error\n");
return false;
}
return true;
}
static void start_feed(GstElement* source, guint size, gstreamer_data* data) {
(void) source;
(void) size;
if (data->source_id == 0) {
//g_print("Start feeding\n");
data->source_id = g_idle_add((GSourceFunc) push_data, data);
}
}
static void stop_feed(GstElement* source, gstreamer_data* data) {
(void) source;
if (data->source_id != 0) {
//g_print("Stop feeding\n");
g_source_remove(data->source_id);
data->source_id = 0;
}
}
static void error_cb(GstBus* bus, GstMessage* msg, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
void* gstreamer_thread(void*) {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
data.source_id = 0;
gst_init(NULL, NULL);
data.appsrc1 = gst_element_factory_make("appsrc", "appsrc1");
g_object_set(
G_OBJECT(data.appsrc1),
"stream-type", 0,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
NULL
);
g_object_set(
G_OBJECT(data.appsrc1),
"caps", gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, image.GetWidth(),
"height", G_TYPE_INT, image.GetHeight(),
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL
),
NULL
);
g_signal_connect(data.appsrc1, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.appsrc1, "enough-data", G_CALLBACK(stop_feed), &data);
data.videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
data.ximagesink1 = gst_element_factory_make("ximagesink", "ximagesink1");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.appsrc1 ||
!data.videoconvert1 ||
!data.ximagesink1
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many (
GST_BIN(data.pipeline),
data.appsrc1,
data.videoconvert1,
data.ximagesink1,
NULL
);
if (
gst_element_link_many (
data.appsrc1,
data.videoconvert1,
data.ximagesink1,
NULL
) != TRUE
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
printf("Exiting.\n");
gstreamer_thread_done = true;
pthread_exit(NULL);
}
int main() {
int error;
error = pthread_create(&thread_1, NULL, pylon_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
while (image_valid == false) {
sleep(1);
}
error = pthread_create(&thread_2, NULL, gstreamer_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
pthread_join(thread_1, NULL);
pthread_join(thread_2, NULL);
return 0;
}
Makefile
# Makefile for Basler pylon sample program
.PHONY: all clean
# The program to build
NAME := Test
# Installation directories for pylon
PYLON_ROOT ?= /opt/pylon
# Build tools and flags
LD := $(CXX)
CPPFLAGS := $(shell $(PYLON_ROOT)/bin/pylon-config --cflags) $(shell pkg-config --cflags gstreamer-1.0) $(shell pkg-config --cflags opencv4) -DUSE_GIGE
CXXFLAGS := #e.g., CXXFLAGS=-g -O0 for debugging
LDFLAGS := $(shell $(PYLON_ROOT)/bin/pylon-config --libs-rpath)
LDLIBS := $(shell $(PYLON_ROOT)/bin/pylon-config --libs) $(shell pkg-config --libs gstreamer-1.0) -lopencv_core -lopencv_imgproc -lpthread
# Rules for building
all: $(NAME)
$(NAME): $(NAME).o
$(LD) $(LDFLAGS) -o $@ $^ $(LDLIBS)
$(NAME).o: $(NAME).cpp
$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c -o $@ $<
clean:
$(RM) $(NAME).o $(NAME)
此代碼僅顯示一個圖像:
測試.cpp
#include <pylon/PylonIncludes.h>
#include <gst/gst.h>
#include "unistd.h"
#include "pthread.h"
pthread_t thread_1;
pthread_t thread_2;
Pylon::CPylonImage image;
bool image_valid = false;
bool gstreamer_thread_done = false;
void* pylon_thread(void*) {
Pylon::CDeviceInfo device_info;
Pylon::CGrabResultPtr ptrGrabResult;
Pylon::PylonAutoInitTerm autoInitTerm;
device_info.SetIpAddress("192.168.1.109");
while (1) {
if (gstreamer_thread_done) { break; }
try {
Pylon::CInstantCamera camera(Pylon::CTlFactory::GetInstance().CreateDevice(device_info));
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
while (camera.IsGrabbing()) {
if (gstreamer_thread_done) { break; }
camera.RetrieveResult(5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
if (ptrGrabResult->GrabSucceeded()) {
image.CopyImage(ptrGrabResult);
image_valid = true;
}
else {
fprintf(stderr, "Error: %u %s\n", ptrGrabResult->GetErrorCode(), ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException &e) {
fprintf(stderr, "An exception occurred.\n");
fprintf(stderr, "%s\n", e.GetDescription());
sleep(1);
}
}
pthread_exit(NULL);
}
struct gstreamer_data {
GstElement* appsrc1;
GstElement* videoconvert1;
GstElement* x264enc1;
GstElement* rtph264pay1;
GstElement* rtph264depay1;
GstElement* avdec_h2641;
GstElement* videoconvert2;
GstElement* ximagesink1;
GstElement* pipeline;
GMainLoop* main_loop;
guint source_id;
};
static gboolean push_data(gstreamer_data* data) {
GstBuffer* buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_wrapped_full(
(GstMemoryFlags) GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, (gpointer) image.GetBuffer(), image.GetImageSize(), 0, image.GetImageSize(), NULL, NULL
);
g_signal_emit_by_name(data->appsrc1, "push-buffer", buffer, &ret);
if (ret != GST_FLOW_OK) {
printf("Error\n");
return false;
}
return true;
}
static void start_feed(GstElement* source, guint size, gstreamer_data* data) {
(void) source;
(void) size;
if (data->source_id == 0) {
g_print("Start feeding\n");
data->source_id = g_idle_add((GSourceFunc) push_data, data);
}
}
static void stop_feed(GstElement* source, gstreamer_data* data) {
(void) source;
if (data->source_id != 0) {
g_print("Stop feeding\n");
g_source_remove(data->source_id);
data->source_id = 0;
}
}
static void error_cb(GstBus* bus, GstMessage* msg, gstreamer_data* data) {
(void) bus;
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
}
void* gstreamer_thread(void*) {
gstreamer_data data;
GstStateChangeReturn ret;
GstBus* bus;
data.source_id = 0;
gst_init(NULL, NULL);
data.appsrc1 = gst_element_factory_make("appsrc", "appsrc1");
g_object_set(
G_OBJECT(data.appsrc1),
"stream-type", 0,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
NULL
);
g_object_set(
G_OBJECT(data.appsrc1),
"caps", gst_caps_new_simple(
"video/x-raw",
"format", G_TYPE_STRING, "GRAY8",
"width", G_TYPE_INT, image.GetWidth(),
"height", G_TYPE_INT, image.GetHeight(),
"framerate", GST_TYPE_FRACTION, 0, 1,
NULL
),
NULL
);
g_signal_connect(data.appsrc1, "need-data", G_CALLBACK(start_feed), &data);
g_signal_connect(data.appsrc1, "enough-data", G_CALLBACK(stop_feed), &data);
data.videoconvert1 = gst_element_factory_make("videoconvert", "videoconvert1");
data.x264enc1 = gst_element_factory_make("x264enc", "x264enc1");
data.rtph264pay1 = gst_element_factory_make("rtph264pay", "rtph264pay1");
data.rtph264depay1 = gst_element_factory_make("rtph264depay", "rtph264depay1");
data.avdec_h2641 = gst_element_factory_make("avdec_h264", "avdec_h2641");
data.videoconvert2 = gst_element_factory_make("videoconvert", "videoconvert2");
data.ximagesink1 = gst_element_factory_make("ximagesink", "ximagesink1");
data.pipeline = gst_pipeline_new("pipeline");
if (
!data.pipeline ||
!data.appsrc1 ||
!data.videoconvert1 ||
!data.x264enc1 ||
!data.rtph264pay1 ||
!data.rtph264depay1 ||
!data.avdec_h2641 ||
!data.videoconvert2 ||
!data.ximagesink1
)
{
g_printerr("Not all elements could be created.\n");
exit(-1);
}
gst_bin_add_many (
GST_BIN(data.pipeline),
data.appsrc1,
data.videoconvert1,
data.x264enc1,
data.avdec_h2641,
data.videoconvert2,
data.ximagesink1,
NULL
);
if (
gst_element_link_many (
data.appsrc1,
data.videoconvert1,
data.x264enc1,
data.avdec_h2641,
data.videoconvert2,
data.ximagesink1,
NULL
) != TRUE
)
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
bus = gst_element_get_bus(data.pipeline);
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref(bus);
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
exit(-1);
}
data.main_loop = g_main_loop_new(NULL, FALSE);
g_main_loop_run(data.main_loop);
printf("Exiting.\n");
gstreamer_thread_done = true;
pthread_exit(NULL);
}
int main() {
int error;
error = pthread_create(&thread_1, NULL, pylon_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
while (image_valid == false) {
sleep(1);
}
error = pthread_create(&thread_2, NULL, gstreamer_thread, NULL);
if (error) {
printf("Error: pthread_create: %i\n", error);
exit(EXIT_FAILURE);
}
pthread_join(thread_1, NULL);
pthread_join(thread_2, NULL);
return 0;
}
也許不是將圖像從 PtrGrabResult 復制到 CPylonImage,而是嘗試使用接收緩沖區的替代方法:通過image.AttachGrabResultBuffer()
方法。 它在 Pylon SDK 的GUI_ImageWindow
示例項目中得到了很好的應用。
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.