简体   繁体   English

Gstreamer用音频录制视频

[英]Gstreamer recording video with audio


I'm trying to record on a file a video from my webcam along with audio using Gstreamer on my Ubuntu 16 machine through glib library. 我正在尝试通过glib库在Ubuntu 16机器上使用Gstreamer在网络摄像头中录制视频以及音频。
I'm able to watch the video streaming from the webcam through these code lines 我可以通过这些代码行观看网络摄像头中的视频流

#include <gst/gst.h>

int main(int argc, char *argv[]) {
    GstElement *pipeline, *source, *sink, *convert;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);

    /* Create the elements */
    source = gst_element_factory_make ("v4l2src", "source");
    sink = gst_element_factory_make ("autovideosink", "sink");
    convert =gst_element_factory_make("videoconvert","convert");
    //convert = gst_element_factory_make ("audioconvert", "convert");
    //sink = gst_element_factory_make ("autoaudiosink", "sink");

    /* Create the empty pipeline */
    pipeline = gst_pipeline_new ("test-pipeline");

    if (!pipeline || !source || !sink || !convert) {
        g_printerr ("Not all elements could be created.\n");
        return -1;
    }

    /*set der source*/
    g_object_set (source, "device", "/dev/video0", NULL);

    /* Build the pipeline */
    gst_bin_add_many (GST_BIN (pipeline), source, sink, convert, NULL);
    if (gst_element_link (convert, sink) != TRUE) {
        g_printerr ("Elements could not be linked confert sink.\n");
        gst_object_unref (pipeline);
        return -1;
    }


    if (gst_element_link (source, convert) != TRUE) {
        g_printerr ("Elements could not be linked source -convert.\n");
        gst_object_unref (pipeline);
        return -1;
    }

    /* Start playing */
    ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the pipeline to the playing state.\n");
        gst_object_unref (pipeline);
        return -1;
    }

    /* Wait until error or EOS */
    bus = gst_element_get_bus (pipeline);
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,(GstMessageType) (GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

    /* Parse message */
    if (msg != NULL) {
        GError *err;
        gchar *debug_info;

        switch (GST_MESSAGE_TYPE (msg)) {
            case GST_MESSAGE_ERROR:
                gst_message_parse_error (msg, &err, &debug_info);
                g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
                g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
                g_clear_error (&err);
                g_free (debug_info);
                break;
            case GST_MESSAGE_EOS:
                g_print ("End-Of-Stream reached.\n");
                break;
            default:
                /* We should not reach here because we only asked for ERRORs and EOS */
                g_printerr ("Unexpected message received.\n");
                break;
        }
        gst_message_unref (msg);
    }

    /* Free resources */
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    return 0;
}


and to capture audio from microphone and listen it through the speakers using these code lines 并使用这些代码线从麦克风捕获音频并通过扬声器收听

#include <gst/gst.h>
#include <glib.h>

static gboolean
bus_call (GstBus     *bus,
          GstMessage *msg,
          gpointer    data){
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}

/* Main function for audio pipeline initialization and looping streaming process  */
gint
main (gint argc, gchar **argv) {
    GMainLoop *loop;
    GstElement *pipeline, *audio_source, *sink; 
    GstBus *bus;
    guint bus_watch_id;
    GstCaps *caps;
    gboolean ret;

    /* Initialization of gstreamer */
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    /* Elements creation */
    pipeline     = gst_pipeline_new ("audio_stream");
    audio_source = gst_element_factory_make ("alsasrc", "audio_source");
    sink   = gst_element_factory_make ("alsasink", "audio_sink");

    // video_source = gst_element_factory_make ("v4l2src", "source");
    // video_sink   = gst_element_factory_make ("autovideosink", "sink");
    // video_convert= gst_element_factory_make("videoconvert","convert");

    if (!pipeline) {
        g_printerr ("Audio: Pipeline couldn't be created\n");
        return -1;
    }
    if (!audio_source) {
        g_printerr ("Audio: alsasrc couldn't be created\n");
        return -1;
    }
    if (!sink) {
        g_printerr ("Audio: Output file couldn't be created\n");
        return -1;
    }

    g_object_set (G_OBJECT (audio_source), "device", "hw:1,0", NULL);
    g_object_set (G_OBJECT (sink), "device", "hw:1,0", NULL);

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    gst_bin_add_many (GST_BIN(pipeline), audio_source, sink, NULL);

    caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "S16LE",  "layout", G_TYPE_STRING, "interleaved", "rate", G_TYPE_INT, (int)44100, "channels", G_TYPE_INT, (int)2, NULL);
    ret = gst_element_link_filtered (audio_source, sink, caps);
    if (!ret) {
        g_print ("audio_source and sink couldn't be linked\n");
        gst_caps_unref (caps);
        return FALSE;
    }

    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    g_print ("streaming...\n");
    g_main_loop_run (loop);

    g_print ("Returned, stopping stream\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);

    g_print ("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (pipeline));
    g_source_remove (bus_watch_id);
    g_main_loop_unref (loop);

    return 0;
}


What i really don't understand is how to get video from the webcam and audio from my alsa hw at the same time and save them into a file (such as .mp4 for ex). 我真正不明白的是如何同时从网络摄像头获取视频和从alsa hw获得音频,并将其保存到文件中(例如.mp4 for ex)。 Can anyone help me? 谁能帮我? I tried to find something useful, but there's nothing on the board. 我试图找到有用的东西,但是板上却什么也没有。 In addition, it would be really appreciate also how to save just the video stream or just the audio stream in separated files. 此外,也非常感谢您如何仅将视频流或音频流保存在单独的文件中。

UPDATE UPDATE
I looked again to the tutorials and to the git link gave by @nayana, so i tried myself to code something. 我再次查看了教程和@nayana提供的git链接,因此我尝试自己编写一些代码。 I have two results: 我有两个结果:

#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>

static GMainLoop *loop;
static GstElement *pipeline;
static GstElement *muxer, *sink;
static GstElement *src_video, *encoder_video, *queue_video; 
static GstElement *src_audio, *encoder_audio, *queue_audio;
static GstBus *bus;

static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR:{
      GError *err = NULL;
      gchar *name, *debug = NULL;

      name = gst_object_get_path_string (message->src);
      gst_message_parse_error (message, &err, &debug);

      g_printerr ("ERROR: from element %s: %s\n", name, err->message);
      if (debug != NULL)
        g_printerr ("Additional debug info:\n%s\n", debug);

      g_error_free (err);
      g_free (debug);
      g_free (name);

      g_main_loop_quit (loop);
      break;
    }
    case GST_MESSAGE_WARNING:{
    GError *err = NULL;
    gchar *name, *debug = NULL;

    name = gst_object_get_path_string (message->src);
    gst_message_parse_warning (message, &err, &debug);

    g_printerr ("ERROR: from element %s: %s\n", name, err->message);
    if (debug != NULL)
    g_printerr ("Additional debug info:\n%s\n", debug);

    g_error_free (err);
    g_free (debug);
    g_free (name);
    break;
    }
    case GST_MESSAGE_EOS:{
    g_print ("Got EOS\n");
    g_main_loop_quit (loop);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    g_main_loop_unref (loop);
    gst_object_unref (pipeline);
    exit(0);
    break;
  }
    default:
    break;
  }

  return TRUE;
}

void sigintHandler(int unused) {
  g_print("You ctrl-c-ed! Sending EoS");
  gst_element_send_event(pipeline, gst_event_new_eos()); 
}

int main(int argc, char *argv[])
{
  signal(SIGINT, sigintHandler);
  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new(NULL);

  src_video = gst_element_factory_make("v4l2src", NULL);
  encoder_video = gst_element_factory_make("x264enc", NULL);
  queue_video = gst_element_factory_make("queue", NULL);

  src_audio = gst_element_factory_make ("alsasrc", NULL);
  encoder_audio = gst_element_factory_make("lamemp3enc", NULL);
  queue_audio = gst_element_factory_make("queue", NULL);

  muxer = gst_element_factory_make("mp4mux", NULL);
  sink = gst_element_factory_make("filesink", NULL);

  if (!pipeline || !src_video || !encoder_video || !src_audio || !encoder_audio
        || !queue_video || !queue_audio || !muxer || !sink) {
    g_error("Failed to create elements");
    return -1;
  }

  g_object_set(src_audio, "device", "hw:1,0", NULL);
  g_object_set(sink, "location", "video_audio_test.mp4", NULL);


  gst_bin_add_many(GST_BIN(pipeline), src_video, encoder_video, queue_video, 
    src_audio, encoder_audio, queue_audio, muxer, sink, NULL);

  gst_element_link_many (src_video,encoder_video,queue_video, muxer,NULL);

  gst_element_link_many (src_audio,encoder_audio,queue_audio, muxer,NULL);

  if (!gst_element_link_many(muxer, sink, NULL)){
    g_error("Failed to link elements");
    return -2;
  }

  loop = g_main_loop_new(NULL, FALSE);

  bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch(bus);
  g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
  gst_object_unref(GST_OBJECT(bus));

  gst_element_set_state(pipeline, GST_STATE_PLAYING);

  g_print("Starting loop");
  g_main_loop_run(loop);

  return 0;
}

With this upon i am able to record the video from the cam, but the audio is recorded for just one second somewhere randomly during the recording and it gives me this error 有了这个,我就可以从凸轮上录制视频,但是在录制过程中,音频是随机录制的,仅录制了一秒钟,这给了我这个错误

ERROR: from element /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0: Can't record audio fast enough
Additional debug info:
gstaudiobasesrc.c(869): gst_audio_base_src_create (): /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0:
Dropped 206388 samples. This is most likely because downstream can't keep up and is consuming samples too slowly.<br>

So i tried to add some setting and queues 所以我试图添加一些设置和队列

#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>

static GMainLoop *loop;
static GstElement *pipeline;
static GstElement *muxer, *sink;
static GstElement *src_video, *encoder_video, *queue_video, *rate_video, *scale_video, *capsfilter_video; 
static GstElement *src_audio, *encoder_audio, *queue_audio, *queue_audio2, *capsfilter_audio, *rate_audio;
static GstBus *bus;
static GstCaps *caps;

static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR:{
      GError *err = NULL;
      gchar *name, *debug = NULL;

      name = gst_object_get_path_string (message->src);
      gst_message_parse_error (message, &err, &debug);

      g_printerr ("ERROR: from element %s: %s\n", name, err->message);
      if (debug != NULL)
        g_printerr ("Additional debug info:\n%s\n", debug);

      g_error_free (err);
      g_free (debug);
      g_free (name);

      g_main_loop_quit (loop);
      break;
    }
    case GST_MESSAGE_WARNING:{
    GError *err = NULL;
    gchar *name, *debug = NULL;

    name = gst_object_get_path_string (message->src);
    gst_message_parse_warning (message, &err, &debug);

    g_printerr ("ERROR: from element %s: %s\n", name, err->message);
    if (debug != NULL)
    g_printerr ("Additional debug info:\n%s\n", debug);

    g_error_free (err);
    g_free (debug);
    g_free (name);
    break;
    }
    case GST_MESSAGE_EOS:{
    g_print ("Got EOS\n");
    g_main_loop_quit (loop);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    g_main_loop_unref (loop);
    gst_object_unref (pipeline);
    exit(0);
    break;
  }
    default:
    break;
  }

  return TRUE;
}

void sigintHandler(int unused) {
  g_print("You ctrl-c-ed! Sending EoS");
  gst_element_send_event(pipeline, gst_event_new_eos()); 
}

int main(int argc, char *argv[])
{
  signal(SIGINT, sigintHandler);
  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new(NULL);

  src_video = gst_element_factory_make("v4l2src", NULL);
  rate_video = gst_element_factory_make ("videorate", NULL);
  scale_video = gst_element_factory_make ("videoscale", NULL);
  capsfilter_video = gst_element_factory_make ("capsfilter", NULL);
  queue_video = gst_element_factory_make("queue", NULL);
  encoder_video = gst_element_factory_make("x264enc", NULL);

  src_audio = gst_element_factory_make ("alsasrc", NULL);
  capsfilter_audio = gst_element_factory_make ("capsfilter", NULL);
  queue_audio = gst_element_factory_make("queue", NULL);
  rate_audio = gst_element_factory_make ("audiorate", NULL);
  queue_audio2 = gst_element_factory_make("queue", NULL);
  encoder_audio = gst_element_factory_make("lamemp3enc", NULL);

  muxer = gst_element_factory_make("mp4mux", NULL);
  sink = gst_element_factory_make("filesink", NULL);

  if (!pipeline || !src_video || !rate_video || !scale_video || !capsfilter_video 
     || !queue_video || !encoder_video || !src_audio || !capsfilter_audio 
     || !queue_audio || !rate_audio || !queue_audio2 || !encoder_audio 
     || !muxer || !sink) {
    g_error("Failed to create elements");
    return -1;
  }

  // Set up the pipeline
  g_object_set(src_video, "device", "/dev/video0", NULL); 
  g_object_set(src_audio, "device", "hw:1,0", NULL);
  g_object_set(sink, "location", "video_audio_test.mp4", NULL);

  // video settings
  caps = gst_caps_from_string("video/x-raw,format=(string)I420,width=480,height=384,framerate=(fraction)25/1");
  g_object_set (G_OBJECT (capsfilter_video), "caps", caps, NULL);
  gst_caps_unref (caps); 

  // audio settings
  caps = gst_caps_from_string("audio/x-raw,rate=44100,channels=1");
  g_object_set (G_OBJECT (capsfilter_audio), "caps", caps, NULL);
  gst_caps_unref (caps);

  // add all elements into the pipeline 
  gst_bin_add_many(GST_BIN(pipeline), src_video, rate_video, scale_video, capsfilter_video, 
    queue_video, encoder_video, src_audio, capsfilter_audio, queue_audio, rate_audio, 
    queue_audio2, encoder_audio, muxer, sink, NULL);

  if (!gst_element_link_many (src_video,rate_video,scale_video, capsfilter_video,
    queue_video, encoder_video, muxer,NULL))
  {
    g_error("Failed to link video elements");
    return -2;
  }

  if (!gst_element_link_many (src_audio, capsfilter_audio, queue_audio, rate_audio, 
    queue_audio2, encoder_audio, muxer,NULL))
  {
    g_error("Failed to link audio elements");
    return -2;
  }

  if (!gst_element_link_many(muxer, sink, NULL))
  {
    g_error("Failed to link elements");
    return -2;
  }

  loop = g_main_loop_new(NULL, FALSE);

  bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch(bus);
  g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
  gst_object_unref(GST_OBJECT(bus));

  gst_element_set_state(pipeline, GST_STATE_PLAYING);

  g_print("Starting loop");
  g_main_loop_run(loop);

  return 0;
}

This time the code doesnt record anything and give me the following error 这次代码没有记录任何内容,并给我以下错误

   ERROR: from element /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0: Internal data flow error.
Additional debug info:
gstbasesrc.c(2948): gst_base_src_loop (): /GstPipeline:pipeline0/GstAlsaSrc:alsasrc0:
streaming task paused, reason not-negotiated (-4)

Can you address me to fix the error? 您能解决我的问题吗?
Thanks in advance 提前致谢

What you need is the multiplexer - such GStreamer element that can merge two streams into one. 您需要的是多路复用器-这样的GStreamer元素可以将两个流合并为一个。

mp4, mkv, avi.. are just a container formats which contains multiple "data streams", which can be audio, video, subtitles (not all formats support this). mp4,mkv,avi ..只是一种包含多个“数据流”的容器格式,可以是音频,视频,字幕(并非所有格式都支持)。

I don't know about your use case, but you don't need C code for what you do. 我不知道您的用例,但是您不需要C代码即可完成工作。 You can just use gst-launch-1.0 tool which has its own GStreamer kind-of-scripting language. 您可以只使用gst-launch-1.0工具,它具有自己的GStreamer脚本语言。

For simplicity I will use debugging elements videotestsrc and audiotestsrc for simulating input (instead of actual camera etc). 为简单起见,我将使用调试元素videotestsrcaudiotestsrc来模拟输入(而不是实际的摄像audiotestsrc )。

gst-launch-1.0 -e videotestsrc ! x264enc ! mp4mux name=mux ! filesink location="bla.mp4"  audiotestsrc ! lamemp3enc ! mux.

videotestsrc --> x264enc -----\
                               >---> mp4mux ---> filesink
audiotestsrc --> lamemp3enc --/ 

Explanation: 说明:

Videotestsrc generates raw video which is in GStreamer terms called "video/x-raw". Videotestsrc生成原始视频,以GStreamer术语称为“ video / x-raw”。

However mp4 cannot hold raw video, so we need to encode it with for example x264enc which makes our data "video/x-h264". 但是mp4无法保存原始视频,因此我们需要使用例如x264enc对其进行编码,这会使我们的数据为“ video / x-h264”。

Then we can finally mux this into our mp4 with mp4mux element. 然后,我们最终可以使用mp4mux元素将其混入我们的mp4中。

When we take a look into GStreamer docs using gst-inspect-1.0 mp4mux we see that this element supports various formats amongst which there is also video/x-h264 . 当我们使用gst-inspect-1.0 mp4mux查看GStreamer文档时,我们看到此元素支持多种格式,其中还包括video/x-h264

The same thing we do with audio with either faac for AAC format or lamemp3enc for mp3. 我们用两种FAAC对AAC格式或音频做同样的事情lamemp3enc为MP3。

With gst-launch-1.0 I did two tricks and one bonus trick: 使用gst-launch-1.0我做了两个技巧和一个奖励技巧:

  1. ability to have separate branches in one line. 在一行中具有独立分支的能力。 This is achieved by just separating those branches with space instead of ! 这是通过仅用空格而不是!分隔这些分支来实现的!
  2. ability to make alias with name=mux and later on using it with adding dot right at the end of name like mux. 可以使用name=mux来创建别名,之后再在名称末尾添加点(例如mux.来使用别名mux. . You can make up any name for that element you like. 您可以为自己喜欢的元素命名。
  3. Write EOS after hitting ctrl+c to stop the recording. 按下ctrl + c后停止录制,请写EOS。 This is achieved with parameter -e 这是通过参数-e实现的

Finally the output goes to filesink which just writes anything you give it to file. 最终,输出进入filesink,它只会将您提供的任何内容写入文件。

Now for a homework you: 现在开始做作业了:

  1. Use your elements for what you need - v4l2, alsasrc 根据需要使用元素-v4l2,alsasrc

  2. Add queue elements to add buffering and thread separation 添加队列元素以添加缓冲和线程分离

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM