0

私は gstreamer の初心者で、オーディオとビデオの両方のバッファを 3gp ファイルから取得し、コールバックで何らかの処理を行いたいと考えていました。

(パイプラインを別のスレッドに開始しています。パイプラインは、コールバック AudioCallback でオーディオ バッファーを提供し、VideoCallback でビデオ バッファーを提供します。)

これは私のパイプラインがどのように見えるかです:

GstElement* audioQueue;//global variable , needed in on_pad_added (cant pass both while connecting demuxer to callback)
GstElement* videoQueue;//global variable , needed in on_pad_added (cant pass both while connecting demuxer to callback)


//static functions
static gboolean
bus_call (GstBus* bus, GstMessage* msg, gpointer    data)
{
  GMainLoop* loop = (GMainLoop*) data;

  switch (GST_MESSAGE_TYPE (msg))
  {
  case GST_MESSAGE_EOS:
      g_main_loop_quit (loop);
      break;

  case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
  default:
      break;
  }
  return true;
}

static void link_two_elements(GstElement* src_element, GstElement* sink_element)
{
  if(!gst_element_link(src_element, sink_element))
      g_printerr ("Linking Error");

}

static void
 on_pad_added (GstElement *element,
              GstPad     *pad,
              gpointer    data)
{
  GstCaps *caps;
  GstStructure *str;
  gchar *tex;
  GstPad* sinkpad;

  /* check media type */
  caps = gst_pad_get_caps (pad);
  str = gst_caps_get_structure (caps, 0);
  tex = (gchar*)gst_structure_get_name(str);

  if(g_strrstr(tex,"audio"))
  {
    //GstElement *audioQueue = (GstElement *) data;
    sinkpad = gst_element_get_static_pad (audioQueue, "sink");

    if(sinkpad)
    {
        GstPadLinkReturn linkReturn = gst_pad_link (pad, sinkpad);
        gst_object_unref (sinkpad);
    }
  }

  if(g_strrstr(tex,"video"))
  {
    //GstElement *videoQueue = (GstElement *) data;
    sinkpad = gst_element_get_static_pad (videoQueue, "sink");

    GstPadLinkReturn linkReturn = gst_pad_link (pad, sinkpad);
    gst_object_unref (sinkpad);
  }
}

void runPipeline()
    {
    GMainLoop *loop;
    GstElement *__pPipeline, *source, *demuxer, *audioDecoder, *audioConverter, *audioresample, /**audioQueue,*/ *audioSink, *videoDecoder, *videoSink, /**videoQueue,*/ *ffmpegcolorspace, *videoscale;
    GstBus* bus;

     //Initialisation
    gst_init (null,null);

    loop = g_main_loop_new (NULL, FALSE);

    // Create gstreamer elements
    __pPipeline = gst_pipeline_new("test_appsink");
    source = gst_element_factory_make ("filesrc", "file-source");
    demuxer = gst_element_factory_make("qtdemux", "demuxer");
    //audioDecoder = gst_element_factory_make("ffdec_mp3", "audioDecoder");
    audioDecoder = gst_element_factory_make("decodebin", "audioDecoder");
    audioConverter = gst_element_factory_make("audioconvert", "audioConverter");
    audioresample = gst_element_factory_make("audioresample", "audioresample");
    audioSink = gst_element_factory_make("appsink", "audioSink");
    audioQueue = gst_element_factory_make("queue2", "audioQueue");
    //videoDecoder = gst_element_factory_make("ffdec_h264", "videoDecoder");
    videoQueue = gst_element_factory_make("queue2", "videoQueue");
    videoDecoder = gst_element_factory_make("decodebin ", "videoDecoder");
    ffmpegcolorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace");
    videoscale = gst_element_factory_make("videoscale", "videoscale");
    videoSink = gst_element_factory_make("appsink", "videoSink");
    //appsink = gst_element_factory_make("appsink", "sink-buffer");

    if (!__pPipeline || !source  || !demuxer || !audioDecoder || !audioConverter ||!audioresample || !audioSink || !videoSink || !audioQueue || !videoQueue || !videoDecoder || !ffmpegcolorspace || !videoscale )
    {
      //return -1;
    }

    //we set the input filename to the source element
    g_object_set (G_OBJECT (source), "location", "/etc/20000101-161404.3gp", NULL);

    //Make appsink emit the "new-preroll" and "new-buffer" signals.
    gst_app_sink_set_emit_signals ((GstAppSink*) audioSink, TRUE);
    gst_app_sink_set_emit_signals ((GstAppSink*) videoSink, TRUE);

    //we add a message handler
    bus = gst_pipeline_get_bus (GST_PIPELINE (__pPipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

     //we add all elements into the pipeline
    gst_bin_add_many (GST_BIN (__pPipeline),
                    source, demuxer, videoDecoder, audioDecoder, audioConverter, audioresample, audioSink, videoSink,
                    audioQueue, videoQueue, ffmpegcolorspace, videoscale, NULL);


    //link source and demuxer seperately
    link_two_elements(source, demuxer);

    //link rest of the elements
    int retValVideoLinking = (int)gst_element_link_many (videoQueue, videoDecoder, ffmpegcolorspace, videoscale, videoSink, NULL);
    int retValAudioLinking = (int)gst_element_link_many (audioQueue, audioDecoder, audioConverter, audioresample, audioSink, NULL);

    gulong sigConRet =  g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), null);

    _ArAudioIn audioInstance = _ArAudioIn::GetArAudioInstance();

    g_signal_connect (videoSink, "new-buffer",  G_CALLBACK (AudioCallback), null);//AudioCallback static API
    g_signal_connect (audioSink, "new-buffer",  G_CALLBACK (VideoCallback), null);//VideoCallback static API

     //Set the pipeline to "playing" state
    GstStateChangeReturn state = gst_element_set_state (__pPipeline, GST_STATE_PLAYING);

    g_main_loop_run (loop);
    return null;
    }


I'm just getting a single video buffer in my Videocallback and also in the on_pad_addded : I'm getting a linking err for audio pad linking.
GST_PAD_LINK_NOFORMAT         = -4,

I'm trying to link the queue's sink pad to the pad recieved in on_pad_added, same is working for video but not for audio.

If anybody has any idea about this then please give me some pointers to get rid off this err and make this pipeline work.
4

1 に答える 1

0

デバッグを依頼する前に、コードをクリーンアップすると便利です。一般的なアドバイスとして、戻り値を確認し、警告をログに記録するか、単にexit(1)して、パイプラインのセットアップが機能することを確認します(pad_addedハンドラーなど)。また、通常のビデオとオーディオシンクを使用して、再生されることを確認します。

最後に、通常、ピプレインからデータを引き出すことは悪い考えです。おそらく、コールバックにデータを入れたら、データをどのように処理したいかを伝えることができるので、より良いアドバイスを提供できます。

于 2012-06-25T16:06:47.553 に答える