0

Gstreamerを使用してビデオを再生するC言語のプログラムがあります。playbin2 なしで GUI を統合する必要があります。代わりにパイプラインを使用するために playbin2 を削除しようとしたときに、おそらく何らかの間違いを犯しました。私はそれがどのように機能するのか本当にわかりません...ここにコードを取得したリンクがあります:playbin2を使用したGstreamer GUI

ここに私が作ったコードがあります:

#include <string.h>

#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/interfaces/xoverlay.h>

#include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11)
#include <gdk/gdkx.h>
#elif defined (GDK_WINDOWING_WIN32)
#include <gdk/gdkwin32.h>
#elif defined (GDK_WINDOWING_QUARTZ)
#include <gdk/gdkquartz.h>
#endif

/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
  GstElement *pipeline;           /* Our one and only pipeline */

  GtkWidget *slider;              /* Slider widget to keep track of current position */
  GtkWidget *streams_list;        /* Text widget to display info about the streams */
  gulong slider_update_signal_id; /* Signal ID for the slider update signal */

  GstState state;                 /* Current state of the pipeline */
  gint64 duration;                /* Duration of the clip, in nanoseconds */
} CustomData;

/* This function is called when the GUI toolkit creates the physical window that will hold the video.
 * At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
 * and pass it to GStreamer through the XOverlay interface. */
static void realize_cb (GtkWidget *widget, CustomData *data) {
  GdkWindow *window = gtk_widget_get_window (widget);
  guintptr window_handle;

  if (!gdk_window_ensure_native (window))
    g_error ("Couldn't create native window needed for GstXOverlay!");

  /* Retrieve window handler from GDK */
#if defined (GDK_WINDOWING_WIN32)
  window_handle = (guintptr)GDK_WINDOW_HWND (window);
#elif defined (GDK_WINDOWING_QUARTZ)
  window_handle = gdk_quartz_window_get_nsview (window);
#elif defined (GDK_WINDOWING_X11)
  window_handle = GDK_WINDOW_XID (window);
#endif
  /* Pass it to playbin2, which implements XOverlay and will forward it to the video sink */
  gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), window_handle);
}

/* This function is called when the PLAY button is clicked */
static void play_cb (GtkButton *button, CustomData *data) {
  gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
}

/* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) {
  gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
}

/* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) {
  gst_element_set_state (data->pipeline, GST_STATE_READY);
}

/* This function is called when the main window is closed */
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
  stop_cb (NULL, data);
  gtk_main_quit ();
}

/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
 * rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
 * we simply draw a black rectangle to avoid garbage showing up. */
static gboolean expose_cb (GtkWidget *widget, GdkEventExpose *event, CustomData *data) {
  if (data->state < GST_STATE_PAUSED) {
    GtkAllocation allocation;
    GdkWindow *window = gtk_widget_get_window (widget);
    cairo_t *cr;

    /* Cairo is a 2D graphics library which we use here to clean the video window.
     * It is used by GStreamer for other reasons, so it will always be available to us. */
    gtk_widget_get_allocation (widget, &allocation);
    cr = gdk_cairo_create (window);
    cairo_set_source_rgb (cr, 0, 0, 0);
    cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
    cairo_fill (cr);
    cairo_destroy (cr);
  }

  return FALSE;
}

/* This function is called when the slider changes its position. We perform a seek to the
 * new position here. */
static void slider_cb (GtkRange *range, CustomData *data) {
  gdouble value = gtk_range_get_value (GTK_RANGE (data->slider));
  gst_element_seek_simple (data->pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
      (gint64)(value * GST_SECOND));
}

/* This creates all the GTK+ widgets that compose our application, and registers the callbacks */
static void create_ui (CustomData *data) {
  GtkWidget *main_window;  /* The uppermost window, containing all other windows */
  GtkWidget *video_window; /* The drawing area where the video will be shown */
  GtkWidget *main_box;     /* VBox to hold main_hbox and the controls */
  GtkWidget *main_hbox;    /* HBox to hold the video_window and the stream info text widget */
  GtkWidget *controls;     /* HBox to hold the buttons and the slider */
  GtkWidget *play_button, *pause_button, *stop_button; /* Buttons */

  main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (main_window), "delete-event", G_CALLBACK (delete_event_cb), data);

  video_window = gtk_drawing_area_new ();
  gtk_widget_set_double_buffered (video_window, FALSE);
  g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
  g_signal_connect (video_window, "expose_event", G_CALLBACK (expose_cb), data);

  play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
  g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);

  pause_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PAUSE);
  g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data);

  stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
  g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);

  data->slider = gtk_hscale_new_with_range (0, 100, 1);
  gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
  data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data);

  data->streams_list = gtk_text_view_new ();
  gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);

  controls = gtk_hbox_new (FALSE, 0);
  gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
  gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
  gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
  gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);

  main_hbox = gtk_hbox_new (FALSE, 0);
  gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
  gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2);

  main_box = gtk_vbox_new (FALSE, 0);
  gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
  gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
  gtk_container_add (GTK_CONTAINER (main_window), main_box);
  gtk_window_set_default_size (GTK_WINDOW (main_window), 640, 480);

  gtk_widget_show_all (main_window);
}

/* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) {
  GstFormat fmt = GST_FORMAT_TIME;
  gint64 current = -1;

  /* We do not want to update anything unless we are in the PAUSED or PLAYING states */
  if (data->state < GST_STATE_PAUSED)
    return TRUE;

  /* If we didn't know it yet, query the stream duration */
  if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
    if (!gst_element_query_duration (data->pipeline, &fmt, &data->duration)) {
      g_printerr ("Could not query current duration.\n");
    } else {
      /* Set the range of the slider to the clip duration, in SECONDS */
      gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND);
    }
  }

  if (gst_element_query_position (data->pipeline, &fmt, &current)) {
    /* Block the "value-changed" signal, so the slider_cb function is not called
     * (which would trigger a seek the user has not requested) */
    g_signal_handler_block (data->slider, data->slider_update_signal_id);
    /* Set the position of the slider to the current pipeline positoin, in SECONDS */
    gtk_range_set_value (GTK_RANGE (data->slider), (gdouble)current / GST_SECOND);
    /* Re-enable the signal */
    g_signal_handler_unblock (data->slider, data->slider_update_signal_id);
  }
  return TRUE;
}

/* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *pipeline, gint stream, CustomData *data) {
  /* We are possibly in a GStreamer working thread, so we notify the main
   * thread of this event through a message in the bus */
  gst_element_post_message (pipeline,
    gst_message_new_application (GST_OBJECT (pipeline),
      gst_structure_new ("tags-changed", NULL)));
}

/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
  GError *err;
  gchar *debug_info;

  /* Print error details on the screen */
  gst_message_parse_error (msg, &err, &debug_info);
  g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
  g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
  g_clear_error (&err);
  g_free (debug_info);

  /* Set the pipeline to READY (which stops playback) */
  gst_element_set_state (data->pipeline, GST_STATE_READY);
}

/* This function is called when an End-Of-Stream message is posted on the bus.
 * We just set the pipeline to READY (which stops playback) */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
  g_print ("End-Of-Stream reached.\n");
  gst_element_set_state (data->pipeline, GST_STATE_READY);
}

/* This function is called when the pipeline changes states. We use it to
 * keep track of the current state. */
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
  GstState old_state, new_state, pending_state;
  gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
  if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
    data->state = new_state;
    g_print ("State set to %s\n", gst_element_state_get_name (new_state));
    if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
      /* For extra responsiveness, we refresh the GUI as soon as we reach the PAUSED state */
      refresh_ui (data);
    }
  }
}

/* Extract metadata from all the streams and write it to the text widget in the GUI */
static void analyze_streams (CustomData *data) {
  gint i;
  GstTagList *tags;
  gchar *str, *total_str;
  guint rate;
  gint n_video, n_audio, n_text;
  GtkTextBuffer *text;

  /* Clean current contents of the widget */
  text = gtk_text_view_get_buffer (GTK_TEXT_VIEW (data->streams_list));
  gtk_text_buffer_set_text (text, "", -1);

  /* Read some properties */
  g_object_get (data->pipeline, "n-video", &n_video, NULL);
  g_object_get (data->pipeline, "n-audio", &n_audio, NULL);
  g_object_get (data->pipeline, "n-text", &n_text, NULL);

  for (i = 0; i < n_video; i++) {
    tags = NULL;
    /* Retrieve the stream's video tags */
    g_signal_emit_by_name (data->pipeline, "get-video-tags", i, &tags);
    if (tags) {
      total_str = g_strdup_printf ("video stream %d:\n", i);
      gtk_text_buffer_insert_at_cursor (text, total_str, -1);
      g_free (total_str);
      gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
      total_str = g_strdup_printf ("  codec: %s\n", str ? str : "unknown");
      gtk_text_buffer_insert_at_cursor (text, total_str, -1);
      g_free (total_str);
      g_free (str);
      gst_tag_list_free (tags);
    }
  }

  for (i = 0; i < n_audio; i++) {
    tags = NULL;
    /* Retrieve the stream's audio tags */
    g_signal_emit_by_name (data->pipeline, "get-audio-tags", i, &tags);
    if (tags) {
      total_str = g_strdup_printf ("\naudio stream %d:\n", i);
      gtk_text_buffer_insert_at_cursor (text, total_str, -1);
      g_free (total_str);
      if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
        total_str = g_strdup_printf ("  codec: %s\n", str);
        gtk_text_buffer_insert_at_cursor (text, total_str, -1);
        g_free (total_str);
        g_free (str);
      }
      if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
        total_str = g_strdup_printf ("  language: %s\n", str);
        gtk_text_buffer_insert_at_cursor (text, total_str, -1);
        g_free (total_str);
        g_free (str);
      }
      if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
        total_str = g_strdup_printf ("  bitrate: %d\n", rate);
        gtk_text_buffer_insert_at_cursor (text, total_str, -1);
        g_free (total_str);
      }
      gst_tag_list_free (tags);
    }
  }

  for (i = 0; i < n_text; i++) {
    tags = NULL;
    /* Retrieve the stream's subtitle tags */
    g_signal_emit_by_name (data->pipeline, "get-text-tags", i, &tags);
    if (tags) {
      total_str = g_strdup_printf ("\nsubtitle stream %d:\n", i);
      gtk_text_buffer_insert_at_cursor (text, total_str, -1);
      g_free (total_str);
      if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
        total_str = g_strdup_printf ("  language: %s\n", str);
        gtk_text_buffer_insert_at_cursor (text, total_str, -1);
        g_free (total_str);
        g_free (str);
      }
      gst_tag_list_free (tags);
    }
  }
}

/* This function is called when an "application" message is posted on the bus.
 * Here we retrieve the message posted by the tags_cb callback */
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
  if (g_strcmp0 (gst_structure_get_name (msg->structure), "tags-changed") == 0) {
    /* If the message is the "tags-changed" (only one we are currently issuing), update
     * the stream info GUI */
    analyze_streams (data);
  }
}

static gboolean bus_call (GstBus     *bus,
                          GstMessage *msg,
                          gpointer    data)
{
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

   case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}

static void on_pad_added (GstElement *element,
              GstPad     *pad,
              gpointer    data)
{
  GstPad *sinkpad;
  GstElement *decoder = (GstElement *) data;

  /* We can now link this pad with the vorbis-decoder sink pad */
  g_print ("Dynamic pad created, linking demuxer/decoder\n");

  sinkpad = gst_element_get_static_pad (decoder, "sink");

  gst_pad_link (pad, sinkpad);
  gst_object_unref (sinkpad);
}

int main (int argc, char *argv[]) {
    GMainLoop *loop;

CustomData data;

GstStateChangeReturn ret;

    GstElement *pipeline, *source, *subOverlay, *subParse, *subSource, *demuxer, *audioDecoder, *videoDecoder, *audioConv, *videoConv, *videosink, 
            *audiosink, *audioQueue, *videoQueue;
    GstBus *bus;


gtk_init (&argc, &argv);

    gst_init (&argc, &argv);

memset (&data, 0, sizeof (data));
data.duration = GST_CLOCK_TIME_NONE;

    loop = g_main_loop_new (NULL, FALSE);

    if (argc < 2 && argc > 3) {
        g_printerr ("Usage: %s <Ogg/Vorbis filename> [Srt filename]\n", argv[0]);
        return -1;
    }

    pipeline     = gst_pipeline_new ("audiovideo-player");
    source       = gst_element_factory_make ("filesrc",          "file-source");
    demuxer      = gst_element_factory_make ("oggdemux",         "ogg-demuxer");
    audioQueue   = gst_element_factory_make ("queue",            "audio-queue");
    videoQueue   = gst_element_factory_make ("queue",            "video-queue");
    audioDecoder = gst_element_factory_make ("vorbisdec",        "vorbis-decoder");
    videoDecoder = gst_element_factory_make ("theoradec",        "theora-decoder");
    audioConv    = gst_element_factory_make ("audioconvert",     "audio-converter");
    videoConv    = gst_element_factory_make ("ffmpegcolorspace", "video-converter");
    videosink    = gst_element_factory_make ("autovideosink",    "video-output");
    audiosink    = gst_element_factory_make ("autoaudiosink",    "audio-output");
    subOverlay   = gst_element_factory_make ("subtitleoverlay",    "subtitleoverlay");
    subParse     = gst_element_factory_make ("subparse",    "subparse");
    subSource    = gst_element_factory_make ("filesrc",    "sub-source");

/* Create the GUI */
create_ui (&data);

    if (!pipeline || !source || !demuxer || !audioDecoder || !audioConv || !videoDecoder || !videoConv || !audioQueue 
            || !videoQueue || !audiosink || !videosink) {
        g_printerr ("One element could not be created. Exiting.\n");
        exit(-1);
    }

/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}

/* Register a function that GLib will call every second */
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);

/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
gtk_main ();

    if(argc == 2){
        g_object_set (G_OBJECT (source), "location", argv[1], NULL);
        gst_bin_add_many (GST_BIN (pipeline),
                            source, demuxer, 
                            audioQueue, videoQueue, audioDecoder, videoDecoder,
                            videoConv, audioConv, videosink, audiosink, NULL);

        gst_element_link (source, demuxer);

        gst_element_link_many (videoQueue, videoDecoder, videoConv, videosink, NULL);
        g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), videoQueue);

        gst_element_link_many (audioQueue, audioDecoder, audioConv, audiosink, NULL);
        g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), audioQueue);

    } else if (argc == 3){
        g_object_set (G_OBJECT (source), "location", argv[1], NULL);
        g_object_set (G_OBJECT (subSource), "location", argv[2], NULL);

        gst_bin_add_many (GST_BIN (pipeline),
                            source, demuxer, 
                            audioQueue, videoQueue, audioDecoder, videoDecoder,
                            videoConv, audioConv, videosink, audiosink, subSource, subOverlay, subParse, NULL);

        gst_element_link (source, demuxer);

        gst_element_link_many(subSource, subParse, subOverlay, NULL);

        gst_element_link_many (videoQueue, videoDecoder, videoConv, subOverlay, videosink, NULL);

        /* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */
        gst_element_link_many (audioQueue, audioDecoder, audioConv, audiosink, NULL);

        /* Linking subtitles and video pads together */

        gst_element_link (subSource, subParse);

        if(gst_element_link_pads(subParse, NULL, subOverlay, NULL)) {
            g_printerr("Pads couldn't be linked\n");
        }
        g_signal_connect (source, "pad-added", G_CALLBACK (on_pad_added), subOverlay);

        g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), videoQueue);

        g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), audioQueue);

    } else {g_printerr("Error occurent argument.\n");}

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    g_print ("The video's name.. : %s\n", argv[1]);
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    g_print ("Playing...\n");
    g_main_loop_run (loop);

    g_print ("Stop...\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);

    g_print ("Pipeline erased...\n");
    gst_object_unref (GST_OBJECT (pipeline));
    return 0;
} 

もちろん、それは機能しておらず、エラーが発生しました:

Gtk-Message: Failed to load module "overlay-scrollbar"

** (lecteuraudiovideo:2895): CRITICAL **: gst_x_overlay_set_window_handle: assertion `overlay != NULL' failed

(lecteuraudiovideo:2895): GStreamer-CRITICAL **: gst_element_set_state: assertion `GST_IS_ELEMENT (element)' failed
Unable to set the pipeline to the playing state.

(lecteuraudiovideo:2895): GStreamer-CRITICAL **: gst_object_unref: assertion `object != NULL' failed
4

1 に答える 1

1

これは、手動でパイプラインを構築する代わりに uridecodebin を使用して、すべてのプラットフォームで機能する最小限の例です。

gstreamer-0.10 と gtk2 は新しいプログラムを作成するときに使用すべきではないため、gstreamer-1.0 と gtk3 を使用します。

コード自体が語ってくれると思いますが、わからないことがあれば遠慮なく聞いてください。

コンパイル:

libtool --mode=link gcc player.c `pkg-config gstreamer-1.0 --cflags --libs` `pkg-config gtk+-3.0 --cflags --libs` `pkg-config gstreamer-video-1.0 --cflags --libs` -o player

コード:

#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gtk/gtk.h>
#include <gdk/gdk.h>

#if defined (GDK_WINDOWING_X11)
#include <gdk/gdkx.h>
#elif defined (GDK_WINDOWING_WIN32)
#include <gdk/gdkwin32.h>
#elif defined (GDK_WINDOWING_QUARTZ)
#include <gdk/gdkquartz.h>
#endif

static void
realize_cb (GtkWidget *widget, GstElement *sink)
{
  GdkWindow *window = gtk_widget_get_window (widget);
  guintptr window_handle;

  if (!gdk_window_ensure_native (window))
    g_error ("Couldn't create native window needed for GstXOverlay!");

#if defined (GDK_WINDOWING_WIN32)
    window_handle = (guintptr)GDK_WINDOW_HWND (window);
#elif defined (GDK_WINDOWING_QUARTZ)
      window_handle = gdk_quartz_window_get_nsview (window);
#elif defined (GDK_WINDOWING_X11)
        window_handle = GDK_WINDOW_XID (window);
#endif

  gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), window_handle);
}

int main ()
{
  gst_init (NULL, NULL);
  gtk_init (NULL, NULL);
  GError *error = NULL;
  GstElement *sink;
  GtkWidget *window = gtk_window_new(0);
  GtkWidget *da = gtk_drawing_area_new();

  GstElement *pipeline = gst_parse_launch ("uridecodebin uri=file:///home/meh/Videos/raw_h264.0.mp4 name=d ! xvimagesink name=sink d. ! pulsesink", &error);

  sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
  g_signal_connect (da, "realize", G_CALLBACK (realize_cb), sink);
  gtk_container_add (GTK_CONTAINER(window), da);

  gtk_widget_show_all (window);
  gtk_window_set_title (GTK_WINDOW (window), "my player");

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  gtk_main();

  return 0;
}
于 2014-11-01T18:33:09.810 に答える