4

IP カメラ (Axis など) から RTSP ストリームを再生するために gstreamer を使用します。次のようなコマンド ラインを使用します。

gst-launch-0.10 rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp latency=0 ! decodebin ! autovideosink

そしてそれはうまくいきます。

pygtk の gui で制御したいので、gstreamer python バインディングを使用します。私はこのコードを書きました:

[...]
self.player = gst.Pipeline("player")
source = gst.element_factory_make("rtspsrc", "source")
source.set_property("location", "rtsp://192.168.0.127/axis-media/media.amp")
decoder = gst.element_factory_make("decodebin", "decoder")
sink = gst.element_factory_make("autovideosink", "sink")

self.player.add(source, decoder, sink)
gst.element_link_many(source, decoder, sink)

bus = self.player.get_bus()
bus.add_signal_watch()
bus.enable_sync_message_emission()
bus.connect("message", self.on_message)
bus.connect("sync-message::element", self.on_sync_message)
[...]

しかし、それは機能せず、次のメッセージで終了します:

gst.element_link_many(source, decoder,sink)
gst.LinkError: failed to link source with decoder

また、h264 のみを使用するため、これを使用して CLI を改善しようとしました。

gst-launch-0.10 -v rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp ! rtph264depay !  ffdec_h264 ! xvimagesink

そして、それを私のpythonコードに次のように実装します:

[...]
self.player = gst.Pipeline("player")
source = gst.element_factory_make("rtspsrc", "source")
depay = gst.element_factory_make("rtph264depay", "depay")
decoder = gst.element_factory_make("ffdec_h264", "decoder")
sink = gst.element_factory_make("xvimagesink", "output")

self.player.add(source, depay, decoder, sink)
gst.element_link_many(source, depay, decoder, sink)
[...]

しかし、同じエラーが発生しました:(

gst.LinkError: failed to link source with depay

私のソース(rtspsrc)の間に何か問題があります.filesrcでdecodebinで動作するためです(もちろんrtph264depayでは動作しません)

cliで動作するのに動作しない理由がわかりません。私を助けることができるgstreamerの専門家はいますか?

前もって感謝します。

よろしく、

4

2 に答える 2

6

あなたが探しているコードの「C」実装があります。「Python」に変換するのはかなり簡単だと思います

 //Display RTSP streaming of video
 //(c) 2011 enthusiasticgeek
 // This code is distributed in the hope that it will be useful,
 // but WITHOUT ANY WARRANTY; without even the implied warranty of
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  

#include <string.h>
#include <math.h>
#include <gst/gst.h>
#include <glib.h>

static gboolean bus_call (GstBus *bus,GstMessage *msg, gpointer data){
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("Stream Ends\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data){

  GstPad *sinkpad;
  GstElement *decoder = (GstElement *) data;

  /* We can now link this pad with the rtsp-decoder sink pad */
  g_print ("Dynamic pad created, linking source/demuxer\n");

  sinkpad = gst_element_get_static_pad (decoder, "sink");

  gst_pad_link (pad, sinkpad);

  gst_object_unref (sinkpad);
}

int main (int argc, char *argv[])
{
  GMainLoop *loop;
  GstBus *bus;
  GstElement *source;
  GstElement *decoder;
  GstElement *sink;
  GstElement *pipeline;
  GstElement *demux;
  GstElement *colorspace;

  /* Initializing GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

 //gst-launch-0.10 rtspsrc location=rtsp://<ip> ! decodebin ! ffmpegcolorspace ! autovideosink
 //gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! mpeg4videoparse ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink
 //gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink
  /* Create Pipe's Elements */
  pipeline = gst_pipeline_new ("video player");
  g_assert (pipeline);
  source   = gst_element_factory_make ("rtspsrc", "Source");
  g_assert (source);
  demux = gst_element_factory_make ("rtpmp4vdepay", "Depay");
  g_assert (demux);
  decoder = gst_element_factory_make ("ffdec_mpeg4", "Decoder");
  g_assert (decoder);
  colorspace     = gst_element_factory_make ("ffmpegcolorspace",  "Colorspace");
  g_assert(colorspace);
  sink     = gst_element_factory_make ("autovideosink", "Output");
  g_assert (sink);

  /*Make sure: Every elements was created ok*/
  if (!pipeline || !source || !demux || !decoder || !colorspace || !sink) {
    g_printerr ("One of the elements wasn't create... Exiting\n");
    return -1;
  }

  g_printf(" \nPipeline is Part(A) ->(dynamic/runtime link)  Part(B)[ Part(B-1) -> Part(B-2) -> Part(B-3) ]\n\n");
  g_printf(" [source](dynamic)->(dynamic)[demux]->[decoder]->[colorspace]->[videosink] \n\n");

  /* Set video Source */
  g_object_set (G_OBJECT (source), "location", argv[1], NULL);
  //g_object_set (G_OBJECT (source), "do-rtcp", TRUE, NULL);
  g_object_set (G_OBJECT (source), "latency", 0, NULL);

  /* Putting a Message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Add Elements to the Bin */
  gst_bin_add_many (GST_BIN (pipeline), source, demux, decoder, colorspace, sink, NULL);

  /* Link confirmation */
  if (!gst_element_link_many (demux, decoder, colorspace, sink, NULL)){
     g_warning ("Linking part (B) Fail...");
  }

  g_printf("\nNote that the source will be linked to the demuxer(depayload) dynamically.\n\
     The reason is that rtspsrc may contain various elements (for example\n\
     audio and video). The source pad(s) will be created at run time,\n\
     by the rtspsrc when it detects the amount and nature of elements.\n\
     Therefore we connect a callback function which will be executed\n\
     when the \"pad-added\" is emitted.\n");

  /* Dynamic Pad Creation */
  if(! g_signal_connect (source, "pad-added", G_CALLBACK (on_pad_added),demux))
  {
    g_warning ("Linking part (A) with part (B) Fail...");
  }
  /* Run the pipeline */
  g_print ("Playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  /* Ending Playback */
  g_print ("End of the Streaming... ending the playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  /* Eliminating Pipeline */
  g_print ("Eliminating Pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}

メイクファイル

test = test12
ext = c
CC = gcc
CPP = g++
gstreamer:
    $(CC) -g $(test).$(ext) -o $(test) `pkg-config gstreamer-0.10 --libs --cflags` `pkg-config gtk+-2.0 --libs --cflags`
clean:
    rm -rf $(test)

アップデート

同等の Java コード

 // Display RTSP streaming of video
 // (c) 2011 enthusiasticgeek
 // This code is distributed in the hope that it will be useful,
 // but WITHOUT ANY WARRANTY; without even the implied warranty of
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
 // Leave Credits intact

package video2; //replace this with your package
import java.awt.BorderLayout;
import java.awt.Dimension;

import javax.swing.JFrame;
import javax.swing.SwingUtilities;

//import org.gstreamer.Caps;
import org.gstreamer.Element;
import org.gstreamer.ElementFactory;
import org.gstreamer.Gst;
import org.gstreamer.Pad;
import org.gstreamer.PadDirection;
import org.gstreamer.Pipeline;
import org.gstreamer.swing.VideoComponent;

/**
 * A Simple videotest example.
 */
public class Main {
    public Main() {
    }
    private static Pipeline pipe;
    public static void main(String[] args) {
    // Quartz is abysmally slow at scaling video for some reason, so turn it off.
    System.setProperty("apple.awt.graphics.UseQuartz", "false");

    args = Gst.init("SwingVideoTest", args);

    pipe = new Pipeline("pipeline");
    /*
    final Element videosrc = ElementFactory.make("videotestsrc", "source");
    final Element videofilter = ElementFactory.make("capsfilter", "flt");
    videofilter.setCaps(Caps.fromString("video/x-raw-yuv, width=720, height=576"
            + ", bpp=32, depth=32, framerate=25/1"));
    */

     pipe.getBus().connect(new Bus.ERROR() {
        public void errorMessage(GstObject source, int code, String message) {
            System.out.println("Error occurred: " + message);
            Gst.quit();
        }
    });
    pipe.getBus().connect(new Bus.STATE_CHANGED() {
        public void stateChanged(GstObject source, State old, State current, State pending) {
            if (source == pipe) {
                System.out.println("Pipeline state changed from " + old + " to " + current);
            }
        }
    });
    pipe.getBus().connect(new Bus.EOS() {
        public void endOfStream(GstObject source) {
            System.out.println("Finished playing file");
            Gst.quit();
        }
    });        

     pipe.getBus().connect(new Bus.TAG() {
        public void tagsFound(GstObject source, TagList tagList) {
            for (String tag : tagList.getTagNames()) {
                System.out.println("Found tag " + tag + " = "
                        + tagList.getValue(tag, 0));
            }
        }
    });

    final Element source = ElementFactory.make("rtspsrc", "Source");
    final Element demux = ElementFactory.make("rtpmp4vdepay", "Depay");
    final Element decoder=ElementFactory.make("ffdec_mpeg4", "Decoder");
    final Element colorspace = ElementFactory.make("ffmpegcolorspace",  "Colorspace");
    //final Element sink = ElementFactory.make ("autovideosink", "Output");

    SwingUtilities.invokeLater(new Runnable() {

        public void run() {
            // Create the video component and link it in
            VideoComponent videoComponent = new VideoComponent();
            Element videosink = videoComponent.getElement();

           source.connect(new Element.PAD_ADDED() {
           public void padAdded(Element element, Pad pad) {
            pad.link(demux.getStaticPad("sink"));
           }
            });

           Pad p = new Pad(null, PadDirection.SRC);
           source.addPad(p);

            source.set("location","rtsp://<user>:<pass>@<ip>/mpeg4/1/media.amp");  //replace this with your source

            pipe.addMany(source, demux, decoder, colorspace, videosink);
            Element.linkMany(demux, decoder, colorspace, videosink);

            // Now create a JFrame to display the video output
            JFrame frame = new JFrame("Swing Video Test");
            frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
            frame.add(videoComponent, BorderLayout.CENTER);
            videoComponent.setPreferredSize(new Dimension(720, 576));
            frame.pack();
            frame.setVisible(true);

            // Start the pipeline processing
            pipe.play();
        }
    });
    }
}
于 2011-07-25T16:32:09.173 に答える
2

この回答は、Python の gst.LinkError 問題の Gstreamer が発生する理由を説明していgst.LinkErrorます

ではgst.parse_launch、要素に名前を付けて取得し、プロパティを設定できます。

pipeline = gst.parse_launch('rtspsrc name=source latency=0 ! decodebin ! autovideosink')
source = pipeline.get_by_name('source')
source.props.location = 'rtsp://192.168.0.127/axis-media/media.amp'
于 2010-11-18T19:52:31.380 に答える