3

ローカル ネットワークの IP カメラからの RTSP ストリーミングに、iOS 用の gstreamer ライブラリを使用しています。チュートリアル自体で提供されたこのコードを使用しています-

#import <gst/gst.h>
#include <gst/interfaces/xoverlay.h>
#include <gst/video/video.h>


@interface GStreamerBackend()
//these are all structs, so no need to give them as properties for memory handling.
{
    GstElement *pipeline;  /* The running pipeline */
    GstElement *video_sink;/* The video sink element which receives XOverlay commands */
    GMainContext *context; /* GLib context used to run the main loop */
    GMainLoop *main_loop;  /* GLib main loop */
    gboolean initialized;  /* To avoid informing the UI multiple times about the initialization */
    GstState target_state;       /* Desired pipeline state, to be set once buffering is complete */
}

@property(nonatomic, weak)id<GstreamerDelegate> delegate;
@property(nonatomic, weak)UIView *ui_video_view;
@property(nonatomic)BOOL is_live;

@end

@implementation GStreamerBackend


-(id)initWithDelegate:(id)delegate videoView:(UIView *)videoView
{
    if(self = [super init])
    {
        self.delegate = delegate;
        self.ui_video_view = videoView;

        /* Start the bus monitoring task */
        @autoreleasepool
        {
            dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
                [self app_function];
            });
        }

    }
    return self;
}


#pragma mark Pipeline Setup and Running
-(void) setUri:(NSString*)uri
{
    const char *char_uri = [uri UTF8String];
    g_object_set(pipeline, "uri", char_uri, NULL);
}


/* Main method for the bus monitoring code */
-(void) app_function
{
    GstBus *bus;
    //GSource *timeout_source;
    GSource *bus_source;
    GError *error = NULL;

    /* Create our own GLib Main Context and make it the default one */
    context = g_main_context_new ();
    g_main_context_push_thread_default(context);

    /* Build pipeline */
    pipeline = gst_parse_launch("playbin2", &error);
    if (error) {
        gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
        g_clear_error (&error);
        [self setUIMessage:message];
        g_free (message);
        return;
    }

    /* Set the pipeline to READY, so it can already accept a window handle */
    gst_element_set_state(pipeline, GST_STATE_READY);

    video_sink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_X_OVERLAY);
    if (!video_sink) {
        NSLog(@"Could not retrieve video sink");
        return;
    }
    gst_x_overlay_set_window_handle(GST_X_OVERLAY(video_sink), (guintptr) (id) self.ui_video_view);

    /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
    bus = gst_element_get_bus (pipeline);
    bus_source = gst_bus_create_watch (bus);
    g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
    g_source_attach (bus_source, context);
    g_source_unref (bus_source);
    g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, (__bridge void *)self);
    //g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, (__bridge void *)self);
    g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, (__bridge void *)self);
    g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback)buffering_cb, (__bridge void *)self);
    g_signal_connect (G_OBJECT (bus), "message::clock-lost", (GCallback)clock_lost_cb, (__bridge void *)self);
    gst_object_unref (bus);

    /* Register a function that GLib will call 4 times per second */
   /* timeout_source = g_timeout_source_new (250);
    g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, (__bridge void *)self, NULL);
    g_source_attach (timeout_source, context);
    g_source_unref (timeout_source);
    */

    /* Create a GLib Main Loop and set it to run */
    NSLog(@"Entering main loop...");

    @autoreleasepool {
        main_loop = g_main_loop_new (context, FALSE);
        [self check_initialization_complete];
        g_main_loop_run (main_loop);
        NSLog(@"Exited main loop");
        g_main_loop_unref (main_loop);
        main_loop = NULL;
    }

    /* Free resources */
    g_main_context_pop_thread_default(context);
    g_main_context_unref (context);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    pipeline = NULL;

    self.ui_video_view = NULL;
    self.delegate = NULL;

    return;

}


/* Check if all conditions are met to report GStreamer as initialized.
 * These conditions will change depending on the application */
-(void) check_initialization_complete
{
    if (!initialized && main_loop)
    {
        [self.delegate gstreamerInitialized];
        initialized = TRUE;
    }
}



/* Notify UI about pipeline state changes */
static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self)
{
    GstState old_state, new_state, pending_state;
    gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
    /* Only pay attention to messages coming from the pipeline, not its children */
    if (GST_MESSAGE_SRC (msg) == GST_OBJECT (self->pipeline))
    {
        gchar *message = g_strdup_printf("State changed to %s", gst_element_state_get_name(new_state));
        [self setUIMessage:message];
        g_free (message);
    }
}


/* Retrieve errors from the bus and show them on the UI */
static void error_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self)
{
    GError *err;
    gchar *debug_info;
    gchar *message_string;

    gst_message_parse_error (msg, &err, &debug_info);
    message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
    g_clear_error (&err);
    g_free (debug_info);
    [self setUIMessage:message_string];
    g_free (message_string);
    gst_element_set_state (self->pipeline, GST_STATE_NULL);
}


#pragma mark for Network Reselience
/* Called when buffering messages are received. We inform the UI about the current buffering level and
 * keep the pipeline paused until 100% buffering is reached. At that point, set the desired state. */
static void buffering_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self) {
    gint percent;

    if (self.is_live)
        return;

    gst_message_parse_buffering (msg, &percent);
    if (percent < 100 && self->target_state >= GST_STATE_PAUSED)
    {
        gchar * message_string = g_strdup_printf ("Buffering %d%%", percent);
        gst_element_set_state (self->pipeline, GST_STATE_PAUSED);
        [self setUIMessage:message_string];
        g_free (message_string);
    }
    else if (self->target_state >= GST_STATE_PLAYING)
    {
        gst_element_set_state (self->pipeline, GST_STATE_PLAYING);
    }
    else if (self->target_state >= GST_STATE_PAUSED)
    {
        [self setUIMessage:"Buffering complete"];
    }
}


/* Called when the clock is lost */
static void clock_lost_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self) {
    if (self->target_state >= GST_STATE_PLAYING) {
        gst_element_set_state (self->pipeline, GST_STATE_PAUSED);
        gst_element_set_state (self->pipeline, GST_STATE_PLAYING);
    }
}


#pragma mark Functionality related
/* Retrieve the video sink's Caps and tell the application about the media size */
static void check_media_size (GStreamerBackend *self)
{
    GstElement *video_sink;
    GstPad *video_sink_pad;
    GstCaps *caps;
    GstVideoFormat fmt;
    int width;
    int height;

    /* Retrieve the Caps at the entrance of the video sink */
    g_object_get (self->pipeline, "video-sink", &video_sink, NULL);

    /* Do nothing if there is no video sink (this might be an audio-only clip */
    if (!video_sink) return;

    video_sink_pad = gst_element_get_static_pad (video_sink, "sink");
    caps = gst_pad_get_negotiated_caps (video_sink_pad);

    if (gst_video_format_parse_caps(caps, &fmt, &width, &height)) {
        int par_n, par_d;
        if (gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
            width = width * par_n / par_d;
        }
        NSLog(@"Media size is %dx%d, notifying application", width, height);

        [self.delegate mediaSizeChanged:width height:height];
    }

    gst_caps_unref(caps);
    gst_object_unref (video_sink_pad);
    gst_object_unref(video_sink);
}


-(void) play
{
    if(gst_element_set_state(pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        [self setUIMessage:"Failed to set pipeline to playing"];
    }
}


-(void) pause
{
    if(gst_element_set_state(pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
        [self setUIMessage:"Failed to set pipeline to paused"];
    }
}


-(void)setUIMessage:(gchar*)message
{
    NSString *string = [NSString stringWithUTF8String:message];
    [self.delegate gstreamerSetUIMessage:string];
}


-(void) deinit
{
    if (main_loop) {
        g_main_loop_quit(main_loop);
    }
}


-(void) dealloc
{
    if (pipeline)
    {
        gst_element_set_state(pipeline, GST_STATE_NULL);
        gst_object_unref(pipeline);
        pipeline = NULL;
    }
}

@end

iOS 5 では次の警告が表示されますが、iOS 6 以降では表示されません。-

objc[1512]: Object 0x39bfff0 of class __NSCFDictionary autoreleased with no pool in place - just leaking - break on objc_autoreleaseNoPool() to debug
objc[1512]: Object 0x39bfd50 of class __NSCFDictionary autoreleased with no pool in place - just leaking - break on objc_autoreleaseNoPool() to debug
objc[1512]: Object 0x2d57c00 of class __NSCFString autoreleased with no pool in place - just leaking - break on objc_autoreleaseNoPool() to debug

このようなエラーを検索した後、 [self app_function];の周りに@autoreleasepoolブロッ​​クを追加しました。それでも同じエラーが発生します。その後、メモリ警告とクラッシュを受け取ります。

また、objc_autoreleaseNoPoolBreakPoint Navigator ( ⌘ + 6 ) に移動してブレークポイントを配置し、下部の+記号をクリックします -> Add Exception Breakpointのように -

ここに画像の説明を入力

「クラス __NSCFDictionary のオブジェクト 0x39bfff0」に関する最初の警告については、これがデバッグ ナビゲーターに表示されるものです -

ここに画像の説明を入力

どうやら問題が発生しているVideoStreaming gst_egl_choose_config at gstegladaptation_eagl.m:233:ようですが、このファイルがどこにも見つかりません。パスにインストールされている Gstreamer sdk も検索しました~/Library/Developer/GStreamerが、ファイルが見つかりませんでした。

iOS 5 でメモリ警告とクラッシュが発生します。

ON iOS 6 -- ログに警告が表示されません。メモリの警告やクラッシュはありません。「objc_autoreleaseNoPool」のブレークポイントはまだヒットしていますが。

誰でもこの問題の解決策または説明を与えることができますか??

4

0 に答える 0