#ifdef HAVE_CONFIG_H #include "config.h" #endif #include #include #include #include #include #include #include "gmencoder.h" #define G_MENCODER_GET_PRIVATE(obj) \ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), G_TYPE_MENCODER, GMencoderPrivate)) //#define SUPPORT_MULT_INPUT 0 typedef struct _GMencoderPrivate GMencoderPrivate; typedef struct _SetupInfo SetupInfo; struct _SetupInfo { gchar* video_encode; gchar* mux_name; gchar** video_encode_prop; gdouble video_fps; gdouble video_rate; guint video_width; guint video_height; gchar* audio_encode; gchar** audio_encode_prop; guint audio_rate; }; struct _GMencoderPrivate { GstElement *pipe; GstElement *abin; GstElement *vbin; GstElement *sink; GstElement *src; gboolean ready; SetupInfo *info; GstClockTime videot; GstClockTime audiot; gint fd; gint sources; gint tick_id; gint64 duration; }; enum { PAUSED, PLAYING, STOPED, EOS, ERROR, LAST_SIGNAL }; static void g_mencoder_class_init (GMencoderClass *klass); static void g_mencoder_init (GMencoder *object); static void g_mencoder_dispose (GObject *object); static void g_mencoder_finalize (GObject *object); static GstElement* _create_audio_bin (const gchar* encode, gchar** encode_prop, gint rate); static GstElement* _create_video_bin (const gchar* encode, gchar** encode_prop, gdouble fps, gint rate, guint width, guint height); static gboolean _pipeline_bus_cb (GstBus *bus, GstMessage *msg, gpointer user_data); static void _decodebin_new_pad_cb (GstElement* object, GstPad* pad, gboolean flag, gpointer user_data); static void _decodebin_unknown_type_cb (GstElement* object, GstPad* pad, GstCaps* caps, gpointer user_data); static void _close_output (GMencoder *self); static void _open_output (GMencoder *self, const gchar* uri); static GstElement* _create_source (const gchar* uri); static GstElement*_create_pipeline (GMencoder *self, const gchar* video_encode, const gchar* mux_name, gchar** video_encode_prop, gdouble video_fps, gdouble video_rate, guint video_width, guint video_height, const gchar* audio_encode, gchar** audio_encode_prop, guint audio_rate); static gboolean _tick_cb (gpointer data); static guint g_mencoder_signals[LAST_SIGNAL] = { 0 }; G_DEFINE_TYPE(GMencoder, g_mencoder, G_TYPE_OBJECT) static void g_mencoder_class_init (GMencoderClass *klass) { GObjectClass *object_class; object_class = (GObjectClass *) klass; g_type_class_add_private (klass, sizeof (GMencoderPrivate)); object_class->dispose = g_mencoder_dispose; object_class->finalize = g_mencoder_finalize; g_mencoder_signals[PAUSED] = g_signal_new ("paused", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_FIRST, 0, NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); g_mencoder_signals[PLAYING] = g_signal_new ("playing", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_FIRST, 0, NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); g_mencoder_signals[STOPED] = g_signal_new ("stoped", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_FIRST, 0, NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); g_mencoder_signals[EOS] = g_signal_new ("eos", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_FIRST, 0, NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); g_mencoder_signals[ERROR] = g_signal_new ("error", G_OBJECT_CLASS_TYPE (object_class), G_SIGNAL_RUN_LAST, 0, NULL, NULL, g_cclosure_marshal_VOID__STRING, G_TYPE_NONE, 1, G_TYPE_STRING); } static void g_mencoder_init (GMencoder *self) { GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); priv->info = g_new0 (SetupInfo, 1); } static void g_mencoder_dispose (GObject *object) { } static void g_mencoder_finalize (GObject *object) { //TODO: clear vars g_mencoder_close_stream (G_MENCODER (object)); } GMencoder* g_mencoder_new (void) { return g_object_new (G_TYPE_MENCODER, NULL); } static void _obj_set_prop (GObject *obj, const gchar *prop_name, const gchar *prop_val) { GValue p = {0}; GValue v = {0}; GParamSpec *s = NULL; GObjectClass *k = G_OBJECT_GET_CLASS (obj); g_value_init (&v, G_TYPE_STRING); g_value_set_string (&v, prop_val); s = g_object_class_find_property (k, prop_name); if (s == NULL) { g_print ("Invalid property name: %s\n", prop_name); return; } g_value_init (&p, s->value_type); switch (s->value_type) { case G_TYPE_INT: g_value_set_int (&p, atoi (prop_val)); break; case G_TYPE_STRING: g_value_set_string (&p, prop_val); break; default: return; } g_object_set_property (obj, prop_name, &p); g_value_unset (&v); g_value_unset (&p); } static GstElement* _create_element_with_prop (const gchar* factory_name, const gchar* element_name, gchar** prop) { GstElement *ret; int i; ret = gst_element_factory_make (factory_name, element_name); if (ret == NULL) return NULL; if (prop != NULL) { for (i=0; i < g_strv_length (prop); i++) { if (prop[i] != NULL) { char** v = g_strsplit(prop[i], "=", 2); if (g_strv_length (v) == 2) { _obj_set_prop (G_OBJECT (ret), v[0], v[1]); } g_strfreev (v); } } } return ret; } static GstElement* _create_audio_bin (const gchar* encode, gchar** encode_prop, gint rate) { GstElement *abin = NULL; GstElement *aqueue = NULL; GstElement *aconvert = NULL; GstElement *aencode = NULL; GstElement *aqueue_src = NULL; GstPad *apad = NULL; //audio/x-raw-int ! queue ! audioconvert ! faac ! rtpmp4gpay ! udpsink name=upd_audio host=224.0.0.1 port=5002 abin = gst_bin_new ("abin"); aqueue = gst_element_factory_make ("queue", "aqueue"); aconvert= gst_element_factory_make ("audioconvert", "aconvert"); aencode = _create_element_with_prop ((encode ? encode : "lame"), "aencode", encode_prop); aqueue_src= gst_element_factory_make ("queue", "aqueue_src"); if ((abin == NULL) || (aqueue == NULL) || (aconvert == NULL) || (aencode == NULL) || (aqueue_src == NULL)) { g_warning ("Audio elements not found"); goto error; } g_object_set (G_OBJECT (aencode), "bitrate", 32, NULL); /* if (rate > 0) { g_object_set (G_OBJECT (aencode), "bitrate", 32, NULL); } */ gst_bin_add_many (GST_BIN (abin), aqueue, aconvert, aencode, aqueue_src, NULL); if (gst_element_link_many (aqueue, aconvert, aencode, aqueue_src, NULL) == FALSE) { g_warning ("Not Link audio elements"); } //TODO: apply audio rate // ghost pad the audio bin apad = gst_element_get_pad (aqueue, "sink"); gst_element_add_pad (abin, gst_ghost_pad_new("sink", apad)); gst_object_unref (apad); apad = gst_element_get_pad (aqueue_src, "src"); gst_element_add_pad (abin, gst_ghost_pad_new("src", apad)); gst_object_unref (apad); return abin; error: if (abin != NULL) gst_object_unref (abin); if (aqueue != NULL) gst_object_unref (aqueue); if (aconvert != NULL) gst_object_unref (aconvert); if (aencode != NULL) gst_object_unref (aencode); if (aqueue_src != NULL) gst_object_unref (aqueue_src); if (apad != NULL) gst_object_unref (apad); return NULL; } //queue ! videoscale ! video/x-raw-yuv,width=240,height=144 ! colorspace ! rate ! encode ! queue static GstElement* _create_video_bin (const gchar* encode, gchar** encode_prop, gdouble fps, gint rate, guint width, guint height) { GstElement *vbin = NULL; GstElement *vqueue = NULL; GstElement* vqueue_src = NULL; GstElement *vcolorspace = NULL; GstElement *vencode = NULL; GstElement *vrate = NULL; GstPad *vpad = NULL; vbin = gst_bin_new ("vbin"); vqueue = gst_element_factory_make ("queue", "vqueue"); vcolorspace = gst_element_factory_make ("ffmpegcolorspace", "colorspace"); vencode = _create_element_with_prop ( (encode != NULL ? encode : "ffenc_mpeg1video"), "vencode", encode_prop); vqueue_src = gst_element_factory_make ("queue", "queue_src"); if ((vbin == NULL) || (vqueue == NULL) || (vcolorspace == NULL) || (vencode == NULL) || (vqueue_src == NULL)) { g_warning ("Video elements not found"); goto error; } gst_bin_add_many (GST_BIN (vbin), vqueue, vcolorspace, vencode, vqueue_src, NULL); if ((width > 0) && (height > 0)) { //Scalling video GstCaps *vcaps; GstElement *vscale = gst_element_factory_make ("videoscale", "vscale"); gst_bin_add (GST_BIN (vbin), vscale); vcaps = gst_caps_new_simple ("video/x-raw-yuv", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); gst_element_link (vqueue, vscale); if (gst_element_link_filtered (vscale, vcolorspace, vcaps) == FALSE) { g_warning ("Fail to resize video"); gst_object_unref (vcaps); gst_object_unref (vscale); goto error; } gst_caps_unref (vcaps); } else { gst_element_link (vqueue, vcolorspace); } if (fps > 0) { //Changing the video fps GstCaps *vcaps; vrate = gst_element_factory_make ("videorate", "vrate"); gst_bin_add (GST_BIN (vbin), vrate); if (gst_element_link (vcolorspace, vrate) == FALSE) { g_warning ("Fail to link video elements"); goto error; } vcaps = gst_caps_new_simple ("video/x-raw-yuv", "framerate", GST_TYPE_FRACTION, (int) (fps * 1000), 1000, NULL); if (gst_element_link_filtered (vrate, vencode, vcaps) == FALSE) { g_warning ("Fail to link vrate with vencode."); goto error; } gst_caps_unref (vcaps); } else { if (gst_element_link (vcolorspace, vencode) == FALSE) { g_warning ("Fail to link colorspace and video encode element."); goto error; } } gst_element_link (vencode, vqueue_src); // ghost pad the video bin vpad = gst_element_get_pad (vqueue, "sink"); gst_element_add_pad (vbin, gst_ghost_pad_new ("sink", vpad)); gst_object_unref (vpad); vpad = gst_element_get_pad (vqueue_src, "src"); gst_element_add_pad (vbin, gst_ghost_pad_new ("src", vpad)); gst_object_unref (vpad); return vbin; error: if (vpad != NULL) gst_object_unref (vpad); if (vbin != NULL) gst_object_unref (vbin); if (vqueue != NULL) gst_object_unref (vqueue); if (vencode != NULL) gst_object_unref (vencode); if (vqueue_src != NULL) gst_object_unref (vqueue_src); if (vcolorspace != NULL) gst_object_unref (vcolorspace); return NULL; } void g_mencoder_setup_stream (GMencoder *self, const gchar* mux_name, const gchar* video_encode, gchar** video_encode_prop, gdouble video_fps, gdouble video_rate, guint video_width, guint video_height, const gchar* audio_encode, gchar** audio_encode_prop, guint audio_rate, const gchar* out_uri) { GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); if (priv->ready == TRUE) { g_warning ("Stream already configured. You need close stream first."); return; } _close_output (self); _open_output (self, out_uri); priv->sources = 0; priv->pipe = _create_pipeline (self, video_encode, mux_name, video_encode_prop, video_fps, video_rate, video_width, video_height, audio_encode, audio_encode_prop, audio_rate); } gboolean g_mencoder_append_uri (GMencoder *self, const gchar* uri) { GstPad *pad_src; GstPad *pad_sink; GstElement *src; GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); gboolean ret = FALSE; GstElement *ap = NULL; GstElement *vp = NULL; g_return_val_if_fail (priv->pipe != NULL, FALSE); g_return_val_if_fail (priv->ready == FALSE, FALSE); #ifndef SUPPORT_MULT_INPUT g_return_val_if_fail (priv->sources < 1, FALSE); #endif src = _create_source (uri); if (src == NULL) return FALSE; priv->src = gst_bin_get_by_name (GST_BIN (src), "src"); gst_bin_add (GST_BIN (priv->pipe), src); #ifdef SUPPORT_MULT_INPUT ap = gst_bin_get_by_name (GST_BIN (priv->pipe), "ap"); vp = gst_bin_get_by_name (GST_BIN (priv->pipe), "vp"); #else ap = gst_bin_get_by_name (GST_BIN (priv->pipe), "abin"); vp = gst_bin_get_by_name (GST_BIN (priv->pipe), "vbin"); #endif if ((vp == NULL) || (ap == NULL)) { g_warning ("Fail to get output bin"); goto error; } pad_src = gst_element_get_pad (src, "src_audio"); pad_sink = gst_element_get_compatible_pad (ap, pad_src, gst_pad_get_caps (pad_src)); if ((pad_sink == NULL) || (pad_src == NULL)) goto error; GstPadLinkReturn lret = gst_pad_link (pad_src, pad_sink); if (lret != GST_PAD_LINK_OK) goto error; gst_object_unref (pad_src); gst_object_unref (pad_sink); pad_src = gst_element_get_pad (src, "src_video"); pad_sink = gst_element_get_compatible_pad (vp, pad_src, gst_pad_get_caps (pad_src)); if ((pad_src == NULL) || (pad_sink == NULL)) goto error; if (gst_pad_link (pad_src, pad_sink) != GST_PAD_LINK_OK) { g_warning ("invalid source. video"); goto error; } priv->sources++; ret = TRUE; error: if ((src != NULL) && (ret == FALSE)) { gst_bin_remove (GST_BIN (priv->pipe), src); gst_object_unref (src); } if (ap != NULL) gst_object_unref (ap); if (vp != NULL) gst_object_unref (vp); if (pad_src != NULL) gst_object_unref (pad_src); if (pad_sink != NULL) gst_object_unref (pad_sink); return ret; } void g_mencoder_remove_uri (GMencoder *self, const gchar* uri) { // GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); //TODO: remove src } void g_mencoder_play_stream (GMencoder *self) { GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); g_return_if_fail (priv->ready == FALSE); priv->ready = TRUE; gst_element_set_state (priv->pipe, GST_STATE_PLAYING); priv->tick_id = g_timeout_add (500, _tick_cb, self); } void g_mencoder_pause_stream (GMencoder *self) { GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); g_return_if_fail (priv->ready == TRUE); gst_element_set_state (priv->pipe, GST_STATE_PAUSED); } void g_mencoder_close_stream (GMencoder *self) { GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); if (priv->tick_id != 0) { g_source_remove (priv->tick_id); priv->tick_id = 0; } if (priv->pipe != NULL) { //TODO: fixe pipeline dispose g_debug ("SETING STATE TO NULL"); //gst_element_set_state (priv->pipe, GST_STATE_NULL); g_debug ("SETING STATE TO NULL: OK"); //gst_object_unref (priv->pipe); gst_object_unref (priv->src); priv->src = NULL; priv->pipe = NULL; priv->abin = NULL; priv->vbin = NULL; priv->sink = NULL; } priv->ready = FALSE; } static GstElement* _create_pipeline (GMencoder *self, const gchar* video_encode, const gchar* mux_name, gchar** video_encode_prop, gdouble video_fps, gdouble video_rate, guint video_width, guint video_height, const gchar* audio_encode, gchar** audio_encode_prop, guint audio_rate) { GstBus *bus = NULL; GstElement *pipe = NULL; GstElement *sink = NULL; GstElement *mux = NULL; GstElement *abin = NULL; GstElement *vbin = NULL; GstElement *queue= NULL; GstPad *aux_pad = NULL; GstPad *mux_pad = NULL; #ifdef SUPPORT_MULT_INPUT GstElement *ap = NULL; GstElement *vp = NULL; #endif GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); pipe = gst_pipeline_new ("pipe"); #ifdef SUPPORT_MULT_INPUT ap = gst_element_factory_make ("concatmux", "ap"); vp = gst_element_factory_make ("concatmux", "vp"); gst_bin_add_many (GST_BIN (pipe), ap, vp, NULL); #endif mux = gst_element_factory_make ((mux_name ? mux_name : "ffmux_mpeg"), "mux"); if (mux == NULL) goto error; queue = gst_element_factory_make ("queue", "queueu_sink"); sink = gst_element_factory_make ("fdsink", "sink"); if (sink == NULL) goto error; g_object_set (G_OBJECT(sink), "fd", priv->fd, "sync", FALSE, NULL); abin = _create_audio_bin (audio_encode, audio_encode_prop, audio_rate); if (abin == NULL) goto error; vbin = _create_video_bin (video_encode, video_encode_prop, video_fps, video_rate, video_width, video_height); if (vbin == NULL) goto error; // Finish Pipe gst_bin_add_many (GST_BIN (pipe), abin, vbin, mux, queue, sink, NULL); #ifdef SUPPORT_MULT_INPUT if (gst_element_link (ap, abin) == FALSE) { g_warning ("Fail to link concat and abin"); goto error; } if (gst_element_link (vp, vbin) == FALSE) { g_warning ("Fail to link concat and vbin"); } #endif //Link bins with mux aux_pad = gst_element_get_pad (abin, "src"); mux_pad = gst_element_get_compatible_pad (mux, aux_pad, GST_PAD_CAPS (aux_pad)); if (mux_pad == NULL) { g_warning ("Mux element no have audio PAD"); goto error; } GstPadLinkReturn ret = gst_pad_link (aux_pad, mux_pad); if (ret != GST_PAD_LINK_OK) { g_warning ("Fail link audio and mux: %d", ret); goto error; } gst_object_unref (aux_pad); gst_object_unref (mux_pad); aux_pad = gst_element_get_pad (vbin, "src"); mux_pad = gst_element_get_compatible_pad (mux, aux_pad, GST_PAD_CAPS (aux_pad)); if (mux_pad == NULL) { g_warning ("Mux element no have video PAD"); goto error; } ret = gst_pad_link (aux_pad, mux_pad); if (ret != GST_PAD_LINK_OK) { g_warning ("Fail link video and mux: %d", ret); goto error; } gst_object_unref (aux_pad); gst_object_unref (mux_pad); aux_pad = NULL; mux_pad = NULL; //Link mux with sink gst_element_link_many (mux, queue, sink, NULL); bus = gst_pipeline_get_bus (GST_PIPELINE (pipe)); gst_bus_add_watch (bus, _pipeline_bus_cb, self); gst_object_unref (bus); return pipe; error: g_warning ("Invalid uri"); if (pipe != NULL) { gst_object_unref (pipe); } if (mux != NULL) { gst_object_unref (mux); } if (mux_pad != NULL) { gst_object_unref (mux_pad); } if (aux_pad != NULL) { gst_object_unref (mux_pad); } if (sink != NULL) { gst_object_unref (sink); } if (abin != NULL) { gst_object_unref (abin); } if (vbin != NULL) { gst_object_unref (vbin); } return FALSE; } static void _close_output (GMencoder *self) { } static GstElement* _create_source (const gchar* uri) { GstElement *bsrc = NULL; GstElement *src = NULL; GstElement *queue = NULL; GstElement *aqueue = NULL; GstElement *vqueue = NULL; GstElement *decode = NULL; GstPad *src_pad = NULL; bsrc = gst_bin_new (NULL); //src = gst_element_factory_make ("gnomevfssrc", "src"); //g_object_set (G_OBJECT (src), "location", uri, NULL); src = gst_element_make_from_uri (GST_URI_SRC, uri, "src"); if (src == NULL) goto error; decode = gst_element_factory_make ("decodebin", "decode"); if (decode == NULL) goto error; queue = gst_element_factory_make ("queue", "queue_src"); aqueue = gst_element_factory_make ("queue", "aqueue"); if (aqueue == NULL) goto error; vqueue = gst_element_factory_make ("queue", "vqueue"); if (vqueue == NULL) goto error; gst_bin_add_many (GST_BIN (bsrc), src, queue, decode, aqueue, vqueue, NULL); gst_element_link_many (src, queue, decode, NULL); g_signal_connect (G_OBJECT (decode), "new-decoded-pad", G_CALLBACK (_decodebin_new_pad_cb), bsrc); g_signal_connect (G_OBJECT (decode), "unknown-type", G_CALLBACK (_decodebin_unknown_type_cb), pipe); src_pad = gst_element_get_pad (aqueue, "src"); gst_element_add_pad (bsrc, gst_ghost_pad_new("src_audio", src_pad)); gst_object_unref (src_pad); src_pad = gst_element_get_pad (vqueue, "src"); gst_element_add_pad (bsrc, gst_ghost_pad_new("src_video", src_pad)); gst_object_unref (src_pad); return bsrc; error: if (src != NULL) { gst_object_unref (src); } if (decode != NULL) { gst_object_unref (decode); } if (aqueue != NULL) { gst_object_unref (aqueue); } if (vqueue != NULL) { gst_object_unref (vqueue); } return NULL; } static void _open_output (GMencoder *self, const gchar* uri) { gchar** i; GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (self); i = g_strsplit (uri, "://", 0); if (strcmp (i[0], "fd") == 0) { priv->fd = atoi (i[1]); } else if (strcmp (i[0], "file") == 0) { priv->fd = open (i[1], O_WRONLY | O_CREAT | O_TRUNC); } else { g_warning ("Output uri not supported"); } g_strfreev (i); } static gboolean _pipeline_bus_cb (GstBus *bus, GstMessage *msg, gpointer user_data) { GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (user_data); switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_STATE_CHANGED: { GstState oldstate; GstState newstate; GstState pendingstate; gst_message_parse_state_changed (msg, &oldstate, &newstate, &pendingstate); if (pendingstate != GST_STATE_VOID_PENDING) break; if ((oldstate == GST_STATE_READY) && (newstate == GST_STATE_PAUSED)) { if (priv->ready) g_signal_emit (user_data, g_mencoder_signals[PAUSED], 0); } else if ((oldstate == GST_STATE_PAUSED) && (newstate == GST_STATE_PLAYING)) { g_signal_emit (user_data, g_mencoder_signals[PLAYING], 0); } else if ((oldstate == GST_STATE_READY) && (newstate == GST_STATE_NULL)) { g_signal_emit (user_data, g_mencoder_signals[STOPED], 0); } break; } case GST_MESSAGE_ERROR: { GError *error; gchar *debug; gchar *err_str; if (priv->tick_id != 0) { g_source_remove (priv->tick_id); priv->tick_id = 0; } gst_message_parse_error (msg, &error, &debug); err_str = g_strdup_printf ("Error [%d] %s (%s)", error->code, error->message, debug); priv->ready = FALSE; g_signal_emit (user_data, g_mencoder_signals[ERROR], 0, err_str); g_free (err_str); g_clear_error (&error); g_free (debug); break; } case GST_MESSAGE_EOS: priv->ready = FALSE; g_signal_emit (user_data, g_mencoder_signals[EOS], 0); break; case GST_MESSAGE_DURATION: { GstFormat format; gint64 duration; gst_message_parse_duration (msg, &format, &duration); if (format == GST_FORMAT_BYTES) priv->duration = duration; break; } default: { break; } } return TRUE; } static void _decodebin_new_pad_cb (GstElement* object, GstPad* pad, gboolean flag, gpointer user_data) { GstCaps *caps; gchar *str_caps = NULL; GstElement *sink_element; GstPad *sink_pad; caps = gst_pad_get_caps (pad); str_caps = gst_caps_to_string (caps); if (strstr (str_caps, "audio") != NULL) { sink_element = gst_bin_get_by_name (GST_BIN (user_data), "aqueue"); } else if (strstr (str_caps, "video") != NULL) { sink_element = gst_bin_get_by_name (GST_BIN (user_data), "vqueue"); } else { g_warning ("invalid caps %s", str_caps); } sink_pad = gst_element_get_pad (sink_element, "sink"); gst_pad_link (pad, sink_pad); gst_object_unref (sink_element); gst_object_unref (sink_pad); g_free (str_caps); gst_caps_unref (caps); } static void _decodebin_unknown_type_cb (GstElement* object, GstPad* pad, GstCaps* caps, gpointer user_data) { g_warning ("Unknown Type"); //priv->ready = FALSE; } static gboolean _tick_cb (gpointer user_data) { GstFormat format = GST_FORMAT_BYTES; gint64 cur = 0; GMencoderPrivate *priv = G_MENCODER_GET_PRIVATE (user_data); if (priv->duration == 0) { gint64 d = 0; if (gst_element_query_duration (priv->src, &format, &d)) priv->duration = d; } if (priv->duration != 0) { gst_element_query_position (priv->src, &format, &cur); g_print ("PROGRESS:%lli\n", (99 * cur) / priv->duration); } return TRUE; }