/*
* GStreamer Copyright (C) <2006> Renato Araujo Oliveira Filho
* <renato.filho@indt.org.br> Rosfran Borges <rosfran.borges@indt.org.br>
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License, or (at
* your option) any later version. This library is distributed in the hope
* that it will be useful, but WITHOUT ANY WARRANTY; without even the
* implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Library General Public License for more details. You should
* have received a copy of the GNU Library General Public License along
* with this library; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
/*
* Element-Checklist-Version: 5
*/
/**
* SECTION:element-nuvdemux
*
* <refsect2>
* <para>
* Demuxes an .nuv file into raw or compressed audio and/or video streams.
* </para>
* <para>
* This element currently only supports pull-based scheduling.
* </para>
* <title>Example launch line</title>
* <para>
* <programlisting>
* gst-launch filesrc test.nuv ! nuvdemux name=demux demux.audio_00 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_00 ! queue ! decodebin ! ffmpegcolorspace ! videoscale ! autovideosink
* </programlisting>
* Play (parse and decode) an .nuv file and try to output it to
* an automatically detected soundcard and videosink. If the NUV file contains
* compressed audio or video data, this will only work if you have the
* right decoder elements/plugins installed.
* </para>
* </refsect2>
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/gsterror.h>
#include <gst/gstplugin.h>
#include <string.h>
#include <math.h>
#include "glib/gi18n.h"
#include "gstnuvdemux.h"
#define GST_NUV_DEMUX_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_NUV_DEMUX, GstNuvDemuxPrivate))
GST_DEBUG_CATEGORY_STATIC(nuvdemux_debug);
#define GST_CAT_DEFAULT nuvdemux_debug
#define GST_FLOW_ERROR_NO_DATA -101
#define GST_FLOW_ERROR_EOS -102
enum {
NUV_PUSH_MODE = 0,
NUV_PULL_MODE
};
GST_DEBUG_CATEGORY_EXTERN(GST_CAT_EVENT);
static const GstElementDetails gst_nuv_demux_details =
GST_ELEMENT_DETAILS("Nuv demuxer",
"Codec/Demuxer",
"Demultiplex a .nuv file into audio and video",
"Renato Araujo Oliveira Filho <renato.filho@indt.org.br>,"
"Rosfran Borges <rosfran.borges@indt.org.br>");
/*
* file header
*/
typedef struct {
gchar id[12]; /* "NuppelVideo\0" or "MythTVVideo\0" */
gchar version[5]; /* "x.xx\0" */
gint i_width;
gint i_height;
gint i_width_desired;
gint i_height_desired;
gchar i_mode; /* P progressive, I interlaced */
gdouble d_aspect; /* 1.0 squared pixel */
gdouble d_fps;
// fps num/denom
gint i_fpsn;
gint i_fpsd;
gint i_video_blocks; /* 0 no video, -1 unknown */
gint i_audio_blocks;
gint i_text_blocks;
gint i_keyframe_distance;
} nuv_header;
/*
* frame header
*/
typedef struct {
gchar i_type; /* A: audio, V: video, S: sync; T: test R:
* Seekpoint (string:RTjjjjjjjj) D: Extra
* data for codec */
gchar i_compression; /* V: 0 uncompressed 1 RTJpeg 2
* RTJpeg+lzo N black frame L copy
* last A: 0 uncompressed (44100
* 1-bits, 2ch) 1 lzo 2 layer 2 3
* layer 3 F flac S shorten N null
* frame loudless L copy last S: B
* audio and vdeo sync point A audio
* sync info (timecode == effective
* dsp frequency*100) V next video
* sync (timecode == next video frame
* num) S audio,video,text correlation
*/
gchar i_keyframe; /* 0 keyframe, else no no key frame */
guint8 i_filters; /* 0x01: gauss 5 pixel (8,2,2,2,2)/16
* 0x02: gauss 5 pixel (8,1,1,1,1)/12
* 0x04: cartoon filter */
gint32 i_timecode; /* ms */
gint i_length; /* V,A,T: length of following data S:
* length of packet correl */
} nuv_frame_header;
/*
* FIXME Not sure of this one
*/
typedef struct {
gint i_version;
guint32 i_video_fcc;
guint32 i_audio_fcc;
gint i_audio_sample_rate;
gint i_audio_bits_per_sample;
gint i_audio_channels;
gint i_audio_compression_ratio;
gint i_audio_quality;
gint i_rtjpeg_quality;
gint i_rtjpeg_luma_filter;
gint i_rtjpeg_chroma_filter;
gint i_lavc_bitrate;
gint i_lavc_qmin;
gint i_lavc_qmax;
gint i_lavc_maxqdiff;
gint64 i_seekable_offset;
gint64 i_keyframe_adjust_offset;
} nuv_extended_header;
typedef struct {
gint64 timecode;
gint64 offset;
} frame_index_data;
typedef enum {
GST_NUV_DEMUX_START,
GST_NUV_DEMUX_HEADER_DATA,
GST_NUV_DEMUX_EXTRA_DATA,
GST_NUV_DEMUX_MPEG_DATA,
GST_NUV_DEMUX_EXTEND_HEADER,
GST_NUV_DEMUX_EXTEND_HEADER_DATA,
GST_NUV_DEMUX_INDEX_CREATE,
GST_NUV_DEMUX_FRAME_HEADER,
GST_NUV_DEMUX_MOVI,
GST_NUV_DEMUX_INVALID_DATA
} GstNuvDemuxState;
struct _GstNuvDemuxPrivate {
/*
* used for indicate the mode
*/
guint mode;
/*
* used on push mode
*/
GstAdapter *adapter;
/*
* pads
*/
GstPad *sinkpad;
GstPad *src_video_pad;
GstPad *src_audio_pad;
/*
* Flow control
*/
GstFlowReturn last_video_return;
GstFlowReturn last_audio_return;
gboolean more_data;
gboolean eos;
gboolean new_file;
guint segment;
/*
* NUV decoding state
*/
GstNuvDemuxState state;
guint64 offset;
/*
* duration information
*/
guint64 duration_bytes;
guint64 duration_time;
guint64 segment_stop;
guint64 segment_start;
/*
* segment control info
*/
gboolean new_audio_segment;
gboolean new_video_segment;
/*
* Mpeg ExtraData
*/
guint64 mpeg_data_size;
GstBuffer *mpeg_buffer;
/*
* Headers
*/
nuv_header h;
nuv_extended_header eh;
nuv_frame_header fh;
/*
* anothers info
*/
guint64 header_lengh;
gint64 time_start;
gint64 time_diff;
gint64 time_qos;
guint64 last_frame_time;
GSList *index;
};
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS
("video/x-nuv"));
static GstStaticPadTemplate audio_src_template =
GST_STATIC_PAD_TEMPLATE("audio_src",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate video_src_template =
GST_STATIC_PAD_TEMPLATE("video_src",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY);
static void gst_nuv_demux_dispose(GObject * object);
static void gst_nuv_demux_finalize(GObject * object);
static GstStateChangeReturn gst_nuv_demux_change_state(GstElement *
element,
GstStateChange
transition);
static void gst_nuv_demux_loop(GstPad * pad);
static GstFlowReturn gst_nuv_demux_chain(GstPad * pad, GstBuffer * buf);
static GstFlowReturn gst_nuv_demux_play(GstPad * pad);
static gboolean gst_nuv_demux_sink_activate_pull(GstPad * sinkpad,
gboolean active);
static gboolean gst_nuv_demux_sink_activate_push(GstPad * pad,
gboolean active);
static gboolean gst_nuv_demux_sink_activate(GstPad * sinkpad);
static gboolean gst_nuv_demux_sink_event(GstPad * pad, GstEvent * event);
static gboolean gst_nuv_demux_srcpad_event(GstPad * pad, GstEvent * event);
static frame_index_data *gst_nuv_demux_do_seek_index(GstNuvDemux * nuv,
gint64 seek_pos,
gint64 segment_stop,
GstFormat format);
static GstFlowReturn gst_nuv_demux_move_bytes(GstNuvDemux * nuv,
guint64 size);
static GstFlowReturn gst_nuv_demux_read_bytes(GstNuvDemux * nuv,
guint64 size, gboolean move,
GstBuffer ** buffer);
static void gst_nuv_demux_reset(GstNuvDemux * nuv);
static void gst_nuv_demux_destoy_src_pad(GstNuvDemux * nuv);
static void gst_nuv_demux_send_eos(GstNuvDemux * nuv);
static void gst_nuv_demux_create_seek_index(GstNuvDemux * nuv);
#if (GST_VERSION_MINOR == 10) && (GST_VERSION_MICRO < 6)
GstBuffer *gst_adapter_take_buffer(GstAdapter * adapter,
guint nbytes);
#endif
GST_BOILERPLATE(GstNuvDemux, gst_nuv_demux, GstElement, GST_TYPE_ELEMENT);
/******************************************************************************
* Utils function
******************************************************************************/
#if G_BYTE_ORDER == G_BIG_ENDIAN
static inline gdouble
_gdouble_swap_le_be(gdouble * d)
{
union {
guint64 i;
gdouble d;
} u;
u.d = *d;
u.i = GUINT64_SWAP_LE_BE(u.i);
return u.d;
}
#define READ_DOUBLE_FROM_LE(d) (_gdouble_swap_le_be((gdouble* ) d))
#else /* G_BYTE_ORDER != G_BIG_ENDIAN */
#define READ_DOUBLE_FROM_LE(d) *((gdouble* ) (d))
#endif /* G_BYTE_ORDER != G_BIG_ENDIAN */
static void
double2fraction(double in, int *num, int *denom)
{
if (in == 29.97) {
*num = 30000;
*denom = 1001;
} else if (in == 23.976) {
*num = 24000;
*denom = 1001;
} else {
*denom = 1;
while (in - floor(in) >= 0.1) {
*denom *= 10;
in *= 10.0;
}
*num = (int) floor(in);
}
}
/*
* GObject Functions
*/
static void
gst_nuv_demux_base_init(gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS(klass);
gst_element_class_add_pad_template(element_class,
gst_static_pad_template_get
(&audio_src_template));
gst_element_class_add_pad_template(element_class,
gst_static_pad_template_get
(&video_src_template));
gst_element_class_add_pad_template(element_class,
gst_static_pad_template_get
(&sink_template));
gst_element_class_set_details(element_class, &gst_nuv_demux_details);
}
static void
gst_nuv_demux_class_init(GstNuvDemuxClass * klass)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS(klass);
GObjectClass *gobject_class = (GObjectClass *) klass;
GST_DEBUG_CATEGORY_INIT(nuvdemux_debug, "nuvdemux",
0, "Demuxer for NUV streams");
parent_class = g_type_class_peek_parent(klass);
gobject_class->dispose = gst_nuv_demux_dispose;
gobject_class->finalize = gst_nuv_demux_finalize;
gstelement_class->change_state = gst_nuv_demux_change_state;
g_type_class_add_private(gobject_class, sizeof(GstNuvDemuxPrivate));
}
static void
gst_nuv_demux_init(GstNuvDemux * nuv, GstNuvDemuxClass * nuv_class)
{
nuv->priv = GST_NUV_DEMUX_GET_PRIVATE(nuv);
nuv->priv->sinkpad =
gst_pad_new_from_static_template(&sink_template, "sink");
/*
* creating adapter
*/
nuv->priv->mode = NUV_PUSH_MODE;
nuv->priv->adapter = gst_adapter_new();
nuv->priv->new_audio_segment = TRUE;
nuv->priv->new_video_segment = TRUE;
gst_pad_set_activate_function(nuv->priv->sinkpad,
gst_nuv_demux_sink_activate);
gst_pad_set_activatepull_function(nuv->priv->sinkpad,
gst_nuv_demux_sink_activate_pull);
gst_pad_set_activatepush_function(nuv->priv->sinkpad,
gst_nuv_demux_sink_activate_push);
gst_pad_set_chain_function(nuv->priv->sinkpad,
GST_DEBUG_FUNCPTR(gst_nuv_demux_chain));
gst_pad_set_event_function(nuv->priv->sinkpad,
GST_DEBUG_FUNCPTR
(gst_nuv_demux_sink_event));
gst_element_add_pad(GST_ELEMENT(nuv), nuv->priv->sinkpad);
}
static void
gst_nuv_demux_dispose(GObject * object)
{
GstNuvDemux *nuv = GST_NUV_DEMUX(object);
if (nuv->priv->mpeg_buffer != NULL) {
gst_buffer_unref(nuv->priv->mpeg_buffer);
}
gst_nuv_demux_reset(GST_NUV_DEMUX(object));
gst_nuv_demux_destoy_src_pad(GST_NUV_DEMUX(object));
if (nuv->priv->adapter != NULL) {
gst_object_unref(nuv->priv->adapter);
}
}
static void
gst_nuv_demux_finalize(GObject * object)
{
G_OBJECT_CLASS(parent_class)->finalize(object);
}
/*
* HeaderLoad:
*/
static GstFlowReturn
gst_nuv_demux_header_load(GstNuvDemux * nuv, nuv_header * h)
{
GstBuffer *buffer = NULL;
GstFlowReturn res = gst_nuv_demux_read_bytes(nuv, 72, TRUE, &buffer);
if ((res != GST_FLOW_OK) || (buffer == NULL)) {
goto done;
}
if (h != NULL) {
memcpy(h->id, buffer->data, 12);
memcpy(h->version, buffer->data + 12, 5);
h->i_width = GST_READ_UINT32_LE(&buffer->data[20]);
h->i_height = GST_READ_UINT32_LE(&buffer->data[24]);
h->i_width_desired = GST_READ_UINT32_LE(&buffer->data[28]);
h->i_height_desired = GST_READ_UINT32_LE(&buffer->data[32]);
h->i_mode = GPOINTER_TO_INT(buffer->data[36]);
h->d_aspect = READ_DOUBLE_FROM_LE(&buffer->data[40]);
h->d_fps = READ_DOUBLE_FROM_LE(&buffer->data[48]);
/*
* get the num and denom values from fps
*/
double2fraction(h->d_fps, &h->i_fpsn, &h->i_fpsd);
h->i_video_blocks = GST_READ_UINT32_LE(&buffer->data[56]);
h->i_audio_blocks = GST_READ_UINT32_LE(&buffer->data[60]);
h->i_text_blocks = GST_READ_UINT32_LE(&buffer->data[64]);
h->i_keyframe_distance = GST_READ_UINT32_LE(&buffer->data[68]);
GST_DEBUG_OBJECT(nuv,
"nuv: h=%s v=%s %dx%d a=%f fps=%f v=%d a=%d t=%d kfd=%d",
h->id, h->version, h->i_width, h->i_height,
h->d_aspect, h->d_fps, h->i_video_blocks,
h->i_audio_blocks, h->i_text_blocks,
h->i_keyframe_distance);
}
done:
if (buffer != NULL) {
gst_buffer_unref(buffer);
buffer = NULL;
}
return res;
}
static GstFlowReturn
gst_nuv_demux_stream_header_data(GstNuvDemux * nuv)
{
GstFlowReturn res;
if (nuv->priv->new_file)
res = gst_nuv_demux_header_load(nuv, NULL);
else
res = gst_nuv_demux_header_load(nuv, &nuv->priv->h);
if (res == GST_FLOW_OK)
nuv->priv->state = GST_NUV_DEMUX_EXTRA_DATA;
return res;
}
/*
* Read NUV file tag
*/
static GstFlowReturn
gst_nuv_demux_stream_file_header(GstNuvDemux * nuv)
{
GstFlowReturn res = GST_FLOW_OK;
GstBuffer *file_header = NULL;
res = gst_nuv_demux_read_bytes(nuv, 12, FALSE, &file_header);
if (res == GST_FLOW_OK) {
if (strncmp((gchar *) file_header->data, "MythTVVideo", 11) ||
strncmp((gchar *) file_header->data, "NuppelVideo", 11)) {
nuv->priv->state = GST_NUV_DEMUX_HEADER_DATA;
} else {
GST_DEBUG_OBJECT(nuv, "error parsing file header");
nuv->priv->state = GST_NUV_DEMUX_INVALID_DATA;
res = GST_FLOW_ERROR;
}
}
if (file_header != NULL) {
gst_buffer_unref(file_header);
file_header = NULL;
}
return res;
}
/*
* FrameHeaderLoad:
*/
static GstFlowReturn
gst_nuv_demux_frame_header_load(GstNuvDemux * nuv, nuv_frame_header * h)
{
unsigned char *data;
GstBuffer *buf = NULL;
GstFlowReturn res = gst_nuv_demux_read_bytes(nuv, 12, TRUE, &buf);
if ((res != GST_FLOW_OK) || (buf == NULL)) {
goto done;
}
if (h == NULL)
goto done;
data = buf->data;
h->i_type = GPOINTER_TO_INT(data[0]);
h->i_compression = GPOINTER_TO_INT(data[1]);
h->i_keyframe = GPOINTER_TO_INT(data[2]);
h->i_filters = GPOINTER_TO_INT(data[3]);
h->i_timecode = GST_READ_UINT32_LE(&data[4]);
h->i_length = GST_READ_UINT32_LE(&data[8]);
GST_DEBUG_OBJECT(nuv,
"frame hdr: t=%c c=%c k=%d f=0x%x timecode=%d l=%d",
h->i_type, h->i_compression ? h->i_compression : ' ',
h->i_keyframe ? h->i_keyframe : ' ', h->i_filters,
h->i_timecode, h->i_length);
done:
if (buf != NULL) {
gst_buffer_unref(buf);
buf = NULL;
}
return res;
}
static GstFlowReturn
gst_nuv_demux_extended_header_load(GstNuvDemux * nuv,
nuv_extended_header * h)
{
unsigned char *data;
GstBuffer *buff = NULL;
GstFlowReturn res = gst_nuv_demux_read_bytes(nuv, 512, TRUE, &buff);
if ((res != GST_FLOW_OK) || (buff == NULL)) {
goto done;
}
if (h == NULL)
goto done;
data = buff->data;
h->i_version = GST_READ_UINT32_LE(&data[0]);
h->i_video_fcc = GST_MAKE_FOURCC(data[4], data[5], data[6], data[7]);
h->i_audio_fcc = GST_MAKE_FOURCC(data[8], data[9], data[10], data[11]);
h->i_audio_sample_rate = GST_READ_UINT32_LE(&data[12]);
h->i_audio_bits_per_sample = GST_READ_UINT32_LE(&data[16]);
h->i_audio_channels = GST_READ_UINT32_LE(&data[20]);
h->i_audio_compression_ratio = GST_READ_UINT32_LE(&data[24]);
h->i_audio_quality = GST_READ_UINT32_LE(&data[28]);
h->i_rtjpeg_quality = GST_READ_UINT32_LE(&data[32]);
h->i_rtjpeg_luma_filter = GST_READ_UINT32_LE(&data[36]);
h->i_rtjpeg_chroma_filter = GST_READ_UINT32_LE(&data[40]);
h->i_lavc_bitrate = GST_READ_UINT32_LE(&data[44]);
h->i_lavc_qmin = GST_READ_UINT32_LE(&data[48]);
h->i_lavc_qmin = GST_READ_UINT32_LE(&data[52]);
h->i_lavc_maxqdiff = GST_READ_UINT32_LE(&data[56]);
h->i_seekable_offset = GST_READ_UINT64_LE(&data[60]);
h->i_keyframe_adjust_offset = GST_READ_UINT64_LE(&data[68]);
GST_DEBUG_OBJECT(nuv,
"ex hdr: v=%d vffc=%4.4s afcc=%4.4s %dHz %dbits ach=%d acr=%d aq=%d"
"rtjpeg q=%d lf=%d lc=%d lavc br=%d qmin=%d qmax=%d maxqdiff=%d seekableoff=%lld keyfao=%lld",
h->i_version, (gchar *) & h->i_video_fcc,
(gchar *) & h->i_audio_fcc, h->i_audio_sample_rate,
h->i_audio_bits_per_sample, h->i_audio_channels,
h->i_audio_compression_ratio, h->i_audio_quality,
h->i_rtjpeg_quality, h->i_rtjpeg_luma_filter,
h->i_rtjpeg_chroma_filter, h->i_lavc_bitrate,
h->i_lavc_qmin, h->i_lavc_qmax, h->i_lavc_maxqdiff,
h->i_seekable_offset, h->i_keyframe_adjust_offset);
done:
if (buff != NULL) {
gst_buffer_unref(buff);
buff = NULL;
}
return res;
}
/*
* Query Functions
*/
static const GstQueryType *
gst_nuv_demux_get_src_query_types(GstPad * pad)
{
static const GstQueryType src_types[] = {
GST_QUERY_POSITION,
GST_QUERY_DURATION,
0
};
return src_types;
}
static gboolean
gst_nuv_demux_handle_src_query(GstPad * pad, GstQuery * query)
{
gboolean res = FALSE;
GstNuvDemux *nuv = GST_NUV_DEMUX(gst_pad_get_parent(pad));
switch (GST_QUERY_TYPE(query)) {
case GST_QUERY_POSITION:
{
GstFormat format;
gst_query_parse_position(query, &format, NULL);
switch (format) {
case GST_FORMAT_TIME:
if (GST_CLOCK_TIME_IS_VALID(nuv->priv->last_frame_time)) {
gst_query_set_position(query, GST_FORMAT_TIME,
nuv->priv->last_frame_time);
res = TRUE;
}
break;
default:
break;
}
break;
}
case GST_QUERY_DURATION:
{
GstFormat format;
gst_query_parse_duration(query, &format, NULL);
switch (format) {
case GST_FORMAT_TIME:
if (nuv->priv->duration_time != GST_CLOCK_TIME_NONE) {
gst_query_set_duration(query, GST_FORMAT_TIME,
nuv->priv->duration_time);
res = TRUE;
}
break;
default:
break;
}
break;
}
default:
break;
}
if (res == FALSE) {
res = gst_pad_query_default(pad, query);
}
gst_object_unref(nuv);
return res;
}
static GstPad *
gst_nuv_demux_create_pad(GstNuvDemux * nuv, GstCaps * caps,
GstStaticPadTemplate * template,
const gchar * name)
{
GstPad *pad = NULL;
pad = gst_pad_new_from_static_template(template, name);
gst_pad_set_caps(pad, caps);
gst_pad_set_active(pad, TRUE);
gst_pad_use_fixed_caps(pad);
gst_element_add_pad(GST_ELEMENT(nuv), pad);
gst_pad_set_event_function(pad,
GST_DEBUG_FUNCPTR
(gst_nuv_demux_srcpad_event));
gst_pad_set_query_type_function(pad,
GST_DEBUG_FUNCPTR
(gst_nuv_demux_get_src_query_types));
gst_pad_set_query_function(pad,
GST_DEBUG_FUNCPTR
(gst_nuv_demux_handle_src_query));
return pad;
}
static void
gst_nuv_demux_create_pads(GstNuvDemux * nuv)
{
if ((nuv->priv->src_video_pad != NULL) ||
(nuv->priv->src_audio_pad != NULL)) {
return;
}
if (nuv->priv->h.i_video_blocks != 0){
GstCaps *video_caps = NULL;
video_caps = gst_caps_new_simple("video/x-divx",
"divxversion", G_TYPE_INT, 4,
"width", G_TYPE_INT,
nuv->priv->h.i_width, "height",
G_TYPE_INT, nuv->priv->h.i_height,
"framerate", GST_TYPE_FRACTION,
nuv->priv->h.i_fpsn,
nuv->priv->h.i_fpsd, "format",
GST_TYPE_FOURCC,
nuv->priv->eh.i_video_fcc,
"pixel-aspect-ratio",
GST_TYPE_FRACTION,
(gint) (nuv->priv->h.d_aspect *
1000.0f), 1000, NULL);
nuv->priv->src_video_pad =
gst_nuv_demux_create_pad(nuv, video_caps, &video_src_template,
"video_src");
gst_caps_unref(video_caps);
}
if (nuv->priv->h.i_audio_blocks != 0) {
GstCaps *audio_caps = NULL;
audio_caps = gst_caps_new_simple("audio/mpeg",
"rate", G_TYPE_INT, nuv->priv->eh.i_audio_sample_rate,
"format", GST_TYPE_FOURCC, nuv->priv->eh.i_audio_fcc,
"channels", G_TYPE_INT, nuv->priv->eh.i_audio_channels,
"layer", G_TYPE_INT, 3, // fixme: magic number
"mpegversion", G_TYPE_INT, nuv->priv->eh.i_version,
NULL);
nuv->priv->src_audio_pad =
gst_nuv_demux_create_pad(nuv, audio_caps, &audio_src_template,
"audio_src");
gst_caps_unref(audio_caps);
}
gst_element_no_more_pads(GST_ELEMENT(nuv));
}
static gboolean
gst_nuv_demux_validate_header(nuv_frame_header * h)
{
gboolean valid = FALSE;
// g_usleep (1 * G_USEC_PER_SEC );
switch (h->i_type) {
/*
* case 'V': if (h->i_compression == 0 || h->i_compression == 1 ||
* h->i_compression == 2 || h->i_compression == 'N' ||
* h->i_compression == 'L') { valid = TRUE; } break; case 'A': if
* (h->i_compression == 0 || h->i_compression == 1 ||
* h->i_compression == 2 || h->i_compression == 3 ||
* h->i_compression == 'F' || h->i_compression == 'S' ||
* h->i_compression == 'N' || h->i_compression == 'L') { valid =
* TRUE; } break; case 'S': if (h->i_compression == 'B' ||
* h->i_compression == 'A' || h->i_compression == 'V' ||
* h->i_compression == 'S') { valid = TRUE; } break;
*/
case 'A':
case 'V':
case 'S':
case 'R':
case 'D':
case 'Q':
valid = TRUE;
break;
default:
valid = FALSE;
}
return valid;
}
static GstFlowReturn
gst_nuv_demux_read_head_frame(GstNuvDemux * nuv)
{
GstFlowReturn ret = GST_FLOW_OK;
gboolean valid = FALSE;
do {
ret = gst_nuv_demux_frame_header_load(nuv, &nuv->priv->fh);
if (ret != GST_FLOW_OK) {
return ret;
}
if (gst_nuv_demux_validate_header(&nuv->priv->fh) == TRUE)
valid = TRUE;
}
while (valid == FALSE);
nuv->priv->state = GST_NUV_DEMUX_MOVI;
return ret;
}
static gboolean
gst_nuv_combine_flow(GstNuvDemux * nuv)
{
GstFlowReturn ret_video = nuv->priv->last_video_return;
GstFlowReturn ret_audio = nuv->priv->last_audio_return;
if ((ret_video != GST_FLOW_OK) && (ret_audio != GST_FLOW_OK))
return FALSE;
if (GST_FLOW_IS_FATAL(ret_video))
return FALSE;
if (GST_FLOW_IS_FATAL(ret_audio))
return FALSE;
return TRUE;
}
static GstFlowReturn
gst_nuv_demux_stream_data(GstNuvDemux * nuv)
{
GstFlowReturn ret = GST_FLOW_OK;
GstPad *pad = NULL;
guint64 timestamp;
GstBuffer *buf = NULL;
nuv_frame_header h;
h = nuv->priv->fh;
if (h.i_type == 'R') {
goto done;
}
if (h.i_length > 0) {
ret = gst_nuv_demux_read_bytes(nuv, h.i_length, TRUE, &buf);
if ((ret != GST_FLOW_OK) || (buf == NULL)) {
goto done;
}
if ((h.i_timecode < 0)) {
h.i_timecode = 0;
// goto done;
}
timestamp = h.i_timecode * GST_MSECOND;
GST_BUFFER_TIMESTAMP(buf) = timestamp;
} else {
goto done;
}
switch (h.i_type) {
case 'V':
{
pad = nuv->priv->src_video_pad;
if (nuv->priv->new_video_segment) {
/*
* send new segment event
*/
gst_pad_push_event(nuv->priv->src_video_pad,
gst_event_new_new_segment(TRUE, 1.0,
GST_FORMAT_TIME,
0,
GST_CLOCK_TIME_NONE,
0));
if (nuv->priv->time_start == GST_CLOCK_TIME_NONE) {
nuv->priv->time_start = timestamp;
}
nuv->priv->new_video_segment = FALSE;
}
break;
}
case 'A':
{
pad = nuv->priv->src_audio_pad;
if (nuv->priv->new_audio_segment) {
/*
* send new segment event
*/
gst_pad_push_event(nuv->priv->src_audio_pad,
gst_event_new_new_segment(TRUE, 1.0,
GST_FORMAT_TIME,
0,
GST_CLOCK_TIME_NONE,
0));
if (nuv->priv->time_start == GST_CLOCK_TIME_NONE) {
nuv->priv->time_start = timestamp;
}
nuv->priv->new_audio_segment = FALSE;
}
break;
}
case 'S':
{
switch (h.i_compression) {
case 'V':
GST_DEBUG_OBJECT(nuv, "sending new video segment: %d",
h.i_timecode);
gst_pad_push_event(nuv->priv->src_video_pad,
gst_event_new_new_segment(TRUE, 1.0,
GST_FORMAT_TIME,
h.i_timecode *
GST_MSECOND,
GST_CLOCK_TIME_NONE,
0));
break;
case 'A':
GST_DEBUG_OBJECT(nuv, "sending new audio segment: %d",
h.i_timecode);
gst_pad_push_event(nuv->priv->src_audio_pad,
gst_event_new_new_segment(TRUE, 1.0,
GST_FORMAT_TIME,
0,
GST_CLOCK_TIME_NONE,
0));
break;
default:
break;
}
goto done;
}
default:
break;
}
if ((buf != NULL) && (pad != NULL)) {
/*
* pushing the buffer
*/
gst_buffer_set_caps(buf, GST_PAD_CAPS(pad));
ret = gst_pad_push(pad, buf);
buf = NULL;
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT(nuv, "error: %d pushing on srcpad %s", ret,
gst_pad_get_name(pad));
if (pad == nuv->priv->src_video_pad) {
nuv->priv->last_video_return = ret;
} else if (pad == nuv->priv->src_audio_pad) {
nuv->priv->last_audio_return = ret;
}
/*
* verify anothers flow if is necessary stop task
*/
if (gst_nuv_combine_flow(nuv) != FALSE) {
ret = GST_FLOW_OK;
} else {
GST_WARNING_OBJECT(nuv, "error: on push");
}
}
}
done:
if (buf != NULL) {
gst_buffer_unref(buf);
buf = NULL;
}
if (ret == GST_FLOW_OK) {
nuv->priv->state = GST_NUV_DEMUX_FRAME_HEADER;
memset(&nuv->priv->fh, 0, sizeof(nuv->priv->fh));
}
return ret;
}
static GstFlowReturn
gst_nuv_demux_stream_mpeg_data(GstNuvDemux * nuv)
{
GstFlowReturn ret = GST_FLOW_OK;
/*
* ffmpeg extra data
*/
if (nuv->priv->new_file) {
GstBuffer *buf;
ret =
gst_nuv_demux_read_bytes(nuv, nuv->priv->mpeg_data_size, TRUE,
&buf);
gst_buffer_unref(buf);
} else {
ret =
gst_nuv_demux_read_bytes(nuv, nuv->priv->mpeg_data_size, TRUE,
&nuv->priv->mpeg_buffer);
}
if ((ret != GST_FLOW_OK) || (nuv->priv->mpeg_buffer == NULL)) {
return ret;
}
GST_BUFFER_SIZE(nuv->priv->mpeg_buffer) = nuv->priv->mpeg_data_size;
nuv->priv->state = GST_NUV_DEMUX_EXTEND_HEADER;
return ret;
}
static GstFlowReturn
gst_nuv_demux_stream_extra_data(GstNuvDemux * nuv)
{
GstFlowReturn ret = GST_FLOW_OK;
/*
* Load 'D'
*/
nuv_frame_header h;
if (nuv->priv->new_file)
ret = gst_nuv_demux_frame_header_load(nuv, NULL);
else
ret = gst_nuv_demux_frame_header_load(nuv, &h);
if (ret != GST_FLOW_OK)
return ret;
if (h.i_type != 'D') {
GST_WARNING_OBJECT(nuv, "Unsuported rtjpeg");
return GST_FLOW_NOT_SUPPORTED;
}
if (h.i_length > 0) {
if (h.i_compression == 'F') {
nuv->priv->state = GST_NUV_DEMUX_MPEG_DATA;
} else {
GST_WARNING_OBJECT(nuv,
"only file with extended chunk are supported");
return GST_FLOW_NOT_SUPPORTED;
}
} else {
nuv->priv->state = GST_NUV_DEMUX_EXTEND_HEADER;
}
return ret;
}
static GstFlowReturn
gst_nuv_demux_stream_extend_header_data(GstNuvDemux * nuv)
{
GstFlowReturn ret = GST_FLOW_OK;
if (nuv->priv->new_file)
ret = gst_nuv_demux_extended_header_load(nuv, NULL);
else {
ret = gst_nuv_demux_extended_header_load(nuv, &nuv->priv->eh);
if (ret != GST_FLOW_OK)
return ret;
gst_nuv_demux_create_pads(nuv);
}
nuv->priv->state = GST_NUV_DEMUX_INDEX_CREATE;
return ret;
}
static GstFlowReturn
gst_nuv_demux_stream_extend_header(GstNuvDemux * nuv)
{
GstBuffer *buf = NULL;
GstFlowReturn res = GST_FLOW_OK;
res = gst_nuv_demux_read_bytes(nuv, 1, FALSE, &buf);
if ((res != GST_FLOW_OK) || (buf == NULL)) {
if (buf != NULL) {
gst_buffer_unref(buf);
}
return res;
}
if (buf->data[0] == 'X') {
gst_buffer_unref(buf);
buf = NULL;
nuv_frame_header h;
res = gst_nuv_demux_frame_header_load(nuv, &h);
if (res != GST_FLOW_OK)
return res;
if (h.i_length != 512) {
return GST_FLOW_ERROR;
}
nuv->priv->state = GST_NUV_DEMUX_EXTEND_HEADER_DATA;
} else {
nuv->priv->state = GST_NUV_DEMUX_INVALID_DATA;
g_object_unref(buf);
GST_ELEMENT_WARNING(nuv, STREAM, FAILED,
(_("incomplete NUV support")),
("incomplete NUV support"));
return GST_FLOW_ERROR;
}
return res;
}
static void
gst_nuv_demux_create_seek_index(GstNuvDemux * nuv)
{
GstMessage *msg;
nuv_frame_header h;
while (gst_nuv_demux_frame_header_load(nuv, &h) == GST_FLOW_OK) {
if ((h.i_type == 'V') && (h.i_keyframe == 0)) {
frame_index_data *f = g_new0(frame_index_data, 1);
f->offset = nuv->priv->offset - 12;
f->timecode = h.i_timecode * GST_MSECOND;
nuv->priv->index = g_slist_append(nuv->priv->index, f);
}
if (h.i_type != 'R') {
nuv->priv->offset += h.i_length;
if (h.i_type == 'A' || h.i_type == 'V')
nuv->priv->duration_time = h.i_timecode * GST_MSECOND;
}
}
GST_DEBUG_OBJECT(nuv,
"CREATING INDEX: DONE : DURATION Bytes/Sec: %"
G_GUINT64_FORMAT "/%" G_GUINT64_FORMAT,
nuv->priv->offset, nuv->priv->duration_time);
nuv->priv->duration_bytes = nuv->priv->offset;
nuv->priv->offset = nuv->priv->header_lengh;
msg =
gst_message_new_duration(GST_OBJECT(nuv), GST_FORMAT_TIME,
nuv->priv->duration_time);
gst_element_post_message(GST_ELEMENT(nuv), msg);
}
static GstFlowReturn
gst_nuv_demux_play(GstPad * pad)
{
GstFlowReturn res = GST_FLOW_OK;
GstNuvDemux *nuv = GST_NUV_DEMUX(GST_PAD_PARENT(pad));
switch (nuv->priv->state) {
case GST_NUV_DEMUX_START:
res = gst_nuv_demux_stream_file_header(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_HEADER_DATA:
res = gst_nuv_demux_stream_header_data(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_EXTRA_DATA:
res = gst_nuv_demux_stream_extra_data(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_MPEG_DATA:
res = gst_nuv_demux_stream_mpeg_data(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_EXTEND_HEADER:
res = gst_nuv_demux_stream_extend_header(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_EXTEND_HEADER_DATA:
res = gst_nuv_demux_stream_extend_header_data(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
// store file header size
nuv->priv->header_lengh = nuv->priv->offset;
break;
case GST_NUV_DEMUX_INDEX_CREATE:
if ((nuv->priv->mode == NUV_PULL_MODE) && (!nuv->priv->new_file)) {
gst_nuv_demux_create_seek_index(nuv);
}
case GST_NUV_DEMUX_FRAME_HEADER:
res = gst_nuv_demux_read_head_frame(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_MOVI:
res = gst_nuv_demux_stream_data(nuv);
if ((res != GST_FLOW_OK) && (res != GST_FLOW_ERROR_NO_DATA)) {
goto pause;
}
break;
case GST_NUV_DEMUX_INVALID_DATA:
goto pause;
break;
default:
g_assert_not_reached();
}
return GST_FLOW_OK;
pause:
GST_LOG_OBJECT(nuv, "pausing task, reason %s", gst_flow_get_name(res));
gst_pad_pause_task(nuv->priv->sinkpad);
if (res == GST_FLOW_ERROR_EOS) {
gst_nuv_demux_send_eos(nuv);
nuv->priv->eos = TRUE;
res = GST_FLOW_OK;
}
if (GST_FLOW_IS_FATAL(res)) {
GST_ELEMENT_ERROR(nuv, STREAM, FAILED,
(_("Internal data stream error.")),
("streaming stopped, reason %s",
gst_flow_get_name(res)));
gst_nuv_demux_send_eos(nuv);
}
return res;
}
static void
gst_nuv_demux_send_eos(GstNuvDemux * nuv)
{
gst_element_post_message(GST_ELEMENT(nuv),
gst_message_new_segment_done(GST_OBJECT(nuv),
GST_FORMAT_TIME,
-1));
if (nuv->priv->src_video_pad)
gst_pad_push_event(nuv->priv->src_video_pad, gst_event_new_eos());
if (nuv->priv->src_audio_pad)
gst_pad_push_event(nuv->priv->src_audio_pad, gst_event_new_eos());
}
static GstFlowReturn
gst_nuv_demux_read_bytes(GstNuvDemux * nuv, guint64 size, gboolean move,
GstBuffer ** buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
if (size == 0) {
return ret;
}
if (nuv->priv->mode == NUV_PULL_MODE) {
ret =
gst_pad_pull_range(nuv->priv->sinkpad, nuv->priv->offset, size,
buffer);
if (ret == GST_FLOW_OK) {
if (move) {
nuv->priv->offset += size;
}
/*
* got eos
*/
} else if (ret == GST_FLOW_UNEXPECTED) {
return GST_FLOW_ERROR_EOS;
}
} else {
if (gst_adapter_available(nuv->priv->adapter) < size) {
nuv->priv->more_data = TRUE;
return GST_FLOW_ERROR_NO_DATA;
}
if (move) {
*buffer = gst_adapter_take_buffer(nuv->priv->adapter, size);
} else {
guint8 *data = NULL;
data = (guint8 *) gst_adapter_peek(nuv->priv->adapter, size);
*buffer = gst_buffer_new();
gst_buffer_set_data(*buffer, data, size);
}
}
return ret;
}
static GstFlowReturn
gst_nuv_demux_move_bytes(GstNuvDemux * nuv, guint64 size)
{
GstFlowReturn ret = GST_FLOW_OK;
if (size == 0) {
return ret;
}
if (nuv->priv->mode == NUV_PULL_MODE) {
nuv->priv->offset += size;
} else {
if (gst_adapter_available(nuv->priv->adapter) < size) {
nuv->priv->more_data = TRUE;
return GST_FLOW_ERROR_NO_DATA;
}
gst_adapter_flush(nuv->priv->adapter, size);
}
return ret;
}
static gboolean
gst_nuv_demux_sink_activate(GstPad * sinkpad)
{
gboolean res = TRUE;
GstNuvDemux *nuv = GST_NUV_DEMUX(gst_pad_get_parent(sinkpad));
if (gst_pad_check_pull_range(sinkpad)) {
gst_adapter_clear(nuv->priv->adapter);
res = gst_pad_activate_pull(sinkpad, TRUE);
} else {
gst_adapter_clear(nuv->priv->adapter);
res = gst_pad_activate_push(sinkpad, TRUE);
}
g_object_unref(nuv);
return res;
}
static gboolean
gst_nuv_demux_sink_activate_pull(GstPad * sinkpad, gboolean active)
{
GstNuvDemux *nuv = GST_NUV_DEMUX(gst_pad_get_parent(sinkpad));
if (active) {
GST_DEBUG_OBJECT(nuv, "activating pull function");
nuv->priv->mode = NUV_PULL_MODE;
gst_adapter_clear(nuv->priv->adapter);
gst_pad_start_task(sinkpad, (GstTaskFunction) gst_nuv_demux_loop,
sinkpad);
} else {
GST_DEBUG_OBJECT(nuv, "deactivating pull function");
gst_pad_stop_task(sinkpad);
}
gst_object_unref(nuv);
return TRUE;
}
static gboolean
gst_nuv_demux_sink_activate_push(GstPad * pad, gboolean active)
{
GstNuvDemux *nuv = GST_NUV_DEMUX(gst_pad_get_parent(pad));
if (active) {
nuv->priv->mode = NUV_PUSH_MODE;
gst_adapter_clear(nuv->priv->adapter);
GST_DEBUG_OBJECT(nuv, "activating push/chain function");
} else {
GST_DEBUG_OBJECT(nuv, "deactivating push/chain function");
}
gst_object_unref(nuv);
return TRUE;
}
static frame_index_data *
gst_nuv_demux_do_seek_index(GstNuvDemux * nuv, gint64 seek_pos,
gint64 segment_stop, GstFormat format)
{
GSList *l;
frame_index_data *ret = NULL;
if (nuv->priv->index == NULL) {
return NULL;
}
/*
* find keyframe closest to the requested position
*/
for (l = nuv->priv->index; l != NULL; l = l->next) {
frame_index_data *f = (frame_index_data *) l->data;
gint64 pos = 0;
if (format == GST_FORMAT_BYTES) {
pos = f->offset;
} else if (format == GST_FORMAT_TIME) {
pos = f->timecode;
} else {
return NULL;
}
if (pos >= seek_pos) {
ret = f;
break;
}
if ((segment_stop != -1) && (segment_stop != GST_CLOCK_TIME_NONE)
&& (pos > segment_stop)) {
break;
}
}
return ret;
}
static gboolean
gst_nuv_demux_do_seek(GstNuvDemux * nuv, GstEvent * event)
{
gdouble rate;
GstFormat format;
GstSeekFlags flags;
GstSeekType cur_type;
gint64 cur;
GstSeekType stop_type;
gint64 stop;
gboolean flush;
frame_index_data *entry;
gint64 segment_start;
gint64 segment_stop;
GstEvent *newsegment_event;
if (nuv->priv->eos) {
return FALSE;
}
if (nuv->priv->mode == NUV_PUSH_MODE) {
return FALSE;
}
gst_event_parse_seek(event, &rate, &format, &flags,
&cur_type, &cur, &stop_type, &stop);
/*
* if (format == GST_FORMAT_TIME) { GST_DEBUG_OBJECT (nuv, "Can only
* seek on BYTES"); return FALSE; }
*/
if (rate <= 0.0) {
GST_DEBUG_OBJECT(nuv, "Can only seek with positive rate");
return FALSE;
}
if (cur_type == GST_SEEK_TYPE_SET) {
GST_OBJECT_LOCK(nuv);
if (gst_nuv_demux_do_seek_index(nuv, cur, -1, format) == NULL) {
GST_DEBUG_OBJECT(nuv, "No matching seek entry in index");
GST_OBJECT_UNLOCK(nuv);
return FALSE;
}
GST_OBJECT_UNLOCK(nuv);
}
flush = !!(flags & GST_SEEK_FLAG_FLUSH);
if (flush) {
gst_pad_push_event(nuv->priv->sinkpad,
gst_event_new_flush_start());
if (nuv->priv->src_video_pad != NULL) {
gst_pad_push_event(nuv->priv->src_video_pad,
gst_event_new_flush_start());
}
if (nuv->priv->src_audio_pad != NULL) {
gst_pad_push_event(nuv->priv->src_audio_pad,
gst_event_new_flush_start());
}
} else {
gst_pad_pause_task(nuv->priv->sinkpad);
}
GST_PAD_STREAM_LOCK(nuv->priv->sinkpad);
GST_OBJECT_LOCK(nuv);
if (cur == GST_CLOCK_TIME_NONE)
cur = 0;
if (stop == GST_CLOCK_TIME_NONE)
stop = nuv->priv->duration_time;
if (cur_type == GST_SEEK_TYPE_SET)
segment_start = cur;
else if (cur_type == GST_SEEK_TYPE_CUR)
segment_start = nuv->priv->segment_start + cur;
else
segment_start = nuv->priv->segment_start;
if (stop_type == GST_SEEK_TYPE_SET)
segment_stop = stop;
else if (stop_type == GST_SEEK_TYPE_CUR)
segment_stop = nuv->priv->segment_stop + stop;
else
segment_stop = nuv->priv->segment_stop;
segment_start = CLAMP(segment_start, 0, nuv->priv->duration_time);
segment_stop = CLAMP(segment_stop, 0, nuv->priv->duration_time);
entry = gst_nuv_demux_do_seek_index(nuv, segment_start,
segment_stop, format);
if (entry == NULL) {
GST_DEBUG_OBJECT(nuv, "No matching seek entry in index");
goto seek_error;
}
segment_start = entry->timecode;
nuv->priv->segment_start = segment_start;
nuv->priv->segment_stop = segment_stop;
GST_OBJECT_UNLOCK(nuv);
if (!nuv->priv->eos) {
GstMessage *msg;
msg =
gst_message_new_segment_start(GST_OBJECT(nuv), GST_FORMAT_TIME,
nuv->priv->segment_start);
gst_element_post_message(GST_ELEMENT(nuv), msg);
}
GST_DEBUG_OBJECT(nuv,
"NEW SEGMENT START %" G_GUINT64_FORMAT ", STOP %"
G_GUINT64_FORMAT, segment_start, segment_stop);
newsegment_event =
gst_event_new_new_segment(FALSE, rate, GST_FORMAT_TIME,
segment_start, segment_stop,
segment_start);
if (flush) {
if (nuv->priv->src_video_pad != NULL) {
gst_pad_push_event(nuv->priv->src_video_pad,
gst_event_new_flush_stop());
}
if (nuv->priv->src_audio_pad != NULL) {
gst_pad_push_event(nuv->priv->src_audio_pad,
gst_event_new_flush_stop());
}
gst_pad_push_event(nuv->priv->sinkpad, gst_event_new_flush_stop());
}
if (nuv->priv->src_video_pad != NULL) {
gst_pad_push_event(nuv->priv->src_video_pad,
gst_event_ref(newsegment_event));
}
if (nuv->priv->src_audio_pad != NULL) {
gst_pad_push_event(nuv->priv->src_audio_pad,
gst_event_ref(newsegment_event));
}
gst_event_unref(newsegment_event);
nuv->priv->state = GST_NUV_DEMUX_FRAME_HEADER;
nuv->priv->offset = entry->offset;
gst_pad_start_task(nuv->priv->sinkpad,
(GstTaskFunction) gst_nuv_demux_loop,
nuv->priv->sinkpad);
GST_PAD_STREAM_UNLOCK(nuv->priv->sinkpad);
return TRUE;
seek_error:
GST_DEBUG_OBJECT(nuv, "Got a seek error");
GST_OBJECT_UNLOCK(nuv);
GST_PAD_STREAM_UNLOCK(nuv->priv->sinkpad);
return FALSE;
}
static gboolean
gst_nuv_demux_srcpad_event(GstPad * pad, GstEvent * event)
{
gboolean res = FALSE;
GstNuvDemux *nuv;
nuv = GST_NUV_DEMUX(gst_pad_get_parent(pad));
switch (GST_EVENT_TYPE(event)) {
case GST_EVENT_SEEK:
res = gst_nuv_demux_do_seek(nuv, event);
break;
default:
res = FALSE;
break;
}
gst_object_unref(nuv);
return res;
}
static gboolean
gst_nuv_demux_sink_event(GstPad * pad, GstEvent * event)
{
gboolean res = FALSE;
GstNuvDemux *nuv;
nuv = GST_NUV_DEMUX(gst_pad_get_parent(pad));
switch (GST_EVENT_TYPE(event)) {
case GST_EVENT_NEWSEGMENT:
GST_PAD_STREAM_LOCK(pad);
gst_nuv_demux_reset(nuv);
GST_PAD_STREAM_UNLOCK(pad);
//res = gst_pad_event_default(pad, event);
res = TRUE;
break;
default:
res = gst_pad_event_default(pad, event);
break;
}
return res;
}
static GstFlowReturn
gst_nuv_demux_chain(GstPad * pad, GstBuffer * buf)
{
GstFlowReturn ret = GST_FLOW_OK;
GstNuvDemux *nuv = GST_NUV_DEMUX(gst_pad_get_parent(pad));
if (nuv->priv->mode != NUV_PUSH_MODE)
return ret;
gst_adapter_push(nuv->priv->adapter, buf);
while ((ret == GST_FLOW_OK) && (nuv->priv->more_data == FALSE)) {
ret = gst_nuv_demux_play(pad);
}
nuv->priv->more_data = FALSE;
gst_object_unref(nuv);
return ret;
}
static void
gst_nuv_demux_loop(GstPad * pad)
{
gst_nuv_demux_play(pad);
}
static void
gst_nuv_demux_index_free(gpointer data, gpointer user_data)
{
g_free(data);
}
static void
gst_nuv_demux_reset(GstNuvDemux * nuv)
{
nuv->priv->eos = FALSE;
nuv->priv->more_data = FALSE;
nuv->priv->state = GST_NUV_DEMUX_START;
nuv->priv->mode = NUV_PUSH_MODE;
nuv->priv->offset = 0;
nuv->priv->time_start = 0;
nuv->priv->time_qos = GST_CLOCK_TIME_NONE;
nuv->priv->duration_bytes = GST_CLOCK_TIME_NONE;
nuv->priv->duration_time = GST_CLOCK_TIME_NONE;
nuv->priv->last_video_return = GST_FLOW_OK;
nuv->priv->last_audio_return = GST_FLOW_OK;
nuv->priv->header_lengh = 0;
nuv->priv->segment_stop = GST_CLOCK_TIME_NONE;
nuv->priv->segment_start = GST_CLOCK_TIME_NONE;
nuv->priv->new_file = FALSE;
// clear index list
g_slist_foreach(nuv->priv->index, gst_nuv_demux_index_free, NULL);
g_slist_free(nuv->priv->index);
nuv->priv->index = NULL;
gst_adapter_clear(nuv->priv->adapter);
if (nuv->priv->mpeg_buffer != NULL) {
gst_buffer_unref(nuv->priv->mpeg_buffer);
nuv->priv->mpeg_buffer = NULL;
}
}
static void
gst_nuv_demux_destoy_src_pad(GstNuvDemux * nuv)
{
if (nuv->priv->src_video_pad) {
gst_element_remove_pad(GST_ELEMENT(nuv), nuv->priv->src_video_pad);
nuv->priv->src_video_pad = NULL;
}
if (nuv->priv->src_audio_pad) {
gst_element_remove_pad(GST_ELEMENT(nuv), nuv->priv->src_audio_pad);
nuv->priv->src_audio_pad = NULL;
}
}
static GstStateChangeReturn
gst_nuv_demux_change_state(GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
gst_nuv_demux_reset(GST_NUV_DEMUX(element));
gst_nuv_demux_destoy_src_pad(GST_NUV_DEMUX(element));
break;
default:
break;
}
ret =
GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
if (ret == GST_STATE_CHANGE_FAILURE) {
goto done;
}
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
gst_nuv_demux_reset(GST_NUV_DEMUX(element));
gst_nuv_demux_destoy_src_pad(GST_NUV_DEMUX(element));
break;
default:
break;
}
done:
return ret;
}
#if (GST_VERSION_MINOR == 10) && (GST_VERSION_MICRO < 6)
GstBuffer *
gst_adapter_take_buffer(GstAdapter * adapter, guint nbytes)
{
GstBuffer *buffer;
GstBuffer *cur;
guint8 *data;
g_return_val_if_fail(GST_IS_ADAPTER(adapter), NULL);
g_return_val_if_fail(nbytes > 0, NULL);
GST_LOG_OBJECT(adapter, "taking buffer of %u bytes", nbytes);
/*
* we don't have enough data, return NULL. This is unlikely as one
* usually does an _available() first instead of peeking a random
* size.
*/
if (G_UNLIKELY(nbytes > adapter->size))
return NULL;
/*
* our head buffer has enough data left, return it
*/
cur = adapter->buflist->data;
if (GST_BUFFER_SIZE(cur) >= nbytes + adapter->skip) {
GST_LOG_OBJECT(adapter,
"providing buffer of %d bytes via sub-buffer",
nbytes);
buffer = gst_buffer_create_sub(cur, adapter->skip, nbytes);
gst_adapter_flush(adapter, nbytes);
return buffer;
}
data = gst_adapter_take(adapter, nbytes);
if (data == NULL)
return NULL;
buffer = gst_buffer_new();
GST_BUFFER_DATA(buffer) = data;
GST_BUFFER_MALLOCDATA(buffer) = data;
GST_BUFFER_SIZE(buffer) = nbytes;
return buffer;
}
#endif
static void
gst_nuv_typefind(GstTypeFind * tf, gpointer unused)
{
guint8 *data = gst_type_find_peek(tf, 0, 11);
if (data) {
if (memcmp(data, "MythTVVideo", 11) == 0
|| memcmp(data, "NuppelVideo", 11) == 0) {
gst_type_find_suggest(tf, GST_TYPE_FIND_MAXIMUM,
gst_caps_new_simple("video/x-nuv",
NULL));
}
}
}
static gboolean
plugin_init(GstPlugin * plugin)
{
static gchar *exts[] = { "nuv", NULL };
#ifdef ENABLE_NLS
setlocale(LC_ALL, "");
bindtextdomain(GETTEXT_PACKAGE, LOCALEDIR);
#endif /* ENABLE_NLS */
if (!gst_element_register(plugin, "nuvdemux", GST_RANK_SECONDARY,
GST_TYPE_NUV_DEMUX)) {
return FALSE;
}
if (!gst_type_find_register(plugin, "video/x-nuv", GST_RANK_SECONDARY,
gst_nuv_typefind,
exts,
gst_caps_new_simple("video/x-nuv", NULL),
NULL, NULL)) {
GST_WARNING("can't register typefind");
return FALSE;
}
return TRUE;
}
GST_PLUGIN_DEFINE(GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"nuvdemux",
"Demuxes and muxes audio and video",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,
GST_PACKAGE_ORIGIN)