all: Fix for GST_DISABLE_GST_DEBUG

Where applicable, remove methods that don't do anything different than
the default implementation.
This commit is contained in:
Edward Hervey 2013-07-29 09:35:08 +02:00
parent bc84cd775f
commit 97426a1caa
35 changed files with 164 additions and 578 deletions

View file

@ -227,11 +227,13 @@ static void gst_dash_demux_remove_streams (GstDashDemux * demux,
GSList * streams);
static void gst_dash_demux_stream_free (GstDashDemuxStream * stream);
static void gst_dash_demux_reset (GstDashDemux * demux, gboolean dispose);
#ifndef GST_DISABLE_GST_DEBUG
static GstClockTime gst_dash_demux_get_buffering_time (GstDashDemux * demux);
static GstCaps *gst_dash_demux_get_input_caps (GstDashDemux * demux,
GstActiveStream * stream);
static GstClockTime gst_dash_demux_stream_get_buffering_time (GstDashDemuxStream
* stream);
#endif
static GstCaps *gst_dash_demux_get_input_caps (GstDashDemux * demux,
GstActiveStream * stream);
static GstPad *gst_dash_demux_create_pad (GstDashDemux * demux);
#define gst_dash_demux_parent_class parent_class
@ -1409,6 +1411,7 @@ gst_dash_demux_reset (GstDashDemux * demux, gboolean dispose)
demux->cancelled = FALSE;
}
#ifndef GST_DISABLE_GST_DEBUG
static GstClockTime
gst_dash_demux_get_buffering_time (GstDashDemux * demux)
{
@ -1436,6 +1439,7 @@ gst_dash_demux_stream_get_buffering_time (GstDashDemuxStream * stream)
return (GstClockTime) level.time;
}
#endif
static gboolean
gst_dash_demux_all_streams_have_data (GstDashDemux * demux)
@ -2186,12 +2190,16 @@ gst_dash_demux_get_next_fragment (GstDashDemux * demux,
/* Wake the download task up */
GST_TASK_SIGNAL (demux->download_task);
if (selected_stream) {
#ifndef GST_DISABLE_GST_DEBUG
guint64 brate;
#endif
diff = (GST_TIMEVAL_TO_TIME (now) - GST_TIMEVAL_TO_TIME (start));
gst_download_rate_add_rate (&selected_stream->dnl_rate, size_buffer, diff);
#ifndef GST_DISABLE_GST_DEBUG
brate = (size_buffer * 8) / ((double) diff / GST_SECOND);
#endif
GST_INFO_OBJECT (demux,
"Stream: %d Download rate = %" PRIu64 " Kbits/s (%" PRIu64
" Ko in %.2f s)", selected_stream->index,

View file

@ -699,15 +699,14 @@ gst_mpdparser_get_xml_prop_dateTime (xmlNode * a_node,
{
xmlChar *prop_string;
gchar *str;
gint ret, len, pos;
gint ret, pos;
gint year, month, day, hour, minute, second;
gboolean exists = FALSE;
prop_string = xmlGetProp (a_node, (const xmlChar *) property_name);
if (prop_string) {
len = xmlStrlen (prop_string);
str = (gchar *) prop_string;
GST_TRACE ("dateTime: %s, len %d", str, len);
GST_TRACE ("dateTime: %s, len %d", str, xmlStrlen (prop_string));
/* parse year */
ret = sscanf (str, "%d", &year);
if (ret != 1)
@ -2023,10 +2022,12 @@ gst_mpdparser_get_first_adapt_set_with_mimeType_and_lang (GList *
gchar *this_mimeType = NULL;
rep =
gst_mpdparser_get_lowest_representation (adapt_set->Representations);
#ifndef GST_DISABLE_GST_DEBUG
if (rep && rep->BaseURLs) {
GstBaseURL *url = rep->BaseURLs->data;
GST_DEBUG ("%s", url->baseURL);
}
#endif
if (rep->RepresentationBase)
this_mimeType = rep->RepresentationBase->mimeType;
if (!this_mimeType && adapt_set->RepresentationBase) {

View file

@ -106,6 +106,7 @@ got_egl_error (const char *wtf)
void
gst_egl_adaptation_init_egl_exts (GstEglAdaptationContext * ctx)
{
#ifndef GST_DISABLE_GST_DEBUG
const char *eglexts;
unsigned const char *glexts;
@ -116,7 +117,7 @@ gst_egl_adaptation_init_egl_exts (GstEglAdaptationContext * ctx)
GST_STR_NULL (eglexts));
GST_DEBUG_OBJECT (ctx->element, "Available GLES extensions: %s\n",
GST_STR_NULL ((const char *) glexts));
#endif
return;
}

View file

@ -1052,7 +1052,10 @@ static gboolean
gst_eglglessink_fill_texture (GstEglGlesSink * eglglessink, GstBuffer * buf)
{
GstVideoFrame vframe;
gint w, h;
#ifndef GST_DISABLE_GST_DEBUG
gint w;
#endif
gint h;
memset (&vframe, 0, sizeof (vframe));
@ -1061,8 +1064,9 @@ gst_eglglessink_fill_texture (GstEglGlesSink * eglglessink, GstBuffer * buf)
GST_ERROR_OBJECT (eglglessink, "Couldn't map frame");
goto HANDLE_ERROR;
}
#ifndef GST_DISABLE_GST_DEBUG
w = GST_VIDEO_FRAME_WIDTH (&vframe);
#endif
h = GST_VIDEO_FRAME_HEIGHT (&vframe);
GST_DEBUG_OBJECT (eglglessink,

View file

@ -126,7 +126,6 @@ static gboolean gst_faac_configure_source_pad (GstFaac * faac,
GstAudioInfo * info);
static GstCaps *gst_faac_getcaps (GstAudioEncoder * enc, GstCaps * filter);
static gboolean gst_faac_start (GstAudioEncoder * enc);
static gboolean gst_faac_stop (GstAudioEncoder * enc);
static gboolean gst_faac_set_format (GstAudioEncoder * enc,
GstAudioInfo * info);
@ -207,7 +206,6 @@ gst_faac_class_init (GstFaacClass * klass)
"Free MPEG-2/4 AAC encoder",
"Ronald Bultje <rbultje@ronald.bitfreak.net>");
base_class->start = GST_DEBUG_FUNCPTR (gst_faac_start);
base_class->stop = GST_DEBUG_FUNCPTR (gst_faac_stop);
base_class->set_format = GST_DEBUG_FUNCPTR (gst_faac_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_faac_handle_frame);
@ -259,15 +257,6 @@ gst_faac_close_encoder (GstFaac * faac)
faac->handle = NULL;
}
static gboolean
gst_faac_start (GstAudioEncoder * enc)
{
GstFaac *faac = GST_FAAC (enc);
GST_DEBUG_OBJECT (faac, "start");
return TRUE;
}
static gboolean
gst_faac_stop (GstAudioEncoder * enc)
{

View file

@ -460,7 +460,6 @@ gst_flups_demux_send_data (GstFluPSDemux * demux, GstFluPSStream * stream,
{
GstFlowReturn result;
GstClockTime pts = GST_CLOCK_TIME_NONE, dts = GST_CLOCK_TIME_NONE;
guint size;
if (stream == NULL)
goto no_stream;
@ -497,16 +496,15 @@ gst_flups_demux_send_data (GstFluPSDemux * demux, GstFluPSStream * stream,
stream->discont = FALSE;
}
size = gst_buffer_get_size (buf);
demux->next_pts = G_MAXUINT64;
demux->next_dts = G_MAXUINT64;
result = gst_pad_push (stream->pad, buf);
GST_DEBUG_OBJECT (demux, "pushed stream id 0x%02x type 0x%02x, pts time: %"
GST_TIME_FORMAT ", size %d. result: %s",
GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT ". result: %s",
stream->id, stream->type, GST_TIME_ARGS (pts),
size, gst_flow_get_name (result));
gst_buffer_get_size (buf), gst_flow_get_name (result));
return result;
@ -1525,7 +1523,9 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
{
guint16 length;
const guint8 *data;
#ifndef GST_DISABLE_GST_DEBUG
gboolean csps;
#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
@ -1572,6 +1572,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
/* audio_bound:6==1 ! fixed:1 | constrained:1 */
{
#ifndef GST_DISABLE_GST_DEBUG
guint8 audio_bound;
gboolean fixed;
@ -1584,36 +1585,40 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
GST_DEBUG_OBJECT (demux, "audio_bound %d, fixed %d, constrained %d",
audio_bound, fixed, csps);
#endif
data += 1;
}
/* audio_lock:1 | video_lock:1 | marker:1==1 | video_bound:5 */
{
#ifndef GST_DISABLE_GST_DEBUG
gboolean audio_lock;
gboolean video_lock;
guint8 video_bound;
audio_lock = (data[0] & 0x80) == 0x80;
video_lock = (data[0] & 0x40) == 0x40;
#endif
if ((data[0] & 0x20) != 0x20)
goto marker_expected;
#ifndef GST_DISABLE_GST_DEBUG
/* max number of simultaneous video streams active */
video_bound = (data[0] & 0x1f);
GST_DEBUG_OBJECT (demux, "audio_lock %d, video_lock %d, video_bound %d",
audio_lock, video_lock, video_bound);
#endif
data += 1;
}
/* packet_rate_restriction:1 | reserved:7==0x7F */
{
#ifndef GST_DISABLE_GST_DEBUG
gboolean packet_rate_restriction;
#endif
if ((data[0] & 0x7f) != 0x7f)
goto marker_expected;
#ifndef GST_DISABLE_GST_DEBUG
/* only valid if csps is set */
if (csps) {
packet_rate_restriction = (data[0] & 0x80) == 0x80;
@ -1621,6 +1626,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
GST_DEBUG_OBJECT (demux, "packet_rate_restriction %d",
packet_rate_restriction);
}
#endif
}
data += 1;
@ -1632,10 +1638,11 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
for (i = 0; i < stream_count; i++) {
guint8 stream_id;
#ifndef GST_DISABLE_GST_DEBUG
gboolean STD_buffer_bound_scale;
guint16 STD_buffer_size_bound;
guint32 buf_byte_size_bound;
#endif
stream_id = *data++;
if (!(stream_id & 0x80))
goto sys_len_error;
@ -1643,7 +1650,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
/* check marker bits */
if ((*data & 0xC0) != 0xC0)
goto no_placeholder_bits;
#ifndef GST_DISABLE_GST_DEBUG
STD_buffer_bound_scale = *data & 0x20;
STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
STD_buffer_size_bound |= *data++;
@ -1653,7 +1660,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
} else {
buf_byte_size_bound = STD_buffer_size_bound * 1024;
}
#endif
GST_DEBUG_OBJECT (demux, "STD_buffer_bound_scale %d",
STD_buffer_bound_scale);
GST_DEBUG_OBJECT (demux, "STD_buffer_size_bound %d or %d bytes",
@ -1700,7 +1707,9 @@ gst_flups_demux_parse_psm (GstFluPSDemux * demux)
guint16 length = 0, info_length = 0, es_map_length = 0;
guint8 psm_version = 0;
const guint8 *data, *es_map_base;
#ifndef GST_DISABLE_GST_DEBUG
gboolean applicable;
#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
@ -1731,7 +1740,9 @@ gst_flups_demux_parse_psm (GstFluPSDemux * demux)
/* Read PSM applicable bit together with version */
psm_version = GST_READ_UINT8 (data);
#ifndef GST_DISABLE_GST_DEBUG
applicable = (psm_version & 0x80) >> 7;
#endif
psm_version &= 0x1F;
GST_DEBUG_OBJECT (demux, "PSM version %u (applicable now %u)", psm_version,
applicable);
@ -1835,8 +1846,6 @@ gst_flups_demux_data_cb (GstPESFilter * filter, gboolean first,
if (stream_type == -1) {
/* no stream type, if PS1, get the new id */
if (start_code == ID_PRIVATE_STREAM_1 && datalen >= 2) {
guint8 nframes;
/* VDR writes A52 streams without any header bytes
* (see ftp://ftp.mplayerhq.hu/MPlayer/samples/MPEG-VOB/vdr-AC3) */
if (datalen >= 4) {
@ -1861,8 +1870,13 @@ gst_flups_demux_data_cb (GstPESFilter * filter, gboolean first,
* take the first byte too, since it's the frame count in audio
* streams and our backwards compat convention is to strip it off */
if (stream_type != ST_PS_DVD_SUBPICTURE) {
#ifndef GST_DISABLE_GST_DEBUG
guint8 nframes;
/* Number of audio frames in this packet */
nframes = map.data[offset++];
nframes = map.data[offset];
#endif
offset++;
datalen--;
GST_DEBUG_OBJECT (demux, "private type 0x%02x, %d frames", id,
nframes);

View file

@ -36,14 +36,19 @@ gst_mpeg_descriptor_free (GstMPEGDescriptor * desc)
static guint
gst_mpeg_descriptor_parse_1 (guint8 * data, guint size)
{
#ifndef GST_DISABLE_GST_DEBUG
guint8 tag;
#endif
guint8 length;
/* need at least 2 bytes for tag and length */
if (size < 2)
return 0;
tag = *data++;
#ifndef GST_DISABLE_GST_DEBUG
tag = *data;
#endif
data += 1;
length = *data++;
size -= 2;

View file

@ -458,9 +458,11 @@ gst_pes_filter_parse (GstPESFilter * filter)
push_out:
{
GstBuffer *out;
#ifndef GST_DISABLE_GST_DEBUG
guint16 consumed;
consumed = avail - 6 - datalen;
#endif
if (filter->unbounded_packet == FALSE) {
filter->length -= avail - 6;

View file

@ -293,13 +293,12 @@ static GstFlowReturn
gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
GstAdapter * adapter, gboolean at_eos)
{
GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
gboolean completed = FALSE;
const guint8 *data;
guint size;
guint i;
GST_LOG_OBJECT (rsvg, "parse start");
GST_LOG_OBJECT (decoder, "parse start");
size = gst_adapter_available (adapter);
/* "<svg></svg>" */
@ -308,7 +307,7 @@ gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
data = gst_adapter_map (adapter, size);
if (data == NULL) {
GST_ERROR_OBJECT (rsvg, "Unable to map memory");
GST_ERROR_OBJECT (decoder, "Unable to map memory");
return GST_FLOW_ERROR;
}
for (i = 0; i < size - 4; i++) {
@ -320,7 +319,7 @@ gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
return GST_VIDEO_DECODER_FLOW_NEED_DATA;
data = gst_adapter_map (adapter, size);
if (data == NULL) {
GST_ERROR_OBJECT (rsvg, "Unable to map memory");
GST_ERROR_OBJECT (decoder, "Unable to map memory");
return GST_FLOW_ERROR;
}
break;
@ -342,7 +341,7 @@ gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
if (completed) {
GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);
GST_LOG_OBJECT (decoder, "have complete svg of %u bytes", size);
gst_video_decoder_add_to_frame (decoder, size);
return gst_video_decoder_have_frame (decoder);

View file

@ -1106,8 +1106,10 @@ gst_mss_demux_stream_download_fragment (GstMssDemuxStream * stream,
after_download = g_get_real_time ();
if (_buffer) {
#ifndef GST_DISABLE_GST_DEBUG
guint64 bitrate = (8 * gst_buffer_get_size (_buffer) * 1000000LLU) /
(after_download - before_download);
#endif
GST_DEBUG_OBJECT (mssdemux,
"Measured download bitrate: %s %" G_GUINT64_FORMAT " bps",

View file

@ -84,7 +84,6 @@ static GstCaps *gst_wayland_sink_get_caps (GstBaseSink * bsink,
GstCaps * filter);
static gboolean gst_wayland_sink_set_caps (GstBaseSink * bsink, GstCaps * caps);
static gboolean gst_wayland_sink_start (GstBaseSink * bsink);
static gboolean gst_wayland_sink_stop (GstBaseSink * bsink);
static gboolean gst_wayland_sink_preroll (GstBaseSink * bsink,
GstBuffer * buffer);
static gboolean
@ -130,6 +129,7 @@ gst_wayland_format_to_wl_format (GstVideoFormat format)
return -1;
}
#ifndef GST_DISABLE_GST_DEBUG
static const gchar *
gst_wayland_format_to_string (uint32_t wl_format)
{
@ -142,6 +142,7 @@ gst_wayland_format_to_string (uint32_t wl_format)
return gst_video_format_to_string (format);
}
#endif
static void
gst_wayland_sink_class_init (GstWaylandSinkClass * klass)
@ -169,7 +170,6 @@ gst_wayland_sink_class_init (GstWaylandSinkClass * klass)
gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_wayland_sink_get_caps);
gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_wayland_sink_set_caps);
gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_wayland_sink_start);
gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_wayland_sink_stop);
gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_wayland_sink_preroll);
gstbasesink_class->propose_allocation =
GST_DEBUG_FUNCPTR (gst_wayland_sink_propose_allocation);
@ -527,16 +527,6 @@ gst_wayland_sink_start (GstBaseSink * bsink)
return result;
}
static gboolean
gst_wayland_sink_stop (GstBaseSink * bsink)
{
GstWaylandSink *sink = (GstWaylandSink *) bsink;
GST_DEBUG_OBJECT (sink, "stop");
return TRUE;
}
static gboolean
gst_wayland_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
{

View file

@ -1968,7 +1968,9 @@ gst_h264_parser_parse_sei (GstH264NalParser * nalparser, GstH264NalUnit * nalu,
guint32 payloadSize;
guint8 payload_type_byte, payload_size_byte;
#ifndef GST_DISABLE_GST_DEBUG
guint remaining, payload_size;
#endif
GstH264ParserResult res;
GST_DEBUG ("parsing \"Sei message\"");
@ -1991,11 +1993,13 @@ gst_h264_parser_parse_sei (GstH264NalParser * nalparser, GstH264NalUnit * nalu,
}
while (payload_size_byte == 0xff);
#ifndef GST_DISABLE_GST_DEBUG
remaining = nal_reader_get_remaining (&nr) * 8;
payload_size = payloadSize < remaining ? payloadSize : remaining;
GST_DEBUG ("SEI message received: payloadType %u, payloadSize = %u bytes",
sei->payloadType, payload_size);
#endif
if (sei->payloadType == GST_H264_SEI_BUF_PERIOD) {
/* size not set; might depend on emulation_prevention_three_byte */

View file

@ -51,8 +51,6 @@ static void gst_audio_channel_mix_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_audio_channel_mix_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_audio_channel_mix_dispose (GObject * object);
static void gst_audio_channel_mix_finalize (GObject * object);
static gboolean gst_audio_channel_mix_setup (GstAudioFilter * filter,
const GstAudioInfo * info);
@ -115,8 +113,6 @@ gst_audio_channel_mix_class_init (GstAudioChannelMixClass * klass)
gobject_class->set_property = gst_audio_channel_mix_set_property;
gobject_class->get_property = gst_audio_channel_mix_get_property;
gobject_class->dispose = gst_audio_channel_mix_dispose;
gobject_class->finalize = gst_audio_channel_mix_finalize;
audio_filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_channel_mix_setup);
base_transform_class->transform_ip =
GST_DEBUG_FUNCPTR (gst_audio_channel_mix_transform_ip);
@ -209,36 +205,14 @@ gst_audio_channel_mix_get_property (GObject * object, guint property_id,
}
}
void
gst_audio_channel_mix_dispose (GObject * object)
{
GstAudioChannelMix *audiochannelmix = GST_AUDIO_CHANNEL_MIX (object);
GST_DEBUG_OBJECT (audiochannelmix, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_audio_channel_mix_parent_class)->dispose (object);
}
void
gst_audio_channel_mix_finalize (GObject * object)
{
GstAudioChannelMix *audiochannelmix = GST_AUDIO_CHANNEL_MIX (object);
GST_DEBUG_OBJECT (audiochannelmix, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_audio_channel_mix_parent_class)->finalize (object);
}
static gboolean
gst_audio_channel_mix_setup (GstAudioFilter * filter, const GstAudioInfo * info)
{
#ifndef GST_DISABLE_GST_DEBUG
GstAudioChannelMix *audiochannelmix = GST_AUDIO_CHANNEL_MIX (filter);
GST_DEBUG_OBJECT (audiochannelmix, "setup");
#endif
return TRUE;
}

View file

@ -59,11 +59,7 @@ static void gst_watchdog_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_watchdog_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_watchdog_dispose (GObject * object);
static void gst_watchdog_finalize (GObject * object);
static GstCaps *gst_watchdog_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_watchdog_start (GstBaseTransform * trans);
static gboolean gst_watchdog_stop (GstBaseTransform * trans);
static gboolean gst_watchdog_sink_event (GstBaseTransform * trans,
@ -105,10 +101,6 @@ gst_watchdog_class_init (GstWatchdogClass * klass)
gobject_class->set_property = gst_watchdog_set_property;
gobject_class->get_property = gst_watchdog_get_property;
gobject_class->dispose = gst_watchdog_dispose;
gobject_class->finalize = gst_watchdog_finalize;
base_transform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_watchdog_transform_caps);
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_watchdog_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_watchdog_stop);
base_transform_class->sink_event =
@ -166,41 +158,6 @@ gst_watchdog_get_property (GObject * object, guint property_id,
}
}
void
gst_watchdog_dispose (GObject * object)
{
GstWatchdog *watchdog = GST_WATCHDOG (object);
GST_DEBUG_OBJECT (watchdog, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_watchdog_parent_class)->dispose (object);
}
void
gst_watchdog_finalize (GObject * object)
{
GstWatchdog *watchdog = GST_WATCHDOG (object);
GST_DEBUG_OBJECT (watchdog, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_watchdog_parent_class)->finalize (object);
}
static GstCaps *
gst_watchdog_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstWatchdog *watchdog = GST_WATCHDOG (trans);
GST_DEBUG_OBJECT (watchdog, "transform_caps");
return gst_caps_ref (caps);
}
static gpointer
gst_watchdog_thread (gpointer user_data)
{

View file

@ -788,22 +788,19 @@ gst_dvd_spu_advance_spu (GstDVDSpu * dvdspu, GstClockTime new_ts)
GST_TIME_ARGS (state->next_ts), GST_TIME_ARGS (new_ts));
if (!gstspu_execute_event (dvdspu)) {
GstClockTime vid_run_ts;
/* No current command buffer, try and get one */
SpuPacket *packet = (SpuPacket *) g_queue_pop_head (dvdspu->pending_spus);
if (packet == NULL)
return; /* No SPU packets available */
vid_run_ts =
gst_segment_to_running_time (&dvdspu->video_seg, GST_FORMAT_TIME,
dvdspu->video_seg.position);
GST_LOG_OBJECT (dvdspu,
"Popped new SPU packet with TS %" GST_TIME_FORMAT
". Video position=%" GST_TIME_FORMAT " (%" GST_TIME_FORMAT
") type %s",
GST_TIME_ARGS (packet->event_ts), GST_TIME_ARGS (vid_run_ts),
GST_TIME_ARGS (packet->event_ts),
GST_TIME_ARGS (gst_segment_to_running_time (&dvdspu->video_seg,
GST_FORMAT_TIME, dvdspu->video_seg.position)),
GST_TIME_ARGS (dvdspu->video_seg.position),
packet->buf ? "buffer" : "event");

View file

@ -234,9 +234,7 @@ static void
gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "get_times");
GST_DEBUG_OBJECT (src, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
@ -325,10 +323,9 @@ gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,
static gboolean
gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
gboolean ret;
GST_DEBUG_OBJECT (interaudiosrc, "query");
GST_DEBUG_OBJECT (src, "query");
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
@ -360,15 +357,14 @@ gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query)
static GstCaps *
gst_inter_audio_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GstStructure *structure;
GST_DEBUG_OBJECT (src, "fixate");
caps = gst_caps_make_writable (caps);
structure = gst_caps_get_structure (caps, 0);
GST_DEBUG_OBJECT (interaudiosrc, "fixate");
gst_structure_fixate_field_nearest_int (structure, "channels", 2);
gst_structure_fixate_field_nearest_int (structure, "rate", 48000);

View file

@ -192,9 +192,7 @@ static void
gst_inter_sub_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
GST_DEBUG_OBJECT (intersubsrc, "get_times");
GST_DEBUG_OBJECT (src, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {

View file

@ -221,9 +221,7 @@ static void
gst_inter_video_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src);
GST_DEBUG_OBJECT (intervideosrc, "get_times");
GST_DEBUG_OBJECT (src, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
@ -313,10 +311,9 @@ gst_inter_video_src_create (GstBaseSrc * src, guint64 offset, guint size,
static GstCaps *
gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src);
GstStructure *structure;
GST_DEBUG_OBJECT (intervideosrc, "fixate");
GST_DEBUG_OBJECT (src, "fixate");
caps = gst_caps_make_writable (caps);

View file

@ -49,17 +49,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_comb_detect_debug_category);
/* prototypes */
static void gst_comb_detect_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_comb_detect_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_comb_detect_dispose (GObject * object);
static void gst_comb_detect_finalize (GObject * object);
static GstCaps *gst_comb_detect_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static gboolean gst_comb_detect_start (GstBaseTransform * trans);
static gboolean gst_comb_detect_stop (GstBaseTransform * trans);
static gboolean gst_comb_detect_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info);
@ -106,7 +97,6 @@ G_DEFINE_TYPE_WITH_CODE (GstCombDetect, gst_comb_detect, GST_TYPE_VIDEO_FILTER,
static void
gst_comb_detect_class_init (GstCombDetectClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
@ -122,14 +112,8 @@ gst_comb_detect_class_init (GstCombDetectClass * klass)
"Comb Detect", "Video/Filter", "Detect combing artifacts in video stream",
"David Schleef <ds@schleef.org>");
gobject_class->set_property = gst_comb_detect_set_property;
gobject_class->get_property = gst_comb_detect_get_property;
gobject_class->dispose = gst_comb_detect_dispose;
gobject_class->finalize = gst_comb_detect_finalize;
base_transform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_comb_detect_transform_caps);
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_comb_detect_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_comb_detect_stop);
video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_comb_detect_set_info);
video_filter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_comb_detect_transform_frame);
@ -141,59 +125,6 @@ gst_comb_detect_init (GstCombDetect * combdetect)
{
}
void
gst_comb_detect_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstCombDetect *combdetect = GST_COMB_DETECT (object);
GST_DEBUG_OBJECT (combdetect, "set_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_comb_detect_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstCombDetect *combdetect = GST_COMB_DETECT (object);
GST_DEBUG_OBJECT (combdetect, "get_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_comb_detect_dispose (GObject * object)
{
GstCombDetect *combdetect = GST_COMB_DETECT (object);
GST_DEBUG_OBJECT (combdetect, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_comb_detect_parent_class)->dispose (object);
}
void
gst_comb_detect_finalize (GObject * object)
{
GstCombDetect *combdetect = GST_COMB_DETECT (object);
GST_DEBUG_OBJECT (combdetect, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_comb_detect_parent_class)->finalize (object);
}
static GstCaps *
gst_comb_detect_transform_caps (GstBaseTransform * trans,
@ -243,27 +174,6 @@ gst_comb_detect_transform_caps (GstBaseTransform * trans,
return othercaps;
}
static gboolean
gst_comb_detect_start (GstBaseTransform * trans)
{
GstCombDetect *combdetect = GST_COMB_DETECT (trans);
GST_DEBUG_OBJECT (combdetect, "start");
/* initialize processing */
return TRUE;
}
static gboolean
gst_comb_detect_stop (GstBaseTransform * trans)
{
GstCombDetect *combdetect = GST_COMB_DETECT (trans);
GST_DEBUG_OBJECT (combdetect, "stop");
/* finalize processing */
return TRUE;
}
static gboolean
gst_comb_detect_set_info (GstVideoFilter * filter,

View file

@ -59,21 +59,12 @@ GST_DEBUG_CATEGORY_STATIC (gst_ivtc_debug_category);
/* prototypes */
static void gst_ivtc_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_ivtc_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_ivtc_dispose (GObject * object);
static void gst_ivtc_finalize (GObject * object);
static GstCaps *gst_ivtc_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter);
static GstCaps *gst_ivtc_fixate_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
static gboolean gst_ivtc_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps);
static gboolean gst_ivtc_start (GstBaseTransform * trans);
static gboolean gst_ivtc_stop (GstBaseTransform * trans);
static gboolean gst_ivtc_sink_event (GstBaseTransform * trans,
GstEvent * event);
static GstFlowReturn gst_ivtc_transform (GstBaseTransform * trans,
@ -123,7 +114,6 @@ G_DEFINE_TYPE_WITH_CODE (GstIvtc, gst_ivtc, GST_TYPE_BASE_TRANSFORM,
static void
gst_ivtc_class_init (GstIvtcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
@ -138,16 +128,10 @@ gst_ivtc_class_init (GstIvtcClass * klass)
"Inverse Telecine", "Video/Filter", "Inverse Telecine Filter",
"David Schleef <ds@schleef.org>");
gobject_class->set_property = gst_ivtc_set_property;
gobject_class->get_property = gst_ivtc_get_property;
gobject_class->dispose = gst_ivtc_dispose;
gobject_class->finalize = gst_ivtc_finalize;
base_transform_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_ivtc_transform_caps);
base_transform_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_ivtc_fixate_caps);
base_transform_class->set_caps = GST_DEBUG_FUNCPTR (gst_ivtc_set_caps);
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_ivtc_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_ivtc_stop);
base_transform_class->sink_event = GST_DEBUG_FUNCPTR (gst_ivtc_sink_event);
base_transform_class->transform = GST_DEBUG_FUNCPTR (gst_ivtc_transform);
}
@ -157,60 +141,6 @@ gst_ivtc_init (GstIvtc * ivtc)
{
}
void
gst_ivtc_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstIvtc *ivtc = GST_IVTC (object);
GST_DEBUG_OBJECT (ivtc, "set_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_ivtc_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstIvtc *ivtc = GST_IVTC (object);
GST_DEBUG_OBJECT (ivtc, "get_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_ivtc_dispose (GObject * object)
{
GstIvtc *ivtc = GST_IVTC (object);
GST_DEBUG_OBJECT (ivtc, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_ivtc_parent_class)->dispose (object);
}
void
gst_ivtc_finalize (GObject * object)
{
GstIvtc *ivtc = GST_IVTC (object);
GST_DEBUG_OBJECT (ivtc, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_ivtc_parent_class)->finalize (object);
}
static GstCaps *
gst_ivtc_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter)
@ -296,28 +226,6 @@ gst_ivtc_set_caps (GstBaseTransform * trans, GstCaps * incaps,
return TRUE;
}
/* states */
static gboolean
gst_ivtc_start (GstBaseTransform * trans)
{
GstIvtc *ivtc = GST_IVTC (trans);
GST_DEBUG_OBJECT (ivtc, "start");
return TRUE;
}
static gboolean
gst_ivtc_stop (GstBaseTransform * trans)
{
GstIvtc *ivtc = GST_IVTC (trans);
GST_DEBUG_OBJECT (ivtc, "stop");
gst_ivtc_flush (ivtc);
return TRUE;
}
/* sink and src pad event handlers */
static gboolean
gst_ivtc_sink_event (GstBaseTransform * trans, GstEvent * event)

View file

@ -599,7 +599,6 @@ gst_flups_demux_send_data (GstFluPSDemux * demux, GstFluPSStream * stream,
{
GstFlowReturn result;
GstClockTime pts = GST_CLOCK_TIME_NONE, dts = GST_CLOCK_TIME_NONE;
guint size;
if (stream == NULL)
goto no_stream;
@ -650,16 +649,14 @@ gst_flups_demux_send_data (GstFluPSDemux * demux, GstFluPSStream * stream,
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
}
size = gst_buffer_get_size (buf);
demux->next_pts = G_MAXUINT64;
demux->next_dts = G_MAXUINT64;
stream->last_flow = result = gst_pad_push (stream->pad, buf);
GST_LOG_OBJECT (demux, "pushed stream id 0x%02x type 0x%02x, pts time: %"
GST_TIME_FORMAT ", size %d. result: %s",
GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT ". result: %s",
stream->id, stream->type, GST_TIME_ARGS (pts),
size, gst_flow_get_name (result));
gst_buffer_get_size (buf), gst_flow_get_name (result));
return result;
@ -1806,7 +1803,9 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
{
guint16 length;
const guint8 *data;
#ifndef GST_DISABLE_GST_DEBUG
gboolean csps;
#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
@ -1853,6 +1852,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
/* audio_bound:6==1 ! fixed:1 | constrained:1 */
{
#ifndef GST_DISABLE_GST_DEBUG
guint8 audio_bound;
gboolean fixed;
@ -1865,36 +1865,42 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
GST_DEBUG_OBJECT (demux, "audio_bound %d, fixed %d, constrained %d",
audio_bound, fixed, csps);
#endif
data += 1;
}
/* audio_lock:1 | video_lock:1 | marker:1==1 | video_bound:5 */
{
#ifndef GST_DISABLE_GST_DEBUG
gboolean audio_lock;
gboolean video_lock;
guint8 video_bound;
audio_lock = (data[0] & 0x80) == 0x80;
video_lock = (data[0] & 0x40) == 0x40;
#endif
if ((data[0] & 0x20) != 0x20)
goto marker_expected;
#ifndef GST_DISABLE_GST_DEBUG
/* max number of simultaneous video streams active */
video_bound = (data[0] & 0x1f);
GST_DEBUG_OBJECT (demux, "audio_lock %d, video_lock %d, video_bound %d",
audio_lock, video_lock, video_bound);
#endif
data += 1;
}
/* packet_rate_restriction:1 | reserved:7==0x7F */
{
#ifndef GST_DISABLE_GST_DEBUG
gboolean packet_rate_restriction;
#endif
if ((data[0] & 0x7f) != 0x7f)
goto marker_expected;
#ifndef GST_DISABLE_GST_DEBUG
/* only valid if csps is set */
if (csps) {
packet_rate_restriction = (data[0] & 0x80) == 0x80;
@ -1902,6 +1908,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
GST_DEBUG_OBJECT (demux, "packet_rate_restriction %d",
packet_rate_restriction);
}
#endif
}
data += 1;
@ -1913,10 +1920,11 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
for (i = 0; i < stream_count; i++) {
guint8 stream_id;
#ifndef GST_DISABLE_GST_DEBUG
gboolean STD_buffer_bound_scale;
guint16 STD_buffer_size_bound;
guint32 buf_byte_size_bound;
#endif
stream_id = *data++;
if (!(stream_id & 0x80))
goto sys_len_error;
@ -1924,7 +1932,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
/* check marker bits */
if ((*data & 0xC0) != 0xC0)
goto no_placeholder_bits;
#ifndef GST_DISABLE_GST_DEBUG
STD_buffer_bound_scale = *data & 0x20;
STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
STD_buffer_size_bound |= *data++;
@ -1939,6 +1947,7 @@ gst_flups_demux_parse_sys_head (GstFluPSDemux * demux)
STD_buffer_bound_scale);
GST_DEBUG_OBJECT (demux, "STD_buffer_size_bound %d or %d bytes",
STD_buffer_size_bound, buf_byte_size_bound);
#endif
}
}
@ -1981,7 +1990,9 @@ gst_flups_demux_parse_psm (GstFluPSDemux * demux)
guint16 length = 0, info_length = 0, es_map_length = 0;
guint8 psm_version = 0;
const guint8 *data, *es_map_base;
#ifndef GST_DISABLE_GST_DEBUG
gboolean applicable;
#endif
if (gst_adapter_available (demux->adapter) < 6)
goto need_more_data;
@ -2012,7 +2023,9 @@ gst_flups_demux_parse_psm (GstFluPSDemux * demux)
/* Read PSM applicable bit together with version */
psm_version = GST_READ_UINT8 (data);
#ifndef GST_DISABLE_GST_DEBUG
applicable = (psm_version & 0x80) >> 7;
#endif
psm_version &= 0x1F;
GST_DEBUG_OBJECT (demux, "PSM version %u (applicable now %u)", psm_version,
applicable);
@ -2116,8 +2129,6 @@ gst_flups_demux_data_cb (GstPESFilter * filter, gboolean first,
if (stream_type == -1) {
/* no stream type, if PS1, get the new id */
if (start_code == ID_PRIVATE_STREAM_1 && datalen >= 2) {
guint8 nframes;
/* VDR writes A52 streams without any header bytes
* (see ftp://ftp.mplayerhq.hu/MPlayer/samples/MPEG-VOB/vdr-AC3) */
if (datalen >= 4) {
@ -2143,10 +2154,15 @@ gst_flups_demux_data_cb (GstPESFilter * filter, gboolean first,
* streams and our backwards compat convention is to strip it off */
if (stream_type != ST_PS_DVD_SUBPICTURE) {
/* Number of audio frames in this packet */
nframes = map.data[offset++];
datalen--;
#ifndef GST_DISABLE_GST_DEBUG
guint8 nframes;
nframes = map.data[offset];
GST_LOG_OBJECT (demux, "private type 0x%02x, %d frames", id,
nframes);
#endif
offset++;
datalen--;
} else {
GST_LOG_OBJECT (demux, "private type 0x%02x, stream type %d", id,
stream_type);

View file

@ -459,9 +459,11 @@ gst_pes_filter_parse (GstPESFilter * filter)
push_out:
{
GstBuffer *out;
#ifndef GST_DISABLE_GST_DEBUG
guint16 consumed;
consumed = avail - 6 - datalen;
#endif
if (filter->unbounded_packet == FALSE) {
filter->length -= avail - 6;

View file

@ -779,9 +779,11 @@ mpegtsmux_sink_event (GstCollectPads * pads, GstCollectData * data,
MpegTsMux *mux = GST_MPEG_TSMUX (user_data);
gboolean res = FALSE;
gboolean forward = TRUE;
#ifndef GST_DISABLE_GST_DEBUG
GstPad *pad;
pad = data->pad;
#endif
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_DOWNSTREAM:

View file

@ -164,7 +164,10 @@ gst_siren_dec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buf)
GstBuffer *out_buf;
guint8 *in_data, *out_data;
guint i, size, num_frames;
gint out_size, in_size;
gint out_size;
#ifndef GST_DISABLE_GST_DEBUG
gint in_size;
#endif
gint decode_ret;
GstMapInfo inmap, outmap;
@ -181,7 +184,9 @@ gst_siren_dec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buf)
num_frames = size / 40;
/* this is the input/output size */
#ifndef GST_DISABLE_GST_DEBUG
in_size = num_frames * 40;
#endif
out_size = num_frames * 640;
GST_LOG_OBJECT (dec, "we have %u frames, %u in, %u out", num_frames, in_size,

View file

@ -146,7 +146,10 @@ gst_siren_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
GstBuffer *out_buf;
guint8 *in_data, *out_data;
guint i, size, num_frames;
gint out_size, in_size;
gint out_size;
#ifndef GST_DISABLE_GST_DEBUG
gint in_size;
#endif
gint encode_ret;
GstMapInfo inmap, outmap;
@ -166,7 +169,9 @@ gst_siren_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
num_frames = size / 640;
/* this is the input/output size */
#ifndef GST_DISABLE_GST_DEBUG
in_size = num_frames * 640;
#endif
out_size = num_frames * 40;
GST_LOG_OBJECT (enc, "we have %u frames, %u in, %u out", num_frames, in_size,

View file

@ -89,16 +89,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_scene_change_debug_category);
/* prototypes */
static void gst_scene_change_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_scene_change_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_scene_change_dispose (GObject * object);
static void gst_scene_change_finalize (GObject * object);
static gboolean gst_scene_change_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info);
static GstFlowReturn gst_scene_change_transform_frame_ip (GstVideoFilter *
filter, GstVideoFrame * frame);
@ -125,7 +115,6 @@ G_DEFINE_TYPE_WITH_CODE (GstSceneChange, gst_scene_change,
static void
gst_scene_change_class_init (GstSceneChangeClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
@ -140,11 +129,6 @@ gst_scene_change_class_init (GstSceneChangeClass * klass)
"Video/Filter", "Detects scene changes in video",
"David Schleef <ds@entropywave.com>");
gobject_class->set_property = gst_scene_change_set_property;
gobject_class->get_property = gst_scene_change_get_property;
gobject_class->dispose = gst_scene_change_dispose;
gobject_class->finalize = gst_scene_change_finalize;
video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_scene_change_set_info);
video_filter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_scene_change_transform_frame_ip);
@ -155,71 +139,6 @@ gst_scene_change_init (GstSceneChange * scenechange)
{
}
void
gst_scene_change_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
GST_DEBUG_OBJECT (scenechange, "set_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_scene_change_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
GST_DEBUG_OBJECT (scenechange, "get_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_scene_change_dispose (GObject * object)
{
GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
GST_DEBUG_OBJECT (scenechange, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_scene_change_parent_class)->dispose (object);
}
void
gst_scene_change_finalize (GObject * object)
{
GstSceneChange *scenechange = GST_SCENE_CHANGE (object);
GST_DEBUG_OBJECT (scenechange, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_scene_change_parent_class)->finalize (object);
}
static gboolean
gst_scene_change_set_info (GstVideoFilter * filter, GstCaps * incaps,
GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
GstSceneChange *scenechange = GST_SCENE_CHANGE (filter);
GST_DEBUG_OBJECT (scenechange, "set_info");
return TRUE;
}
static double
get_frame_score (GstVideoFrame * f1, GstVideoFrame * f2)

View file

@ -44,19 +44,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_video_diff_debug_category);
/* prototypes */
static void gst_video_diff_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_video_diff_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_video_diff_dispose (GObject * object);
static void gst_video_diff_finalize (GObject * object);
static gboolean gst_video_diff_start (GstBaseTransform * trans);
static gboolean gst_video_diff_stop (GstBaseTransform * trans);
static gboolean gst_video_diff_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info);
static GstFlowReturn gst_video_diff_transform_frame (GstVideoFilter * filter,
GstVideoFrame * inframe, GstVideoFrame * outframe);
@ -85,9 +72,6 @@ G_DEFINE_TYPE_WITH_CODE (GstVideoDiff, gst_video_diff, GST_TYPE_VIDEO_FILTER,
static void
gst_video_diff_class_init (GstVideoDiffClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseTransformClass *base_transform_class =
GST_BASE_TRANSFORM_CLASS (klass);
GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
/* Setting up pads and setting metadata should be moved to
@ -103,13 +87,6 @@ gst_video_diff_class_init (GstVideoDiffClass * klass)
"FIXME Long name", "Generic", "FIXME Description",
"FIXME <fixme@example.com>");
gobject_class->set_property = gst_video_diff_set_property;
gobject_class->get_property = gst_video_diff_get_property;
gobject_class->dispose = gst_video_diff_dispose;
gobject_class->finalize = gst_video_diff_finalize;
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_video_diff_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_video_diff_stop);
video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_diff_set_info);
video_filter_class->transform_frame =
GST_DEBUG_FUNCPTR (gst_video_diff_transform_frame);
@ -121,91 +98,6 @@ gst_video_diff_init (GstVideoDiff * videodiff)
videodiff->threshold = 10;
}
void
gst_video_diff_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
GST_DEBUG_OBJECT (videodiff, "set_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_video_diff_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
GST_DEBUG_OBJECT (videodiff, "get_property");
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_video_diff_dispose (GObject * object)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
GST_DEBUG_OBJECT (videodiff, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_video_diff_parent_class)->dispose (object);
}
void
gst_video_diff_finalize (GObject * object)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (object);
GST_DEBUG_OBJECT (videodiff, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_video_diff_parent_class)->finalize (object);
}
static gboolean
gst_video_diff_start (GstBaseTransform * trans)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (trans);
GST_DEBUG_OBJECT (videodiff, "start");
return TRUE;
}
static gboolean
gst_video_diff_stop (GstBaseTransform * trans)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (trans);
GST_DEBUG_OBJECT (videodiff, "stop");
return TRUE;
}
static gboolean
gst_video_diff_set_info (GstVideoFilter * filter, GstCaps * incaps,
GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
GstVideoDiff *videodiff = GST_VIDEO_DIFF (filter);
GST_DEBUG_OBJECT (videodiff, "set_info");
return TRUE;
}
static GstFlowReturn
gst_video_diff_transform_frame_ip_planarY (GstVideoDiff * videodiff,
GstVideoFrame * outframe, GstVideoFrame * inframe, GstVideoFrame * oldframe)

View file

@ -62,14 +62,9 @@ static void gst_zebra_stripe_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_zebra_stripe_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_zebra_stripe_dispose (GObject * object);
static void gst_zebra_stripe_finalize (GObject * object);
static gboolean gst_zebra_stripe_start (GstBaseTransform * trans);
static gboolean gst_zebra_stripe_stop (GstBaseTransform * trans);
static gboolean gst_zebra_stripe_set_info (GstVideoFilter * filter,
GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
GstVideoInfo * out_info);
static GstFlowReturn gst_zebra_stripe_transform_frame_ip (GstVideoFilter *
filter, GstVideoFrame * frame);
@ -119,11 +114,8 @@ gst_zebra_stripe_class_init (GstZebraStripeClass * klass)
gobject_class->set_property = gst_zebra_stripe_set_property;
gobject_class->get_property = gst_zebra_stripe_get_property;
gobject_class->dispose = gst_zebra_stripe_dispose;
gobject_class->finalize = gst_zebra_stripe_finalize;
base_transform_class->start = GST_DEBUG_FUNCPTR (gst_zebra_stripe_start);
base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_zebra_stripe_stop);
video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_zebra_stripe_set_info);
video_filter_class->transform_frame_ip =
GST_DEBUG_FUNCPTR (gst_zebra_stripe_transform_frame_ip);
@ -177,36 +169,14 @@ gst_zebra_stripe_get_property (GObject * object, guint property_id,
}
}
void
gst_zebra_stripe_dispose (GObject * object)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (object);
GST_DEBUG_OBJECT (zebrastripe, "dispose");
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (gst_zebra_stripe_parent_class)->dispose (object);
}
void
gst_zebra_stripe_finalize (GObject * object)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (object);
GST_DEBUG_OBJECT (zebrastripe, "finalize");
/* clean up object here */
G_OBJECT_CLASS (gst_zebra_stripe_parent_class)->finalize (object);
}
static gboolean
gst_zebra_stripe_start (GstBaseTransform * trans)
{
#ifndef GST_DISABLE_GST_DEBUG
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (trans);
GST_DEBUG_OBJECT (zebrastripe, "start");
#endif
if (GST_BASE_TRANSFORM_CLASS (gst_zebra_stripe_parent_class)->start)
return
@ -217,9 +187,11 @@ gst_zebra_stripe_start (GstBaseTransform * trans)
static gboolean
gst_zebra_stripe_stop (GstBaseTransform * trans)
{
#ifndef GST_DISABLE_GST_DEBUG
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (trans);
GST_DEBUG_OBJECT (zebrastripe, "stop");
#endif
if (GST_BASE_TRANSFORM_CLASS (gst_zebra_stripe_parent_class)->stop)
return
@ -227,17 +199,6 @@ gst_zebra_stripe_stop (GstBaseTransform * trans)
return TRUE;
}
static gboolean
gst_zebra_stripe_set_info (GstVideoFilter * filter, GstCaps * incaps,
GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
{
GstZebraStripe *zebrastripe = GST_ZEBRA_STRIPE (filter);
GST_DEBUG_OBJECT (zebrastripe, "set_info");
return TRUE;
}
static GstFlowReturn
gst_zebra_stripe_transform_frame_ip_planarY (GstZebraStripe * zebrastripe,
GstVideoFrame * frame)

View file

@ -427,7 +427,7 @@ gst_h264_parser_store_nal (GstH264Parse * h264parse, guint id,
store[id] = buf;
}
#ifndef GST_DISABLE_GST_DEBUG
static const gchar *nal_names[] = {
"Unknown",
"Slice",
@ -451,6 +451,7 @@ _nal_name (GstH264NalUnitType nal_type)
return nal_names[nal_type];
return "Invalid";
}
#endif
/* SPS/PPS/IDR considered key, all others DELTA;
* so downstream waiting for keyframe can pick up at SPS/PPS/IDR */
@ -1566,8 +1567,10 @@ gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
{
GstClockTime running_time;
guint count;
#ifndef GST_DISABLE_GST_DEBUG
gboolean have_sps, have_pps;
gint i;
#endif
parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
gst_event_replace (&parse->force_key_unit_event, NULL);
@ -1580,6 +1583,7 @@ gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
GST_TIME_ARGS (running_time), count);
gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
#ifndef GST_DISABLE_GST_DEBUG
have_sps = have_pps = FALSE;
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
if (parse->sps_nals[i] != NULL) {
@ -1596,6 +1600,7 @@ gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
have_sps, have_pps);
#endif
/* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
parse->push_codec = TRUE;
@ -1781,7 +1786,10 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
(value = gst_structure_get_value (str, "codec_data"))) {
GstMapInfo map;
guint8 *data;
guint num_sps, num_pps, profile;
guint num_sps, num_pps;
#ifndef GST_DISABLE_GST_DEBUG
guint profile;
#endif
gint i;
GST_DEBUG_OBJECT (h264parse, "have packetized h264");
@ -1805,12 +1813,13 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
gst_buffer_unmap (codec_data, &map);
goto wrong_version;
}
#ifndef GST_DISABLE_GST_DEBUG
/* AVCProfileIndication */
/* profile_compat */
/* AVCLevelIndication */
profile = (data[1] << 16) | (data[2] << 8) | data[3];
GST_DEBUG_OBJECT (h264parse, "profile %06x", profile);
#endif
/* 6 bits reserved | 2 bits lengthSizeMinusOne */
/* this is the number of bytes in front of the NAL units to mark their

View file

@ -61,6 +61,7 @@ gst_h263_parse_get_params (H263Params * params, GstBuffer * buffer,
{1408, 1152}
};
#ifndef GST_DISABLE_GST_DEBUG
static const gchar *source_format_name[] = {
"Forbidden",
"sub-QCIF",
@ -71,6 +72,7 @@ gst_h263_parse_get_params (H263Params * params, GstBuffer * buffer,
"Reserved",
"Extended PType"
};
#endif
GstBitReader br;
GstMapInfo map;

View file

@ -119,6 +119,7 @@ static CamReturn
handle_application_info_reply (CamApplicationInfo * info,
CamSLSession * session, guint8 * buffer, guint length)
{
#ifndef GST_DISABLE_GST_DEBUG
guint8 type;
guint8 menu_length;
gchar menu[256];
@ -130,7 +131,7 @@ handle_application_info_reply (CamApplicationInfo * info,
menu[menu_length] = 0;
GST_INFO ("application info reply, type: %d, menu: %s", type, menu);
#endif
return CAM_RETURN_OK;
}

View file

@ -172,6 +172,7 @@ static CamReturn
handle_conditional_access_info_reply (CamConditionalAccess * cas,
CamSLSession * session, guint8 * buffer, guint length)
{
#ifndef GST_DISABLE_GST_DEBUG
int i;
guint16 cas_id;
@ -186,6 +187,7 @@ handle_conditional_access_info_reply (CamConditionalAccess * cas,
}
cas->ready = TRUE;
#endif
return CAM_RETURN_OK;
}
@ -194,6 +196,7 @@ static CamReturn
handle_conditional_access_pmt_reply (CamConditionalAccess * cas,
CamSLSession * session, guint8 * buffer, guint length)
{
#ifndef GST_DISABLE_GST_DEBUG
guint16 program_num;
guint8 version_num, current_next_indicator;
@ -226,6 +229,7 @@ handle_conditional_access_pmt_reply (CamConditionalAccess * cas,
length -= 3;
}
#endif
return CAM_RETURN_OK;
}

View file

@ -65,7 +65,9 @@ gst_vdp_decoder_render (GstVdpDecoder * vdp_decoder, VdpPictureInfo * info,
VdpStatus status;
GstVdpVideoMemory *vmem;
#ifndef GST_DISABLE_GST_DEBUG
GstClockTime before, after;
#endif
GST_DEBUG_OBJECT (vdp_decoder, "n_bufs:%d, frame:%d", n_bufs,
frame->system_frame_number);
@ -83,11 +85,15 @@ gst_vdp_decoder_render (GstVdpDecoder * vdp_decoder, VdpPictureInfo * info,
goto no_mem;
GST_DEBUG_OBJECT (vdp_decoder, "Calling VdpDecoderRender()");
#ifndef GST_DISABLE_GST_DEBUG
before = gst_util_get_timestamp ();
#endif
status =
vdp_decoder->device->vdp_decoder_render (vdp_decoder->decoder,
vmem->surface, info, n_bufs, bufs);
#ifndef GST_DISABLE_GST_DEBUG
after = gst_util_get_timestamp ();
#endif
if (status != VDP_STATUS_OK)
goto decode_error;

View file

@ -99,7 +99,9 @@ ensure_data (GstVdpVideoMemory * vmem)
{
VdpStatus vdp_stat;
GstVideoInfo *info = vmem->info;
#ifndef GST_DISABLE_GST_DEBUG
GstClockTime before, after;
#endif
if (g_atomic_int_add (&vmem->refcount, 1) > 1)
return TRUE;
@ -119,11 +121,15 @@ ensure_data (GstVdpVideoMemory * vmem)
vmem->destination_pitches[0],
vmem->destination_pitches[1], vmem->destination_pitches[2]);
#ifndef GST_DISABLE_GST_DEBUG
before = gst_util_get_timestamp ();
#endif
vdp_stat =
vmem->device->vdp_video_surface_get_bits_ycbcr (vmem->surface,
vmem->ycbcr_format, vmem->cached_data, vmem->destination_pitches);
#ifndef GST_DISABLE_GST_DEBUG
after = gst_util_get_timestamp ();
#endif
GST_CAT_WARNING (GST_CAT_PERFORMANCE, "Downloading took %" GST_TIME_FORMAT,
GST_TIME_ARGS (after - before));