mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-18 22:36:33 +00:00
ext/ffmpeg/gstffmpegdec.c: Use caps to clip output images. Fixes #341736
Original commit message from CVS: * ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_open), (gst_ffmpegdec_setcaps), (gst_ffmpegdec_negotiate), (get_output_buffer), (gst_ffmpegdec_video_frame), (clip_audio_buffer), (gst_ffmpegdec_audio_frame), (gst_ffmpegdec_frame), (gst_ffmpegdec_chain), (gst_ffmpegdec_register): Use caps to clip output images. Fixes #341736
This commit is contained in:
parent
672f4e6f54
commit
421a02db40
2 changed files with 108 additions and 62 deletions
10
ChangeLog
10
ChangeLog
|
@ -1,3 +1,13 @@
|
||||||
|
2007-01-09 Wim Taymans <wim@fluendo.com>
|
||||||
|
|
||||||
|
* ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_open),
|
||||||
|
(gst_ffmpegdec_setcaps), (gst_ffmpegdec_negotiate),
|
||||||
|
(get_output_buffer), (gst_ffmpegdec_video_frame),
|
||||||
|
(clip_audio_buffer), (gst_ffmpegdec_audio_frame),
|
||||||
|
(gst_ffmpegdec_frame), (gst_ffmpegdec_chain),
|
||||||
|
(gst_ffmpegdec_register):
|
||||||
|
Use caps to clip output images. Fixes #341736
|
||||||
|
|
||||||
2007-01-08 Edward Hervey <edward@fluendo.com>
|
2007-01-08 Edward Hervey <edward@fluendo.com>
|
||||||
|
|
||||||
* ext/ffmpeg/gstffmpegenc.c: (gst_ffmpegenc_me_method_get_type):
|
* ext/ffmpeg/gstffmpegenc.c: (gst_ffmpegenc_me_method_get_type):
|
||||||
|
|
|
@ -56,6 +56,7 @@ struct _GstFFMpegDec
|
||||||
struct
|
struct
|
||||||
{
|
{
|
||||||
gint width, height;
|
gint width, height;
|
||||||
|
gint clip_width, clip_height;
|
||||||
gint fps_n, fps_d;
|
gint fps_n, fps_d;
|
||||||
gint old_fps_n, old_fps_d;
|
gint old_fps_n, old_fps_d;
|
||||||
|
|
||||||
|
@ -496,9 +497,8 @@ gst_ffmpegdec_open (GstFFMpegDec * ffmpegdec)
|
||||||
if (ffmpegdec->context->extradata_size == 0) {
|
if (ffmpegdec->context->extradata_size == 0) {
|
||||||
GST_LOG_OBJECT (ffmpegdec, "H264 with no extradata, creating parser");
|
GST_LOG_OBJECT (ffmpegdec, "H264 with no extradata, creating parser");
|
||||||
ffmpegdec->pctx = av_parser_init (oclass->in_plugin->id);
|
ffmpegdec->pctx = av_parser_init (oclass->in_plugin->id);
|
||||||
}
|
} else {
|
||||||
else {
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
GST_LOG_OBJECT (ffmpegdec,
|
|
||||||
"H264 with extradata implies framed data - not using parser");
|
"H264 with extradata implies framed data - not using parser");
|
||||||
ffmpegdec->pctx = NULL;
|
ffmpegdec->pctx = NULL;
|
||||||
}
|
}
|
||||||
|
@ -513,6 +513,8 @@ gst_ffmpegdec_open (GstFFMpegDec * ffmpegdec)
|
||||||
case CODEC_TYPE_VIDEO:
|
case CODEC_TYPE_VIDEO:
|
||||||
ffmpegdec->format.video.width = 0;
|
ffmpegdec->format.video.width = 0;
|
||||||
ffmpegdec->format.video.height = 0;
|
ffmpegdec->format.video.height = 0;
|
||||||
|
ffmpegdec->format.video.clip_width = -1;
|
||||||
|
ffmpegdec->format.video.clip_height = -1;
|
||||||
ffmpegdec->format.video.pix_fmt = PIX_FMT_NB;
|
ffmpegdec->format.video.pix_fmt = PIX_FMT_NB;
|
||||||
break;
|
break;
|
||||||
case CODEC_TYPE_AUDIO:
|
case CODEC_TYPE_AUDIO:
|
||||||
|
@ -621,6 +623,12 @@ gst_ffmpegdec_setcaps (GstPad * pad, GstCaps * caps)
|
||||||
if (!gst_ffmpegdec_open (ffmpegdec))
|
if (!gst_ffmpegdec_open (ffmpegdec))
|
||||||
goto open_failed;
|
goto open_failed;
|
||||||
|
|
||||||
|
/* clipping region */
|
||||||
|
gst_structure_get_int (structure, "width",
|
||||||
|
&ffmpegdec->format.video.clip_width);
|
||||||
|
gst_structure_get_int (structure, "height",
|
||||||
|
&ffmpegdec->format.video.clip_height);
|
||||||
|
|
||||||
done:
|
done:
|
||||||
GST_OBJECT_UNLOCK (ffmpegdec);
|
GST_OBJECT_UNLOCK (ffmpegdec);
|
||||||
|
|
||||||
|
@ -861,13 +869,23 @@ gst_ffmpegdec_negotiate (GstFFMpegDec * ffmpegdec)
|
||||||
}
|
}
|
||||||
|
|
||||||
caps = gst_ffmpeg_codectype_to_caps (oclass->in_plugin->type,
|
caps = gst_ffmpeg_codectype_to_caps (oclass->in_plugin->type,
|
||||||
ffmpegdec->context);
|
ffmpegdec->context, oclass->in_plugin->id);
|
||||||
|
|
||||||
if (caps == NULL)
|
if (caps == NULL)
|
||||||
goto no_caps;
|
goto no_caps;
|
||||||
|
|
||||||
switch (oclass->in_plugin->type) {
|
switch (oclass->in_plugin->type) {
|
||||||
case CODEC_TYPE_VIDEO:
|
case CODEC_TYPE_VIDEO:
|
||||||
|
{
|
||||||
|
gint width, height;
|
||||||
|
|
||||||
|
width = ffmpegdec->format.video.clip_width;
|
||||||
|
height = ffmpegdec->format.video.clip_height;
|
||||||
|
|
||||||
|
if (width != -1 && height != -1) {
|
||||||
|
gst_caps_set_simple (caps,
|
||||||
|
"width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
|
||||||
|
}
|
||||||
/* If a demuxer provided a framerate then use it (#313970) */
|
/* If a demuxer provided a framerate then use it (#313970) */
|
||||||
if (ffmpegdec->format.video.fps_n != -1) {
|
if (ffmpegdec->format.video.fps_n != -1) {
|
||||||
gst_caps_set_simple (caps, "framerate",
|
gst_caps_set_simple (caps, "framerate",
|
||||||
|
@ -877,6 +895,7 @@ gst_ffmpegdec_negotiate (GstFFMpegDec * ffmpegdec)
|
||||||
gst_ffmpegdec_add_pixel_aspect_ratio (ffmpegdec,
|
gst_ffmpegdec_add_pixel_aspect_ratio (ffmpegdec,
|
||||||
gst_caps_get_structure (caps, 0));
|
gst_caps_get_structure (caps, 0));
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case CODEC_TYPE_AUDIO:
|
case CODEC_TYPE_AUDIO:
|
||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
|
@ -1113,13 +1132,20 @@ get_output_buffer (GstFFMpegDec * ffmpegdec, GstBuffer ** outbuf)
|
||||||
} else {
|
} else {
|
||||||
AVPicture pic;
|
AVPicture pic;
|
||||||
gint fsize;
|
gint fsize;
|
||||||
|
gint width, height;
|
||||||
|
|
||||||
/* see if we need renegotiation */
|
/* see if we need renegotiation */
|
||||||
if (G_UNLIKELY (!gst_ffmpegdec_negotiate (ffmpegdec)))
|
if (G_UNLIKELY (!gst_ffmpegdec_negotiate (ffmpegdec)))
|
||||||
goto negotiate_failed;
|
goto negotiate_failed;
|
||||||
|
|
||||||
|
/* figure out size of output buffer */
|
||||||
|
if ((width = ffmpegdec->format.video.clip_width) == -1)
|
||||||
|
width = ffmpegdec->context->width;
|
||||||
|
if ((height = ffmpegdec->format.video.clip_height) == -1)
|
||||||
|
height = ffmpegdec->context->height;
|
||||||
|
|
||||||
fsize = gst_ffmpeg_avpicture_get_size (ffmpegdec->context->pix_fmt,
|
fsize = gst_ffmpeg_avpicture_get_size (ffmpegdec->context->pix_fmt,
|
||||||
ffmpegdec->context->width, ffmpegdec->context->height);
|
width, height);
|
||||||
|
|
||||||
if (!ffmpegdec->context->palctrl) {
|
if (!ffmpegdec->context->palctrl) {
|
||||||
ret = gst_pad_alloc_buffer_and_set_caps (ffmpegdec->srcpad,
|
ret = gst_pad_alloc_buffer_and_set_caps (ffmpegdec->srcpad,
|
||||||
|
@ -1141,16 +1167,14 @@ get_output_buffer (GstFFMpegDec * ffmpegdec, GstBuffer ** outbuf)
|
||||||
/* original ffmpeg code does not handle odd sizes correctly.
|
/* original ffmpeg code does not handle odd sizes correctly.
|
||||||
* This patched up version does */
|
* This patched up version does */
|
||||||
gst_ffmpeg_avpicture_fill (&pic, GST_BUFFER_DATA (*outbuf),
|
gst_ffmpeg_avpicture_fill (&pic, GST_BUFFER_DATA (*outbuf),
|
||||||
ffmpegdec->context->pix_fmt,
|
ffmpegdec->context->pix_fmt, width, height);
|
||||||
ffmpegdec->context->width, ffmpegdec->context->height);
|
|
||||||
|
|
||||||
/* the original convert function did not do the right thing, this
|
/* the original convert function did not do the right thing, this
|
||||||
* is a patched up version that adjust widht/height so that the
|
* is a patched up version that adjust widht/height so that the
|
||||||
* ffmpeg one works correctly. */
|
* ffmpeg one works correctly. */
|
||||||
gst_ffmpeg_img_convert (&pic, ffmpegdec->context->pix_fmt,
|
gst_ffmpeg_img_convert (&pic, ffmpegdec->context->pix_fmt,
|
||||||
(AVPicture *) ffmpegdec->picture,
|
(AVPicture *) ffmpegdec->picture,
|
||||||
ffmpegdec->context->pix_fmt,
|
ffmpegdec->context->pix_fmt, width, height);
|
||||||
ffmpegdec->context->width, ffmpegdec->context->height);
|
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
|
|
||||||
|
@ -1183,7 +1207,7 @@ alloc_failed:
|
||||||
static gint
|
static gint
|
||||||
gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec,
|
gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec,
|
||||||
guint8 * data, guint size,
|
guint8 * data, guint size,
|
||||||
GstClockTime in_timestamp, GstClockTime in_duration,
|
GstClockTime in_timestamp, GstClockTime in_duration,
|
||||||
GstBuffer ** outbuf, GstFlowReturn * ret)
|
GstBuffer ** outbuf, GstFlowReturn * ret)
|
||||||
{
|
{
|
||||||
gint len = -1;
|
gint len = -1;
|
||||||
|
@ -1198,7 +1222,8 @@ gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec,
|
||||||
|
|
||||||
/* run QoS code, returns FALSE if we can skip decoding this
|
/* run QoS code, returns FALSE if we can skip decoding this
|
||||||
* frame entirely. */
|
* frame entirely. */
|
||||||
if (G_UNLIKELY (!gst_ffmpegdec_do_qos (ffmpegdec, in_timestamp, &mode_switch)))
|
if (G_UNLIKELY (!gst_ffmpegdec_do_qos (ffmpegdec, in_timestamp,
|
||||||
|
&mode_switch)))
|
||||||
goto drop_qos;
|
goto drop_qos;
|
||||||
|
|
||||||
/* in case we skip frames */
|
/* in case we skip frames */
|
||||||
|
@ -1220,9 +1245,12 @@ gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec,
|
||||||
if (len < 0 || have_data <= 0)
|
if (len < 0 || have_data <= 0)
|
||||||
goto beach;
|
goto beach;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (ffmpegdec, "picture: pts %"G_GUINT64_FORMAT, ffmpegdec->picture->pts);
|
GST_DEBUG_OBJECT (ffmpegdec, "picture: pts %" G_GUINT64_FORMAT,
|
||||||
GST_DEBUG_OBJECT (ffmpegdec, "picture: num %d", ffmpegdec->picture->coded_picture_number);
|
ffmpegdec->picture->pts);
|
||||||
GST_DEBUG_OBJECT (ffmpegdec, "picture: display %d", ffmpegdec->picture->display_picture_number);
|
GST_DEBUG_OBJECT (ffmpegdec, "picture: num %d",
|
||||||
|
ffmpegdec->picture->coded_picture_number);
|
||||||
|
GST_DEBUG_OBJECT (ffmpegdec, "picture: display %d",
|
||||||
|
ffmpegdec->picture->display_picture_number);
|
||||||
|
|
||||||
/* check if we are dealing with a keyframe here */
|
/* check if we are dealing with a keyframe here */
|
||||||
iskeyframe = check_keyframe (ffmpegdec);
|
iskeyframe = check_keyframe (ffmpegdec);
|
||||||
|
@ -1254,10 +1282,11 @@ gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec,
|
||||||
* files it returns the same timestamp twice. Leaving the code here for when
|
* files it returns the same timestamp twice. Leaving the code here for when
|
||||||
* the parsers are improved in ffmpeg. */
|
* the parsers are improved in ffmpeg. */
|
||||||
if (ffmpegdec->pctx) {
|
if (ffmpegdec->pctx) {
|
||||||
GST_DEBUG_OBJECT (ffmpegdec, "picture: ffpts %"G_GUINT64_FORMAT, ffmpegdec->pctx->pts);
|
GST_DEBUG_OBJECT (ffmpegdec, "picture: ffpts %" G_GUINT64_FORMAT,
|
||||||
|
ffmpegdec->pctx->pts);
|
||||||
if (ffmpegdec->pctx->pts != AV_NOPTS_VALUE) {
|
if (ffmpegdec->pctx->pts != AV_NOPTS_VALUE) {
|
||||||
in_timestamp = gst_ffmpeg_time_ff_to_gst (ffmpegdec->pctx->pts,
|
in_timestamp = gst_ffmpeg_time_ff_to_gst (ffmpegdec->pctx->pts,
|
||||||
ffmpegdec->context->time_base);
|
ffmpegdec->context->time_base);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -1310,7 +1339,8 @@ gst_ffmpegdec_video_frame (GstFFMpegDec * ffmpegdec,
|
||||||
GST_BUFFER_SIZE (*outbuf) -= AVPALETTE_SIZE;
|
GST_BUFFER_SIZE (*outbuf) -= AVPALETTE_SIZE;
|
||||||
|
|
||||||
/* now see if we need to clip the buffer against the segment boundaries. */
|
/* now see if we need to clip the buffer against the segment boundaries. */
|
||||||
if (G_UNLIKELY (!clip_video_buffer (ffmpegdec, *outbuf, in_timestamp, in_duration)))
|
if (G_UNLIKELY (!clip_video_buffer (ffmpegdec, *outbuf, in_timestamp,
|
||||||
|
in_duration)))
|
||||||
goto clipped;
|
goto clipped;
|
||||||
|
|
||||||
/* mark as keyframe or delta unit */
|
/* mark as keyframe or delta unit */
|
||||||
|
@ -1375,7 +1405,7 @@ clip_audio_buffer (GstFFMpegDec * dec, GstBuffer * buf, GstClockTime in_ts,
|
||||||
stop = in_ts + in_dur;
|
stop = in_ts + in_dur;
|
||||||
|
|
||||||
res = gst_segment_clip (&dec->segment, GST_FORMAT_TIME, in_ts, stop, &ctime,
|
res = gst_segment_clip (&dec->segment, GST_FORMAT_TIME, in_ts, stop, &ctime,
|
||||||
&cstop);
|
&cstop);
|
||||||
if (G_UNLIKELY (!res))
|
if (G_UNLIKELY (!res))
|
||||||
goto out_of_segment;
|
goto out_of_segment;
|
||||||
|
|
||||||
|
@ -1421,15 +1451,16 @@ out_of_segment:
|
||||||
static gint
|
static gint
|
||||||
gst_ffmpegdec_audio_frame (GstFFMpegDec * ffmpegdec,
|
gst_ffmpegdec_audio_frame (GstFFMpegDec * ffmpegdec,
|
||||||
guint8 * data, guint size,
|
guint8 * data, guint size,
|
||||||
GstClockTime in_timestamp, GstClockTime in_duration,
|
GstClockTime in_timestamp, GstClockTime in_duration,
|
||||||
GstBuffer ** outbuf, GstFlowReturn * ret)
|
GstBuffer ** outbuf, GstFlowReturn * ret)
|
||||||
{
|
{
|
||||||
gint len = -1;
|
gint len = -1;
|
||||||
gint have_data;
|
gint have_data;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (ffmpegdec,
|
GST_DEBUG_OBJECT (ffmpegdec,
|
||||||
"size:%d, ts:%" GST_TIME_FORMAT ", dur:%"GST_TIME_FORMAT", ffmpegdec->next_ts:%"
|
"size:%d, ts:%" GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT
|
||||||
GST_TIME_FORMAT, size, GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration),
|
", ffmpegdec->next_ts:%" GST_TIME_FORMAT, size,
|
||||||
|
GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration),
|
||||||
GST_TIME_ARGS (ffmpegdec->next_ts));
|
GST_TIME_ARGS (ffmpegdec->next_ts));
|
||||||
|
|
||||||
/* outgoing buffer */
|
/* outgoing buffer */
|
||||||
|
@ -1488,7 +1519,8 @@ gst_ffmpegdec_audio_frame (GstFFMpegDec * ffmpegdec,
|
||||||
ffmpegdec->next_ts = in_timestamp + in_duration;
|
ffmpegdec->next_ts = in_timestamp + in_duration;
|
||||||
|
|
||||||
/* now see if we need to clip the buffer against the segment boundaries. */
|
/* now see if we need to clip the buffer against the segment boundaries. */
|
||||||
if (G_UNLIKELY (!clip_audio_buffer (ffmpegdec, *outbuf, in_timestamp, in_duration)))
|
if (G_UNLIKELY (!clip_audio_buffer (ffmpegdec, *outbuf, in_timestamp,
|
||||||
|
in_duration)))
|
||||||
goto clipped;
|
goto clipped;
|
||||||
|
|
||||||
} else if (len > 0 && have_data == 0) {
|
} else if (len > 0 && have_data == 0) {
|
||||||
|
@ -1533,8 +1565,7 @@ clipped:
|
||||||
static gint
|
static gint
|
||||||
gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
guint8 * data, guint size, gint * got_data,
|
guint8 * data, guint size, gint * got_data,
|
||||||
GstClockTime in_timestamp, GstClockTime in_duration,
|
GstClockTime in_timestamp, GstClockTime in_duration, GstFlowReturn * ret)
|
||||||
GstFlowReturn * ret)
|
|
||||||
{
|
{
|
||||||
GstFFMpegDecClass *oclass;
|
GstFFMpegDecClass *oclass;
|
||||||
GstBuffer *outbuf = NULL;
|
GstBuffer *outbuf = NULL;
|
||||||
|
@ -1544,7 +1575,7 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
goto no_codec;
|
goto no_codec;
|
||||||
|
|
||||||
GST_LOG_OBJECT (ffmpegdec,
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
"data:%p, size:%d, ts:%" GST_TIME_FORMAT", dur:%"GST_TIME_FORMAT,
|
"data:%p, size:%d, ts:%" GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT,
|
||||||
data, size, GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration));
|
data, size, GST_TIME_ARGS (in_timestamp), GST_TIME_ARGS (in_duration));
|
||||||
|
|
||||||
*ret = GST_FLOW_OK;
|
*ret = GST_FLOW_OK;
|
||||||
|
@ -1555,13 +1586,13 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
switch (oclass->in_plugin->type) {
|
switch (oclass->in_plugin->type) {
|
||||||
case CODEC_TYPE_VIDEO:
|
case CODEC_TYPE_VIDEO:
|
||||||
len =
|
len =
|
||||||
gst_ffmpegdec_video_frame (ffmpegdec, data, size, in_timestamp, in_duration, &outbuf,
|
gst_ffmpegdec_video_frame (ffmpegdec, data, size, in_timestamp,
|
||||||
ret);
|
in_duration, &outbuf, ret);
|
||||||
break;
|
break;
|
||||||
case CODEC_TYPE_AUDIO:
|
case CODEC_TYPE_AUDIO:
|
||||||
len =
|
len =
|
||||||
gst_ffmpegdec_audio_frame (ffmpegdec, data, size, in_timestamp, in_duration, &outbuf,
|
gst_ffmpegdec_audio_frame (ffmpegdec, data, size, in_timestamp,
|
||||||
ret);
|
in_duration, &outbuf, ret);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
g_assert_not_reached ();
|
g_assert_not_reached ();
|
||||||
|
@ -1582,7 +1613,7 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
goto beach;
|
goto beach;
|
||||||
} else {
|
} else {
|
||||||
/* this is where I lost my last clue on ffmpeg... */
|
/* this is where I lost my last clue on ffmpeg... */
|
||||||
*got_data = 1;
|
*got_data = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (outbuf) {
|
if (outbuf) {
|
||||||
|
@ -1809,7 +1840,7 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
* source for this as it might not even be parsed (UDP/file/..). */
|
* source for this as it might not even be parsed (UDP/file/..). */
|
||||||
if (G_UNLIKELY (ffmpegdec->waiting_for_key)) {
|
if (G_UNLIKELY (ffmpegdec->waiting_for_key)) {
|
||||||
if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT) &&
|
if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT) &&
|
||||||
oclass->in_plugin->type != CODEC_TYPE_AUDIO)
|
oclass->in_plugin->type != CODEC_TYPE_AUDIO)
|
||||||
goto skip_keyframe;
|
goto skip_keyframe;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (ffmpegdec, "got keyframe");
|
GST_DEBUG_OBJECT (ffmpegdec, "got keyframe");
|
||||||
|
@ -1817,11 +1848,12 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
}
|
}
|
||||||
|
|
||||||
pending_timestamp = GST_BUFFER_TIMESTAMP (inbuf);
|
pending_timestamp = GST_BUFFER_TIMESTAMP (inbuf);
|
||||||
pending_duration = GST_BUFFER_DURATION (inbuf);
|
pending_duration = GST_BUFFER_DURATION (inbuf);
|
||||||
|
|
||||||
GST_LOG_OBJECT (ffmpegdec,
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
"Received new data of size %d, ts:%" GST_TIME_FORMAT ", dur:%"GST_TIME_FORMAT,
|
"Received new data of size %d, ts:%" GST_TIME_FORMAT ", dur:%"
|
||||||
GST_BUFFER_SIZE (inbuf), GST_TIME_ARGS (pending_timestamp), GST_TIME_ARGS (pending_duration));
|
GST_TIME_FORMAT, GST_BUFFER_SIZE (inbuf),
|
||||||
|
GST_TIME_ARGS (pending_timestamp), GST_TIME_ARGS (pending_duration));
|
||||||
|
|
||||||
/* parse cache joining. If there is cached data, its timestamp will be what we
|
/* parse cache joining. If there is cached data, its timestamp will be what we
|
||||||
* send to the parse. */
|
* send to the parse. */
|
||||||
|
@ -1832,7 +1864,7 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
|
|
||||||
/* use timestamp and duration of what is in the cache */
|
/* use timestamp and duration of what is in the cache */
|
||||||
in_timestamp = GST_BUFFER_TIMESTAMP (ffmpegdec->pcache);
|
in_timestamp = GST_BUFFER_TIMESTAMP (ffmpegdec->pcache);
|
||||||
in_duration = GST_BUFFER_DURATION (ffmpegdec->pcache);
|
in_duration = GST_BUFFER_DURATION (ffmpegdec->pcache);
|
||||||
|
|
||||||
/* join with previous data */
|
/* join with previous data */
|
||||||
inbuf = gst_buffer_join (ffmpegdec->pcache, inbuf);
|
inbuf = gst_buffer_join (ffmpegdec->pcache, inbuf);
|
||||||
|
@ -1843,12 +1875,11 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
|
|
||||||
/* no more cached data, we assume we can consume the complete cache */
|
/* no more cached data, we assume we can consume the complete cache */
|
||||||
ffmpegdec->pcache = NULL;
|
ffmpegdec->pcache = NULL;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
/* no cache, input timestamp matches the buffer we try to decode */
|
/* no cache, input timestamp matches the buffer we try to decode */
|
||||||
left = 0;
|
left = 0;
|
||||||
in_timestamp = pending_timestamp;
|
in_timestamp = pending_timestamp;
|
||||||
in_duration = pending_duration;
|
in_duration = pending_duration;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* workarounds, functions write to buffers:
|
/* workarounds, functions write to buffers:
|
||||||
|
@ -1870,18 +1901,20 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
gint64 ffpts;
|
gint64 ffpts;
|
||||||
|
|
||||||
/* convert timestamp to ffmpeg timestamp */
|
/* convert timestamp to ffmpeg timestamp */
|
||||||
ffpts = gst_ffmpeg_time_gst_to_ff (in_timestamp, ffmpegdec->context->time_base);
|
ffpts =
|
||||||
|
gst_ffmpeg_time_gst_to_ff (in_timestamp,
|
||||||
|
ffmpegdec->context->time_base);
|
||||||
|
|
||||||
GST_LOG_OBJECT (ffmpegdec,
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
"Calling av_parser_parse with ts:%" GST_TIME_FORMAT", ffpts:%"G_GINT64_FORMAT,
|
"Calling av_parser_parse with ts:%" GST_TIME_FORMAT ", ffpts:%"
|
||||||
GST_TIME_ARGS (in_timestamp), ffpts);
|
G_GINT64_FORMAT, GST_TIME_ARGS (in_timestamp), ffpts);
|
||||||
|
|
||||||
/* feed the parser */
|
/* feed the parser */
|
||||||
res = av_parser_parse (ffmpegdec->pctx, ffmpegdec->context,
|
res = av_parser_parse (ffmpegdec->pctx, ffmpegdec->context,
|
||||||
&data, &size, bdata, bsize, ffpts, ffpts);
|
&data, &size, bdata, bsize, ffpts, ffpts);
|
||||||
|
|
||||||
GST_LOG_OBJECT (ffmpegdec,
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
"parser returned res %d and size %d", res, size);
|
"parser returned res %d and size %d", res, size);
|
||||||
|
|
||||||
GST_LOG_OBJECT (ffmpegdec, "consuming %d bytes. Next ts at %d, ffpts:%"
|
GST_LOG_OBJECT (ffmpegdec, "consuming %d bytes. Next ts at %d, ffpts:%"
|
||||||
G_GINT64_FORMAT, size, left, ffmpegdec->pctx->pts);
|
G_GINT64_FORMAT, size, left, ffmpegdec->pctx->pts);
|
||||||
|
@ -1893,36 +1926,36 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
/* if there is no output, we must break and wait for more data. also the
|
/* if there is no output, we must break and wait for more data. also the
|
||||||
* timestamp in the context is not updated. */
|
* timestamp in the context is not updated. */
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
if(bsize>0)
|
if (bsize > 0)
|
||||||
continue;
|
continue;
|
||||||
else
|
else
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (left <= size) {
|
if (left <= size) {
|
||||||
left = 0;
|
left = 0;
|
||||||
/* activate the pending timestamp/duration and mark it invalid */
|
/* activate the pending timestamp/duration and mark it invalid */
|
||||||
next_timestamp = pending_timestamp;
|
next_timestamp = pending_timestamp;
|
||||||
next_duration = pending_duration;
|
next_duration = pending_duration;
|
||||||
|
|
||||||
pending_timestamp = GST_CLOCK_TIME_NONE;
|
pending_timestamp = GST_CLOCK_TIME_NONE;
|
||||||
pending_duration = GST_CLOCK_TIME_NONE;
|
pending_duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
GST_LOG_OBJECT (ffmpegdec, "activated ts:%" GST_TIME_FORMAT", dur:%"GST_TIME_FORMAT,
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
GST_TIME_ARGS (next_timestamp), GST_TIME_ARGS (next_duration));
|
"activated ts:%" GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT,
|
||||||
}
|
GST_TIME_ARGS (next_timestamp), GST_TIME_ARGS (next_duration));
|
||||||
else {
|
} else {
|
||||||
left -= size;
|
left -= size;
|
||||||
/* get new timestamp from the parser, this could be interpolated by the
|
/* get new timestamp from the parser, this could be interpolated by the
|
||||||
* parser. We lost track of duration here. */
|
* parser. We lost track of duration here. */
|
||||||
next_timestamp = gst_ffmpeg_time_ff_to_gst (ffmpegdec->pctx->pts,
|
next_timestamp = gst_ffmpeg_time_ff_to_gst (ffmpegdec->pctx->pts,
|
||||||
ffmpegdec->context->time_base);
|
ffmpegdec->context->time_base);
|
||||||
next_duration = GST_CLOCK_TIME_NONE;
|
next_duration = GST_CLOCK_TIME_NONE;
|
||||||
GST_LOG_OBJECT (ffmpegdec, "parse context next ts:%" GST_TIME_FORMAT", ffpts:%"G_GINT64_FORMAT,
|
GST_LOG_OBJECT (ffmpegdec,
|
||||||
GST_TIME_ARGS (next_timestamp), ffpts);
|
"parse context next ts:%" GST_TIME_FORMAT ", ffpts:%"
|
||||||
|
G_GINT64_FORMAT, GST_TIME_ARGS (next_timestamp), ffpts);
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
data = bdata;
|
data = bdata;
|
||||||
size = bsize;
|
size = bsize;
|
||||||
/* after decoding this input buffer, we don't know the timestamp anymore
|
/* after decoding this input buffer, we don't know the timestamp anymore
|
||||||
|
@ -1933,7 +1966,9 @@ gst_ffmpegdec_chain (GstPad * pad, GstBuffer * inbuf)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* decode a frame of audio/video now */
|
/* decode a frame of audio/video now */
|
||||||
len = gst_ffmpegdec_frame (ffmpegdec, data, size, &have_data, in_timestamp, in_duration, &ret);
|
len =
|
||||||
|
gst_ffmpegdec_frame (ffmpegdec, data, size, &have_data, in_timestamp,
|
||||||
|
in_duration, &ret);
|
||||||
if (len < 0 || ret != GST_FLOW_OK)
|
if (len < 0 || ret != GST_FLOW_OK)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
@ -2104,7 +2139,8 @@ gst_ffmpegdec_register (GstPlugin * plugin)
|
||||||
if (in_plugin->type == CODEC_TYPE_VIDEO) {
|
if (in_plugin->type == CODEC_TYPE_VIDEO) {
|
||||||
srccaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv");
|
srccaps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv");
|
||||||
} else {
|
} else {
|
||||||
srccaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL);
|
srccaps =
|
||||||
|
gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL, in_plugin->id);
|
||||||
}
|
}
|
||||||
if (!sinkcaps || !srccaps)
|
if (!sinkcaps || !srccaps)
|
||||||
goto next;
|
goto next;
|
||||||
|
|
Loading…
Reference in a new issue