mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-23 00:36:51 +00:00
ext/ffmpeg/gstffmpegcodecmap.c: Make type explicit.
Original commit message from CVS: * ext/ffmpeg/gstffmpegcodecmap.c: Make type explicit. * ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_release_buffer), (gst_ffmpegdec_frame), (gst_ffmpegdec_chain), (gst_ffmpegdec_change_state): When we provide a buffer and get a valid return value (data was read), but no output (have-data==0), then we need to reuse this same output buffer, because it may be used for caching output data. Fixes #307353. * ext/ffmpeg/gstffmpegdemux.c: (gst_ffmpegdemux_loop): Timestamp fix.
This commit is contained in:
parent
ada045509e
commit
e24d6984e4
5 changed files with 38 additions and 9 deletions
14
ChangeLog
14
ChangeLog
|
@ -1,3 +1,17 @@
|
||||||
|
2005-07-22 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
|
||||||
|
|
||||||
|
* ext/ffmpeg/gstffmpegcodecmap.c:
|
||||||
|
Make type explicit.
|
||||||
|
* ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_release_buffer),
|
||||||
|
(gst_ffmpegdec_frame), (gst_ffmpegdec_chain),
|
||||||
|
(gst_ffmpegdec_change_state):
|
||||||
|
When we provide a buffer and get a valid return value (data was
|
||||||
|
read), but no output (have-data==0), then we need to reuse this
|
||||||
|
same output buffer, because it may be used for caching output
|
||||||
|
data. Fixes #307353.
|
||||||
|
* ext/ffmpeg/gstffmpegdemux.c: (gst_ffmpegdemux_loop):
|
||||||
|
Timestamp fix.
|
||||||
|
|
||||||
2005-07-20 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
|
2005-07-20 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
|
||||||
|
|
||||||
* ext/ffmpeg/gstffmpeg.h:
|
* ext/ffmpeg/gstffmpeg.h:
|
||||||
|
|
2
common
2
common
|
@ -1 +1 @@
|
||||||
Subproject commit 6f9b691adc2a0300598311671dd7c4d9d2035afa
|
Subproject commit 694de4dbf4827f372321f0634643a254d7edd986
|
|
@ -86,7 +86,7 @@ gst_ffmpeg_set_palette (GstCaps *caps, AVCodecContext *context)
|
||||||
gst_caps_new_simple (mimetype, \
|
gst_caps_new_simple (mimetype, \
|
||||||
"width", G_TYPE_INT, context->width, \
|
"width", G_TYPE_INT, context->width, \
|
||||||
"height", G_TYPE_INT, context->height, \
|
"height", G_TYPE_INT, context->height, \
|
||||||
"framerate", G_TYPE_DOUBLE, 1. * \
|
"framerate", G_TYPE_DOUBLE, (double) 1. * \
|
||||||
context->time_base.den / \
|
context->time_base.den / \
|
||||||
context->time_base.num, \
|
context->time_base.num, \
|
||||||
__VA_ARGS__, NULL) \
|
__VA_ARGS__, NULL) \
|
||||||
|
@ -94,7 +94,8 @@ gst_ffmpeg_set_palette (GstCaps *caps, AVCodecContext *context)
|
||||||
gst_caps_new_simple (mimetype, \
|
gst_caps_new_simple (mimetype, \
|
||||||
"width", GST_TYPE_INT_RANGE, 16, 4096, \
|
"width", GST_TYPE_INT_RANGE, 16, 4096, \
|
||||||
"height", GST_TYPE_INT_RANGE, 16, 4096, \
|
"height", GST_TYPE_INT_RANGE, 16, 4096, \
|
||||||
"framerate", GST_TYPE_DOUBLE_RANGE, 0., G_MAXDOUBLE, \
|
"framerate", GST_TYPE_DOUBLE_RANGE, (double) 0., \
|
||||||
|
G_MAXDOUBLE, \
|
||||||
__VA_ARGS__, NULL)
|
__VA_ARGS__, NULL)
|
||||||
|
|
||||||
/* same for audio - now with channels/sample rate
|
/* same for audio - now with channels/sample rate
|
||||||
|
|
|
@ -562,6 +562,7 @@ gst_ffmpegdec_release_buffer (AVCodecContext * context, AVFrame * picture)
|
||||||
g_return_if_fail (buf != NULL);
|
g_return_if_fail (buf != NULL);
|
||||||
g_return_if_fail (picture->type == FF_BUFFER_TYPE_USER);
|
g_return_if_fail (picture->type == FF_BUFFER_TYPE_USER);
|
||||||
|
|
||||||
|
if (buf == ffmpegdec->last_buffer)
|
||||||
ffmpegdec->last_buffer = NULL;
|
ffmpegdec->last_buffer = NULL;
|
||||||
gst_buffer_unref (buf);
|
gst_buffer_unref (buf);
|
||||||
|
|
||||||
|
@ -665,7 +666,7 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
GstFFMpegDecClass *oclass =
|
GstFFMpegDecClass *oclass =
|
||||||
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
|
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
|
||||||
GstBuffer *outbuf = NULL;
|
GstBuffer *outbuf = NULL;
|
||||||
gint have_data, len = 0;
|
gint have_data = 0, len = 0;
|
||||||
|
|
||||||
ffmpegdec->context->frame_number++;
|
ffmpegdec->context->frame_number++;
|
||||||
|
|
||||||
|
@ -690,6 +691,8 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
|
|
||||||
if (ffmpegdec->picture->opaque != NULL) {
|
if (ffmpegdec->picture->opaque != NULL) {
|
||||||
outbuf = (GstBuffer *) ffmpegdec->picture->opaque;
|
outbuf = (GstBuffer *) ffmpegdec->picture->opaque;
|
||||||
|
if (outbuf == ffmpegdec->last_buffer)
|
||||||
|
ffmpegdec->last_buffer = NULL;
|
||||||
} else {
|
} else {
|
||||||
AVPicture pic;
|
AVPicture pic;
|
||||||
gint fsize = gst_ffmpeg_avpicture_get_size (ffmpegdec->context->pix_fmt,
|
gint fsize = gst_ffmpeg_avpicture_get_size (ffmpegdec->context->pix_fmt,
|
||||||
|
@ -778,14 +781,18 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case CODEC_TYPE_AUDIO:
|
case CODEC_TYPE_AUDIO:
|
||||||
|
if (!ffmpegdec->last_buffer)
|
||||||
outbuf = gst_buffer_new_and_alloc (AVCODEC_MAX_AUDIO_FRAME_SIZE);
|
outbuf = gst_buffer_new_and_alloc (AVCODEC_MAX_AUDIO_FRAME_SIZE);
|
||||||
|
else {
|
||||||
|
outbuf = ffmpegdec->last_buffer;
|
||||||
|
ffmpegdec->last_buffer = NULL;
|
||||||
|
}
|
||||||
len = avcodec_decode_audio (ffmpegdec->context,
|
len = avcodec_decode_audio (ffmpegdec->context,
|
||||||
(int16_t *) GST_BUFFER_DATA (outbuf), &have_data, data, size);
|
(int16_t *) GST_BUFFER_DATA (outbuf), &have_data, data, size);
|
||||||
GST_DEBUG_OBJECT (ffmpegdec,
|
GST_DEBUG_OBJECT (ffmpegdec,
|
||||||
"Decode audio: len=%d, have_data=%d", len, have_data);
|
"Decode audio: len=%d, have_data=%d", len, have_data);
|
||||||
|
|
||||||
if (len >= 0 && have_data > 0) {
|
if (len >= 0 && have_data > 0) {
|
||||||
|
|
||||||
if (!gst_ffmpegdec_negotiate (ffmpegdec)) {
|
if (!gst_ffmpegdec_negotiate (ffmpegdec)) {
|
||||||
gst_buffer_unref (outbuf);
|
gst_buffer_unref (outbuf);
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -801,6 +808,9 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
||||||
ffmpegdec->context->sample_rate);
|
ffmpegdec->context->sample_rate);
|
||||||
ffmpegdec->next_ts += GST_BUFFER_DURATION (outbuf);
|
ffmpegdec->next_ts += GST_BUFFER_DURATION (outbuf);
|
||||||
*in_ts += GST_BUFFER_DURATION (outbuf);
|
*in_ts += GST_BUFFER_DURATION (outbuf);
|
||||||
|
} else if (len > 0 && have_data == 0) {
|
||||||
|
/* cache output, because it may be used for caching (in-place) */
|
||||||
|
ffmpegdec->last_buffer = outbuf;
|
||||||
} else {
|
} else {
|
||||||
gst_buffer_unref (outbuf);
|
gst_buffer_unref (outbuf);
|
||||||
}
|
}
|
||||||
|
@ -1006,6 +1016,8 @@ gst_ffmpegdec_chain (GstPad * pad, GstData * _data)
|
||||||
|
|
||||||
ffmpegdec->pcache = gst_buffer_create_sub (inbuf,
|
ffmpegdec->pcache = gst_buffer_create_sub (inbuf,
|
||||||
GST_BUFFER_SIZE (inbuf) - bsize, bsize);
|
GST_BUFFER_SIZE (inbuf) - bsize, bsize);
|
||||||
|
} else if (bsize > 0) {
|
||||||
|
GST_DEBUG_OBJECT (ffmpegdec, "Dropping %d bytes of data", bsize);
|
||||||
}
|
}
|
||||||
gst_buffer_unref (inbuf);
|
gst_buffer_unref (inbuf);
|
||||||
}
|
}
|
||||||
|
@ -1021,6 +1033,7 @@ gst_ffmpegdec_change_state (GstElement * element)
|
||||||
gst_ffmpegdec_close (ffmpegdec);
|
gst_ffmpegdec_close (ffmpegdec);
|
||||||
if (ffmpegdec->last_buffer != NULL) {
|
if (ffmpegdec->last_buffer != NULL) {
|
||||||
gst_buffer_unref (ffmpegdec->last_buffer);
|
gst_buffer_unref (ffmpegdec->last_buffer);
|
||||||
|
ffmpegdec->last_buffer = NULL;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -656,8 +656,9 @@ gst_ffmpegdemux_loop (GstElement * element)
|
||||||
GST_BUFFER_SIZE (outbuf) = pkt.size;
|
GST_BUFFER_SIZE (outbuf) = pkt.size;
|
||||||
|
|
||||||
if (pkt.pts != AV_NOPTS_VALUE) {
|
if (pkt.pts != AV_NOPTS_VALUE) {
|
||||||
GST_BUFFER_TIMESTAMP (outbuf) = (GstClockTime) (pkt.pts +
|
AVRational bq = { 1, GST_SECOND };
|
||||||
stream->start_time) * GST_SECOND / AV_TIME_BASE;
|
GST_BUFFER_TIMESTAMP (outbuf) = av_rescale_q (pkt.pts,
|
||||||
|
demux->context->streams[pkt.stream_index]->time_base, bq);
|
||||||
demux->last_ts[stream->index] = GST_BUFFER_TIMESTAMP (outbuf);
|
demux->last_ts[stream->index] = GST_BUFFER_TIMESTAMP (outbuf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue