mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-28 20:51:13 +00:00
Better error recovery in teh MPEG1 decoder.
Original commit message from CVS: Better error recovery in teh MPEG1 decoder. Removed MPEG2 stuff from the MPEG1 decoderand make it ignore MPEG2 streams. Some extra framerates in the MPEG2 decoder (from libmpeg2) Round image size up to the nearest multiple of 16 (MPEG2 decoder)
This commit is contained in:
parent
9920b88c3f
commit
2ab64206bb
6 changed files with 47 additions and 29 deletions
|
@ -224,15 +224,16 @@ void gst_audiosink_chain(GstPad *pad,GstBuffer *buf) {
|
||||||
//g_print("audiosink: writing to soundcard\n");
|
//g_print("audiosink: writing to soundcard\n");
|
||||||
if (audiosink->fd > 2) {
|
if (audiosink->fd > 2) {
|
||||||
if (!audiosink->mute) {
|
if (!audiosink->mute) {
|
||||||
if (gst_clock_current_diff(audiosink->clock, GST_BUFFER_TIMESTAMP(buf)) > 500000) {
|
//if (gst_clock_current_diff(audiosink->clock, GST_BUFFER_TIMESTAMP(buf)) > 500000) {
|
||||||
}
|
//}
|
||||||
else {
|
//else {
|
||||||
gst_clock_wait(audiosink->clock, GST_BUFFER_TIMESTAMP(buf), GST_OBJECT(audiosink));
|
gst_clock_wait(audiosink->clock, GST_BUFFER_TIMESTAMP(buf), GST_OBJECT(audiosink));
|
||||||
ioctl(audiosink->fd,SNDCTL_DSP_GETOSPACE,&ospace);
|
ioctl(audiosink->fd,SNDCTL_DSP_GETOSPACE,&ospace);
|
||||||
DEBUG("audiosink: (%d bytes buffer)\n", ospace.bytes);
|
DEBUG("audiosink: (%d bytes buffer)\n", ospace.bytes);
|
||||||
write(audiosink->fd,GST_BUFFER_DATA(buf),GST_BUFFER_SIZE(buf));
|
write(audiosink->fd,GST_BUFFER_DATA(buf),GST_BUFFER_SIZE(buf));
|
||||||
|
//}
|
||||||
//gst_clock_set(audiosink->clock, GST_BUFFER_TIMESTAMP(buf));
|
//gst_clock_set(audiosink->clock, GST_BUFFER_TIMESTAMP(buf));
|
||||||
}
|
//}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,7 +126,8 @@ void gst_clock_wait(GstClock *clock, GstClockTime time, GstObject *obj) {
|
||||||
if (!tfnow.tv_sec) {
|
if (!tfnow.tv_sec) {
|
||||||
select(0, NULL, NULL, NULL, &tfnow);
|
select(0, NULL, NULL, NULL, &tfnow);
|
||||||
}
|
}
|
||||||
else fprintf(stderr, "gst_clock: waiting %u %llu %llu %llu seconds\n", (int)tfnow.tv_sec, now, diff, time);
|
else printf("gst_clock: %s waiting %u %llu %llu %llu seconds\n", gst_element_get_name(GST_ELEMENT(obj)),
|
||||||
|
(int)tfnow.tv_sec, now, diff, time);
|
||||||
//DEBUG("gst_clock: 0x%p waiting for time %llu %llu %lld %llu\n", obj, time, target, diff, now);
|
//DEBUG("gst_clock: 0x%p waiting for time %llu %llu %lld %llu\n", obj, time, target, diff, now);
|
||||||
//DEBUG("waiting %d.%08d\n",tfnow.tv_sec, tfnow.tv_usec);
|
//DEBUG("waiting %d.%08d\n",tfnow.tv_sec, tfnow.tv_usec);
|
||||||
//DEBUG("gst_clock: 0x%p waiting done time %llu \n", obj, time);
|
//DEBUG("gst_clock: 0x%p waiting done time %llu \n", obj, time);
|
||||||
|
|
|
@ -253,9 +253,10 @@ static gboolean gst_thread_change_state(GstElement *element,
|
||||||
GST_FLAG_UNSET(thread,GST_THREAD_STATE_SPINNING);
|
GST_FLAG_UNSET(thread,GST_THREAD_STATE_SPINNING);
|
||||||
GST_FLAG_SET(thread,GST_THREAD_STATE_REAPING);
|
GST_FLAG_SET(thread,GST_THREAD_STATE_REAPING);
|
||||||
gst_thread_signal_thread(thread);
|
gst_thread_signal_thread(thread);
|
||||||
pthread_join(thread->thread_id,0);
|
//pthread_join(thread->thread_id,0);
|
||||||
/* tear down the internal state */
|
/* tear down the internal state */
|
||||||
gst_info("gstthread: tearing down thread's iteration state\n");
|
gst_info("gstthread: tearing down thread's \"%s\" iteration state\n",
|
||||||
|
gst_element_get_name(GST_ELEMENT(element)));
|
||||||
/* FIXME do stuff */
|
/* FIXME do stuff */
|
||||||
break;
|
break;
|
||||||
case GST_STATE_PLAYING:
|
case GST_STATE_PLAYING:
|
||||||
|
@ -303,6 +304,7 @@ void *gst_thread_main_loop(void *arg) {
|
||||||
}
|
}
|
||||||
|
|
||||||
GST_FLAG_UNSET(thread,GST_THREAD_STATE_REAPING);
|
GST_FLAG_UNSET(thread,GST_THREAD_STATE_REAPING);
|
||||||
|
//pthread_join(thread->thread_id,0);
|
||||||
|
|
||||||
gst_info("gstthread: thread \"%s\" is stopped\n",
|
gst_info("gstthread: thread \"%s\" is stopped\n",
|
||||||
gst_element_get_name(GST_ELEMENT(thread)));
|
gst_element_get_name(GST_ELEMENT(thread)));
|
||||||
|
|
|
@ -95,6 +95,23 @@ void mute_audio(gboolean mute) {
|
||||||
gtk_object_set(GTK_OBJECT(audio_play),"mute",mute,NULL);
|
gtk_object_set(GTK_OBJECT(audio_play),"mute",mute,NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gint delete_event(GtkWidget *widget, GdkEvent *event, gpointer data)
|
||||||
|
{
|
||||||
|
gdk_threads_leave();
|
||||||
|
g_print("setting to ~PLAYING state\n");
|
||||||
|
gst_element_set_state(GST_ELEMENT(pipeline),~GST_STATE_PLAYING);
|
||||||
|
g_print("setting to ~RUNNING state\n");
|
||||||
|
gst_element_set_state(GST_ELEMENT(pipeline),~GST_STATE_RUNNING);
|
||||||
|
gdk_threads_enter();
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
void destroy(GtkWidget *widget, gpointer data)
|
||||||
|
{
|
||||||
|
|
||||||
|
gtk_main_quit();
|
||||||
|
}
|
||||||
|
|
||||||
void gstplay_parse_state_changed(GstElement *element, gint state, gpointer data)
|
void gstplay_parse_state_changed(GstElement *element, gint state, gpointer data)
|
||||||
{
|
{
|
||||||
printf("gstplay: element \"%s\" state changed %d\n", gst_element_get_name(element), state);
|
printf("gstplay: element \"%s\" state changed %d\n", gst_element_get_name(element), state);
|
||||||
|
@ -221,9 +238,14 @@ main (int argc, char *argv[])
|
||||||
g_return_val_if_fail(video_render_thread != NULL, -1);
|
g_return_val_if_fail(video_render_thread != NULL, -1);
|
||||||
show = gst_elementfactory_make("videosink","show");
|
show = gst_elementfactory_make("videosink","show");
|
||||||
g_return_val_if_fail(show != NULL, -1);
|
g_return_val_if_fail(show != NULL, -1);
|
||||||
gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL);
|
//gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL);
|
||||||
window1 = create_window1 (gst_util_get_widget_arg(GTK_OBJECT(show),"widget"));
|
window1 = create_window1 (gst_util_get_widget_arg(GTK_OBJECT(show),"widget"));
|
||||||
gtk_widget_show (window1);
|
gtk_widget_show (window1);
|
||||||
|
gtk_signal_connect(GTK_OBJECT(window1),"delete_event",
|
||||||
|
GTK_SIGNAL_FUNC(delete_event),NULL);
|
||||||
|
gtk_signal_connect(GTK_OBJECT(window1),"destroy",
|
||||||
|
GTK_SIGNAL_FUNC(destroy),pipeline);
|
||||||
|
|
||||||
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
|
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
|
||||||
GTK_SIGNAL_FUNC(frame_displayed),NULL);
|
GTK_SIGNAL_FUNC(frame_displayed),NULL);
|
||||||
gst_bin_add(GST_BIN(video_render_thread),GST_ELEMENT(show));
|
gst_bin_add(GST_BIN(video_render_thread),GST_ELEMENT(show));
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
#define BUFFER 15
|
#define BUFFER 20
|
||||||
#define VIDEO_DECODER "mpeg2play"
|
#define VIDEO_DECODER "mpeg2play"
|
||||||
|
|
||||||
#ifdef HAVE_CONFIG_H
|
#ifdef HAVE_CONFIG_H
|
||||||
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
|
|
||||||
extern gboolean _gst_plugin_spew;
|
extern gboolean _gst_plugin_spew;
|
||||||
extern GstElement *show, *audio_play;
|
extern GstElement *video_render_queue, *audio_render_queue;
|
||||||
|
|
||||||
void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
{
|
{
|
||||||
|
@ -25,7 +25,7 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
|
|
||||||
// connect to audio pad
|
// connect to audio pad
|
||||||
//if (0) {
|
//if (0) {
|
||||||
if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0 && audio_play) {
|
if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0) {
|
||||||
gst_plugin_load("ac3parse");
|
gst_plugin_load("ac3parse");
|
||||||
gst_plugin_load("ac3dec");
|
gst_plugin_load("ac3dec");
|
||||||
// construct internal pipeline elements
|
// construct internal pipeline elements
|
||||||
|
@ -39,7 +39,6 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
g_return_if_fail(audio_thread != NULL);
|
g_return_if_fail(audio_thread != NULL);
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
|
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
|
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_play));
|
|
||||||
|
|
||||||
// set up pad connections
|
// set up pad connections
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
|
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
|
||||||
|
@ -47,7 +46,7 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
|
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
|
||||||
gst_element_get_pad(decode,"sink"));
|
gst_element_get_pad(decode,"sink"));
|
||||||
gst_pad_connect(gst_element_get_pad(decode,"src"),
|
gst_pad_connect(gst_element_get_pad(decode,"src"),
|
||||||
gst_element_get_pad(audio_play,"sink"));
|
gst_element_get_pad(audio_render_queue,"sink"));
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipelie
|
// construct queue and connect everything in the main pipelie
|
||||||
audio_queue = gst_elementfactory_make("queue","audio_queue");
|
audio_queue = gst_elementfactory_make("queue","audio_queue");
|
||||||
|
@ -63,12 +62,10 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
|
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
|
||||||
g_print("setting to RUNNING state\n");
|
g_print("setting to RUNNING state\n");
|
||||||
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
|
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
|
||||||
g_print("setting to PLAYING state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_PLAYING);
|
|
||||||
}
|
}
|
||||||
// connect to audio pad
|
// connect to audio pad
|
||||||
//if (0) {
|
//if (0) {
|
||||||
if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0 && audio_play) {
|
if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) {
|
||||||
gst_plugin_load("mp3parse");
|
gst_plugin_load("mp3parse");
|
||||||
gst_plugin_load("mpg123");
|
gst_plugin_load("mpg123");
|
||||||
// construct internal pipeline elements
|
// construct internal pipeline elements
|
||||||
|
@ -82,7 +79,6 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
g_return_if_fail(audio_thread != NULL);
|
g_return_if_fail(audio_thread != NULL);
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
|
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
|
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_play));
|
|
||||||
|
|
||||||
// set up pad connections
|
// set up pad connections
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
|
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
|
||||||
|
@ -90,7 +86,7 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
|
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
|
||||||
gst_element_get_pad(decode,"sink"));
|
gst_element_get_pad(decode,"sink"));
|
||||||
gst_pad_connect(gst_element_get_pad(decode,"src"),
|
gst_pad_connect(gst_element_get_pad(decode,"src"),
|
||||||
gst_element_get_pad(audio_play,"sink"));
|
gst_element_get_pad(audio_render_queue,"sink"));
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipelie
|
// construct queue and connect everything in the main pipelie
|
||||||
audio_queue = gst_elementfactory_make("queue","audio_queue");
|
audio_queue = gst_elementfactory_make("queue","audio_queue");
|
||||||
|
@ -106,11 +102,9 @@ void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
||||||
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
|
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
|
||||||
g_print("setting to RUNNING state\n");
|
g_print("setting to RUNNING state\n");
|
||||||
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
|
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
|
||||||
g_print("setting to PLAYING state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_PLAYING);
|
|
||||||
} else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
|
} else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
|
||||||
//} else if (0) {
|
//} else if (0) {
|
||||||
mpeg2_setup_video_thread(pad, show, pipeline);
|
mpeg2_setup_video_thread(pad, video_render_queue, pipeline);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,7 +127,6 @@ void mpeg2_setup_video_thread(GstPad *pad, GstElement *show, GstElement *pipelin
|
||||||
g_return_if_fail(video_thread != NULL);
|
g_return_if_fail(video_thread != NULL);
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video));
|
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video));
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video));
|
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video));
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(show));
|
|
||||||
|
|
||||||
// set up pad connections
|
// set up pad connections
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(video_thread),
|
gst_element_add_ghost_pad(GST_ELEMENT(video_thread),
|
||||||
|
@ -141,7 +134,7 @@ void mpeg2_setup_video_thread(GstPad *pad, GstElement *show, GstElement *pipelin
|
||||||
gst_pad_connect(gst_element_get_pad(parse_video,"src"),
|
gst_pad_connect(gst_element_get_pad(parse_video,"src"),
|
||||||
gst_element_get_pad(decode_video,"sink"));
|
gst_element_get_pad(decode_video,"sink"));
|
||||||
gst_pad_connect(gst_element_get_pad(decode_video,"src"),
|
gst_pad_connect(gst_element_get_pad(decode_video,"src"),
|
||||||
gst_element_get_pad(show,"sink"));
|
gst_element_get_pad(video_render_queue,"sink"));
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipeline
|
// construct queue and connect everything in the main pipeline
|
||||||
video_queue = gst_elementfactory_make("queue","video_queue");
|
video_queue = gst_elementfactory_make("queue","video_queue");
|
||||||
|
@ -157,8 +150,6 @@ void mpeg2_setup_video_thread(GstPad *pad, GstElement *show, GstElement *pipelin
|
||||||
gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL);
|
gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL);
|
||||||
g_print("setting to RUNNING state\n");
|
g_print("setting to RUNNING state\n");
|
||||||
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_RUNNING);
|
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_RUNNING);
|
||||||
g_print("setting to PLAYING state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_PLAYING);
|
|
||||||
|
|
||||||
g_print("\n");
|
g_print("\n");
|
||||||
}
|
}
|
||||||
|
|
|
@ -224,15 +224,16 @@ void gst_audiosink_chain(GstPad *pad,GstBuffer *buf) {
|
||||||
//g_print("audiosink: writing to soundcard\n");
|
//g_print("audiosink: writing to soundcard\n");
|
||||||
if (audiosink->fd > 2) {
|
if (audiosink->fd > 2) {
|
||||||
if (!audiosink->mute) {
|
if (!audiosink->mute) {
|
||||||
if (gst_clock_current_diff(audiosink->clock, GST_BUFFER_TIMESTAMP(buf)) > 500000) {
|
//if (gst_clock_current_diff(audiosink->clock, GST_BUFFER_TIMESTAMP(buf)) > 500000) {
|
||||||
}
|
//}
|
||||||
else {
|
//else {
|
||||||
gst_clock_wait(audiosink->clock, GST_BUFFER_TIMESTAMP(buf), GST_OBJECT(audiosink));
|
gst_clock_wait(audiosink->clock, GST_BUFFER_TIMESTAMP(buf), GST_OBJECT(audiosink));
|
||||||
ioctl(audiosink->fd,SNDCTL_DSP_GETOSPACE,&ospace);
|
ioctl(audiosink->fd,SNDCTL_DSP_GETOSPACE,&ospace);
|
||||||
DEBUG("audiosink: (%d bytes buffer)\n", ospace.bytes);
|
DEBUG("audiosink: (%d bytes buffer)\n", ospace.bytes);
|
||||||
write(audiosink->fd,GST_BUFFER_DATA(buf),GST_BUFFER_SIZE(buf));
|
write(audiosink->fd,GST_BUFFER_DATA(buf),GST_BUFFER_SIZE(buf));
|
||||||
|
//}
|
||||||
//gst_clock_set(audiosink->clock, GST_BUFFER_TIMESTAMP(buf));
|
//gst_clock_set(audiosink->clock, GST_BUFFER_TIMESTAMP(buf));
|
||||||
}
|
//}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue