gstreamer/gstplay/mpeg2.c
Wim Taymans 2ab64206bb Better error recovery in teh MPEG1 decoder.
Original commit message from CVS:
Better error recovery in teh MPEG1 decoder.
Removed MPEG2 stuff from the MPEG1 decoderand make it ignore MPEG2
streams.
Some extra framerates in the MPEG2 decoder (from libmpeg2)
Round image size up to the nearest multiple of 16 (MPEG2 decoder)
2000-07-13 19:49:50 +00:00

157 lines
6.2 KiB
C

#define BUFFER 20
#define VIDEO_DECODER "mpeg2play"
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <gnome.h>
#include <gst/gst.h>
#include "codecs.h"
extern gboolean _gst_plugin_spew;
extern GstElement *video_render_queue, *audio_render_queue;
void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
{
GstElement *parse_audio, *decode;
GstElement *audio_queue;
GstElement *audio_thread;
g_print("***** a new pad %s was created\n", gst_pad_get_name(pad));
// connect to audio pad
//if (0) {
if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0) {
gst_plugin_load("ac3parse");
gst_plugin_load("ac3dec");
// construct internal pipeline elements
parse_audio = gst_elementfactory_make("ac3parse","parse_audio");
g_return_if_fail(parse_audio != NULL);
decode = gst_elementfactory_make("ac3dec","decode_audio");
g_return_if_fail(decode != NULL);
// create the thread and pack stuff into it
audio_thread = gst_thread_new("audio_thread");
g_return_if_fail(audio_thread != NULL);
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
// set up pad connections
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
gst_element_get_pad(parse_audio,"sink"));
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
gst_element_get_pad(decode,"sink"));
gst_pad_connect(gst_element_get_pad(decode,"src"),
gst_element_get_pad(audio_render_queue,"sink"));
// construct queue and connect everything in the main pipelie
audio_queue = gst_elementfactory_make("queue","audio_queue");
gtk_object_set(GTK_OBJECT(audio_queue),"max_level",30,NULL);
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_queue));
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_thread));
gst_pad_connect(pad,
gst_element_get_pad(audio_queue,"sink"));
gst_pad_connect(gst_element_get_pad(audio_queue,"src"),
gst_element_get_pad(audio_thread,"sink"));
// set up thread state and kick things off
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
g_print("setting to RUNNING state\n");
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
}
// connect to audio pad
//if (0) {
if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) {
gst_plugin_load("mp3parse");
gst_plugin_load("mpg123");
// construct internal pipeline elements
parse_audio = gst_elementfactory_make("mp3parse","parse_audio");
g_return_if_fail(parse_audio != NULL);
decode = gst_elementfactory_make("mpg123","decode_audio");
g_return_if_fail(decode != NULL);
// create the thread and pack stuff into it
audio_thread = gst_thread_new("audio_thread");
g_return_if_fail(audio_thread != NULL);
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
// set up pad connections
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
gst_element_get_pad(parse_audio,"sink"));
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
gst_element_get_pad(decode,"sink"));
gst_pad_connect(gst_element_get_pad(decode,"src"),
gst_element_get_pad(audio_render_queue,"sink"));
// construct queue and connect everything in the main pipelie
audio_queue = gst_elementfactory_make("queue","audio_queue");
gtk_object_set(GTK_OBJECT(audio_queue),"max_level",BUFFER,NULL);
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_queue));
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_thread));
gst_pad_connect(pad,
gst_element_get_pad(audio_queue,"sink"));
gst_pad_connect(gst_element_get_pad(audio_queue,"src"),
gst_element_get_pad(audio_thread,"sink"));
// set up thread state and kick things off
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
g_print("setting to RUNNING state\n");
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_RUNNING);
} else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
//} else if (0) {
mpeg2_setup_video_thread(pad, video_render_queue, pipeline);
}
}
void mpeg2_setup_video_thread(GstPad *pad, GstElement *show, GstElement *pipeline)
{
GstElement *parse_video, *decode_video;
GstElement *video_queue;
GstElement *video_thread;
gst_plugin_load("mp1videoparse");
gst_plugin_load(VIDEO_DECODER);
// construct internal pipeline elements
parse_video = gst_elementfactory_make("mp1videoparse","parse_video");
g_return_if_fail(parse_video != NULL);
decode_video = gst_elementfactory_make(VIDEO_DECODER,"decode_video");
g_return_if_fail(decode_video != NULL);
// create the thread and pack stuff into it
video_thread = gst_thread_new("video_thread");
g_return_if_fail(video_thread != NULL);
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video));
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video));
// set up pad connections
gst_element_add_ghost_pad(GST_ELEMENT(video_thread),
gst_element_get_pad(parse_video,"sink"));
gst_pad_connect(gst_element_get_pad(parse_video,"src"),
gst_element_get_pad(decode_video,"sink"));
gst_pad_connect(gst_element_get_pad(decode_video,"src"),
gst_element_get_pad(video_render_queue,"sink"));
// construct queue and connect everything in the main pipeline
video_queue = gst_elementfactory_make("queue","video_queue");
gtk_object_set(GTK_OBJECT(video_queue),"max_level",BUFFER,NULL);
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_queue));
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_thread));
gst_pad_connect(pad,
gst_element_get_pad(video_queue,"sink"));
gst_pad_connect(gst_element_get_pad(video_queue,"src"),
gst_element_get_pad(video_thread,"sink"));
// set up thread state and kick things off
gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL);
g_print("setting to RUNNING state\n");
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_RUNNING);
g_print("\n");
}