Merge remote-tracking branch 'origin/master' into 0.11

Conflicts:
	ext/alsa/gstalsasrc.c
	ext/alsa/gstalsasrc.h
	gst/adder/gstadder.c
	gst/playback/gstplaybin2.c
	gst/playback/gstplaysinkconvertbin.c
	win32/common/libgstvideo.def
This commit is contained in:
Tim-Philipp Müller 2011-12-02 00:07:39 +00:00
commit ec0d3566bf
10 changed files with 303 additions and 155 deletions

View file

@ -2107,6 +2107,11 @@ gst_video_convert_sample
gst_video_convert_sample_async
gst_video_event_new_still_frame
gst_video_event_parse_still_frame
gst_video_event_new_upstream_force_key_unit
gst_video_event_new_downstream_force_key_unit
gst_video_event_is_force_key_unit
gst_video_event_parse_upstream_force_key_unit
gst_video_event_parse_downstream_force_key_unit
<SUBSECTION Standard>
gst_video_format_get_type
GST_TYPE_VIDEO_FORMAT

View file

@ -87,12 +87,6 @@ static gboolean gst_alsasrc_close (GstAudioSrc * asrc);
static guint gst_alsasrc_read (GstAudioSrc * asrc, gpointer data, guint length);
static guint gst_alsasrc_delay (GstAudioSrc * asrc);
static void gst_alsasrc_reset (GstAudioSrc * asrc);
static GstStateChangeReturn gst_alsasrc_change_state (GstElement * element,
GstStateChange transition);
static GstFlowReturn gst_alsasrc_create (GstBaseSrc * bsrc, guint64 offset,
guint length, GstBuffer ** outbuf);
static GstClockTime gst_alsasrc_get_timestamp (GstAlsaSrc * src);
/* AlsaSrc signals and args */
enum
@ -164,10 +158,7 @@ gst_alsasrc_class_init (GstAlsaSrcClass * klass)
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&alsasrc_src_factory));
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_alsasrc_change_state);
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_alsasrc_getcaps);
gstbasesrc_class->create = GST_DEBUG_FUNCPTR (gst_alsasrc_create);
gstaudiosrc_class->open = GST_DEBUG_FUNCPTR (gst_alsasrc_open);
gstaudiosrc_class->prepare = GST_DEBUG_FUNCPTR (gst_alsasrc_prepare);
@ -193,87 +184,6 @@ gst_alsasrc_class_init (GstAlsaSrcClass * klass)
DEFAULT_PROP_CARD_NAME, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
}
static GstClockTime
gst_alsasrc_get_timestamp (GstAlsaSrc * src)
{
snd_pcm_status_t *status;
snd_htimestamp_t htstamp;
snd_timestamp_t tstamp;
GstClockTime timestamp;
snd_pcm_uframes_t availmax;
gint64 offset;
GST_DEBUG_OBJECT (src, "Getting alsa timestamp!");
if (!src) {
GST_ERROR_OBJECT (src, "No alsa handle created yet !");
return GST_CLOCK_TIME_NONE;
}
if (snd_pcm_status_malloc (&status) != 0) {
GST_ERROR_OBJECT (src, "snd_pcm_status_malloc failed");
return GST_CLOCK_TIME_NONE;
}
if (snd_pcm_status (src->handle, status) != 0) {
GST_ERROR_OBJECT (src, "snd_pcm_status failed");
snd_pcm_status_free (status);
return GST_CLOCK_TIME_NONE;
}
/* get high resolution time stamp from driver */
snd_pcm_status_get_htstamp (status, &htstamp);
timestamp = GST_TIMESPEC_TO_TIME (htstamp);
if (timestamp == 0) {
GST_INFO_OBJECT (src,
"This alsa source does support high resolution timestamps");
snd_pcm_status_get_tstamp (status, &tstamp);
timestamp = GST_TIMEVAL_TO_TIME (tstamp);
if (timestamp == 0) {
GST_INFO_OBJECT (src,
"This alsa source does support low resolution timestamps");
timestamp = gst_util_get_timestamp ();
}
}
GST_DEBUG_OBJECT (src, "Base ts: %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
if (timestamp == 0) {
/* This timestamp is supposed to represent the last sample, so 0 (which
can be returned on some ALSA setups (such as mine)) must mean that it
is invalid, unless there's just one sample, but we'll ignore that. */
GST_WARNING_OBJECT (src,
"No timestamp returned from snd_pcm_status_get_htstamp");
return GST_CLOCK_TIME_NONE;
}
/* Max available frames sets the depth of the buffer */
availmax = snd_pcm_status_get_avail_max (status);
/* Compensate the fact that the timestamp references the last sample */
offset = -gst_util_uint64_scale_int (availmax * 2, GST_SECOND, src->rate);
/* Compensate for the delay until the package is available */
offset += gst_util_uint64_scale_int (snd_pcm_status_get_delay (status),
GST_SECOND, src->rate);
snd_pcm_status_free (status);
/* just in case, should not happen */
if (-offset > timestamp)
timestamp = 0;
else
timestamp -= offset;
/* Take first ts into account */
if (src->first_alsa_ts == GST_CLOCK_TIME_NONE) {
src->first_alsa_ts = timestamp;
}
timestamp -= src->first_alsa_ts;
GST_DEBUG_OBJECT (src, "ALSA timestamp : %" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
return timestamp;
}
static void
gst_alsasrc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -324,59 +234,6 @@ gst_alsasrc_get_property (GObject * object, guint prop_id,
}
}
static GstStateChangeReturn
gst_alsasrc_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstAudioBaseSrc *src = GST_AUDIO_BASE_SRC (element);
GstAlsaSrc *asrc = GST_ALSA_SRC (element);
GstClock *clk;
switch (transition) {
/* Show the compiler that we care */
case GST_STATE_CHANGE_NULL_TO_READY:
case GST_STATE_CHANGE_READY_TO_PAUSED:
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
case GST_STATE_CHANGE_PAUSED_TO_READY:
case GST_STATE_CHANGE_READY_TO_NULL:
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
clk = src->clock;
asrc->driver_timestamps = FALSE;
if (GST_IS_SYSTEM_CLOCK (clk)) {
gint clocktype;
g_object_get (clk, "clock-type", &clocktype, NULL);
if (clocktype == GST_CLOCK_TYPE_MONOTONIC) {
asrc->driver_timestamps = TRUE;
}
}
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return ret;
}
static GstFlowReturn
gst_alsasrc_create (GstBaseSrc * bsrc, guint64 offset, guint length,
GstBuffer ** outbuf)
{
GstFlowReturn ret = GST_FLOW_OK;
GstAlsaSrc *asrc = GST_ALSA_SRC (bsrc);
ret =
GST_BASE_SRC_CLASS (parent_class)->create (bsrc, offset, length, outbuf);
if (asrc->driver_timestamps == TRUE && *outbuf) {
GstClockTime ts = gst_alsasrc_get_timestamp (asrc);
if (GST_CLOCK_TIME_IS_VALID (ts)) {
GST_BUFFER_TIMESTAMP (*outbuf) = ts;
}
}
return ret;
}
static void
gst_alsasrc_init (GstAlsaSrc * alsasrc)
{
@ -384,8 +241,6 @@ gst_alsasrc_init (GstAlsaSrc * alsasrc)
alsasrc->device = g_strdup (DEFAULT_PROP_DEVICE);
alsasrc->cached_caps = NULL;
alsasrc->driver_timestamps = FALSE;
alsasrc->first_alsa_ts = GST_CLOCK_TIME_NONE;
alsasrc->alsa_lock = g_mutex_new ();
}
@ -1017,7 +872,6 @@ gst_alsasrc_reset (GstAudioSrc * asrc)
GST_DEBUG_OBJECT (alsa, "prepare");
CHECK (snd_pcm_prepare (alsa->handle), prepare_error);
GST_DEBUG_OBJECT (alsa, "reset done");
alsa->first_alsa_ts = GST_CLOCK_TIME_NONE;
GST_ALSA_SRC_UNLOCK (asrc);
return;

View file

@ -898,8 +898,8 @@ gst_ogg_pad_submit_packet (GstOggPad * pad, ogg_packet * packet)
"from granpos %" G_GINT64_FORMAT " (granule %" G_GINT64_FORMAT ", "
"accumulated granule %" G_GINT64_FORMAT,
GST_TIME_ARGS (pad->start_time), GST_TIME_ARGS (pad->start_time),
gst_ogg_stream_get_media_type (&pad->map), packet->granulepos,
granule, pad->map.accumulated_granule);
gst_ogg_stream_get_media_type (&pad->map),
(gint64) packet->granulepos, granule, pad->map.accumulated_granule);
} else {
packet->granulepos = gst_ogg_stream_granule_to_granulepos (&pad->map,
pad->map.accumulated_granule, pad->keyframe_granule);

View file

@ -1562,3 +1562,227 @@ gst_video_parse_caps_palette (GstCaps * caps)
return p;
}
#define GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME "GstForceKeyUnit"
/**
* gst_video_event_new_downstream_force_key_unit:
* @timestamp: the timestamp of the buffer that starts a new key unit
* @stream_time: the stream_time of the buffer that starts a new key unit
* @running_time: the running_time of the buffer that starts a new key unit
* @all_headers: %TRUE to produce headers when starting a new key unit
* @count: integer that can be used to number key units
*
* Creates a new downstream force key unit event. A downstream force key unit
* event can be sent down the pipeline to request downstream elements to produce
* a key unit. A downstream force key unit event must also be sent when handling
* an upstream force key unit event to notify downstream that the latter has been
* handled.
*
* To parse an event created by gst_video_event_new_downstream_force_key_unit() use
* gst_video_event_parse_downstream_force_key_unit().
*
* Returns: The new GstEvent
* Since: 0.10.36
*/
GstEvent *
gst_video_event_new_downstream_force_key_unit (GstClockTime timestamp,
GstClockTime stream_time, GstClockTime running_time, gboolean all_headers,
guint count)
{
GstEvent *force_key_unit_event;
GstStructure *s;
s = gst_structure_new (GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME,
"timestamp", G_TYPE_UINT64, timestamp,
"stream-time", G_TYPE_UINT64, stream_time,
"running-time", G_TYPE_UINT64, running_time,
"all-headers", G_TYPE_BOOLEAN, all_headers,
"count", G_TYPE_UINT, count, NULL);
force_key_unit_event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
return force_key_unit_event;
}
/**
* gst_video_event_new_upstream_force_key_unit:
* @running_time: the running_time at which a new key unit should be produced
* @all_headers: %TRUE to produce headers when starting a new key unit
* @count: integer that can be used to number key units
*
* Creates a new upstream force key unit event. An upstream force key unit event
* can be sent to request upstream elements to produce a key unit.
*
* @running_time can be set to request a new key unit at a specific
* running_time. If set to GST_CLOCK_TIME_NONE, upstream elements will produce a
* new key unit as soon as possible.
*
* To parse an event created by gst_video_event_new_downstream_force_key_unit() use
* gst_video_event_parse_downstream_force_key_unit().
*
* Returns: The new GstEvent
* Since: 0.10.36
*/
GstEvent *
gst_video_event_new_upstream_force_key_unit (GstClockTime running_time,
gboolean all_headers, guint count)
{
GstEvent *force_key_unit_event;
GstStructure *s;
s = gst_structure_new (GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME,
"running-time", GST_TYPE_CLOCK_TIME, running_time,
"all-headers", G_TYPE_BOOLEAN, all_headers,
"count", G_TYPE_UINT, count, NULL);
force_key_unit_event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
return force_key_unit_event;
}
/**
* gst_video_event_is_force_key_unit:
* @event: A #GstEvent to check
*
* Checks if an event is a force key unit event. Returns true for both upstream
* and downstream force key unit events.
*
* Returns: %TRUE if the event is a valid force key unit event
* Since: 0.10.36
*/
gboolean
gst_video_event_is_force_key_unit (GstEvent * event)
{
const GstStructure *s;
g_return_val_if_fail (event != NULL, FALSE);
if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_DOWNSTREAM &&
GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_UPSTREAM)
return FALSE; /* Not a force key unit event */
s = gst_event_get_structure (event);
if (s == NULL
|| !gst_structure_has_name (s, GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME))
return FALSE;
return TRUE;
}
/**
* gst_video_event_parse_downstream_force_key_unit:
* @event: A #GstEvent to parse
* @timestamp: (out): A pointer to the timestamp in the event
* @stream_time: (out): A pointer to the stream-time in the event
* @running_time: (out): A pointer to the running-time in the event
* @all_headers: (out): A pointer to the all_headers flag in the event
* @count: (out): A pointer to the count field of the event
*
* Get timestamp, stream-time, running-time, all-headers and count in the force
* key unit event. See gst_video_event_new_downstream_force_key_unit() for a
* full description of the downstream force key unit event.
*
* Returns: %TRUE if the event is a valid downstream force key unit event.
* Since: 0.10.36
*/
gboolean
gst_video_event_parse_downstream_force_key_unit (GstEvent * event,
GstClockTime * timestamp, GstClockTime * stream_time,
GstClockTime * running_time, gboolean * all_headers, guint * count)
{
const GstStructure *s;
GstClockTime ev_timestamp, ev_stream_time, ev_running_time;
gboolean ev_all_headers;
guint ev_count;
g_return_val_if_fail (event != NULL, FALSE);
if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_DOWNSTREAM)
return FALSE; /* Not a force key unit event */
s = gst_event_get_structure (event);
if (s == NULL
|| !gst_structure_has_name (s, GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME))
return FALSE;
if (!gst_structure_get_clock_time (s, "timestamp", &ev_timestamp))
return FALSE; /* Not a force key unit event */
if (!gst_structure_get_clock_time (s, "stream-time", &ev_stream_time))
return FALSE; /* Not a force key unit event */
if (!gst_structure_get_clock_time (s, "running-time", &ev_running_time))
return FALSE; /* Not a force key unit event */
if (!gst_structure_get_boolean (s, "all-headers", &ev_all_headers))
return FALSE; /* Not a force key unit event */
if (!gst_structure_get_uint (s, "count", &ev_count))
return FALSE; /* Not a force key unit event */
if (timestamp)
*timestamp = ev_timestamp;
if (stream_time)
*stream_time = ev_stream_time;
if (running_time)
*running_time = ev_running_time;
if (all_headers)
*all_headers = ev_all_headers;
if (count)
*count = ev_count;
return TRUE;
}
/**
* gst_video_event_parse_upstream_force_key_unit:
* @event: A #GstEvent to parse
* @running_time: (out): A pointer to the running_time in the event
* @all_headers: (out): A pointer to the all_headers flag in the event
* @count: (out): A pointer to the count field in the event
*
* Get running-time, all-headers and count in the force key unit event. See
* gst_video_event_new_upstream_force_key_unit() for a full description of the
* upstream force key unit event.
*
* Create an upstream force key unit event using gst_video_event_new_upstream_force_key_unit()
*
* Returns: %TRUE if the event is a valid upstream force-key-unit event. %FALSE if not
* Since: 0.10.36
*/
gboolean
gst_video_event_parse_upstream_force_key_unit (GstEvent * event,
GstClockTime * running_time, gboolean * all_headers, guint * count)
{
const GstStructure *s;
GstClockTime ev_running_time;
gboolean ev_all_headers;
guint ev_count;
g_return_val_if_fail (event != NULL, FALSE);
if (GST_EVENT_TYPE (event) != GST_EVENT_CUSTOM_UPSTREAM)
return FALSE; /* Not a force key unit event */
s = gst_event_get_structure (event);
if (s == NULL
|| !gst_structure_has_name (s, GST_VIDEO_EVENT_FORCE_KEY_UNIT_NAME))
return FALSE;
if (!gst_structure_get_clock_time (s, "running-time", &ev_running_time))
return FALSE; /* Not a force key unit event */
if (!gst_structure_get_boolean (s, "all-headers", &ev_all_headers))
return FALSE; /* Not a force key unit event */
if (!gst_structure_get_uint (s, "count", &ev_count))
return FALSE; /* Not a force key unit event */
if (running_time)
*running_time = ev_running_time;
if (all_headers)
*all_headers = ev_all_headers;
if (count)
*count = ev_count;
return TRUE;
}

View file

@ -704,6 +704,31 @@ GstEvent * gst_video_event_new_still_frame (gboolean in_still);
gboolean gst_video_event_parse_still_frame (GstEvent * event, gboolean * in_still);
/* video force key unit event creation and parsing */
GstEvent * gst_video_event_new_downstream_force_key_unit (GstClockTime timestamp,
GstClockTime streamtime,
GstClockTime runningtime,
gboolean all_headers,
guint count);
gboolean gst_video_event_parse_downstream_force_key_unit (GstEvent * event,
GstClockTime * timestamp,
GstClockTime * streamtime,
GstClockTime * runningtime,
gboolean * all_headers,
guint * count);
GstEvent * gst_video_event_new_upstream_force_key_unit (GstClockTime running_time,
gboolean all_headers,
guint count);
gboolean gst_video_event_parse_upstream_force_key_unit (GstEvent * event,
GstClockTime * running_time,
gboolean * all_headers,
guint * count);
gboolean gst_video_event_is_force_key_unit(GstEvent *event);
/* convert/encode video sample from one format to another */

View file

@ -1345,12 +1345,12 @@ gst_play_bin_set_uri (GstPlayBin * playbin, const gchar * uri)
if (!gst_playbin_uri_is_valid (playbin, uri)) {
if (g_str_has_prefix (uri, "file:")) {
GST_ERROR_OBJECT (playbin, "malformed file URI '%s' - make sure to "
"escape spaces and non-ASCII characters properly and specify an "
"absolute path. Use gst_filename_to_uri() to convert filenames "
GST_WARNING_OBJECT (playbin, "not entirely correct file URI '%s' - make "
"sure to escape spaces and non-ASCII characters properly and specify "
"an absolute path. Use gst_filename_to_uri() to convert filenames "
"to URIs", uri);
} else {
GST_ERROR_OBJECT (playbin, "malformed URI '%s'", uri);
/* GST_ERROR_OBJECT (playbin, "malformed URI '%s'", uri); */
}
}
@ -3247,6 +3247,11 @@ sink_accepts_caps (GstElement * sink, GstCaps * caps)
return TRUE;
}
static GstStaticCaps raw_audio_caps = GST_STATIC_CAPS ("audio/x-raw-int; "
"audio/x-raw-float");
static GstStaticCaps raw_video_caps = GST_STATIC_CAPS ("video/x-raw-rgb; "
"video/x-raw-yuv; " "video/x-raw-gray");
/* We are asked to select an element. See if the next element to check
* is a sink. If this is the case, we see if the sink works by setting it to
* READY. If the sink works, we return SELECT_EXPOSE to make decodebin
@ -3294,9 +3299,34 @@ autoplug_select_cb (GstElement * decodebin, GstPad * pad,
sink = group->video_sink;
if ((sinkpad = gst_element_get_static_pad (sink, "sink"))) {
GstPlayFlags flags = gst_play_bin_get_flags (playbin);
GstCaps *raw_caps =
(isaudiodec) ? gst_static_caps_get (&raw_audio_caps) :
gst_static_caps_get (&raw_video_caps);
caps = gst_pad_query_caps (sinkpad, NULL);
compatible = gst_element_factory_can_src_any_caps (factory, caps);
/* If the sink supports raw audio/video, we first check
* if the decoder could output any raw audio/video format
* and assume it is compatible with the sink then. We don't
* do a complete compatibility check here if converters
* are plugged between the decoder and the sink because
* the converters will convert between raw formats and
* even if the decoder format is not supported by the decoder
* a converter will convert it.
*
* We assume here that the converters can convert between
* any raw format.
*/
if ((isaudiodec && !(flags & GST_PLAY_FLAG_NATIVE_AUDIO)
&& gst_caps_can_intersect (caps, raw_caps)) || (!isaudiodec
&& !(flags & GST_PLAY_FLAG_NATIVE_VIDEO)
&& gst_caps_can_intersect (caps, raw_caps))) {
compatible = gst_element_factory_can_src_any_caps (factory, raw_caps)
|| gst_element_factory_can_src_any_caps (factory, caps);
} else {
compatible = gst_element_factory_can_src_any_caps (factory, caps);
}
gst_object_unref (sinkpad);
gst_caps_unref (caps);

View file

@ -87,8 +87,10 @@ distribute_running_time (GstElement * element, const GstSegment * segment)
gst_pad_send_event (pad, gst_event_new_flush_start ());
gst_pad_send_event (pad, gst_event_new_flush_stop (FALSE));
event = gst_event_new_segment (segment);
gst_pad_send_event (pad, event);
if (segment->format != GST_FORMAT_UNDEFINED) {
event = gst_event_new_segment (segment);
gst_pad_send_event (pad, event);
}
gst_object_unref (pad);
}

View file

@ -1059,6 +1059,7 @@ run_fft_pipeline (int inrate, int outrate, int quality, int width, gboolean fp,
/* cleanup */
gst_buffer_unref (inbuffer);
gst_caps_unref (caps);
cleanup_audioresample (audioresample);
}

View file

@ -115,6 +115,8 @@ GST_START_TEST (test_basetime_calculation)
gst_object_unref (pad);
gst_element_set_state (p1, GST_STATE_NULL);
gst_object_unref (p1);
g_main_loop_unref (loop);
}
GST_END_TEST;

View file

@ -24,8 +24,13 @@ EXPORTS
gst_video_convert_frame
gst_video_convert_frame_async
gst_video_crop_meta_get_info
gst_video_event_is_force_key_unit
gst_video_event_new_downstream_force_key_unit
gst_video_event_new_still_frame
gst_video_event_new_upstream_force_key_unit
gst_video_event_parse_downstream_force_key_unit
gst_video_event_parse_still_frame
gst_video_event_parse_upstream_force_key_unit
gst_video_filter_get_type
gst_video_flags_get_type
gst_video_format_flags_get_type