documentation: fix a number of typos

This commit is contained in:
Aaron Boxer 2019-09-02 08:27:35 -04:00 committed by Tim-Philipp Müller
parent 8173596ed2
commit 46989dca96
147 changed files with 282 additions and 281 deletions

4
NEWS
View file

@ -353,7 +353,7 @@ New element features and additions
- rtpjitterbuffer has improved end-of-stream handling
- rtpmp4vpay will be prefered over rtpmp4gpay for MPEG-4 video in
- rtpmp4vpay will be preferred over rtpmp4gpay for MPEG-4 video in
autoplugging scenarios now
- rtspsrc now allows applications to send RTSP SET_PARAMETER and
@ -1208,7 +1208,7 @@ Cerbero has seen a number of improvements:
used in order to re-produce a specific build. To set a manifest, you
can set manifest = 'my_manifest.xml' in your configuration file, or
use the --manifest command line option. The command line option will
take precendence over anything specific in the configuration file.
take precedence over anything specific in the configuration file.
- The new build-deps command can be used to build only the
dependencies of a recipe, without the recipe itself.

View file

@ -1700,7 +1700,7 @@
},
"properties": {
"drain-on-changes": {
"blurb": "Drains the filter when its coeficients change",
"blurb": "Drains the filter when its coefficients change",
"construct": false,
"construct-only": false,
"default": "true",
@ -2120,7 +2120,7 @@
},
"properties": {
"drain-on-changes": {
"blurb": "Drains the filter when its coeficients change",
"blurb": "Drains the filter when its coefficients change",
"construct": false,
"construct-only": false,
"default": "true",
@ -2289,7 +2289,7 @@
"writable": true
},
"drain-on-changes": {
"blurb": "Drains the filter when its coeficients change",
"blurb": "Drains the filter when its coefficients change",
"construct": false,
"construct-only": false,
"default": "true",
@ -4563,7 +4563,7 @@
"writable": true
},
"min": {
"blurb": "mininum buffer size",
"blurb": "minimum buffer size",
"construct": true,
"construct-only": false,
"default": "1",
@ -29808,7 +29808,7 @@
"writable": true
},
"tls-interaction": {
"blurb": "A GTlsInteraction object to promt the user for password or certificate",
"blurb": "A GTlsInteraction object to prompt the user for password or certificate",
"construct": false,
"construct-only": false,
"type-name": "GTlsInteraction",
@ -33823,7 +33823,7 @@
"writable": true
},
"multicast-iface": {
"blurb": "The network interface on which to join the multicast group.This allows multiple interfaces seperated by comma. (\"eth0,eth1\")",
"blurb": "The network interface on which to join the multicast group.This allows multiple interfaces separated by comma. (\"eth0,eth1\")",
"construct": false,
"construct-only": false,
"default": "NULL",

View file

@ -1121,7 +1121,7 @@ gst_dvdemux_handle_pull_seek (GstDVDemux * demux, GstPad * pad,
gst_dvdemux_push_event (demux, new_event);
}
/* if successfull seek, we update our real segment and push
/* if successful seek, we update our real segment and push
* out the new segment. */
if (res) {
memcpy (&demux->time_segment, &seeksegment, sizeof (GstSegment));
@ -1149,7 +1149,7 @@ gst_dvdemux_handle_pull_seek (GstDVDemux * demux, GstPad * pad,
demux->need_segment = FALSE;
}
/* and restart the task in case it got paused explicitely or by
/* and restart the task in case it got paused explicitly or by
* the FLUSH_START event we pushed out. */
gst_pad_start_task (demux->sinkpad, (GstTaskFunction) gst_dvdemux_loop,
demux->sinkpad, NULL);
@ -1800,7 +1800,7 @@ gst_dvdemux_loop (GstPad * pad)
if (!gst_dvdemux_handle_pull_seek (dvdemux, dvdemux->videosrcpad,
event)) {
GST_ELEMENT_WARNING (dvdemux, STREAM, DECODE, (NULL),
("Error perfoming initial seek"));
("Error performing initial seek"));
}
gst_event_unref (event);

View file

@ -31,7 +31,7 @@
* of the FLAC stream.
*
* Applications can set the tags to write using the #GstTagSetter interface.
* Tags contained withing the FLAC bitstream will be picked up
* Tags contained within the FLAC bitstream will be picked up
* automatically (and merged according to the merge mode set via the tag
* setter interface).
*

View file

@ -370,7 +370,8 @@ gst_gdk_pixbuf_dec_flush (GstGdkPixbufDec * filter)
/* ERRORS */
no_pixbuf:
{
GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL), ("error geting pixbuf"));
GST_ELEMENT_ERROR (filter, STREAM, DECODE, (NULL),
("error getting pixbuf"));
return GST_FLOW_ERROR;
}
channels_not_supported:

View file

@ -200,7 +200,7 @@ gst_gtk_base_sink_get_widget (GstGtkBaseSink * gtk_sink)
"ignore-alpha", G_BINDING_BIDIRECTIONAL | G_BINDING_SYNC_CREATE);
/* Take the floating ref, other wise the destruction of the container will
* make this widget disapear possibly before we are done. */
* make this widget disappear possibly before we are done. */
gst_object_ref_sink (gtk_sink->widget);
gtk_sink->widget_destroy_id = g_signal_connect (gtk_sink->widget, "destroy",
G_CALLBACK (widget_destroy_cb), gtk_sink);

View file

@ -591,7 +591,7 @@ gst_jack_audio_client_get_client (GstJackAudioClient * client)
* @client: a #GstJackAudioClient
* @active: new mode for the client
*
* Activate or deactive @client. When a client is activated it will receive
* Activate or deactivate @client. When a client is activated it will receive
* callbacks when data should be processed.
*
* Returns: 0 if all ok.

View file

@ -4,7 +4,7 @@ libgstjpeg_la_SOURCES = \
gstjpeg.c \
gstjpegenc.c \
gstjpegdec.c
# deprected gstsmokeenc.c smokecodec.c gstsmokedec.c
# deprecated gstsmokeenc.c smokecodec.c gstsmokedec.c
libgstjpeg_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstjpeg_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) -lgstvideo-$(GST_API_VERSION) \

View file

@ -154,7 +154,7 @@ static guint mainloop_ref_ct = 0;
static GMutex pa_shared_resource_mutex;
/* We keep a custom ringbuffer that is backed up by data allocated by
* pulseaudio. We must also overide the commit function to write into
* pulseaudio. We must also override the commit function to write into
* pulseaudio memory instead. */
struct _GstPulseRingBuffer
{
@ -545,7 +545,7 @@ gst_pulseringbuffer_open_device (GstAudioRingBuffer * buf)
gst_pulsering_context_subscribe_cb, pctx);
/* try to connect to the server and wait for completion, we don't want to
* autospawn a deamon */
* autospawn a daemon */
GST_LOG_OBJECT (psink, "connect to server %s",
GST_STR_NULL (psink->server));
if (pa_context_connect (pctx->context, psink->server,
@ -685,7 +685,7 @@ gst_pulsering_stream_request_cb (pa_stream * s, size_t length, void *userdata)
if (pbuf->in_commit && (length >= rbuf->spec.segsize)) {
/* only signal when we are waiting in the commit thread
* and got request for atleast a segment */
* and got request for at least a segment */
pa_threaded_mainloop_signal (mainloop, 0);
}
}
@ -2431,7 +2431,7 @@ gst_pulsesink_set_volume (GstPulseSink * psink, gdouble volume)
if (pbuf->is_pcm)
gst_pulse_cvolume_from_linear (&v, pbuf->channels, volume);
else
/* FIXME: this will eventually be superceded by checks to see if the volume
/* FIXME: this will eventually be superseded by checks to see if the volume
* is readable/writable */
goto unlock;
@ -3003,7 +3003,7 @@ gst_pulsesink_change_props (GstPulseSink * psink, GstTagList * l)
static const gchar *const map[] = {
GST_TAG_TITLE, PA_PROP_MEDIA_TITLE,
/* might get overriden in the next iteration by GST_TAG_ARTIST */
/* might get overridden in the next iteration by GST_TAG_ARTIST */
GST_TAG_PERFORMER, PA_PROP_MEDIA_ARTIST,
GST_TAG_ARTIST, PA_PROP_MEDIA_ARTIST,

View file

@ -36,7 +36,7 @@
#include <QtQuick/QQuickWindow>
#include <QOpenGLFramebufferObject>
/* compatability definitions... */
/* compatibility definitions... */
#ifndef GL_READ_FRAMEBUFFER
#define GL_READ_FRAMEBUFFER 0x8CA8
#endif

View file

@ -50,7 +50,7 @@ typedef struct _GstDV1394SrcClass GstDV1394SrcClass;
struct _GstDV1394Src {
GstPushSrc element;
// consecutive=2, skip=4 will skip 4 frames, then let 2 consecutive ones thru
// consecutive=2, skip=4 will skip 4 frames, then let 2 consecutive ones through
gint consecutive;
gint skip;
gboolean drop_incomplete;

View file

@ -741,7 +741,7 @@ foreach_add_tag (const GstTagList * list, const gchar * tag, gpointer userdata)
GST_LOG ("Processing tag %s (num=%u)", tag, num_tags);
if (num_tags > 1 && gst_tag_is_fixed (tag)) {
GST_WARNING ("Multiple occurences of fixed tag '%s', ignoring some", tag);
GST_WARNING ("Multiple occurrences of fixed tag '%s', ignoring some", tag);
num_tags = 1;
}

View file

@ -436,7 +436,7 @@ static void
gst_wavpack_enc_set_wp_config (GstWavpackEnc * enc)
{
enc->wp_config = g_new0 (WavpackConfig, 1);
/* set general stream informations in the WavpackConfig */
/* set general stream information in the WavpackConfig */
enc->wp_config->bytes_per_sample = GST_ROUND_UP_8 (enc->depth) / 8;
enc->wp_config->bits_per_sample = enc->depth;
enc->wp_config->num_channels = enc->channels;
@ -708,7 +708,7 @@ gst_wavpack_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
WavpackCloseFile (enc->wp_context);
goto config_failed;
}
GST_DEBUG_OBJECT (enc, "setup of encoding context successfull");
GST_DEBUG_OBJECT (enc, "setup of encoding context successful");
}
if (enc->need_channel_remap) {

View file

@ -457,7 +457,7 @@ gst_audio_dynamic_transform_soft_knee_compressor_float (GstAudioDynamic *
* f(x) = ax^2 + bx + c
*/
/* FIXME: If treshold is the same as the maximum
/* FIXME: If threshold is the same as the maximum
* we need to raise it a bit to prevent
* division by zero. */
if (threshold == 1.0)

View file

@ -267,7 +267,7 @@ process_fft_##channels##_##width (GstAudioFXBaseFIRFilter * self, const g##ctype
* plus some more space for the inverse FFT below. \
* \
* The samples are put at offset kernel_length, the inverse FFT \
* overwrites everthing from offset 0 to length-kernel_length+1, keeping \
* overwrites everything from offset 0 to length-kernel_length+1, keeping \
* the last kernel_length-1 samples for copying to the next processing \
* step. \
*/ \
@ -558,14 +558,14 @@ gst_audio_fx_base_fir_filter_class_init (GstAudioFXBaseFIRFilterClass * klass)
/**
* GstAudioFXBaseFIRFilter:drain-on-changes:
*
* Whether the filter should be drained when its coeficients change
* Whether the filter should be drained when its coefficients change
*
* Note: Currently this only works if the kernel size is not changed!
* Support for drainless kernel size changes will be added in the future.
*/
g_object_class_install_property (gobject_class, PROP_DRAIN_ON_CHANGES,
g_param_spec_boolean ("drain-on-changes", "Drain on changes",
"Drains the filter when its coeficients change",
"Drains the filter when its coefficients change",
DEFAULT_DRAIN_ON_CHANGES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));

View file

@ -63,7 +63,7 @@ struct _GstAudioFXBaseFIRFilter {
gboolean low_latency; /* work in slower low latency mode */
gboolean drain_on_changes; /* If the filter should be drained when
* coeficients change */
* coefficients change */
/* < private > */
GstAudioFXBaseFIRFilterProcessFunc process;

View file

@ -53,7 +53,7 @@
* for the best overlap position. Scaletempo uses a statistical cross
* correlation (roughly a dot-product). Scaletempo consumes most of its CPU
* cycles here. One can use the #GstScaletempo:search propery to tune how far
* the algoritm looks.
* the algorithm looks.
*
*/

View file

@ -53,7 +53,7 @@
* register the element factories and pad templates
* register the features
*
* exchange the string 'plugin' with your elemnt name
* exchange the string 'plugin' with your element name
*/
static gboolean

View file

@ -154,7 +154,7 @@ gst_auto_detect_attach_ghost_pad (GstAutoDetect * self)
return res;
}
/* Hack to make initial linking work; ideally, this'd work even when
/* Hack to make initial linking work; ideally, this would work even when
* no target has been assigned to the ghostpad yet. */
static void
gst_auto_detect_reset (GstAutoDetect * self)

View file

@ -2864,7 +2864,7 @@ gst_avi_demux_stream_index (GstAviDemux * avi)
if (map.size < 8)
goto too_small;
/* check tag first before blindy trying to read 'size' bytes */
/* check tag first before blindly trying to read 'size' bytes */
tag = GST_READ_UINT32_LE (map.data);
size = GST_READ_UINT32_LE (map.data + 4);
if (tag == GST_RIFF_TAG_LIST) {
@ -3377,7 +3377,7 @@ gst_avi_demux_stream_header_push (GstAviDemux * avi)
if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
goto header_wrong_avih;
GST_DEBUG_OBJECT (avi, "AVI header ok, reading elemnts from header");
GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
/* now, read the elements from the header until the end */
while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
@ -5267,7 +5267,7 @@ gst_avi_demux_loop_data (GstAviDemux * avi)
}
if (avi->segment.rate > 0.0) {
/* only check this for fowards playback for now */
/* only check this for forwards playback for now */
if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
&& (timestamp > avi->segment.stop)) {
goto eos_stop;

View file

@ -1382,7 +1382,7 @@ gst_avi_mux_riff_get_avi_header (GstAviMux * avimux)
gst_tag_list_foreach (tags, gst_avi_mux_write_tag, &bw);
if (info + 8 == gst_byte_writer_get_pos (&bw)) {
/* no tags writen, remove the empty INFO LIST as it is useless
/* no tags written, remove the empty INFO LIST as it is useless
* and prevents playback in vlc */
gst_byte_writer_set_pos (&bw, info - 4);
} else {

View file

@ -26,7 +26,7 @@
*
* The progressreport element can be put into a pipeline to report progress,
* which is done by doing upstream duration and position queries in regular
* (real-time) intervals. Both the interval and the prefered query format
* (real-time) intervals. Both the interval and the preferred query format
* can be specified via the #GstProgressReport:update-freq and the
* #GstProgressReport:format property.
*

View file

@ -137,7 +137,7 @@ gst_rnd_buffer_size_class_init (GstRndBufferSizeClass * klass)
0, G_MAXUINT32, DEFAULT_SEED,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MINIMUM,
g_param_spec_int ("min", "mininum", "mininum buffer size",
g_param_spec_int ("min", "minimum", "minimum buffer size",
0, G_MAXINT32, DEFAULT_MIN,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MAXIMUM,

View file

@ -524,7 +524,7 @@ gst_deinterlace_class_init (GstDeinterlaceClass * klass)
*
* Some methods provide parameters which can be set by getting
* the "method" child via the #GstChildProxy interface and
* setting the appropiate properties on it.
* setting the appropriate properties on it.
*
* * tomsmocomp Motion Adaptive: Motion Search
* * greedyh Motion Adaptive: Advanced Detection
@ -1444,7 +1444,7 @@ gst_deinterlace_get_pattern_lock (GstDeinterlace * self, gboolean * flush_one)
break;
}
/* make complete matches more signficant */
/* make complete matches more significant */
if (k == length)
k += GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY;

View file

@ -1,6 +1,6 @@
/* sse.h
Streaming SIMD Extenstions (a.k.a. Katmai New Instructions)
Streaming SIMD Extensions (a.k.a. Katmai New Instructions)
GCC interface library for IA32.
To use this library, simply include this header file
@ -954,7 +954,7 @@ sse_ok(void)
/* Store FENCE - enforce ordering of stores before fence vs. stores
occuring after fence in source code.
occurring after fence in source code.
*/
#ifdef SSE_TRACE
#define sfence() \

View file

@ -5,7 +5,7 @@
#ifndef IS_C
#ifdef SKIP_SEARCH
"movq %%mm6, %%mm0\n\t" // just use the results of our wierd bob
"movq %%mm6, %%mm0\n\t" // just use the results of our weird bob
#else
@ -114,7 +114,7 @@
return 0;
#else
#ifdef SKIP_SEARCH
out[0] = best[0]; // just use the results of our wierd bob
out[0] = best[0]; // just use the results of our weird bob
out[1] = best[1];
#else
diff[0] = diff[0] - MIN (diff[0], 10) - 4;

View file

@ -1,7 +1,7 @@
// -*- c++ -*-
// First, get and save our possible Bob values
// Assume our pixels are layed out as follows with x the calc'd bob value
// Assume our pixels are laid out as follows with x the calc'd bob value
// and the other pixels are from the current field
//
// j a b c k current field

View file

@ -1,7 +1,7 @@
// -*- c++ -*-
// First, get and save our possible Bob values
// Assume our pixels are layed out as follows with x the calc'd bob value
// Assume our pixels are laid out as follows with x the calc'd bob value
// and the other pixels are from the current field
//
// j a b c k current field

View file

@ -57,7 +57,7 @@ typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodVFIRClass;
/*
* The MPEG2 spec uses a slightly harsher filter, they specify
* [-1 8 2 8 -1]. ffmpeg uses a similar filter but with more of
* a tendancy to blur than to use the local information. The
* a tendency to blur than to use the local information. The
* filter taps here are: [-1 4 2 4 -1].
*/

View file

@ -39,7 +39,7 @@
*
* * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* named events. Tones are specified by their frequencies and events are specified
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
*

View file

@ -31,7 +31,7 @@
*
* * `type` (G_TYPE_INT, 0-1): Which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* named events. Tones are specified by their frequencies and events are specified
* by their number. This element currently only recognizes events.
* Do not confuse with "method" which specified the output.
*

View file

@ -37,7 +37,7 @@
*
* * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are specied
* named events. Tones are specified by their frequencies and events are specified
* by their number. This element can only take events as input. Do not confuse
* with "method" which specified the output.
*

View file

@ -132,7 +132,7 @@ gst_edgetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
g *= g;
b *= b;
r = r >> 5; /* To lack the lower bit for saturated addition, */
g = g >> 5; /* devide the value with 32, instead of 16. It is */
g = g >> 5; /* divide the value with 32, instead of 16. It is */
b = b >> 4; /* same as `v2 &= 0xfefeff' */
if (r > 127)
r = 127;

View file

@ -48,7 +48,7 @@
#include "gstquark.h"
#include "gsteffectv.h"
/* number of frames of time-buffer. It should be as a configurable paramater */
/* number of frames of time-buffer. It should be as a configurable parameter */
/* This number also must be 2^n just for the speed. */
#define PLANES 16

View file

@ -1003,7 +1003,7 @@ gst_flv_demux_update_resync (GstFlvDemux * demux, guint32 dts, gboolean discont,
gboolean ret = FALSE;
gint32 ddts = dts - *last;
if (!discont && ddts <= -RESYNC_THRESHOLD) {
/* Theoretically, we should use substract the duration of the last buffer,
/* Theoretically, we should use subtract the duration of the last buffer,
but this demuxer sends no durations on buffers, not sure if it cannot
know, or just does not care to calculate. */
*offset -= ddts * GST_MSECOND;
@ -3047,7 +3047,7 @@ gst_flv_demux_handle_seek_pull (GstFlvDemux * demux, GstEvent * event,
demux->seek_event = gst_event_ref (event);
demux->seek_time = seeksegment.position;
demux->state = FLV_STATE_SEEK;
/* do not know about succes yet, but we did care and handled it */
/* do not know about success yet, but we did care and handled it */
ret = TRUE;
goto exit;
}
@ -3123,7 +3123,7 @@ wrong_format:
}
}
/* If we can pull that's prefered */
/* If we can pull that's preferred */
static gboolean
gst_flv_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{

View file

@ -39,7 +39,7 @@
* The application that wants to index the stream will create a new index object
* using gst_index_new() or gst_index_factory_make(). The index is assigned to a
* specific element, a bin or the whole pipeline. This will cause indexable
* elements to add entires to the index while playing.
* elements to add entries to the index while playing.
*/
/* FIXME: complete gobject annotations */
@ -326,7 +326,7 @@ gst_index_new (void)
/**
* gst_index_commit:
* @index: the index to commit
* @id: the writer that commited the index
* @id: the writer that committed the index
*
* Tell the index that the writer with the given id is done
* with this index and is not going to write any more entries
@ -787,7 +787,7 @@ gst_index_add_entry (GstIndex * index, GstIndexEntry * entry)
* gst_index_add_associationv:
* @index: the index to add the entry to
* @id: the id of the index writer
* @flags: optinal flags for this entry
* @flags: optional flags for this entry
* @n: number of associations
* @list: list of associations
*
@ -826,7 +826,7 @@ gst_index_add_associationv (GstIndex * index, gint id,
* gst_index_add_association:
* @index: the index to add the entry to
* @id: the id of the index writer
* @flags: optinal flags for this entry
* @flags: optional flags for this entry
* @format: the format of the value
* @value: the value
* @...: other format/value pairs or 0 to end the list

View file

@ -493,7 +493,7 @@ set_caps_failed:
}
info_from_caps_failed:
{
GST_ERROR_OBJECT (self, "coud not get info from caps");
GST_ERROR_OBJECT (self, "could not get info from caps");
return FALSE;
}
}
@ -552,7 +552,7 @@ gst_deinterleave_sink_acceptcaps (GstPad * pad, GstObject * parent,
info_from_caps_failed:
{
GST_ERROR_OBJECT (self, "coud not get info from caps");
GST_ERROR_OBJECT (self, "could not get info from caps");
return FALSE;
}
}
@ -583,7 +583,7 @@ gst_deinterleave_getcaps (GstPad * pad, GstObject * parent, GstCaps * filter)
* to get all formats that are possible up- and downstream.
*
* For the pad for which the caps are requested we don't remove the channel
* informations as they must be in the returned caps and incompatibilities
* information as they must be in the returned caps and incompatibilities
* will be detected here already
*/
ret = gst_caps_new_any ();

View file

@ -3209,7 +3209,7 @@ gst_qt_mux_start_file (GstQTMux * qtmux)
atom_moov_get_trak_count (qtmux->moov));
GST_OBJECT_UNLOCK (qtmux);
/* Now that we know how much reserved space is targetted,
/* Now that we know how much reserved space is targeted,
* output a free atom to fill the extra reserved */
ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
qtmux->reserved_moov_size - qtmux->base_moov_size, FALSE);

View file

@ -133,7 +133,7 @@ struct _QtDemuxSample
#define QTSAMPLE_DTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp))
/* timestamp + offset + cslg_shift is the outgoing PTS */
#define QTSAMPLE_PTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (stream)->cslg_shift + (sample)->pts_offset))
/* timestamp + offset is the PTS used for internal seek calcuations */
/* timestamp + offset is the PTS used for internal seek calculations */
#define QTSAMPLE_PTS_NO_CSLG(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (sample)->pts_offset))
/* timestamp + duration - dts is the duration */
#define QTSAMPLE_DUR_DTS(stream, sample, dts) (QTSTREAMTIME_TO_GSTTIME ((stream), (sample)->timestamp + (sample)->duration) - (dts))
@ -2231,7 +2231,7 @@ gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard)
/* Maps the @segment to the qt edts internal segments and pushes
* the correspnding segment event.
* the corresponding segment event.
*
* If it ends up being at a empty segment, a gap will be pushed and the next
* edts segment will be activated in sequence.
@ -4270,7 +4270,7 @@ qtdemux_parse_moof (GstQTDemux * qtdemux, const guint8 * buffer, guint length,
if (!qtdemux->upstream_format_is_time && !qtdemux->first_moof_already_parsed
&& !qtdemux->received_seek && GST_CLOCK_TIME_IS_VALID (min_dts)
&& min_dts != 0) {
/* Unless the user has explictly requested another seek, perform an
/* Unless the user has explicitly requested another seek, perform an
* internal seek to the time specified in the tfdt.
*
* This way if the user opens a file where the first tfdt is 1 hour
@ -5690,7 +5690,7 @@ extract_cc_from_data (QtDemuxStream * stream, const guint8 * data, gsize size,
GST_DEBUG_OBJECT (stream->pad, "here");
/* Check if we have somethig compatible */
/* Check if we have something compatible */
stsd_entry = CUR_STREAM (stream);
switch (stsd_entry->fourcc) {
case FOURCC_c608:{
@ -7229,7 +7229,7 @@ gst_qtdemux_process_adapter (GstQTDemux * demux, gboolean force)
*
* To keep track of the current buffer timestamp and starting point
* we use gst_adapter_prev_pts that gives us the PTS and the distance
* from the beggining of the buffer, with the distance and demux->offset
* from the beginning of the buffer, with the distance and demux->offset
* we know if it is still the same buffer or not.
*/
prev_pts = gst_adapter_prev_pts (demux->adapter, &dist);
@ -8062,7 +8062,7 @@ qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node, const guint8 * buffer,
* the same format. */
/* video sample description size is 86 bytes without extension.
* node_length have to be bigger than 86 bytes because video sample
* description can include extenstions such as esds, fiel, glbl, etc. */
* description can include extensions such as esds, fiel, glbl, etc. */
if (node_length < 86) {
GST_WARNING_OBJECT (qtdemux, "%" GST_FOURCC_FORMAT
" sample description length too short (%u < 86)",
@ -9336,7 +9336,7 @@ qtdemux_stbl_init (GstQTDemux * qtdemux, QtDemuxStream * stream, GNode * stbl)
}
} else {
/* Ensure the cslg_shift value is consistent so we can use it
* unconditionnally to produce TS and Segment */
* unconditionally to produce TS and Segment */
stream->cslg_shift = 0;
}
@ -13813,7 +13813,7 @@ qtdemux_tag_add_blob (GNode * node, GstQtDemuxTagList * qtdemuxtaglist)
else
style = "iso";
/* santize the name for the caps. */
/* sanitize the name for the caps. */
for (i = 0; i < 4; i++) {
guint8 d = data[4 + i];
if (g_ascii_isalnum (d))

View file

@ -63,7 +63,7 @@ static const gchar qt_lang_map[][4] = {
* 026 Hungarian
* 027 Estonian
* 028 Latvian / Lettish
* 029 Lappish / Saamish (used code for Nothern Sami)
* 029 Lappish / Saamish (used code for Northern Sami)
*/
"urd", "hin", "tha", "kor", "lit", "pol", "hun", "est", "lav", "sme",

View file

@ -308,7 +308,7 @@ gst_level_get_property (GObject * object, guint prop_id,
* input sample data enters in *in_data and is not modified
* this filter only accepts signed audio data, so mid level is always 0
*
* for integers, this code considers the non-existant positive max value to be
* for integers, this code considers the non-existent positive max value to be
* full-scale; so max-1 will not map to 1.0
*/

View file

@ -863,7 +863,7 @@ gst_ebml_write_buffer_header (GstEbmlWrite * ebml, guint32 id, guint64 length)
/**
* gst_ebml_write_buffer:
* @ebml: #GstEbmlWrite
* @buf: #GstBuffer cointaining the data.
* @buf: #GstBuffer containing the data.
*
* Write binary element (see gst_ebml_write_buffer_header).
*/

View file

@ -4529,7 +4529,7 @@ gst_matroska_demux_parse_blockgroup_or_simpleblock (GstMatroskaDemux * demux,
/* QoS for video track with an index. the assumption is that
index entries point to keyframes, but if that is not true we
will instad skip until the next keyframe. */
will instead skip until the next keyframe. */
if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
stream->index_table && demux->common.segment.rate > 0.0) {
@ -5091,7 +5091,7 @@ gst_matroska_demux_check_read_size (GstMatroskaDemux * demux, guint64 bytes)
}
}
/* returns TRUE if we truely are in error state, and should give up */
/* returns TRUE if we truly are in error state, and should give up */
static inline GstFlowReturn
gst_matroska_demux_check_parse_error (GstMatroskaDemux * demux)
{

View file

@ -644,7 +644,7 @@ gst_matroska_pad_free (GstPad * collect_pad)
/**
* gst_matroska_mux_reset:
* @element: #GstMatroskaMux that should be reseted.
* @element: #GstMatroskaMux that should be reset.
*
* Reset matroska muxer back to initial state.
*/
@ -2494,7 +2494,7 @@ gst_matroska_mux_release_pad (GstElement * element, GstPad * pad)
if (cdata->pad == pad) {
/*
* observed duration, this will remain GST_CLOCK_TIME_NONE
* only if the pad is resetted
* only if the pad is reset
*/
GstClockTime collected_duration = GST_CLOCK_TIME_NONE;
@ -3581,7 +3581,7 @@ gst_matroska_mux_finish (GstMatroskaMux * mux)
GstMatroskaPad *collect_pad;
/*
* observed duration, this will never remain GST_CLOCK_TIME_NONE
* since this means buffer without timestamps that is not possibile
* since this means buffer without timestamps that is not possible
*/
GstClockTime collected_duration = GST_CLOCK_TIME_NONE;

View file

@ -27,7 +27,7 @@
* chained oggs. Fixes #334082
* TODO: Test samples: http://www.matroska.org/samples/matrix/index.html
* http://samples.mplayerhq.hu/Matroska/
* TODO: check if parseing is done correct for all codecs according to spec
* TODO: check if parsing is done correct for all codecs according to spec
* TODO: seeking with incomplete or without CUE
*/
@ -1870,7 +1870,7 @@ gst_matroska_parse_parse_blockgroup_or_simpleblock (GstMatroskaParse * parse,
/* QoS for video track with an index. the assumption is that
index entries point to keyframes, but if that is not true we
will instad skip until the next keyframe. */
will instead skip until the next keyframe. */
if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
stream->index_table && parse->common.segment.rate > 0.0) {
@ -2301,7 +2301,7 @@ gst_matroska_parse_check_read_size (GstMatroskaParse * parse, guint64 bytes)
}
#if 0
/* returns TRUE if we truely are in error state, and should give up */
/* returns TRUE if we truly are in error state, and should give up */
static inline gboolean
gst_matroska_parse_check_parse_error (GstMatroskaParse * parse)
{

View file

@ -1441,7 +1441,7 @@ gst_matroska_read_common_parse_header (GstMatroskaReadCommon * common,
goto exit_error;
switch (id) {
/* is our read version uptodate? */
/* is our read version up-to-date? */
case GST_EBML_ID_EBMLREADVERSION:{
guint64 num;
@ -2125,18 +2125,18 @@ gst_matroska_read_common_parse_metadata_id_simple_tag (GstMatroskaReadCommon *
/* ICRA The ICRA content rating for parental control. (Previously RSACi) */
/* Temporal Information */
GST_MATROSKA_TAG_ID_DATE_RELEASED, GST_TAG_DATE}, { /* The time that the item was originaly released. This is akin to the TDRL tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_RELEASED, GST_TAG_DATE}, { /* The time that the item was originally released. This is akin to the TDRL tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_RECORDED, GST_TAG_DATE}, { /* The time that the recording began. This is akin to the TDRC tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_ENCODED, GST_TAG_DATE}, { /* The time that the encoding of this item was completed began. This is akin to the TDEN tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_TAGGED, GST_TAG_DATE}, { /* The time that the tags were done for this item. This is akin to the TDTG tag in ID3. */
GST_MATROSKA_TAG_ID_DATE_DIGITIZED, GST_TAG_DATE}, { /* The time that the item was tranfered to a digital medium. This is akin to the IDIT tag in RIFF. */
GST_MATROSKA_TAG_ID_DATE_DIGITIZED, GST_TAG_DATE}, { /* The time that the item was transferred to a digital medium. This is akin to the IDIT tag in RIFF. */
GST_MATROSKA_TAG_ID_DATE_WRITTEN, GST_TAG_DATE}, { /* The time that the writing of the music/script began. */
GST_MATROSKA_TAG_ID_DATE_PURCHASED, GST_TAG_DATE}, { /* Information on when the file was purchased (see also purchase tags). */
GST_MATROSKA_TAG_ID_DATE, GST_TAG_DATE}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
/* Spacial Information */
GST_MATROSKA_TAG_ID_RECORDING_LOCATION, GST_TAG_GEO_LOCATION_NAME}, { /* The location where the item was recorded. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
/* COMPOSITION_LOCATION Location that the item was originaly designed/written. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
/* COMPOSITION_LOCATION Location that the item was originally designed/written. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
/* COMPOSER_NATIONALITY Nationality of the main composer of the item, mostly for classical music. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. */
/* Personal */
@ -2150,7 +2150,7 @@ gst_matroska_read_common_parse_metadata_id_simple_tag (GstMatroskaReadCommon *
/* ENCODER_SETTINGS A list of the settings used for encoding this item. No specific format. */
GST_MATROSKA_TAG_ID_BPS, GST_TAG_BITRATE}, {
GST_MATROSKA_TAG_ID_BITSPS, GST_TAG_BITRATE}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
/* WONTFIX (already handled in another way): FPS The average frames per second of the specified item. This is typically the average number of Blocks per second. In the event that lacing is used, each laced chunk is to be counted as a seperate frame. */
/* WONTFIX (already handled in another way): FPS The average frames per second of the specified item. This is typically the average number of Blocks per second. In the event that lacing is used, each laced chunk is to be counted as a separate frame. */
GST_MATROSKA_TAG_ID_BPM, GST_TAG_BEATS_PER_MINUTE}, {
/* MEASURE In music, a measure is a unit of time in Western music like "4/4". It represents a regular grouping of beats, a meter, as indicated in musical notation by the time signature.. The majority of the contemporary rock and pop music you hear on the radio these days is written in the 4/4 time signature. */
/* TUNING It is saved as a frequency in hertz to allow near-perfect tuning of instruments to the same tone as the musical piece (e.g. "441.34" in Hertz). The default value is 440.0 Hz. */

View file

@ -3,7 +3,7 @@
* Copyright (C) 1998-2001 Andy Lo A Foe <andy@alsaplayer.org>
* Original code by Tinic Uro
*
* This code is copied from Alsaplayer. The orginal code was by Tinic Uro and under
* This code is copied from Alsaplayer. The original code was by Tinic Uro and under
* the BSD license without a advertisig clause. Andy Lo A Foe then relicensed the
* code when he used it for Alsaplayer to GPL with Tinic's permission. Richard Boulton
* then took this code and made a GPL plugin out of it.

View file

@ -336,7 +336,7 @@ gst_multi_file_src_set_property (GObject * object, guint prop_id,
(st = gst_caps_get_structure (new_caps, 0))
&& gst_structure_get_fraction (st, "framerate", &src->fps_n,
&src->fps_d)) {
GST_INFO_OBJECT (src, "Seting framerate to %d/%d", src->fps_n,
GST_INFO_OBJECT (src, "Setting framerate to %d/%d", src->fps_n,
src->fps_d);
} else {
src->fps_n = -1;

View file

@ -136,7 +136,7 @@ handle_buffer_measuring (GstSplitMuxPartReader * reader,
if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS &&
!part_pad->seen_buffer) {
/* If this is the first buffer on the pad in the collect_streams state,
* then calculate inital offset based on running time of this segment */
* then calculate initial offset based on running time of this segment */
part_pad->initial_ts_offset =
part_pad->orig_segment.start + part_pad->orig_segment.base -
part_pad->orig_segment.time;

View file

@ -2181,7 +2181,7 @@ check_completed_gop (GstSplitMuxSink * splitmux, MqStreamCtx * ctx)
/* On ENDING_FILE, the reference stream sends a command to start a new
* fragment, then releases the GOP for output in the new fragment.
* If somes streams received no buffer during the last GOP that overran,
* If some streams received no buffer during the last GOP that overran,
* because its next buffer has a timestamp bigger than
* ctx->max_in_running_time, its queue is empty. In that case the only
* way to wakeup the output thread is by injecting an event in the

View file

@ -1408,7 +1408,7 @@ splitmux_src_pad_event (GstPad * pad, GstObject * parent, GstEvent * event)
SPLITMUX_SRC_UNLOCK (splitmux);
}
case GST_EVENT_RECONFIGURE:{
GST_DEBUG_OBJECT (splitmux, "reconfigure evnet on pad %" GST_PTR_FORMAT,
GST_DEBUG_OBJECT (splitmux, "reconfigure event on pad %" GST_PTR_FORMAT,
pad);
SPLITMUX_SRC_PADS_RLOCK (splitmux);

View file

@ -107,7 +107,7 @@ pattern_ph_match (const gchar * match_pattern, MatchMode match_mode,
return TRUE;
if (next_wildcard_reached)
/* the forthcoming pattern substring up to the next wildcard has
* been matched, but a mismatch occoured for the rest of the
* been matched, but a mismatch occurred for the rest of the
* pattern, following the next wildcard.
* there's no need to advance the current match position any
* further if the rest pattern will not match.

View file

@ -26,7 +26,7 @@
G_BEGIN_DECLS
/* Reference level (in dBSPL). The 2001 proposal specifies 83. This was
* changed later in all implementations to 89, which is the new, offical value:
* changed later in all implementations to 89, which is the new, official value:
* David Robinson acknowledged the change but didn't update the website yet. */
#define RG_REFERENCE_LEVEL 89.

View file

@ -17,7 +17,7 @@ The following fields can or must (*) be specified in the structure:
* payload: (int) [0, 127]
For audio and video, these will normally be a media payload type as
defined in the RTP Audio/Video Profile. For dynamicaly allocated
defined in the RTP Audio/Video Profile. For dynamically allocated
payload types, this value will be >= 96 and the encoding-name must be
set.
@ -296,7 +296,7 @@ Some gst-launch-1.0 lines:
The receiver now displays an h263 image. Since there is no jitterbuffer in the
pipeline, frames will be displayed at the time when they are received. This can
result in jerky playback in the case of high network jitter or currupted video
result in jerky playback in the case of high network jitter or corrupted video
when packets are dropped or reordered.
Stream a quicktime file with mpeg4 video and AAC audio on port 5000 and port
@ -342,7 +342,7 @@ Some gst-launch-1.0 lines:
recommended to use a gstrtpjitterbuffer after the udpsrc elements.
Even when sync is enabled, the two different streams will not play synchronised
against eachother because the receiver does not have enough information to
against each other because the receiver does not have enough information to
perform this task. For this you need to add the rtpbin element in both the
sender and receiver pipeline and use additional sources and sinks to transmit
RTCP packets used for inter-stream synchronisation.

View file

@ -288,7 +288,7 @@ foreach_metadata_drop (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
/* Process one RTP packet. Accumulate RTP payload in the proper place in a DV
* frame, and return that frame if we detect a new frame, or NULL otherwise.
* We assume a DV frame is 144000 bytes. That should accomodate PAL as well as
* We assume a DV frame is 144000 bytes. That should accommodate PAL as well as
* NTSC.
*/
static GstBuffer *

View file

@ -220,7 +220,7 @@ gst_rtp_g726_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
encoding_name =
g_strdup (gst_structure_get_string (structure, "encoding-name"));
/* if we managed to negotiate to AAL2, we definatly are going to do AAL2
/* if we managed to negotiate to AAL2, we definitely are going to do AAL2
* encoding. Else we only encode AAL2 when explicitly set by the
* property. */
if (g_str_has_prefix (encoding_name, "AAL2-"))

View file

@ -289,7 +289,7 @@ gst_rtp_gst_pay_create_from_adapter (GstRtpGSTPay * rtpgstpay,
GstBuffer *paybuf;
/* this will be the total lenght of the packet */
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (8 + avail, 0, 0);
/* fill one MTU or all available bytes */

View file

@ -773,7 +773,7 @@ find_gob (GstRtpH261Pay * pay, const guint8 * data, guint size, guint pos)
return ret;
}
/* Scans after all GOB start codes and initalizes the GOB structure with start
/* Scans after all GOB start codes and initializes the GOB structure with start
* and end positions. */
static ParseReturn
gst_rtp_h261_pay_init_gobs (GstRtpH261Pay * pay, Gob * gobs, gint num_gobs,
@ -947,7 +947,7 @@ gst_rtp_h261_pay_shift_buffer (GstRtpH261Pay * pay, const guint8 * data,
gsize size, gint offset, gsize * newsize)
{
/* In order to read variable length codes at the very end of the buffer
* wihout peeking into possibly unallocated data, we pad with extra 0's
* without peeking into possibly unallocated data, we pad with extra 0's
* which will generate an invalid code at the end of the buffer. */
guint pad = 4;
gsize allocsize = size + pad;

View file

@ -1708,7 +1708,7 @@ gst_rtp_h263_pay_flush (GstRtpH263Pay * rtph263pay)
GST_DEBUG_OBJECT (rtph263pay, "Frame too large for MTU");
/*
* Let's go trough all the data and fragment it untill end is reached
* Let's go trough all the data and fragment it until end is reached
*/
gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);

View file

@ -680,7 +680,7 @@ gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
* This algorithm separates large frames at synchronisation points (Segments)
* (See RFC 4629 section 6). It would be interesting to have a property such as network
* quality to select between both packetization methods */
/* TODO Add VRC supprt (See RFC 4629 section 5.2) */
/* TODO Add VRC support (See RFC 4629 section 5.2) */
while (avail > 0) {
guint towrite;

View file

@ -1114,7 +1114,7 @@ gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
*/
nalu_size = (payload[0] << 8) | payload[1];
/* dont include nalu_size */
/* don't include nalu_size */
if (nalu_size > (payload_len - 2))
nalu_size = payload_len - 2;

View file

@ -1361,7 +1361,7 @@ gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * basepayload,
avc = rtph264pay->stream_format == GST_H264_STREAM_FORMAT_AVC;
if (avc) {
/* In AVC mode, there is no adapter, so nothign to drain */
/* In AVC mode, there is no adapter, so nothing to drain */
if (draining)
return GST_FLOW_OK;
gst_buffer_map (buffer, &map, GST_MAP_READ);

View file

@ -1367,7 +1367,7 @@ gst_rtp_h265_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
nalu_size = (payload[0] << 8) | payload[1];
/* dont include nalu_size */
/* don't include nalu_size */
if (nalu_size > (payload_len - 2))
nalu_size = payload_len - 2;

View file

@ -418,7 +418,7 @@ MakeHeaders (guint8 * p, int type, int width, int height, guint8 * qt,
*p++ = 0x11; /* huffman table 1 */
*p++ = 0; /* first DCT coeff */
*p++ = 63; /* last DCT coeff */
*p++ = 0; /* sucessive approx. */
*p++ = 0; /* successive approx. */
return (p - start);
};

View file

@ -97,7 +97,7 @@ typedef enum _RtpJpegMarker RtpJpegMarker;
* @JPEG_MARKER_DRI: Define Restart Interval marker
* @JPEG_MARKER_H264: H264 marker
*
* Identifers for markers in JPEG header
* Identifiers for markers in JPEG header
*/
enum _RtpJpegMarker
{

View file

@ -389,7 +389,7 @@ gst_rtp_mp4a_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
skip += data_len;
pos += data_len;
/* update our pointers whith what we consumed */
/* update our pointers with what we consumed */
data += skip;
avail -= skip;

View file

@ -598,7 +598,7 @@ gst_rtp_mp4g_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
rtpmp4gdepay->last_AU_index = AU_index;
}
/* keep track of the higest AU_index */
/* keep track of the highest AU_index */
if (rtpmp4gdepay->max_AU_index != -1
&& rtpmp4gdepay->max_AU_index <= AU_index) {
GST_DEBUG_OBJECT (rtpmp4gdepay, "new interleave group, flushing");

View file

@ -472,7 +472,7 @@ gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay)
GstRTPBuffer rtp = { NULL };
GstBuffer *paybuf;
/* this will be the total lenght of the packet */
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* fill one MTU or all available bytes, we need 4 spare bytes for

View file

@ -267,7 +267,7 @@ gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
guint packet_len;
GstRTPBuffer rtp = { NULL };
/* this will be the total lenght of the packet */
/* this will be the total length of the packet */
packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
/* fill one MTU or all available bytes */

View file

@ -144,7 +144,7 @@ _alloc_red_packet_and_fill_headers (GstRtpRedEnc * self,
guint red_header_size = rtp_red_block_header_get_length (FALSE) +
(redundant_block ? rtp_red_block_header_get_length (TRUE) : 0);
guint32 timestmap = gst_rtp_buffer_get_timestamp (inp_rtp);
guint32 timestamp = gst_rtp_buffer_get_timestamp (inp_rtp);
guint csrc_count = gst_rtp_buffer_get_csrc_count (inp_rtp);
GstBuffer *red = gst_rtp_buffer_new_allocate (red_header_size, 0, csrc_count);
guint8 *red_block_header;
@ -161,7 +161,7 @@ _alloc_red_packet_and_fill_headers (GstRtpRedEnc * self,
gst_rtp_buffer_set_marker (&red_rtp, gst_rtp_buffer_get_marker (inp_rtp));
gst_rtp_buffer_set_payload_type (&red_rtp, self->pt);
gst_rtp_buffer_set_seq (&red_rtp, gst_rtp_buffer_get_seq (inp_rtp));
gst_rtp_buffer_set_timestamp (&red_rtp, timestmap);
gst_rtp_buffer_set_timestamp (&red_rtp, timestamp);
gst_rtp_buffer_set_ssrc (&red_rtp, gst_rtp_buffer_get_ssrc (inp_rtp));
for (i = 0; i != csrc_count; ++i)
gst_rtp_buffer_set_csrc (&red_rtp, i,
@ -173,7 +173,7 @@ _alloc_red_packet_and_fill_headers (GstRtpRedEnc * self,
rtp_red_block_set_is_redundant (red_block_header, TRUE);
rtp_red_block_set_payload_type (red_block_header, redundant_block->pt);
rtp_red_block_set_timestamp_offset (red_block_header,
timestmap - redundant_block->timestamp);
timestamp - redundant_block->timestamp);
rtp_red_block_set_payload_length (red_block_header,
gst_buffer_get_size (redundant_block->payload));

View file

@ -599,7 +599,7 @@ no_output:
return NULL;
/* ERORRS */
/* ERRORS */
switch_failed:
{
GST_ELEMENT_WARNING (rtptheoradepay, STREAM, DECODE,

View file

@ -557,7 +557,7 @@ gst_rtp_ulpfec_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
ret = gst_rtp_ulpfec_enc_stream_ctx_process (ctx, buffer);
/* FIXME: does not work for mulitple ssrcs */
/* FIXME: does not work for multiple ssrcs */
fec->num_packets_protected = ctx->num_packets_protected;
return ret;

View file

@ -630,7 +630,7 @@ no_output:
}
return NULL;
}
/* ERORRS */
/* ERRORS */
switch_failed:
{
GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,

View file

@ -456,7 +456,7 @@ gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload * depayload,
goto next;
}
/* calculate the maximim amount of bytes we can use per line */
/* calculate the maximum amount of bytes we can use per line */
if (offs + ((length / pgroup) * xinc) > width) {
plen = ((width - offs) * pgroup) / xinc;
GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",

View file

@ -92,7 +92,7 @@ rtp_storage_get_packets_for_recovery (RtpStorage * self, gint fec_pt,
STORAGE_UNLOCK (self);
if (NULL == stream) {
GST_ERROR_OBJECT (self, "Cant find ssrc = 0x08%x", ssrc);
GST_ERROR_OBJECT (self, "Can't find ssrc = 0x08%x", ssrc);
} else {
STREAM_LOCK (stream);
if (stream->queue.length > 0) {
@ -128,7 +128,7 @@ rtp_storage_get_redundant_packet (RtpStorage * self, guint32 ssrc,
STORAGE_UNLOCK (self);
if (NULL == stream) {
GST_ERROR_OBJECT (self, "Cant find ssrc = 0x%x", ssrc);
GST_ERROR_OBJECT (self, "Can't find ssrc = 0x%x", ssrc);
} else {
STREAM_LOCK (stream);
if (stream->queue.length > 0) {

View file

@ -358,7 +358,7 @@ rtp_ulpfec_map_info_map (GstBuffer * buffer, RtpUlpFecMapInfo * info)
* @info: #RtpUlpFecMapInfo
*
* Unmap @info previously mapped with rtp_ulpfec_map_info_map() and unrefs the
* buffer. For convinience can even be called even if rtp_ulpfec_map_info_map
* buffer. For convenience can even be called even if rtp_ulpfec_map_info_map
* returned FALSE
**/
void

View file

@ -38,12 +38,12 @@ G_BEGIN_DECLS
#define RTP_ULPFEC_SEQ_BASE_OFFSET_MAX(L) (RTP_ULPFEC_PROTECTED_PACKETS_MAX(L) - 1)
/**
* RtpUlpFecMapInfo: Helper wraper around GstRTPBuffer
* RtpUlpFecMapInfo: Helper wrapper around GstRTPBuffer
*
* @rtp: mapped RTP buffer
**/
typedef struct {
// FIXME: it used to contain more fileds now we are left with only GstRTPBuffer.
// FIXME: it used to contain more fields now we are left with only GstRTPBuffer.
// it will be nice to use it directly
GstRTPBuffer rtp;
} RtpUlpFecMapInfo;

View file

@ -51,7 +51,7 @@
* To use #GstRtpBin as a sender, request a send_rtp_sink_\%u pad, which will
* automatically create a send_rtp_src_\%u pad. If the session number is not provided,
* the pad from the lowest available session will be returned. The session manager will modify the
* SSRC in the RTP packets to its own SSRC and wil forward the packets on the
* SSRC in the RTP packets to its own SSRC and will forward the packets on the
* send_rtp_src_\%u pad after updating its internal state.
*
* The session manager needs the clock-rate of the payload types it is handling
@ -394,7 +394,7 @@ static GstElement *session_request_element (GstRtpBinSession * session,
/* Manages the RTP stream for one SSRC.
*
* We pipe the stream (comming from the SSRC demuxer) into a jitterbuffer.
* We pipe the stream (coming from the SSRC demuxer) into a jitterbuffer.
* If we see an SDES RTCP packet that links multiple SSRCs together based on a
* common CNAME, we create a GstRtpBinClient structure to group the SSRCs
* together (see below).
@ -890,7 +890,7 @@ get_pt_map (GstRtpBinSession * session, guint pt)
bin = session->bin;
GST_DEBUG ("emiting signal for pt %u in session %u", pt, session->id);
GST_DEBUG ("emitting signal for pt %u in session %u", pt, session->id);
/* not in cache, send signal to request caps */
g_value_init (&args[0], GST_TYPE_ELEMENT);
@ -1413,7 +1413,7 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len,
/* For NTP sync we need to first get a snapshot of running_time and NTP
* time. We know at what running_time we play a certain RTP time, we also
* calculated when we would play the RTP time in the SR packet. Now we need
* to know how the running_time and the NTP time relate to eachother. */
* to know how the running_time and the NTP time relate to each other. */
get_current_times (bin, &local_running_time, &local_ntpnstime);
/* see how far away the NTP time is. This is the difference between the
@ -1454,9 +1454,9 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len,
/* calculate the min of all deltas, ignoring streams that did not yet have a
* valid rt_delta because we did not yet receive an SR packet for those
* streams.
* We calculate the mininum because we would like to only apply positive
* We calculate the minimum because we would like to only apply positive
* offsets to streams, delaying their playback instead of trying to speed up
* other streams (which might be imposible when we have to create negative
* other streams (which might be impossible when we have to create negative
* latencies).
* The stream that has the smallest diff is selected as the reference stream,
* all other streams will have a positive offset to this difference. */
@ -1469,7 +1469,7 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len,
guint64 ext_base;
use_rtp = TRUE;
/* signed version for convienience */
/* signed version for convenience */
clock_base = base_rtptime;
/* deal with possible wrap-around */
ext_base = base_rtptime;
@ -1837,7 +1837,7 @@ create_stream (GstRtpBinSession * session, guint32 ssrc)
/* ERRORS */
max_streams:
{
GST_WARNING_OBJECT (rtpbin, "stream exeeds maximum (%d)",
GST_WARNING_OBJECT (rtpbin, "stream exceeds maximum (%d)",
rtpbin->max_streams);
return NULL;
}
@ -3452,7 +3452,7 @@ fec_decoder_link_failed:
}
}
/* a new pad (SSRC) was created in @session. This signal is emited from the
/* a new pad (SSRC) was created in @session. This signal is emitted from the
* payload demuxer. */
static void
new_payload_found (GstElement * element, guint pt, GstPad * pad,
@ -3547,7 +3547,7 @@ static void
payload_type_change (GstElement * element, guint pt, GstRtpBinSession * session)
{
GST_DEBUG_OBJECT (session->bin,
"emiting signal for pt type changed to %u in session %u", pt,
"emitting signal for pt type changed to %u in session %u", pt,
session->id);
g_signal_emit (session->bin, gst_rtp_bin_signals[SIGNAL_PAYLOAD_TYPE_CHANGE],
@ -4617,7 +4617,7 @@ remove_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session)
}
/* If the requested name is NULL we should create a name with
* the session number assuming we want the lowest posible session
* the session number assuming we want the lowest possible session
* with a free pad like the template */
static gchar *
gst_rtp_bin_get_free_pad_name (GstElement * element, GstPadTemplate * templ)

View file

@ -1039,7 +1039,7 @@ gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer)
g_queue_init (&priv->gap_packets);
gst_segment_init (&priv->segment, GST_FORMAT_TIME);
/* reset skew detection initialy */
/* reset skew detection initially */
rtp_jitter_buffer_reset_skew (priv->jbuf);
rtp_jitter_buffer_set_delay (priv->jbuf, priv->latency_ns);
rtp_jitter_buffer_set_buffering (priv->jbuf, FALSE);
@ -1931,7 +1931,7 @@ gst_rtp_jitter_buffer_sink_rtcp_event (GstPad * pad, GstObject * parent,
}
/*
* Must be called with JBUF_LOCK held, will release the LOCK when emiting the
* Must be called with JBUF_LOCK held, will release the LOCK when emitting the
* signal. The function returns GST_FLOW_ERROR when a parsing error happened and
* GST_FLOW_FLUSHING when the element is shutting down. On success
* GST_FLOW_OK is returned.
@ -2198,7 +2198,7 @@ get_rtx_delay (GstRtpJitterBufferPrivate * priv)
if (priv->rtx_delay == -1) {
/* the maximum delay for any RTX-packet is given by the latency, since
anything after that is considered lost. For various calulcations,
(given large avg_jitter and/or packet_spacing), the resuling delay
(given large avg_jitter and/or packet_spacing), the resulting delay
could exceed the configured latency, ending up issuing an RTX-request
that would never arrive in time. To help this we cap the delay
for any RTX with the last possible time it could still arrive in time. */
@ -3889,7 +3889,7 @@ do_lost_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer,
* lost items (so that we can set discont flags and such) */
if (priv->do_lost) {
GstClockTime duration, timestamp;
/* create paket lost event */
/* create packet lost event */
timestamp = apply_offset (jitterbuffer, get_pts_timeout (timer));
duration = timer->duration;
if (duration == GST_CLOCK_TIME_NONE && priv->packet_spacing > 0)
@ -4130,7 +4130,7 @@ stopping:
}
/*
* This funcion implements the main pushing loop on the source pad.
* This function implements the main pushing loop on the source pad.
*
* It first tries to push as many buffers as possible. If there is a seqnum
* mismatch, we wait for the next timeouts.
@ -4184,7 +4184,7 @@ pause:
}
}
/* collect the info from the lastest RTCP packet and the jitterbuffer sync, do
/* collect the info from the latest RTCP packet and the jitterbuffer sync, do
* some sanity checks and then emit the handle-sync signal with the parameters.
* This function must be called with the LOCK */
static void

View file

@ -203,7 +203,7 @@ gst_rtp_pt_demux_class_init (GstRtpPtDemuxClass * klass)
* @pt: the payload type
* @pad: the pad with the new payload
*
* Emited when a new payload type pad has been created in @demux.
* Emitted when a new payload type pad has been created in @demux.
*/
gst_rtp_pt_demux_signals[SIGNAL_NEW_PAYLOAD_TYPE] =
g_signal_new ("new-payload-type", G_TYPE_FROM_CLASS (klass),
@ -216,7 +216,7 @@ gst_rtp_pt_demux_class_init (GstRtpPtDemuxClass * klass)
* @demux: the object which received the signal
* @pt: the new payload type
*
* Emited when the payload type changed.
* Emitted when the payload type changed.
*/
gst_rtp_pt_demux_signals[SIGNAL_PAYLOAD_TYPE_CHANGE] =
g_signal_new ("payload-type-change", G_TYPE_FROM_CLASS (klass),

View file

@ -49,7 +49,7 @@ struct _GstRtpPtDemuxClass
/* get the caps for pt */
GstCaps* (*request_pt_map) (GstRtpPtDemux *demux, guint pt);
/* signal emmited when a new PT is found from the incoming stream */
/* signal emitted when a new PT is found from the incoming stream */
void (*new_payload_type) (GstRtpPtDemux *demux, guint pt, GstPad * pad);
/* signal emitted when the payload type changes */

View file

@ -137,7 +137,7 @@
* It is an error, according to RFC4588 to have two retransmission requests for
* packets belonging to two different streams but with the same sequence number.
* Note that the default seqnum-offset value (-1, which means random) would
* work just fine, but it is overriden here for illustration purposes.
* work just fine, but it is overridden here for illustration purposes.
*/
#ifdef HAVE_CONFIG_H
@ -361,7 +361,7 @@ gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent,
if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map,
GUINT_TO_POINTER (ssrc), NULL, &ssrc2)
&& GPOINTER_TO_UINT (ssrc2) != GPOINTER_TO_UINT (ssrc)) {
GST_TRACE_OBJECT (rtx, "Retransmited stream %X already associated "
GST_TRACE_OBJECT (rtx, "Retransmitted stream %X already associated "
"to its master, %X", GPOINTER_TO_UINT (ssrc2), ssrc);
} else {
SsrcAssoc *assoc;
@ -378,7 +378,7 @@ gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent,
* The jitter may be too impatient of the rtx packet has been
* lost too.
* It does not mean we reject the event, we still want to forward
* the request to the gstrtpsession to be translater into a FB NACK
* the request to the gstrtpsession to be translator into a FB NACK
*/
GST_LOG_OBJECT (rtx, "Duplicate request: seqnum: %u, ssrc: %X",
seqnum, ssrc);
@ -430,7 +430,7 @@ gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent,
GST_OBJECT_UNLOCK (rtx);
}
/* Transfer event upstream so that the request can acutally by translated
/* Transfer event upstream so that the request can actually by translated
* through gstrtpsession through the network */
res = gst_pad_event_default (pad, parent, event);
break;

View file

@ -535,7 +535,7 @@ gst_rtp_rtx_send_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
GST_OBJECT_LOCK (rtx);
/* choose another ssrc for our retransmited stream */
/* choose another ssrc for our retransmitted stream */
if (g_hash_table_contains (rtx->rtx_ssrcs, GUINT_TO_POINTER (ssrc))) {
guint master_ssrc;
SSRCRtxData *data;

View file

@ -31,7 +31,7 @@
*
* * RTP packet validation based on consecutive sequence numbers.
*
* * Maintainance of the SSRC participant database.
* * Maintenance of the SSRC participant database.
*
* * Keeping per participant statistics based on received RTCP packets.
*

View file

@ -367,7 +367,7 @@ gst_rtp_ssrc_demux_class_init (GstRtpSsrcDemuxClass * klass)
* @ssrc: the SSRC of the pad
* @pad: the new pad.
*
* Emited when a new SSRC pad has been created.
* Emitted when a new SSRC pad has been created.
*/
gst_rtp_ssrc_demux_signals[SIGNAL_NEW_SSRC_PAD] =
g_signal_new ("new-ssrc-pad",
@ -382,7 +382,7 @@ gst_rtp_ssrc_demux_class_init (GstRtpSsrcDemuxClass * klass)
* @ssrc: the SSRC of the pad
* @pad: the removed pad.
*
* Emited when a SSRC pad has been removed.
* Emitted when a SSRC pad has been removed.
*/
gst_rtp_ssrc_demux_signals[SIGNAL_REMOVED_SSRC_PAD] =
g_signal_new ("removed-ssrc-pad",

View file

@ -504,7 +504,7 @@ update_buffer_level (RTPJitterBuffer * jbuf, gint * percent)
* Cri : The time of the clock at the receiver for packet i
* D + ni : The jitter when receiving packet i
*
* We see that the network delay is irrelevant here as we can elliminate D:
* We see that the network delay is irrelevant here as we can eliminate D:
*
* recv_diff(i) = (Cri + ni) - (Cr0 + n0))
*

View file

@ -4337,7 +4337,7 @@ rtp_session_on_timeout (RTPSession * sess, GstClockTime current_time,
if (!is_rtcp_time (sess, current_time, &data))
goto done;
/* check if all the buffers are empty afer generation */
/* check if all the buffers are empty after generation */
all_empty = TRUE;
GST_DEBUG

View file

@ -404,7 +404,7 @@ GstClockTime rtp_session_next_timeout (RTPSession *sess, GstClockTi
GstFlowReturn rtp_session_on_timeout (RTPSession *sess, GstClockTime current_time,
guint64 ntpnstime, GstClockTime running_time);
/* request the transmittion of an early RTCP packet */
/* request the transmission of an early RTCP packet */
gboolean rtp_session_request_early_rtcp (RTPSession * sess, GstClockTime current_time,
GstClockTime max_delay);

View file

@ -1288,7 +1288,7 @@ rtp_source_process_rtp (RTPSource * src, RTPPacketInfo * pinfo)
* @reason: the reason for leaving
*
* Mark @src in the BYE state. This can happen when the source wants to
* leave the sesssion or when a BYE packets has been received.
* leave the session or when a BYE packets has been received.
*
* This will make the source inactive.
*/

View file

@ -83,8 +83,8 @@ An RTSP session is created as follows:
Transport header field. The server also includes its ports where RTP and RTCP
messages can be sent to.
In the above example UDP was choosen as a transport. At this point the RTSPSrc element
will furter configure its elements to process this stream.
In the above example UDP was chosen as a transport. At this point the RTSPSrc element
will further configure its elements to process this stream.
The RTSPSrc will create and connect an RTP session manager element and will
connect it to the src pads of the udp element. The data pad from the RTP session

View file

@ -812,7 +812,7 @@ gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
*/
g_object_class_install_property (gobject_class, PROP_TLS_INTERACTION,
g_param_spec_object ("tls-interaction", "TLS interaction",
"A GTlsInteraction object to promt the user for password or certificate",
"A GTlsInteraction object to prompt the user for password or certificate",
G_TYPE_TLS_INTERACTION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
@ -5701,7 +5701,7 @@ gst_rtspsrc_loop_udp (GstRTSPSrc * src)
}
continue;
case GST_RTSP_ENET:
GST_DEBUG_OBJECT (src, "An ethernet problem occured.");
GST_DEBUG_OBJECT (src, "An ethernet problem occurred.");
default:
GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
("Unhandled return value %d.", res));
@ -6554,7 +6554,7 @@ gst_rtspsrc_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
version_retry++;
break;
}
/* falltrough */
/* fallthrough */
default:
break;
}
@ -8805,7 +8805,7 @@ gst_rtspsrc_pause (GstRTSPSrc * src, gboolean async)
gst_rtsp_message_unset (&request);
gst_rtsp_message_unset (&response);
/* exit early when we did agregate control */
/* exit early when we did aggregate control */
if (control)
break;
}

View file

@ -62,7 +62,7 @@ struct _GstSpectrum
guint64 frames_per_interval; /* how many frames per interval */
guint64 frames_todo;
guint bands; /* number of spectrum bands */
gint threshold; /* energy level treshold */
gint threshold; /* energy level threshold */
gboolean multi_channel; /* send separate channel results */
guint64 num_frames; /* frame count (1 sample per channel)

View file

@ -326,7 +326,7 @@ gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass)
/**
* GstMultiUDPSink::send-duplicates:
*
* When a host/port pair is added mutliple times, send the packet to the host
* When a host/port pair is added multiple times, send the packet to the host
* multiple times as well.
*/
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEND_DUPLICATES,
@ -795,7 +795,7 @@ gst_multiudpsink_render_buffers (GstMultiUDPSink * sink, GstBuffer ** buffers,
sink->bytes_to_serve += size;
/* now copy the pre-filled num_buffer messages over to the next num_buffer
* messages for the next client, where we also change the target adddress */
* messages for the next client, where we also change the target address */
for (i = 1; i < num_addr; ++i) {
for (j = 0; j < num_buffers; ++j) {
msgs[i * num_buffers + j] = msgs[j];

View file

@ -43,7 +43,7 @@
*
* The #GstUDPSrc:caps property is mainly used to give a type to the UDP packet
* so that they can be autoplugged in GStreamer pipelines. This is very useful
* for RTP implementations where the contents of the UDP packets is transfered
* for RTP implementations where the contents of the UDP packets is transferred
* out-of-bounds using SDP or other means.
*
* The #GstUDPSrc:buffer-size property is used to change the default kernel
@ -78,7 +78,7 @@
*
* A custom file descriptor can be configured with the
* #GstUDPSrc:socket property. The socket will be closed when setting
* the element to READY by default. This behaviour can be overriden
* the element to READY by default. This behaviour can be overridden
* with the #GstUDPSrc:close-socket property, in which case the
* application is responsible for closing the file descriptor.
*
@ -555,7 +555,7 @@ gst_udpsrc_class_init (GstUDPSrcClass * klass)
g_object_class_install_property (gobject_class, PROP_MULTICAST_IFACE,
g_param_spec_string ("multicast-iface", "Multicast Interface",
"The network interface on which to join the multicast group."
"This allows multiple interfaces seperated by comma. (\"eth0,eth1\")",
"This allows multiple interfaces separated by comma. (\"eth0,eth1\")",
UDP_DEFAULT_MULTICAST_IFACE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_URI,

View file

@ -45,7 +45,7 @@
* involves either cropping or padding.
*
* If you use autocrop there is little point in setting the other
* properties manually because they will be overriden if the caps change,
* properties manually because they will be overridden if the caps change,
* but nothing stops you from doing so.
*
* Sample pipeline:
@ -2829,7 +2829,7 @@ gst_video_box_transform_caps (GstBaseTransform * trans,
v = gst_structure_get_value (structure, "width");
if (!gst_video_box_transform_dimension_value (v, dw, &w_val)) {
GST_WARNING_OBJECT (video_box,
"could not tranform width value with dw=%d" ", caps structure=%"
"could not transform width value with dw=%d" ", caps structure=%"
GST_PTR_FORMAT, dw, structure);
goto bail;
}
@ -2839,7 +2839,7 @@ gst_video_box_transform_caps (GstBaseTransform * trans,
if (!gst_video_box_transform_dimension_value (v, dh, &h_val)) {
g_value_unset (&w_val);
GST_WARNING_OBJECT (video_box,
"could not tranform height value with dh=%d" ", caps structure=%"
"could not transform height value with dh=%d" ", caps structure=%"
GST_PTR_FORMAT, dh, structure);
goto bail;
}

View file

@ -677,7 +677,7 @@ gst_video_crop_transform_caps (GstBaseTransform * trans,
v = gst_structure_get_value (structure, "width");
if (!gst_video_crop_transform_dimension_value (v, dx, &w_val, direction,
w_dynamic)) {
GST_WARNING_OBJECT (vcrop, "could not tranform width value with dx=%d"
GST_WARNING_OBJECT (vcrop, "could not transform width value with dx=%d"
", caps structure=%" GST_PTR_FORMAT, dx, structure);
continue;
}
@ -686,7 +686,7 @@ gst_video_crop_transform_caps (GstBaseTransform * trans,
if (!gst_video_crop_transform_dimension_value (v, dy, &h_val, direction,
h_dynamic)) {
g_value_unset (&w_val);
GST_WARNING_OBJECT (vcrop, "could not tranform height value with dy=%d"
GST_WARNING_OBJECT (vcrop, "could not transform height value with dy=%d"
", caps structure=%" GST_PTR_FORMAT, dy, structure);
continue;
}

View file

@ -169,7 +169,7 @@ gst_video_flip_transform_caps (GstBaseTransform * trans,
"height", G_TYPE_INT, height, NULL);
break;
case GST_VIDEO_ORIENTATION_CUSTOM:
GST_WARNING_OBJECT (videoflip, "unsuported custom orientation");
GST_WARNING_OBJECT (videoflip, "unsupported custom orientation");
break;
default:
g_assert_not_reached ();

View file

@ -6,7 +6,7 @@ a new buffer. The new buffer has by default a checkerboard pattern but
its color can be changed with a property.
The mixer can mix streams with different framerates and video sizes. It
uses the duration value of the buffer to schedule the rendering of the
buffers. For streams with a different resoltion than the final output
buffers. For streams with a different resolution than the final output
resolution one can specify the position of the top left corner where this
image should be placed with the pad properties xpos and ypos.
The overall alpha value of a stream can also be specified with a pad

View file

@ -55,7 +55,7 @@
* videomixer name=mix ! videoconvert ! ximagesink \
* videotestsrc ! \
* video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
* ]| A pipeline to demostrate bgra mixing. (This does not demonstrate alpha blending).
* ]| A pipeline to demonstrate bgra mixing. (This does not demonstrate alpha blending).
* |[
* gst-launch-1.0 videotestsrc pattern=1 ! \
* video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
@ -1354,7 +1354,7 @@ done_unlocked:
*
* We don't do synchronized mixing so this really depends on where the
* streams where punched in and what their relative offsets are against
* eachother which we can get from the first timestamps we see.
* each other which we can get from the first timestamps we see.
*
* When we add a new stream (or remove a stream) the duration might
* also become invalid again and we need to post a new DURATION
@ -2178,7 +2178,7 @@ gst_videomixer2_child_proxy_init (gpointer g_iface, gpointer iface_data)
{
GstChildProxyInterface *iface = g_iface;
GST_INFO ("intializing child proxy interface");
GST_INFO ("initializing child proxy interface");
iface->get_child_by_index = gst_videomixer2_child_proxy_get_child_by_index;
iface->get_children_count = gst_videomixer2_child_proxy_get_children_count;
}

View file

@ -1089,7 +1089,7 @@ gst_wavenc_change_state (GstElement * element, GstStateChange transition)
wavenc->audio_length = 0x7FFF0000;
wavenc->meta_length = 0;
wavenc->sent_header = FALSE;
/* its true because we haven't writen anything */
/* its true because we haven't written anything */
wavenc->finished_properly = TRUE;
break;
default:

Some files were not shown because too many files have changed in this diff Show more