documentation: fixed a heap o' typos

This commit is contained in:
Aaron Boxer 2019-09-02 15:08:44 -04:00
parent 2386858a91
commit 6d3429af34
214 changed files with 428 additions and 426 deletions

4
NEWS
View file

@ -353,7 +353,7 @@ New element features and additions
- rtpjitterbuffer has improved end-of-stream handling
- rtpmp4vpay will be prefered over rtpmp4gpay for MPEG-4 video in
- rtpmp4vpay will be preferred over rtpmp4gpay for MPEG-4 video in
autoplugging scenarios now
- rtspsrc now allows applications to send RTSP SET_PARAMETER and
@ -1208,7 +1208,7 @@ Cerbero has seen a number of improvements:
used in order to re-produce a specific build. To set a manifest, you
can set manifest = 'my_manifest.xml' in your configuration file, or
use the --manifest command line option. The command line option will
take precendence over anything specific in the configuration file.
take precedence over anything specific in the configuration file.
- The new build-deps command can be used to build only the
dependencies of a recipe, without the recipe itself.

View file

@ -554,7 +554,7 @@
"writable": true
},
"min-quantizer": {
"blurb": "Mininum (best quality) quantizer",
"blurb": "Minimum (best quality) quantizer",
"construct": false,
"construct-only": false,
"default": "0",
@ -2166,7 +2166,7 @@
"url": "Unknown package origin"
},
"autoconvert": {
"description": "Selects convertor element based on caps",
"description": "Selects converter element based on caps",
"elements": {
"autoconvert": {
"author": "Olivier Crete <olivier.crete@collabora.com>",
@ -2180,7 +2180,7 @@
"GObject"
],
"klass": "Generic/Bin",
"long-name": "Select convertor based on caps",
"long-name": "Select converter based on caps",
"name": "autoconvert",
"pad-templates": {
"sink": {
@ -2240,7 +2240,7 @@
},
"autovideoconvert": {
"author": "Benjamin Gaignard <benjamin.gaignard@stericsson.com>",
"description": "Selects the right color space convertor based on the caps",
"description": "Selects the right color space converter based on the caps",
"hierarchy": [
"GstAutoVideoConvert",
"GstBin",
@ -2250,7 +2250,7 @@
"GObject"
],
"klass": "Generic/Bin",
"long-name": "Select color space convertor based on caps",
"long-name": "Select color space converter based on caps",
"name": "autovideoconvert",
"pad-templates": {
"sink": {
@ -4994,7 +4994,7 @@
"value": "1"
},
{
"desc": "Calculate and cache color replacement values on first occurence",
"desc": "Calculate and cache color replacement values on first occurrence",
"name": "cached",
"value": "2"
}
@ -8406,7 +8406,7 @@
"writable": true
},
"signal-fps-measurements": {
"blurb": "If the fps-measurements signal should be emited.",
"blurb": "If the fps-measurements signal should be emitted.",
"construct": false,
"construct-only": false,
"default": "false",
@ -11592,7 +11592,7 @@
"writable": true
},
"frequency": {
"blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standars and Hz for all the rest",
"blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standards and Hz for all the rest",
"construct": false,
"construct-only": false,
"default": "0",
@ -13072,7 +13072,7 @@
"writable": true
},
"frequency": {
"blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standars and Hz for all the rest",
"blurb": "Center frequency to tune into. Measured in kHz for the satellite distribution standards and Hz for all the rest",
"construct": false,
"construct-only": false,
"default": "0",
@ -22730,7 +22730,7 @@
},
"properties": {
"brightness": {
"blurb": "Brightnesss of image",
"blurb": "Brightness of image",
"construct": false,
"construct-only": false,
"default": "0.75",
@ -32846,7 +32846,7 @@
"writable": true
},
"connector-properties": {
"blurb": "Additionnal properties for the connector",
"blurb": "Additional properties for the connector",
"construct": false,
"construct-only": false,
"type-name": "GstStructure",
@ -32949,7 +32949,7 @@
"writable": true
},
"plane-properties": {
"blurb": "Additionnal properties for the plane",
"blurb": "Additional properties for the plane",
"construct": false,
"construct-only": false,
"type-name": "GstStructure",
@ -35570,7 +35570,7 @@
"writable": true
},
"unit-coeff-elim": {
"blurb": "How agressively small-unit picture blocks should be skipped",
"blurb": "How aggressively small-unit picture blocks should be skipped",
"construct": false,
"construct-only": false,
"default": "0",
@ -39102,12 +39102,12 @@
"value": "0"
},
{
"desc": "Dewarped image is splitted in two images displayed one below the other",
"desc": "Dewarped image is split into two images displayed one below the other",
"name": "double-panorama",
"value": "1"
},
{
"desc": "Dewarped image is splitted in four images dysplayed as a quad view",
"desc": "Dewarped image is split into four images displayed as a quad view",
"name": "quad-view",
"value": "2"
}
@ -40262,7 +40262,7 @@
"GObject"
],
"klass": "Filter/Effect/Video",
"long-name": "Retinex image colour enhacement",
"long-name": "Retinex image colour enhancement",
"name": "retinex",
"pad-templates": {
"sink": {
@ -42041,7 +42041,7 @@
"writable": true
},
"remove": {
"blurb": "Set to true to remove silence from the stream, false otherwhise",
"blurb": "Set to true to remove silence from the stream, false otherwise",
"construct": false,
"construct-only": false,
"default": "false",
@ -42616,7 +42616,7 @@
"writable": true
},
"bonding-addresses": {
"blurb": "Comma (,) seperated list of <address>:<port> to send to. ",
"blurb": "Comma (,) separated list of <address>:<port> to send to. ",
"construct": false,
"construct-only": false,
"default": "localhost:5004",
@ -45623,7 +45623,7 @@
"writable": true
},
"repeat": {
"blurb": "Whether to repeat specified tone indefinitly",
"blurb": "Whether to repeat specified tone indefinitely",
"construct": false,
"construct-only": false,
"default": "false",
@ -45936,7 +45936,7 @@
"writable": true
},
"poll-timeout": {
"blurb": "Return poll wait after timeout miliseconds (-1 = infinite)",
"blurb": "Return poll wait after timeout milliseconds (-1 = infinite)",
"construct": false,
"construct-only": false,
"default": "-1",
@ -46200,7 +46200,7 @@
"writable": true
},
"poll-timeout": {
"blurb": "Return poll wait after timeout miliseconds (-1 = infinite)",
"blurb": "Return poll wait after timeout milliseconds (-1 = infinite)",
"construct": false,
"construct-only": false,
"default": "-1",
@ -49134,7 +49134,7 @@
"writable": true
},
"drop": {
"blurb": "Drop data untill valid configuration data is received either in the stream or through caps",
"blurb": "Drop data until valid configuration data is received either in the stream or through caps",
"construct": true,
"construct-only": false,
"default": "true",
@ -51905,7 +51905,7 @@
"GObject"
],
"klass": "Generic/Audio",
"long-name": "Accoustic Echo Canceller probe",
"long-name": "Acoustic Echo Canceller probe",
"name": "webrtcechoprobe",
"pad-templates": {
"sink": {

View file

@ -331,7 +331,7 @@ gst_av1_enc_class_init (GstAV1EncClass * klass)
g_object_class_install_property (gobject_class, PROP_MIN_QUANTIZER,
g_param_spec_uint ("min-quantizer", "Minimum (best quality) quantizer",
"Mininum (best quality) quantizer",
"Minimum (best quality) quantizer",
0, G_MAXUINT, DEFAULT_MIN_QUANTIZER,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));

View file

@ -832,7 +832,7 @@ gst_ass_render_negotiate (GstAssRender * render, GstCaps * caps)
}
if (upstream_has_meta || caps_has_meta) {
/* Send caps immediatly, it's needed by GstBaseTransform to get a reply
/* Send caps immediately, it's needed by GstBaseTransform to get a reply
* from allocation query */
ret = gst_pad_set_caps (render->srcpad, overlay_caps);
@ -875,7 +875,7 @@ gst_ass_render_negotiate (GstAssRender * render, GstCaps * caps)
render->window_height = height;
gst_ass_render_update_render_size (render);
/* For backward compatbility, we will prefer bliting if downstream
/* For backward compatibility, we will prefer bliting if downstream
* allocation does not support the meta. In other case we will prefer
* attaching, and will fail the negotiation in the unlikely case we are
* force to blit, but format isn't supported. */

View file

@ -1021,7 +1021,7 @@ gst_cea708dec_show_pango_window (Cea708Dec * decoder, guint window_id)
}
if (!display) {
GST_DEBUG ("No visible text, skiping rendering");
GST_DEBUG ("No visible text, skipping rendering");
return;
}

View file

@ -1468,7 +1468,7 @@ gst_cea_cc_overlay_process_packet (GstCeaCcOverlay * overlay, guint8 cc_type)
* @overlay: The #GstCeaCcOverlay
* @user_data: The #GstMpegVideoCCData to decode
*
* decode closed caption data and render when neccesary
* decode closed caption data and render when necessary
* in struct GstMpegVideoCCData type's user_data's data field, 3 byte's data construct 1 cc_data_pkt
*
* A cc_data_pkt is 3 bytes as follows:

View file

@ -1130,7 +1130,7 @@ vbi3_raw_decoder_services (vbi3_raw_decoder * rd)
* $return
* Set of data services $rd will be decode after the change.
* Can be zero if the sampling parameters are invalid or some
* other error occured.
* other error occurred.
*/
/* Attn: strict must be int for compatibility with libzvbi 0.2 (-1 == 0) */
vbi_service_set

View file

@ -320,7 +320,7 @@ typedef unsigned int vbi_service_set;
*/
typedef struct {
/**
* A @ref VBI_SLICED_ symbol identifying the data service. Under cirumstances
* A @ref VBI_SLICED_ symbol identifying the data service. Under circumstances
* (see VBI_SLICED_TELETEXT_B) this can be a set of VBI_SLICED_ symbols.
*/
uint32_t id;

View file

@ -99,7 +99,7 @@ gst_lcms_lookup_method_get_type (void)
"Precalculate lookup table (takes a long time getting READY)",
"precalculated"},
{GST_LCMS_LOOKUP_METHOD_CACHED,
"Calculate and cache color replacement values on first occurence",
"Calculate and cache color replacement values on first occurrence",
"cached"},
{0, NULL, NULL},
};

View file

@ -98,7 +98,7 @@ typedef enum
#ifdef CURL_VERSION_HTTP2
GSTCURL_HTTP_VERSION_2_0,
#endif
GSTCURL_HTTP_NOT, /* For future use, incase not HTTP protocol! */
GSTCURL_HTTP_NOT, /* For future use if HTTP protocol not used! */
GSTCURL_HTTP_VERSION_MAX
} GstCurlHttpVersion;

View file

@ -145,7 +145,7 @@
* When requested (with GST_SEEK_FLAG_TRICKMODE_KEY_UNIT) and if the format
* is supported (ISOBMFF profiles), dashdemux can download only keyframes
* in order to provide fast forward/reverse playback without exceeding the
* available bandwith/cpu/memory usage.
* available bandwidth/cpu/memory usage.
*
* This is done in two parts:
* 1) Parsing ISOBMFF atoms to detect the location of keyframes and only
@ -178,7 +178,7 @@
*
* The main reason for doing keyframe-only downloads is for trick-modes
* (i.e. being able to do fast reverse/forward playback with limited
* bandwith/cpu/memory).
* bandwidth/cpu/memory).
*
* Downloading all keyframes might not be the optimal solution, especially
* at high playback rates, since the time taken to download the keyframe
@ -252,13 +252,13 @@
* buffering_level.
*
* The smaller the buffering level is (i.e. the closer we are between
* current and downstream), the more aggresively we skip forward (and
* current and downstream), the more aggressively we skip forward (and
* guarantee the keyframe will be downloaded, decoded and displayed in
* time). And the higher the buffering level, the least aggresivelly
* we need to skip forward (and therefore display more frames per
* second).
*
* Right now the threshold for agressive switching is set to 3
* Right now the threshold for aggressive switching is set to 3
* average_download_time. Below that buffering level we set the target time
* to at least 3 average_download_time distance beyond the
* qos_earliest_time.
@ -1135,7 +1135,7 @@ gst_dash_demux_get_video_input_caps (GstDashDemux * demux,
if (stream == NULL)
return NULL;
/* if bitstreamSwitching is true we dont need to swich pads on resolution change */
/* if bitstreamSwitching is true we don't need to switch pads on resolution change */
if (!gst_mpd_client_get_bitstream_switching_flag (stream)) {
width = gst_mpd_client_get_video_stream_width (stream);
height = gst_mpd_client_get_video_stream_height (stream);
@ -1169,7 +1169,7 @@ gst_dash_demux_get_audio_input_caps (GstDashDemux * demux,
if (stream == NULL)
return NULL;
/* if bitstreamSwitching is true we dont need to swich pads on rate/channels change */
/* if bitstreamSwitching is true we don't need to switch pads on rate/channels change */
if (!gst_mpd_client_get_bitstream_switching_flag (stream)) {
channels = gst_mpd_client_get_audio_stream_num_channels (stream);
rate = gst_mpd_client_get_audio_stream_rate (stream);
@ -1923,7 +1923,7 @@ gst_dash_demux_stream_get_target_time (GstDashDemux * dashdemux,
"MUST SKIP to at least %" GST_TIME_FORMAT " (was %" GST_TIME_FORMAT ")",
GST_TIME_ARGS (ret), GST_TIME_ARGS (min_position));
} else if (diff < 4 * dashstream->average_download_time) {
/* Go forward a bit less aggresively (and at most 1s forward) */
/* Go forward a bit less aggressively (and at most 1s forward) */
ret = gst_segment_position_from_running_time (&stream->segment,
GST_FORMAT_TIME, min_running + MIN (GST_SECOND,
2 * dashstream->average_download_time));

View file

@ -4706,7 +4706,7 @@ gst_mpd_client_get_adaptation_sets_for_period (GstMpdClient * client,
* to decide which one to use, so we have to resolve them all here
*/
for (list = period->period->AdaptationSets; list;
/* advanced explicitely below */ ) {
/* advanced explicitly below */ ) {
GstAdaptationSetNode *adapt_set = (GstAdaptationSetNode *) list->data;
GList *new_adapt_sets = NULL, *prev, *next;

View file

@ -52,7 +52,7 @@
* application, that means it won't handle navigation events and won't resize
* the #GstDfbVideoSink:surface to fit video
* frames geometry. Application has to implement the necessary code to grab
* informations about the negotiated geometry and resize there
* information about the negotiated geometry and resize there
* #GstDfbVideoSink:surface accordingly.
*
* For both modes the element implements a buffer pool allocation system to

View file

@ -107,7 +107,7 @@ void gst_dtls_connection_close(GstDtlsConnection *);
void gst_dtls_connection_set_send_callback(GstDtlsConnection *, GClosure *);
/*
* Processes data that has been recevied, the transformation is done in-place.
* Processes data that has been received, the transformation is done in-place.
* Returns the length of the plaintext data that was decoded, if no data is available, 0<= will be returned.
*/
gint gst_dtls_connection_process(GstDtlsConnection *, gpointer ptr, gint len);

View file

@ -567,7 +567,7 @@ gst_hls_demux_set_current_variant (GstHLSDemux * hlsdemux,
if (hlsdemux->current_variant != NULL) {
gint i;
//#warning FIXME: Synching fragments across variants
//#warning FIXME: Syncing fragments across variants
// should be done based on media timestamps, and
// discont-sequence-numbers not sequence numbers.
variant->m3u8->sequence_position =
@ -647,7 +647,7 @@ gst_hls_demux_process_manifest (GstAdaptiveDemux * demux, GstBuffer * buf)
gst_hls_demux_set_current_variant (hlsdemux, variant); // FIXME: inline?
}
/* get the selected media playlist (unless the inital list was one already) */
/* get the selected media playlist (unless the initial list was one already) */
if (!hlsdemux->master->is_simple) {
GError *err = NULL;

View file

@ -58,7 +58,7 @@
*
* ## Example pipeline
*
* This explicitely decodes a Kate stream:
* This explicitly decodes a Kate stream:
* |[
* gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE
* ]|

View file

@ -198,7 +198,7 @@ gst_kate_enc_class_init (GstKateEncClass * klass)
0, G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, ARG_KEEPALIVE_MIN_TIME,
g_param_spec_float ("keepalive-min-time", "Keepalive mimimum time",
g_param_spec_float ("keepalive-min-time", "Keepalive minimum time",
"Minimum time to emit keepalive packets (0 disables keepalive packets)",
0.0f, FLT_MAX, DEFAULT_KEEPALIVE_MIN_TIME,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));

View file

@ -103,7 +103,7 @@
* ## Example Sink/Audio/LADSPA line with this plugins
* |[
* gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4
* ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones).
* ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitly anulate audio with Null (Audio Output), and play a visualization (recommended hearphones).
*
*/

View file

@ -267,8 +267,9 @@ gst_mms_do_seek (GstBaseSrc * src, GstSegment * segment)
return FALSE;
}
start = mmsx_get_current_pos (mmssrc->connection);
GST_INFO_OBJECT (mmssrc, "sought to %" GST_TIME_FORMAT ", offset after "
"seek: %" G_GINT64_FORMAT, GST_TIME_ARGS (segment->start), start);
GST_INFO_OBJECT (mmssrc,
"performed seek to %" GST_TIME_FORMAT ", offset after " "seek: %"
G_GINT64_FORMAT, GST_TIME_ARGS (segment->start), start);
} else if (segment->format == GST_FORMAT_BYTES) {
start = mmsx_seek (NULL, mmssrc->connection, segment->start, SEEK_SET);
/* mmsx_seek will close and reopen the connection when seeking with the
@ -277,7 +278,7 @@ gst_mms_do_seek (GstBaseSrc * src, GstSegment * segment)
GST_DEBUG_OBJECT (mmssrc, "connection broken during seek");
return FALSE;
}
GST_INFO_OBJECT (mmssrc, "sought to: %" G_GINT64_FORMAT " bytes, "
GST_INFO_OBJECT (mmssrc, "performed seek to: %" G_GINT64_FORMAT " bytes, "
"result: %" G_GINT64_FORMAT, segment->start, start);
} else {
GST_DEBUG_OBJECT (mmssrc, "unsupported seek segment format: %s",

View file

@ -171,7 +171,7 @@ gst_lv2_filter_finalize (GObject * object)
}
#if 0
/* Convert an LV2 port role to a Gst channel positon
/* Convert an LV2 port role to a Gst channel position
* WARNING: If the group has only a single port,
* GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER will be returned for pg:centerRole
* (which is used by LV2 for mono groups), but this is not correct. In this
@ -291,7 +291,7 @@ gst_lv2_filter_setup (GstAudioFilter * gsp, const GstAudioInfo * info)
if (!gst_lv2_setup (&self->lv2, GST_AUDIO_INFO_RATE (info)))
goto no_instance;
/* FIXME Handle audio channel positionning while negotiating CAPS */
/* FIXME Handle audio channel positioning while negotiating CAPS */
#if 0
gint i;
/* set input group pad audio channel position */

View file

@ -847,7 +847,7 @@ gst_lv2_class_init (GstLV2Class * lv2_class, GType type)
optional_pred);
GstLV2Port desc = { j, GST_LV2_PORT_AUDIO, -1, };
LilvNodes *lv2group = lilv_port_get (lv2plugin, port, group_pred);
/* FIXME Handle channels positionning
/* FIXME Handle channels positioning
* GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_INVALID; */
if (lv2group) {
@ -862,7 +862,7 @@ gst_lv2_class_init (GstLV2Class * lv2_class, GType type)
group->ports = g_array_new (FALSE, TRUE, sizeof (GstLV2Port));
}
/* FIXME Handle channels positionning
/* FIXME Handle channels positioning
position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
sub_values = lilv_port_get_value (lv2plugin, port, designation_pred);
if (lilv_nodes_size (sub_values) > 0) {

View file

@ -44,7 +44,7 @@
* In particular, default property values are dependent on the format,
* and can even be forcibly restrained to certain pre-sets (and thereby ignored).
* Note that the (S)VCD profiles also restrict the image size, so some scaling
* may be needed to accomodate this. The so-called generic profiles (as used
* may be needed to accommodate this. The so-called generic profiles (as used
* in the example above) allow most parameters to be adjusted.
*
* |[

View file

@ -375,7 +375,7 @@ GstMpeg2EncOptions::initProperties (GObjectClass * klass)
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (klass, ARG_UNIT_COEFF_ELIM,
g_param_spec_int ("unit-coeff-elim", "Unit coefficience elimination",
"How agressively small-unit picture blocks should be skipped",
"How aggressively small-unit picture blocks should be skipped",
-40, 40, 0,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));

View file

@ -298,7 +298,7 @@ gst_neonhttp_src_set_property (GObject * object, guint prop_id,
goto done;
}
if (!gst_neonhttp_src_set_proxy (src, proxy)) {
GST_WARNING ("badly formated proxy");
GST_WARNING ("badly formatted proxy");
goto done;
}
break;
@ -314,7 +314,7 @@ gst_neonhttp_src_set_property (GObject * object, guint prop_id,
goto done;
}
if (!gst_neonhttp_src_set_location (src, location, NULL)) {
GST_WARNING ("badly formated location");
GST_WARNING ("badly formatted location");
goto done;
}
break;
@ -429,7 +429,7 @@ gst_neonhttp_src_get_property (GObject * object, guint prop_id,
static void
oom_callback (void)
{
GST_ERROR ("memory exeception in neon");
GST_ERROR ("memory exception in neon");
}
static GstFlowReturn

View file

@ -408,7 +408,7 @@ MotionCells::calculateMotionPercentInCell (int p_row, int p_col,
cntpixelsnum++;
if ((((uchar *) (m_pbwImage.data + m_pbwImage.step[0] * i))[j]) > 0) {
cntmotionpixelnum++;
if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we dont needs calculate anymore
if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we don't need to calculate anymore
*p_motionarea = cntmotionpixelnum;
return (cntmotionpixelnum / cntpixelsnum);
}

View file

@ -88,7 +88,7 @@ gst_cv_dilate_class_init (GstCvDilateClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -149,7 +149,7 @@ gst_cv_dilate_erode_class_init (GstCvDilateErodeClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -88,7 +88,7 @@ gst_cv_erode_class_init (GstCvErodeClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -237,7 +237,7 @@ gst_cv_smooth_class_init (GstCvSmoothClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -90,9 +90,9 @@ dewarp_display_mode_get_type (void)
static GType dewarp_display_mode_type = 0;
static const GEnumValue dewarp_display_mode[] = {
{GST_DEWARP_DISPLAY_PANORAMA, "Single panorama image", "single-panorama"},
{GST_DEWARP_DISPLAY_DOUBLE_PANORAMA, "Dewarped image is splitted in two "
{GST_DEWARP_DISPLAY_DOUBLE_PANORAMA, "Dewarped image is split in two "
"images displayed one below the other", "double-panorama"},
{GST_DEWARP_DISPLAY_QUAD_VIEW, "Dewarped image is splitted in four images "
{GST_DEWARP_DISPLAY_QUAD_VIEW, "Dewarped image is split in four images "
"dysplayed as a quad view",
"quad-view"},
{0, NULL, NULL},
@ -301,7 +301,7 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
filter->x_center = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "x center setted to %f", filter->x_center);
GST_LOG_OBJECT (filter, "x center set to %f", filter->x_center);
}
break;
case PROP_Y_CENTER:
@ -310,7 +310,7 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
filter->y_center = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "y center setted to %f", filter->y_center);
GST_LOG_OBJECT (filter, "y center set to %f", filter->y_center);
}
break;
case PROP_INNER_RADIUS:
@ -319,7 +319,7 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
filter->inner_radius = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "inner radius setted to %f",
GST_LOG_OBJECT (filter, "inner radius set to %f",
filter->inner_radius);
}
break;
@ -329,7 +329,7 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
filter->outer_radius = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "outer radius setted to %f",
GST_LOG_OBJECT (filter, "outer radius set to %f",
filter->outer_radius);
}
break;
@ -339,7 +339,7 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
filter->remap_correction_x = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "x remap correction setted to %f",
GST_LOG_OBJECT (filter, "x remap correction set to %f",
filter->remap_correction_x);
}
break;
@ -349,13 +349,13 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
filter->remap_correction_y = v;
filter->need_map_update = TRUE;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "y remap correction setted to %f",
GST_LOG_OBJECT (filter, "y remap correction set to %f",
filter->remap_correction_y);
}
break;
case PROP_INTERPOLATION_MODE:
filter->interpolation_mode = g_value_get_enum (value);
GST_LOG_OBJECT (filter, "interpolation mode setted to %" G_GINT32_FORMAT,
GST_LOG_OBJECT (filter, "interpolation mode set to %" G_GINT32_FORMAT,
filter->interpolation_mode);
break;
case PROP_DISPLAY_MODE:
@ -363,7 +363,7 @@ gst_dewarp_set_property (GObject * object, guint prop_id,
if (disp_mode != filter->display_mode) {
filter->display_mode = disp_mode;
need_reconfigure = TRUE;
GST_LOG_OBJECT (filter, "display mode setted to %" G_GINT32_FORMAT,
GST_LOG_OBJECT (filter, "display mode set to %" G_GINT32_FORMAT,
filter->display_mode);
}
break;
@ -510,7 +510,7 @@ gst_dewarp_calculate_dimensions (GstDewarp * filter, GstPadDirection direction,
}
/* if outer_radius and inner radius are very close then width and height
could be 0, we assume passtrough in this case
could be 0, we assume passthrough in this case
*/
if (G_UNLIKELY (*out_width == 0) || G_UNLIKELY (*out_height == 0)) {
GST_WARNING_OBJECT (filter,

View file

@ -370,7 +370,7 @@ gst_disparity_handle_sink_event (GstPad * pad,
GST_INFO_OBJECT (pad, " Negotiating caps via event %" GST_PTR_FORMAT,
caps);
if (!gst_pad_has_current_caps (fs->srcpad)) {
/* Init image info (widht, height, etc) and all OpenCV matrices */
/* Init image info (width, height, etc) and all OpenCV matrices */
initialise_disparity (fs, info.width, info.height,
info.finfo->n_components);

View file

@ -169,7 +169,7 @@ gst_edge_detect_class_init (GstEdgeDetectClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -210,7 +210,7 @@ gst_face_blur_class_init (GstFaceBlurClass * klass)
DEFAULT_SCALE_FACTOR,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MIN_NEIGHBORS,
g_param_spec_int ("min-neighbors", "Mininum neighbors",
g_param_spec_int ("min-neighbors", "Minimum neighbors",
"Minimum number (minus 1) of neighbor rectangles that makes up "
"an object", 0, G_MAXINT, DEFAULT_MIN_NEIGHBORS,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
@ -235,7 +235,7 @@ gst_face_blur_class_init (GstFaceBlurClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -320,7 +320,7 @@ gst_face_detect_class_init (GstFaceDetectClass * klass)
1.1, 10.0, DEFAULT_SCALE_FACTOR,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MIN_NEIGHBORS,
g_param_spec_int ("min-neighbors", "Mininum neighbors",
g_param_spec_int ("min-neighbors", "Minimum neighbors",
"Minimum number (minus 1) of neighbor rectangles that makes up "
"an object", 0, G_MAXINT, DEFAULT_MIN_NEIGHBORS,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));

View file

@ -203,7 +203,7 @@ IF nothing is present, then nothing is done.", "Miguel Casas-Sanchez <miguelecas
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -161,7 +161,7 @@ gst_retinex_class_init (GstRetinexClass * klass)
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
gst_element_class_set_static_metadata (element_class,
"Retinex image colour enhacement", "Filter/Effect/Video",
"Retinex image colour enhancement", "Filter/Effect/Video",
"Multiscale retinex for colour image enhancement",
"Miguel Casas-Sanchez <miguelecasassanchez@gmail.com>");
@ -171,7 +171,7 @@ gst_retinex_class_init (GstRetinexClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void
@ -277,7 +277,7 @@ gst_retinex_transform_ip (GstOpencvVideoFilter * filter, GstBuffer * buf,
img.convertTo (retinex->cvA, retinex->cvA.type ());
log (retinex->cvA, retinex->cvB);
/* Compute log of blured image */
/* Compute log of blurred image */
filter_size = (int) floor (sigma * 6) / 2;
filter_size = filter_size * 2 + 1;
@ -294,7 +294,7 @@ gst_retinex_transform_ip (GstOpencvVideoFilter * filter, GstBuffer * buf,
}
/* Multiscale retinex restoration. The image and a set of filtered images are
converted to the log domain and subtracted from the original with some set
of weights. Typicaly called with three equally weighted scales of fine,
of weights. Typically called with three equally weighted scales of fine,
medium and wide standard deviations.
O = Log(I) - sum_i [ wi * Log(H(I)) ]
where O is the output, H is a gaussian 2d filter and I is the input image

View file

@ -7,7 +7,7 @@
* and adapted. Its license reads:
* "Oct. 3, 2008
* Right to use this code in any way you want without warrenty, support or
* any guarentee of it working. "
* any guarantee of it working. "
*
*
* Permission is hereby granted, free of charge, to any person obtaining a
@ -72,7 +72,7 @@
* mixture model for real-time tracking with shadow detection", Proc. 2nd
* European Workshop on Advanced Video-Based Surveillance Systems, 2001
* [5] http://opencv.itseez.com/modules/video/doc/motion_analysis_and_object_tracking.html#backgroundsubtractormog2
* [6] Z.Zivkovic, "Improved adaptive Gausian mixture model for background
* [6] Z.Zivkovic, "Improved adaptive Gaussian mixture model for background
* subtraction", International Conference Pattern Recognition, UK, August, 2004.
* [7] Z.Zivkovic, F. van der Heijden, "Efficient Adaptive Density Estimation
* per Image Pixel for the Task of Background Subtraction", Pattern Recognition
@ -230,7 +230,7 @@ gst_segmentation_class_init (GstSegmentationClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void
@ -421,7 +421,7 @@ gst_segmentation_transform_ip (GstOpencvVideoFilter * cvfilter,
* OpenCV MOG2 implements the algorithm described in [2] and [3].
*
* [1] http://opencv.itseez.com/modules/video/doc/motion_analysis_and_object_tracking.html#backgroundsubtractormog2
* [2] Z.Zivkovic, "Improved adaptive Gausian mixture model for background
* [2] Z.Zivkovic, "Improved adaptive Gaussian mixture model for background
* subtraction", International Conference Pattern Recognition, UK, Aug 2004.
* [3] Z.Zivkovic, F. van der Heijden, "Efficient Adaptive Density Estimation
* per Image Pixel for the Task of Background Subtraction", Pattern
@ -630,7 +630,7 @@ clear_stale_entries (codeBook * c)
maxMod Add this (possibly negative) number onto
max level when determining if new pixel is foreground
minMod Subract this (possibly negative) number from
minMod Subtract this (possibly negative) number from
min level when determining if new pixel is foreground
NOTES:
@ -770,7 +770,7 @@ run_mog2_iteration (GstSegmentation * filter)
[2] and [3].
[1] http://opencv.itseez.com/modules/video/doc/motion_analysis_and_object_tracking.html#backgroundsubtractormog2
[2] Z.Zivkovic, "Improved adaptive Gausian mixture model for background
[2] Z.Zivkovic, "Improved adaptive Gaussian mixture model for background
subtraction", International Conference Pattern Recognition, UK, August, 2004.
[3] Z.Zivkovic, F. van der Heijden, "Efficient Adaptive Density Estimation per
Image Pixel for the Task of Background Subtraction", Pattern Recognition

View file

@ -168,7 +168,7 @@ gst_skin_detect_class_init (GstSkinDetectClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -217,7 +217,7 @@ gst_opencv_text_overlay_class_init (GstOpencvTextOverlayClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -2087,7 +2087,7 @@ gst_flups_demux_is_pes_sync (guint32 sync)
((sync & 0xe0) == 0xc0) || ((sync & 0xf0) == 0xe0);
}
/* If we can pull that's prefered */
/* If we can pull that's preferred */
static gboolean
gst_flups_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
{

View file

@ -2827,7 +2827,7 @@ rsn_dvdsrc_do_seek (GstBaseSrc * bsrc, GstSegment * segment)
gboolean ret = FALSE;
if (segment->format == rsndvd_format || src->first_seek) {
/* The internal format has alread served its purpose of waking
/* The internal format has already served its purpose of waking
* everything up and flushing, we just need to step to the next
* data block (below) so we know our new position */
ret = TRUE;

View file

@ -1201,7 +1201,7 @@ gst_input_selector_class_init (RsnInputSelectorClass * klass)
*
* The active pad may push more buffers than what is currently displayed/consumed
* and when changing pads those buffers will be discarded and the only way to
* reactivate that pad without loosing the already consumed buffers is to enable cache.
* reactivate that pad without losing the already consumed buffers is to enable cache.
*/
g_object_class_install_property (gobject_class, PROP_CACHE_BUFFERS,
g_param_spec_boolean ("cache-buffers", "Cache Buffers",

View file

@ -579,7 +579,7 @@ success:
/* This is set here so that the call to create() above doesn't clear it */
src->discont = TRUE;
GST_DEBUG_OBJECT (src, "Seek to %" GST_TIME_FORMAT " successfull",
GST_DEBUG_OBJECT (src, "Seek to %" GST_TIME_FORMAT " successful",
GST_TIME_ARGS (segment->start));
return TRUE;

View file

@ -669,7 +669,7 @@ gst_pitch_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
/* this function returns FALSE if not enough data is known to transform the
* segment into proper downstream values. If the function does return false
* the segment should be stalled until enough information is available.
* If the funtion returns TRUE, event will be replaced by the new downstream
* If the function returns TRUE, event will be replaced by the new downstream
* compatible event.
*/
static gboolean

View file

@ -34,7 +34,7 @@
* * gint `type` (0-1): The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are
* specfied by their number. This element can only take events as input.
* specified by their number. This element can only take events as input.
* Do not confuse with "method" which specified the output.
* * gint `number` (0-16): The event number.
* * gint `method` (2): This field will always been 2 (ie sound) from this element.

View file

@ -139,7 +139,7 @@ gst_tone_generate_src_class_init (GstToneGenerateSrcClass * klass)
g_object_class_install_property (gobject_class, PROP_REPEAT,
g_param_spec_boolean ("repeat", "Repeat the specified tone period ",
"Whether to repeat specified tone indefinitly", DEFAULT_REPEAT,
"Whether to repeat specified tone indefinitely", DEFAULT_REPEAT,
G_PARAM_READWRITE));
gst_element_class_add_static_pad_template (gstelement_class,

View file

@ -429,7 +429,7 @@ gst_srt_object_install_properties_helper (GObjectClass * gobject_class)
*/
g_object_class_install_property (gobject_class, PROP_POLL_TIMEOUT,
g_param_spec_int ("poll-timeout", "Poll timeout",
"Return poll wait after timeout miliseconds (-1 = infinite)", -1,
"Return poll wait after timeout milliseconds (-1 = infinite)", -1,
G_MAXINT32, GST_SRT_DEFAULT_POLL_TIMEOUT,
G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
G_PARAM_STATIC_STRINGS));

View file

@ -292,7 +292,7 @@ gst_srtp_dec_class_init (GstSrtpDecClass * klass)
* @gstsrtpdec: the element on which the signal is emitted
* @ssrc: The unique SSRC of the stream
*
* Signal emited to get the parameters relevant to stream
* Signal emitted to get the parameters relevant to stream
* with @ssrc. User should provide the key and the RTP and
* RTCP encryption ciphers and authentication, and return
* them wrapped in a GstCaps.
@ -318,7 +318,7 @@ gst_srtp_dec_class_init (GstSrtpDecClass * klass)
* @gstsrtpdec: the element on which the signal is emitted
* @ssrc: The unique SSRC of the stream
*
* Signal emited when the stream with @ssrc has reached the
* Signal emitted when the stream with @ssrc has reached the
* soft limit of utilisation of it's master encryption key.
* User should provide a new key and new RTP and RTCP encryption
* ciphers and authentication, and return them wrapped in a
@ -333,7 +333,7 @@ gst_srtp_dec_class_init (GstSrtpDecClass * klass)
* @gstsrtpdec: the element on which the signal is emitted
* @ssrc: The unique SSRC of the stream
*
* Signal emited when the stream with @ssrc has reached the
* Signal emitted when the stream with @ssrc has reached the
* hard limit of utilisation of it's master encryption key.
* User should provide a new key and new RTP and RTCP encryption
* ciphers and authentication, and return them wrapped in a
@ -361,7 +361,7 @@ gst_srtp_dec_class_init (GstSrtpDecClass * klass)
/* initialize the new element
* instantiate pads and add them to element
* set pad calback functions
* set pad callback functions
* initialize instance structure
*/
static void

View file

@ -56,7 +56,7 @@
* An application can request multiple RTP and RTCP pads to protect,
* but every sink pad requested must receive packets from the same
* source (identical SSRC). If a packet received contains a different
* SSRC, a warning is emited and the valid SSRC is forced on the packet.
* SSRC, a warning is emitted and the valid SSRC is forced on the packet.
*
* This element uses libsrtp library. When receiving the first packet,
* the library is initialized with a new stream (based on the SSRC). It
@ -335,7 +335,7 @@ gst_srtp_enc_class_init (GstSrtpEncClass * klass)
* GstSrtpEnc::soft-limit:
* @gstsrtpenc: the element on which the signal is emitted
*
* Signal emited when the stream with @ssrc has reached the soft
* Signal emitted when the stream with @ssrc has reached the soft
* limit of utilisation of it's master encryption key. User should
* provide a new key by setting the #GstSrtpEnc:key property.
*/
@ -484,7 +484,7 @@ done:
return ret;
}
/* Release ressources and set default values
/* Release resources and set default values
*/
static void
gst_srtp_enc_reset_no_lock (GstSrtpEnc * filter)

View file

@ -127,7 +127,7 @@ struct _GstSubtitleColor {
* @GST_SUBTITLE_TEXT_DIRECTION_RTL: Text direction is right-to-left.
*
* Defines the progression direction of unicode text that is being treated by
* the unicode bidirectional algorithm as embedded or overidden (see
* the unicode bidirectional algorithm as embedded or overridden (see
* http://unicode.org/reports/tr9/ for more details of the unicode
* bidirectional algorithm).
*/
@ -269,7 +269,7 @@ typedef enum {
/**
* GstSubtitleStyleSet:
* @text_direction: Defines the direction of text that has been declared by the
* #GstSubtitleStyleSet:unicode_bidi attribute to be embbedded or overridden.
* #GstSubtitleStyleSet:unicode_bidi attribute to be embedded or overridden.
* Applies to both #GstSubtitleBlocks and #GstSubtitleElements.
* @font_family: The name of the font family that should be used to render the
* text of an inline element. Applies only to #GstSubtitleElements.

View file

@ -1297,7 +1297,7 @@ ttml_handle_element_whitespace (GNode * node, gpointer data)
gunichar u = g_utf8_get_char (c);
gint nbytes = g_unichar_to_utf8 (u, buf);
/* Repace each newline or tab with a space. */
/* Replace each newline or tab with a space. */
if (nbytes == 1 && (buf[0] == TTML_CHAR_LF || buf[0] == TTML_CHAR_TAB)) {
*c = ' ';
buf[0] = TTML_CHAR_SPACE;

View file

@ -634,7 +634,7 @@ gst_vulkan_download_change_state (GstElement * element,
if (!gst_vulkan_ensure_element_data (element, NULL,
&vk_download->instance)) {
GST_ELEMENT_ERROR (vk_download, RESOURCE, NOT_FOUND,
("Failed to retreive vulkan instance"), (NULL));
("Failed to retrieve vulkan instance"), (NULL));
return GST_STATE_CHANGE_FAILURE;
}
if (!gst_vulkan_device_run_context_query (GST_ELEMENT (vk_download),

View file

@ -729,7 +729,7 @@ gst_vulkan_full_screen_render_start (GstBaseTransform * bt)
if (!gst_vulkan_ensure_element_data (GST_ELEMENT (bt), NULL,
&render->instance)) {
GST_ELEMENT_ERROR (render, RESOURCE, NOT_FOUND,
("Failed to retreive vulkan instance"), (NULL));
("Failed to retrieve vulkan instance"), (NULL));
return FALSE;
}
if (!gst_vulkan_device_run_context_query (GST_ELEMENT (render),

View file

@ -53,11 +53,11 @@ _vk_create_shader (GstVulkanDevice * device, gchar * code, gsize size,
g_return_val_if_fail (first_word == SPIRV_MAGIC_NUMBER_NE
|| first_word == SPIRV_MAGIC_NUMBER_OE, NULL);
if (first_word == SPIRV_MAGIC_NUMBER_OE) {
/* endianess swap... */
/* endianness swap... */
guint32 *old_code = (guint32 *) code;
gsize i;
GST_DEBUG ("performaing endianess conversion on spirv shader of size %"
GST_DEBUG ("performaing endianness conversion on spirv shader of size %"
G_GSIZE_FORMAT, size);
new_code = g_new0 (guint32, size / 4);

View file

@ -1352,7 +1352,7 @@ gst_vulkan_upload_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_READY_TO_PAUSED:
if (!gst_vulkan_ensure_element_data (element, NULL, &vk_upload->instance)) {
GST_ELEMENT_ERROR (vk_upload, RESOURCE, NOT_FOUND,
("Failed to retreive vulkan instance"), (NULL));
("Failed to retrieve vulkan instance"), (NULL));
return GST_STATE_CHANGE_FAILURE;
}
if (!gst_vulkan_device_run_context_query (GST_ELEMENT (vk_upload),

View file

@ -1604,7 +1604,7 @@ fixate_size (GstVulkanViewConvert * viewconvert,
goto done;
}
/* If all this failed, keep the height that was nearest to the orignal
/* If all this failed, keep the height that was nearest to the original
* height and the nearest possible width. This changes the DAR but
* there's not much else to do here.
*/

View file

@ -795,7 +795,7 @@ gst_wayland_sink_show_frame (GstVideoSink * vsink, GstBuffer * buffer)
gst_buffer_pool_config_set_params (config, caps, sink->video_info.size,
2, 0);
/* This is a video pool, it should not fail with basic setings */
/* This is a video pool, it should not fail with basic settings */
if (!gst_buffer_pool_set_config (sink->pool, config) ||
!gst_buffer_pool_set_active (sink->pool, TRUE))
goto activate_failed;

View file

@ -57,7 +57,7 @@
* holds a reference to the GstWlBuffer, but without having an actual reference.
* When we kill the display, there is no way for the GstWlBuffer, the associated
* GstBuffer and the GstBufferPool to get destroyed, so we are going to leak a
* fair ammount of memory.
* fair amount of memory.
*
* Normally, this rarely happens, because the compositor releases buffers
* almost immediately and when waylandsink stops, they are already released.

View file

@ -380,7 +380,7 @@ gst_webp_dec_update_src_caps (GstWebPDec * dec, GstMapInfo * map_info)
if (features.width < MIN_WIDTH || features.width > MAX_WIDTH
|| features.height < MIN_HEIGHT || features.height > MAX_HEIGHT) {
GST_ERROR_OBJECT (dec, "Dimensions of the frame is unspported by libwebp");
GST_ERROR_OBJECT (dec, "Dimensions of the frame is unsupported by libwebp");
return GST_FLOW_ERROR;
}

View file

@ -87,7 +87,7 @@
* balanced bundle policy
* setting custom DTLS certificates
*
* seperate session id's from mlineindex properly
* separate session id's from mlineindex properly
* how to deal with replacing a input/output track/stream
*/
@ -1411,7 +1411,7 @@ _find_codec_preferences (GstWebRTCBin * webrtc,
WebRTCTransceiver *trans = (WebRTCTransceiver *) rtp_trans;
GstCaps *ret = NULL;
GST_LOG_OBJECT (webrtc, "retreiving codec preferences from %" GST_PTR_FORMAT,
GST_LOG_OBJECT (webrtc, "retrieving codec preferences from %" GST_PTR_FORMAT,
trans);
if (rtp_trans && rtp_trans->codec_preferences) {
@ -1481,7 +1481,7 @@ _add_supported_attributes_to_caps (GstWebRTCBin * webrtc,
/*if (!gst_structure_has_field (s, "rtcp-fb-transport-cc"))
gst_structure_set (s, "rtcp-fb-nack-pli", G_TYPE_BOOLEAN, TRUE, NULL); */
/* FIXME: codec-specific paramters? */
/* FIXME: codec-specific parameters? */
}
return ret;
@ -5715,7 +5715,7 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass)
* and is constantly changing these statistics may be changed to fit with
* the latest spec.
*
* Each field key is a unique identifer for each RTCStats
* Each field key is a unique identifier for each RTCStats
* (https://www.w3.org/TR/webrtc/#rtcstats-dictionary) value (another
* GstStructure) in the RTCStatsReport
* (https://www.w3.org/TR/webrtc/#rtcstatsreport-object). Each supported
@ -5884,11 +5884,11 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass)
* members outlined https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit and
* and reproduced below
*
* ordered G_TYPE_BOOLEAN Whether the channal will send data with guarenteed ordering
* ordered G_TYPE_BOOLEAN Whether the channal will send data with guaranteed ordering
* max-packet-lifetime G_TYPE_INT The time in milliseconds to attempt transmitting unacknowledged data. -1 for unset
* max-retransmits G_TYPE_INT The number of times data will be attempted to be transmitted without acknowledgement before dropping
* protocol G_TYPE_STRING The subprotocol used by this channel
* negotiated G_TYPE_BOOLEAN Whether the created data channel should not perform in-band chnanel announcment. If %TRUE, then application must negotiate the channel itself and create the corresponding channel on the peer with the same id.
* negotiated G_TYPE_BOOLEAN Whether the created data channel should not perform in-band chnanel announcement. If %TRUE, then application must negotiate the channel itself and create the corresponding channel on the peer with the same id.
* id G_TYPE_INT Override the default identifier selection of this channel
* priority GST_TYPE_WEBRTC_PRIORITY_TYPE The priority to use for this channel
*

View file

@ -440,7 +440,7 @@ transport_receive_bin_class_init (TransportReceiveBinClass * klass)
g_object_class_install_property (gobject_class,
PROP_STREAM,
g_param_spec_object ("stream", "Stream",
"The TransportStream for this receiveing bin",
"The TransportStream for this receiving bin",
transport_stream_get_type (),
G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
}

View file

@ -319,7 +319,7 @@ validate_sdp (GstWebRTCSignalingState state, SDPSource source,
}
if (!_media_has_setup (media, i, error))
goto fail;
/* check paramaters in bundle are the same */
/* check parameters in bundle are the same */
if (media_in_bundle) {
const gchar *ice_ufrag =
gst_sdp_media_get_attribute_val (media, "ice-ufrag");

View file

@ -40,13 +40,13 @@
* a single probe and DSP.
*
* The probe can only be used within the same top level GstPipeline.
* Additonally, to simplify the code, the probe element must be created
* Additionally, to simplify the code, the probe element must be created
* before the DSP sink pad is activated. It does not need to be in any
* particular state and does not even need to be added to the pipeline yet.
*
* # Example launch line
*
* As a conveniance, the echo canceller can be tested using an echo loop. In
* As a convenience, the echo canceller can be tested using an echo loop. In
* this configuration, one would expect a single echo to be heard.
*
* |[
@ -279,7 +279,7 @@ G_DEFINE_TYPE (GstWebrtcDsp, gst_webrtc_dsp, GST_TYPE_AUDIO_FILTER);
static const gchar *
webrtc_error_to_string (gint err)
{
const gchar *str = "unkown error";
const gchar *str = "unknown error";
switch (err) {
case webrtc::AudioProcessing::kNoError:

View file

@ -259,7 +259,7 @@ gst_webrtc_echo_probe_class_init (GstWebrtcEchoProbeClass * klass)
&gst_webrtc_echo_probe_sink_template);
gst_element_class_set_static_metadata (element_class,
"Accoustic Echo Canceller probe",
"Acoustic Echo Canceller probe",
"Generic/Audio",
"Gathers playback buffers for webrtcdsp",
"Nicolas Dufresne <nicolas.dufrsesne@collabora.com>");

View file

@ -61,7 +61,7 @@ struct _GstWebrtcEchoProbe
* object lock and also lock the probe. The natural order for the DSP is
* to lock the DSP and then the echo probe. If we where using the probe
* object lock, we'd be racing with GstBin which will lock sink to src,
* and may accidently reverse the order. */
* and may accidentally reverse the order. */
GMutex lock;
/* Protected by the lock */

View file

@ -628,7 +628,7 @@ gst_wildmidi_dec_decode (GstNonstreamAudioDecoder * dec, GstBuffer ** buffer,
return FALSE;
/* Allocate output buffer
* Multiply by 2 to accomodate for the sample size (16 bit = 2 byte) */
* Multiply by 2 to accommodate for the sample size (16 bit = 2 byte) */
outbuf_size = wildmidi_dec->output_buffer_size * 2 * WILDMIDI_NUM_CHANNELS;
outbuf =
gst_nonstream_audio_decoder_allocate_output_buffer (dec, outbuf_size);

View file

@ -338,7 +338,7 @@ gst_wpe_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
case GST_NAVIGATION_EVENT_KEY_RELEASE:
if (gst_navigation_event_parse_key_event (event, &key)) {
/* FIXME: This is wrong... The GstNavigation API should pass
hardware-level informations, not high-level keysym strings */
hardware-level information, not high-level keysym strings */
uint32_t keysym =
(uint32_t) xkb_keysym_from_name (key, XKB_KEYSYM_NO_FLAGS);
struct wpe_input_keyboard_event wpe_event;

View file

@ -923,7 +923,7 @@ gst_x265_enc_init_encoder (GstX265Enc * encoder)
}
if (i == encoder->peer_profiles->len) {
GST_ERROR_OBJECT (encoder, "Could't apply peer profile");
GST_ERROR_OBJECT (encoder, "Couldn't apply peer profile");
GST_OBJECT_UNLOCK (encoder);
return FALSE;
@ -1191,7 +1191,7 @@ gst_x265_enc_get_header_buffer (GstX265Enc * encoder)
GST_DEBUG_OBJECT (encoder, "%d nal units in header", i_nal);
/* x265 returns also non header nal units with the call x265_encoder_headers.
* The usefull headers are sequential (VPS, SPS and PPS), so we look for this
* The useful headers are sequential (VPS, SPS and PPS), so we look for this
* nal units and only copy these tree nal units as the header */
vps_idx = sps_idx = pps_idx = -1;

View file

@ -26,11 +26,11 @@
* If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message
* includes a sample of the frame where the barcode was detected (Since 1.6).
*
* The element generate messages named`barcode`. The structure containes these fields:
* The element generate messages named`barcode`. The structure contains these fields:
*
* * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
* * gchar * `type`: the symbol type.
* * gchar * `symbol`: the deteted bar code data.
* * gchar * `symbol`: the detected bar code data.
* * gint `quality`: an unscaled, relative quantity: larger values are better than smaller
* values.
* * GstSample `frame`: the frame in which the barcode message was detected, if

View file

@ -63,7 +63,7 @@
* Subclasses:
* While GstAdaptiveDemux is responsible for the workflow, it knows nothing
* about the intrinsics of the subclass formats, so the subclasses are
* resposible for maintaining the manifest data structures and stream
* responsible for maintaining the manifest data structures and stream
* information.
*/
@ -1745,7 +1745,7 @@ gst_adaptive_demux_handle_seek_event (GstAdaptiveDemux * demux, GstPad * pad,
demux_class->stream_seek (stream, rate >= 0, stream_seek_flags, ts, &ts);
}
/* replace event with a new one without snaping to seek on all streams */
/* replace event with a new one without snapping to seek on all streams */
gst_event_unref (event);
if (rate >= 0) {
start = ts;

View file

@ -1805,7 +1805,7 @@ gst_nonstream_audio_decoder_output_new_segment (GstNonstreamAudioDecoder * dec,
/* stop/duration members are not set, on purpose - in case of loops,
* new segments will be generated, which automatically put an implicit
* end on the current segment (the segment implicitely "ends" when the
* end on the current segment (the segment implicitly "ends" when the
* new one starts), and having a stop value might cause very slight
* gaps occasionally due to slight jitter in the calculation of
* base times etc. */
@ -2478,7 +2478,7 @@ gst_nonstream_audio_decoder_allocate_output_buffer (GstNonstreamAudioDecoder *
* params are used */
if (!gst_nonstream_audio_decoder_negotiate (dec)) {
GST_ERROR_OBJECT (dec,
"could not allocate output buffer because negotation failed");
"could not allocate output buffer because negotiation failed");
return NULL;
}
}

View file

@ -915,7 +915,7 @@ gst_h264_parse_clock_timestamp (GstH264ClockTimestamp * tim,
GST_DEBUG ("parsing \"Clock timestamp\"");
/* defalt values */
/* default values */
tim->time_offset = 0;
READ_UINT8 (nr, tim->ct_type, 2);
@ -1089,7 +1089,7 @@ gst_h264_parser_parse_recovery_point (GstH264NalParser * nalparser,
GST_DEBUG ("parsing \"Recovery point\"");
if (!sps || !sps->valid) {
GST_WARNING ("didn't get the associated sequence paramater set for the "
GST_WARNING ("didn't get the associated sequence parameter set for the "
"current access unit");
goto error;
}

View file

@ -755,7 +755,7 @@ gst_h265_parser_parse_short_term_ref_pic_sets (GstH265ShortTermRefPicSet *
READ_UINT8 (nr, use_delta_flag[j], 1);
}
/* 7-47: calcuate NumNegativePics, DeltaPocS0 and UsedByCurrPicS0 */
/* 7-47: calculate NumNegativePics, DeltaPocS0 and UsedByCurrPicS0 */
i = 0;
for (j = (RefRPS->NumPositivePics - 1); j >= 0; j--) {
dPoc = RefRPS->DeltaPocS1[j] + deltaRps;
@ -778,7 +778,7 @@ gst_h265_parser_parse_short_term_ref_pic_sets (GstH265ShortTermRefPicSet *
}
stRPS->NumNegativePics = i;
/* 7-48: calcuate NumPositivePics, DeltaPocS1 and UsedByCurrPicS1 */
/* 7-48: calculate NumPositivePics, DeltaPocS1 and UsedByCurrPicS1 */
i = 0;
for (j = (RefRPS->NumNegativePics - 1); j >= 0; j--) {
dPoc = RefRPS->DeltaPocS0[j] + deltaRps;
@ -1032,7 +1032,7 @@ gst_h265_parser_parse_pic_timing (GstH265Parser * parser,
GST_DEBUG ("parsing \"Picture timing\"");
if (!parser->last_sps || !parser->last_sps->valid) {
GST_WARNING ("didn't get the associated sequence paramater set for the "
GST_WARNING ("didn't get the associated sequence parameter set for the "
"current access unit");
goto error;
}
@ -1114,7 +1114,7 @@ gst_h265_parser_parse_recovery_point (GstH265Parser * parser,
GST_DEBUG ("parsing \"Recovery point\"");
if (!sps || !sps->valid) {
GST_WARNING ("didn't get the associated sequence paramater set for the "
GST_WARNING ("didn't get the associated sequence parameter set for the "
"current access unit");
goto error;
}
@ -1621,7 +1621,7 @@ gst_h265_parse_vps (GstH265NalUnit * nalu, GstH265VPS * vps)
CHECK_ALLOWED_MAX (vps->max_layer_id, 63);
READ_UE_MAX (&nr, vps->num_layer_sets_minus1, 1023);
/* allowd range is 0 to 1023 */
/* allowed range is 0 to 1023 */
CHECK_ALLOWED_MAX (vps->num_layer_sets_minus1, 1023);
for (i = 1; i <= vps->num_layer_sets_minus1; i++) {
@ -1643,13 +1643,13 @@ gst_h265_parse_vps (GstH265NalUnit * nalu, GstH265VPS * vps)
READ_UE_MAX (&nr, vps->num_ticks_poc_diff_one_minus1, G_MAXUINT32 - 1);
READ_UE_MAX (&nr, vps->num_hrd_parameters, 1024);
/* allowd range is
/* allowed range is
* 0 to vps_num_layer_sets_minus1 + 1 */
CHECK_ALLOWED_MAX (vps->num_hrd_parameters, vps->num_layer_sets_minus1 + 1);
if (vps->num_hrd_parameters) {
READ_UE_MAX (&nr, vps->hrd_layer_set_idx, 1023);
/* allowd range is
/* allowed range is
* ( vps_base_layer_internal_flag ? 0 : 1 ) to vps_num_layer_sets_minus1
*/
CHECK_ALLOWED_MAX (vps->hrd_layer_set_idx, vps->num_layer_sets_minus1);
@ -3118,8 +3118,8 @@ get_format_range_extension_profile (GstH265ProfileTierLevel * ptl)
guint extra_constraints = 0;
FormatRangeExtensionProfileMatch *m;
/* Filter out all the profiles having constraints not satisified by @ptl.
* Then pick the one having the least extra contraints. This allow us
/* Filter out all the profiles having constraints not satisfied by @ptl.
* Then pick the one having the least extra constraints. This allow us
* to match the closet profile if bitstream contains not standard
* constraints. */
if (p.max_14bit_constraint_flag != ptl->max_14bit_constraint_flag) {

View file

@ -248,7 +248,7 @@ typedef enum
/**
* GstH265ParserResult:
* @GST_H265_PARSER_OK: The parsing succeded
* @GST_H265_PARSER_OK: The parsing succeeded
* @GST_H265_PARSER_BROKEN_DATA: The data to parse is broken
* @GST_H265_PARSER_BROKEN_LINK: The link to structure needed for the parsing couldn't be found
* @GST_H265_PARSER_ERROR: An error accured when parsing
@ -423,7 +423,7 @@ struct _GstH265NalUnit
* @progressive_source_flag: flag to indicate the type of stream
* @interlaced_source_flag: flag to indicate the type of stream
* @non_packed_constraint_flag: indicate the presence of frame packing
* arragement sei message
* arrangement sei message
* @frame_only_constraint_flag: recognize the field_seq_flag
* @max_12bit_constraint_flag: used to define profile extensions, see Annex A
* @max_10bit_constraint_flag: used to define profile extensions, see Annex A
@ -592,7 +592,7 @@ struct _GstH265HRDParams
* @temporal_id_nesting_flag: specifies whether inter prediction is
* additionally restricted
* @profile_tier_level: ProfileTierLevel info
* @sub_layer_ordering_info_present_flag: indicates the presense of
* @sub_layer_ordering_info_present_flag: indicates the presence of
* vps_max_dec_pic_buffering_minus1, vps_max_num_reorder_pics and
* vps_max_latency_increase_plus1
* @max_dec_pic_buffering_minus1: specifies the maximum required size

View file

@ -450,7 +450,7 @@ gst_mpeg4_parse (GstMpeg4Packet * packet, gboolean skip_user_data,
first_resync_marker);
first_resync_marker = FALSE;
/* We found a complet slice */
/* We found a complete slice */
if (resync_res == GST_MPEG4_PARSER_OK)
return resync_res;
else if (resync_res == GST_MPEG4_PARSER_NO_PACKET_END) {
@ -1323,7 +1323,7 @@ gst_mpeg4_parse_video_object_plane (GstMpeg4VideoObjectPlane * vop,
vop->fcode_forward = 1;
vop->fcode_backward = 1;
/* Compute macroblock informations */
/* Compute macroblock information */
if (vol->interlaced)
vop->mb_height = (2 * (vol->height + 31) / 32);
else
@ -1398,7 +1398,7 @@ gst_mpeg4_parse_video_object_plane (GstMpeg4VideoObjectPlane * vop,
gst_bit_reader_get_bits_uint16_unchecked (&br, 13);
MARKER_UNCHECKED (&br);
/* Recompute the Macroblock informations
/* Recompute the Macroblock information
* accordingly to the new values */
if (vol->interlaced)
vop->mb_height = (2 * (vol->height + 31) / 32);
@ -1618,11 +1618,11 @@ failed:
* gst_mpeg4_parse_video_packet_header:
* @videopackethdr: The #GstMpeg4VideoPacketHdr structure to fill
* @vol: The last parsed #GstMpeg4VideoObjectLayer, will be updated
* with the informations found during the parsing
* with the information found during the parsing
* @vop: The last parsed #GstMpeg4VideoObjectPlane, will be updated
* with the informations found during the parsing
* with the information found during the parsing
* @sprite_trajectory: A #GstMpeg4SpriteTrajectory to fill or %NULL
* with the informations found during the parsing
* with the information found during the parsing
* @data: The data to parse, should be set after the resync marker.
* @size: The size of the data to parse
*

View file

@ -489,7 +489,7 @@ struct _GstMpeg4VideoObjectPlane {
guint8 load_backward_shape;
guint8 ref_select_code;
/* Computed macroblock informations */
/* Computed macroblock information */
guint16 mb_height;
guint16 mb_width;
guint mb_num;

View file

@ -305,7 +305,7 @@ struct _GstVC1SeqStructC
/* Wmvp specific */
guint8 wmvp; /* Specify if the stream is wmp or not */
/* In the wmvp case, the framerate is not computed but in the bistream */
/* In the wmvp case, the framerate is not computed but in the bitstream */
guint8 slice_code;
};

View file

@ -407,7 +407,7 @@ seg_get_base_qindex (const GstVp9Parser * parser,
int seg_base = frame_hdr->quant_indices.y_ac_qi;
GstVp9ParserPrivate *priv = GST_VP9_PARSER_GET_PRIVATE (parser);
const GstVp9SegmentationInfoData *seg = priv->segmentation + segid;
/* DEBUG("id = %d, seg_base = %d, seg enable = %d, alt eanble = %d, abs = %d, alt= %d\n",segid,
/* DEBUG("id = %d, seg_base = %d, seg enable = %d, alt enable = %d, abs = %d, alt= %d\n",segid,
seg_base, frame_hdr->segmentation.enabled, seg->alternate_quantizer_enabled, priv->segmentation_abs_delta, seg->alternate_quantizer);
*/
if (frame_hdr->segmentation.enabled && seg->alternate_quantizer_enabled) {

View file

@ -73,7 +73,7 @@ typedef struct _GstVp9SegmentationInfoData GstVp9SegmentationInfoData;
* GstVp9ParseResult:
* @GST_VP9_PARSER_OK: The parsing went well
* @GST_VP9_PARSER_BROKEN_DATA: The data to parse is broken
* @GST_VP9_PARSER_NO_PACKET_ERROR: An error occured during the parsing
* @GST_VP9_PARSER_NO_PACKET_ERROR: An error occurred during the parsing
*
* Result type of any parsing function.
*
@ -367,7 +367,7 @@ struct _GstVp9SegmentationInfo {
* @refresh_frame_context: refresh frame context indicator
* @frame_parallel_decoding_mode: enable or disable parallel decoding support.
* @loopfilter: loopfilter values
* @quant_indices: quantization indeces
* @quant_indices: quantization indices
* @segmentation: segmentation info
* @log2_tile_rows: tile row indicator
* @log2_tile_columns: tile column indicator
@ -433,7 +433,7 @@ struct _GstVp9FrameHdr
* @reference_skip: a block skip mode that implies both the use of a (0,0)
* motion vector and that no residual will be coded
*
* Segmentation info kept across multipe frames
* Segmentation info kept across multiple frames
*
* Since: 1.8
*/
@ -469,7 +469,7 @@ struct _GstVp9Segmentation
*/
struct _GstVp9Parser
{
/* private stuct for tracking state variables across frames */
/* private struct for tracking state variables across frames */
void *priv;
gint subsampling_x;

View file

@ -448,7 +448,7 @@ gst_photography_set_autofocus (GstPhotography * photo, gboolean on)
/**
* gst_photography_set_config:
* @photo: #GstPhotography interface of a #GstElement
* @config: #GstPhotographySettings containg the configuration
* @config: #GstPhotographySettings containing the configuration
*
* Set all configuration settings at once.
*
@ -474,7 +474,7 @@ gst_photography_set_config (GstPhotography * photo,
/**
* gst_photography_get_config:
* @photo: #GstPhotography interface of a #GstElement
* @config: #GstPhotographySettings containg the configuration
* @config: #GstPhotographySettings containing the configuration
*
* Get all configuration settings at once.
*
@ -670,7 +670,7 @@ gst_photography_iface_class_init (gpointer g_class)
* GstPhotography:lens-focus:
*
* Manual changing of lens focus in diopter units.
* Inteded use with GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL focus mode, otherwise
* Intended use with GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL focus mode, otherwise
* to be ignored.
*
*/

View file

@ -1289,7 +1289,7 @@ gst_mpegts_descriptor_parse_dvb_component (const GstMpegtsDescriptor
*
* Extracts the component tag from @descriptor.
*
* Returns: %TRUE if the parsing happended correctly, else %FALSE.
* Returns: %TRUE if the parsing happened correctly, else %FALSE.
*/
gboolean
gst_mpegts_descriptor_parse_dvb_stream_identifier (const GstMpegtsDescriptor

View file

@ -763,7 +763,7 @@ typedef enum {
* @priority: %TRUE High Priority %FALSE Low Priority
* @time_slicing: %TRUE no time slicing %FALSE time slicing
* @mpe_fec: %TRUE no mpe-fec is used %FALSE mpe-fec is use
* @constellation: the constallation
* @constellation: the constellation
* @hierarchy: the hierarchy
* @code_rate_hp:
* @code_rate_lp:

View file

@ -91,7 +91,7 @@ static void _##name##_free (type * source) \
* These are the base descriptor types and methods.
*
* For more details, refer to the ITU H.222.0 or ISO/IEC 13818-1 specifications
* and other specifications mentionned in the documentation.
* and other specifications mentioned in the documentation.
*/
/* FIXME : Move this to proper file once we have a C file for ATSC/ISDB descriptors */
@ -970,7 +970,7 @@ gst_mpegts_parse_descriptors (guint8 * buffer, gsize buf_len)
* Note: To look for descriptors that can be present more than once in an
* array of descriptors, iterate the #GArray manually.
*
* Returns: (transfer none): the first descriptor matchin @tag, else %NULL.
* Returns: (transfer none): the first descriptor matching @tag, else %NULL.
*/
const GstMpegtsDescriptor *
gst_mpegts_find_descriptor (GPtrArray * descriptors, guint8 tag)

View file

@ -40,7 +40,7 @@ G_BEGIN_DECLS
/**
* GstPlayerStreamInfo:
*
* Base structure for information concering a media stream. Depending on
* Base structure for information concerning a media stream. Depending on
* the stream type, one can find more media-specific information in
* #GstPlayerVideoInfo, #GstPlayerAudioInfo, #GstPlayerSubtitleInfo.
*/

View file

@ -3933,7 +3933,7 @@ gst_player_set_video_track (GstPlayer * self, gint stream_index)
*
* Returns: %TRUE or %FALSE
*
* Sets the subtitle strack @stream_index.
* Sets the subtitle stack @stream_index.
*/
gboolean
gst_player_set_subtitle_track (GstPlayer * self, gint stream_index)

View file

@ -131,7 +131,7 @@ gst_fragment_class_init (GstFragmentClass * klass)
"Name of the fragment (eg:fragment-12.ts)", NULL, G_PARAM_READABLE));
g_object_class_install_property (gobject_class, PROP_DISCONTINOUS,
g_param_spec_boolean ("discontinuous", "Discontinous",
g_param_spec_boolean ("discontinuous", "Discontinuous",
"Whether this fragment has a discontinuity or not",
FALSE, G_PARAM_READABLE));

View file

@ -680,7 +680,7 @@ quit:
} else {
GstQuery *query;
/* Download successfull, let's query the URI */
/* Download successful, let's query the URI */
query = gst_query_new_uri ();
if (gst_element_query (urisrc, query)) {
gst_query_parse_uri (query, &download->uri);

View file

@ -183,7 +183,7 @@ _gst_vk_debug_callback (VkDebugReportFlagsEXT msgFlags,
* @instance: a #GstVulkanInstance
* @error: #GError
*
* Returns: whether the instance vould be created
* Returns: whether the instance could be created
*
* Since: 1.18
*/
@ -361,7 +361,7 @@ gst_vulkan_instance_open (GstVulkanInstance * instance, GError ** error)
"vkCreateDebugReportCallbackEXT");
if (!instance->dbgCreateDebugReportCallback) {
g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
"Failed to retreive vkCreateDebugReportCallback");
"Failed to retrieve vkCreateDebugReportCallback");
goto error;
}
instance->dbgDestroyDebugReportCallback =
@ -370,7 +370,7 @@ gst_vulkan_instance_open (GstVulkanInstance * instance, GError ** error)
"vkDestroyDebugReportCallbackEXT");
if (!instance->dbgDestroyDebugReportCallback) {
g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
"Failed to retreive vkDestroyDebugReportCallback");
"Failed to retrieve vkDestroyDebugReportCallback");
goto error;
}
instance->dbgReportMessage = (PFN_vkDebugReportMessageEXT)
@ -378,7 +378,7 @@ gst_vulkan_instance_open (GstVulkanInstance * instance, GError ** error)
"vkDebugReportMessageEXT");
if (!instance->dbgReportMessage) {
g_set_error (error, GST_VULKAN_ERROR, VK_ERROR_INITIALIZATION_FAILED,
"Failed to retreive vkDebugReportMessage");
"Failed to retrieve vkDebugReportMessage");
goto error;
}

View file

@ -156,7 +156,7 @@ static GstVideoFormat
_vk_format_to_video_format (VkFormat format)
{
switch (format) {
/* double check endianess */
/* double check endianness */
case VK_FORMAT_R8G8B8A8_UNORM:
case VK_FORMAT_R8G8B8A8_SRGB:
return GST_VIDEO_FORMAT_RGBA;

View file

@ -340,7 +340,7 @@ gst_vulkan_window_get_presentation_support (GstVulkanWindow * window,
* @window: a #GstVulkanWindow
* @error: a #GError
*
* Returns: whether @window could be sucessfully opened
* Returns: whether @window could be successfully opened
*
* Since: 1.18
*/

View file

@ -535,7 +535,7 @@ window_proc (HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
}
default:
{
/* transmit messages to the parrent (ex: mouse/keyboard input) */
/* transmit messages to the parent (ex: mouse/keyboard input) */
HWND parent_id = window_win32->parent_win_id;
if (parent_id)
PostMessage (parent_id, uMsg, wParam, lParam);

View file

@ -84,7 +84,7 @@ gst_wayland_video_default_init (GstWaylandVideoInterface * klass)
*
* Please note that any calls to this method MUST be matched by
* calls to end_geometry_change() and AFTER the parent surface has
* commited its geometry changes.
* committed its geometry changes.
*/
void
gst_wayland_video_begin_geometry_change (GstWaylandVideo * video)
@ -106,11 +106,11 @@ gst_wayland_video_begin_geometry_change (GstWaylandVideo * video)
*
* Notifies the video sink that we just finished changing the
* geometry of both itself and its parent surface. This should
* have been earlier preceeded by a call to begin_geometry_change()
* have been earlier preceded by a call to begin_geometry_change()
* which notified the sink before any of these changes had happened.
*
* It is important to call this method only AFTER the parent surface
* has commited its geometry changes, otherwise no synchronization
* has committed its geometry changes, otherwise no synchronization
* is actually achieved.
*/
void

View file

@ -458,7 +458,7 @@ content_description_calc_size_for_tag (const GstTagList * taglist,
* and get the same tags in the writing function */
/**
* gst_asf_mux_get_content_description_tags:
* @asfmux: #GstAsfMux to have its tags proccessed
* @asfmux: #GstAsfMux to have its tags processed
* @asftags: #GstAsfTags to hold the results
*
* Inspects the tags received by the GstTagSetter interface
@ -466,7 +466,7 @@ content_description_calc_size_for_tag (const GstTagList * taglist,
* size needed for the default and extended content description objects.
* This results and a copy of the #GstTagList
* are stored in the #GstAsfTags. We store a copy so that
* the sizes estimated here mantain the same until they are
* the sizes estimated here maintain the same until they are
* written to the asf file.
*/
static void
@ -784,7 +784,7 @@ gst_asf_mux_write_stream_properties (GstAsfMux * asfmux, guint8 ** buf,
*
* Writes the header of the header extension object. The buffer pointer
* is incremented to the next writing position (the header extension object
* childs should be writen from that point)
* childs should be written from that point)
*/
static void
gst_asf_mux_write_header_extension (GstAsfMux * asfmux, guint8 ** buf,
@ -802,7 +802,7 @@ gst_asf_mux_write_header_extension (GstAsfMux * asfmux, guint8 ** buf,
* gst_asf_mux_write_extended_stream_properties:
* @asfmux:
* @buf: pointer to the buffer pointer
* @asfpad: Pad that handles the stream of the properties to be writen
* @asfpad: Pad that handles the stream of the properties to be written
*
* Writes the extended stream properties object (that is part of the
* header extension objects) for the stream handled by asfpad
@ -849,13 +849,13 @@ gst_asf_mux_write_extended_stream_properties (GstAsfMux * asfmux, guint8 ** buf,
* @asfmux:
* @size_buf: pointer to the memory position to write the size of the string
* @str_buf: pointer to the memory position to write the string
* @str: the string to be writen (in UTF-8)
* @use32: if the string size should be writen with 32 bits (if true)
* @str: the string to be written (in UTF-8)
* @use32: if the string size should be written with 32 bits (if true)
* or with 16 (if false)
*
* Writes a string with its size as it is needed in many asf objects.
* The size is writen to size_buf as a WORD field if use32 is false, and
* as a DWORD if use32 is true. The string is writen to str_buf in UTF16-LE.
* The size is written to size_buf as a WORD field if use32 is false, and
* as a DWORD if use32 is true. The string is written to str_buf in UTF16-LE.
* The string should be passed in UTF-8.
*
* The string size in UTF16-LE is returned.
@ -909,8 +909,8 @@ gst_asf_mux_write_string_with_size (GstAsfMux * asfmux,
* @data_buf:
*
* Checks if a string tag with tagname exists in the taglist. If it
* exists it is writen as an UTF-16LE to data_buf and its size in bytes
* is writen to size_buf. It is used for writing content description
* exists it is written as an UTF-16LE to data_buf and its size in bytes
* is written to size_buf. It is used for writing content description
* object fields.
*
* Returns: the size of the string
@ -1404,7 +1404,7 @@ gst_asf_mux_start_file (GstAsfMux * asfmux)
* @videopad:
*
* Adds a new entry to the simple index of the stream handler by videopad.
* This functions doesn't check if the time ellapsed
* This functions doesn't check if the time elapsed
* is larger than the established time interval between entries. The caller
* is responsible for verifying this.
*/

View file

@ -220,7 +220,7 @@ gst_asf_payload_get_size (AsfPayload * payload)
* gst_asf_payload_free:
* @payload: the #AsfPayload to be freed
*
* Releases teh memory associated with this payload
* Releases the memory associated with this payload
*/
void
gst_asf_payload_free (AsfPayload * payload)
@ -278,7 +278,7 @@ gst_asf_match_guid (const guint8 * data, const Guid * guid)
/**
* gst_asf_put_i32:
* @buf: the memory to write data to
* @data: the value to be writen
* @data: the value to be written
*
* Writes a 32 bit signed integer to memory
*/
@ -291,7 +291,7 @@ gst_asf_put_i32 (guint8 * buf, gint32 data)
/**
* gst_asf_put_time:
* @buf: pointer to the buffer to write the value to
* @time: value to be writen
* @time: value to be written
*
* Writes an asf time value to the buffer
*/
@ -304,7 +304,7 @@ gst_asf_put_time (guint8 * buf, guint64 time)
/**
* gst_asf_put_guid:
* @buf: the buffer to write the guid to
* @guid: the guid to be writen
* @guid: the guid to be written
*
* Writes a GUID to the buffer
*/
@ -324,7 +324,7 @@ gst_asf_put_guid (guint8 * buf, Guid guid)
/**
* gst_asf_put_payload:
* @buf: memory to write the payload to
* @payload: #AsfPayload to be writen
* @payload: #AsfPayload to be written
*
* Writes the asf payload to the buffer. The #AsfPayload
* packet count is incremented.
@ -348,19 +348,19 @@ gst_asf_put_payload (guint8 * buf, AsfPayload * payload)
/**
* gst_asf_put_subpayload:
* @buf: buffer to write the payload to
* @payload: the payload to be writen
* @payload: the payload to be written
* @size: maximum size in bytes to write
*
* Serializes part of a payload to a buffer.
* The maximum size is checked against the payload length,
* the minimum of this size and the payload length is writen
* to the buffer and the writen size is returned.
* the minimum of this size and the payload length is written
* to the buffer and the written size is returned.
*
* It also updates the values of the payload to match the remaining
* data.
* In case there is not enough space to write the headers, nothing is done.
*
* Returns: The writen size in bytes.
* Returns: The written size in bytes.
*/
guint16
gst_asf_put_subpayload (guint8 * buf, AsfPayload * payload, guint16 size)
@ -461,7 +461,7 @@ gst_asf_match_and_peek_obj_size_buf (GstBuffer * buf, const Guid * guid)
* is the last one in an asf packet and the remaining data
* is probably uninteresting to the application.
*
* Returns: true on success, false if some error occurrs
* Returns: true on success, false if some error occurs
*/
static gboolean
gst_asf_parse_mult_payload (GstByteReader * reader, gboolean * has_keyframe)
@ -486,7 +486,7 @@ gst_asf_parse_mult_payload (GstByteReader * reader, gboolean * has_keyframe)
if (!gst_byte_reader_get_uint8 (reader, &stream_num))
goto error;
if ((stream_num & 0x80) != 0) {
GST_LOG ("Keyframe found, stoping parse of payloads");
GST_LOG ("Keyframe found, stopping parse of payloads");
*has_keyframe = TRUE;
return TRUE;
}
@ -525,7 +525,7 @@ error:
* is the last one in an asf packet and the remaining data
* is probably uninteresting to the application.
*
* Returns: true on success, false if some error occurrs
* Returns: true on success, false if some error occurs
*/
static gboolean
gst_asf_parse_single_payload (GstByteReader * reader, gboolean * has_keyframe)
@ -753,7 +753,7 @@ gst_asf_parse_file_properties_obj (GstByteReader * reader,
return GST_FLOW_ERROR;
if (min_ps != max_ps) {
GST_WARNING ("Mininum and maximum packet size differ "
GST_WARNING ("Minimum and maximum packet size differ "
"%" G_GUINT32_FORMAT " and %" G_GUINT32_FORMAT ", "
"ASF spec states they should be the same", min_ps, max_ps);
return FALSE;

View file

@ -268,7 +268,7 @@ gst_synae_scope_render (GstAudioVisualizer * bscope, GstBuffer * audio,
fc = r + l;
x = (guint) (r * w / fc);
/* the brighness scaling factor was picked by experimenting */
/* the brightness scaling factor was picked by experimenting */
br = b * fc * 0.01;
br1 = br * (clarity + 128) >> 8;

View file

@ -164,7 +164,7 @@ gst_auto_convert_class_init (GstAutoConvertClass * klass)
gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
gst_element_class_set_static_metadata (gstelement_class,
"Select convertor based on caps", "Generic/Bin",
"Select converter based on caps", "Generic/Bin",
"Selects the right transform element based on the caps",
"Olivier Crete <olivier.crete@collabora.com>");

View file

@ -144,8 +144,8 @@ gst_auto_video_convert_class_init (GstAutoVideoConvertClass * klass)
gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
gst_element_class_set_static_metadata (gstelement_class,
"Select color space convertor based on caps", "Generic/Bin",
"Selects the right color space convertor based on the caps",
"Select color space converter based on caps", "Generic/Bin",
"Selects the right color space converter based on the caps",
"Benjamin Gaignard <benjamin.gaignard@stericsson.com>");
gstelement_class->change_state =

View file

@ -42,5 +42,5 @@ plugin_init (GstPlugin * plugin)
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
autoconvert,
"Selects convertor element based on caps",
"Selects converter element based on caps",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -64,7 +64,7 @@
* "nearest neighbor" principal, with some additional complexity for the
* calculation of the "green" element, where an "adaptive" pairing is used.
*
* For purposes of documentation and indentification, each element of the
* For purposes of documentation and identification, each element of the
* original array can be put into one of four classes:
* R A red element
* B A blue element

View file

@ -48,7 +48,7 @@ recommended that the viewfinder pad keeps pushing buffers so the user
has a smooth experience.
Note that basecamerasrc already has the mode property and start/stop-capture
signals. It has functions that should be overriden by its child classes to
signals. It has functions that should be overridden by its child classes to
implement the handling of these actions.

Some files were not shown because too many files have changed in this diff Show more