mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 12:11:13 +00:00
vaapi: fix 'ISO C90 forbids mixed declarations and code' compiler warnings
Declare variables at the beginning of a code block, which is how it's done in GStreamer. https://bugzilla.gnome.org/show_bug.cgi?id=759192
This commit is contained in:
parent
eb2daed2a7
commit
24168a2093
10 changed files with 54 additions and 32 deletions
|
@ -3626,8 +3626,6 @@ exec_ref_pic_marking_adaptive(
|
|||
{
|
||||
guint i;
|
||||
|
||||
GST_DEBUG("reference picture marking process (adaptive memory control)");
|
||||
|
||||
typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
|
||||
GstVaapiDecoderH264 *decoder,
|
||||
GstVaapiPictureH264 *picture,
|
||||
|
@ -3644,6 +3642,8 @@ exec_ref_pic_marking_adaptive(
|
|||
exec_ref_pic_marking_adaptive_mmco_6,
|
||||
};
|
||||
|
||||
GST_DEBUG("reference picture marking process (adaptive memory control)");
|
||||
|
||||
for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
|
||||
GstH264RefPicMarking * const ref_pic_marking =
|
||||
&dec_ref_pic_marking->ref_pic_marking[i];
|
||||
|
|
|
@ -610,10 +610,12 @@ decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
|
|||
|
||||
if (priv->is_svh) {
|
||||
guint temp_ref = priv->svh_hdr.temporal_reference;
|
||||
guint delta_ref;
|
||||
|
||||
if (temp_ref < priv->prev_t_ref) {
|
||||
temp_ref += 256;
|
||||
}
|
||||
guint delta_ref = temp_ref - priv->prev_t_ref;
|
||||
delta_ref = temp_ref - priv->prev_t_ref;
|
||||
|
||||
pts = priv->sync_time;
|
||||
// see temporal_reference definition in spec, 30000/1001Hz
|
||||
|
@ -707,6 +709,8 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
|
|||
pic_param->num_macroblocks_in_gob = priv->svh_hdr.num_macroblocks_in_gob;
|
||||
}
|
||||
else {
|
||||
int i;
|
||||
|
||||
// VOL parameters
|
||||
pic_param->vol_fields.bits.short_video_header = 0;
|
||||
pic_param->vol_fields.bits.chroma_format = priv->vol_hdr.chroma_format;
|
||||
|
@ -720,7 +724,7 @@ fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
|
|||
pic_param->vol_fields.bits.reversible_vlc = priv->vol_hdr.reversible_vlc;
|
||||
pic_param->vol_fields.bits.resync_marker_disable = priv->vol_hdr.resync_marker_disable;
|
||||
pic_param->no_of_sprite_warping_points = priv->vol_hdr.no_of_sprite_warping_points;
|
||||
int i =0;
|
||||
|
||||
for (i=0; i<3 && i<priv->vol_hdr.no_of_sprite_warping_points ; i++) {
|
||||
pic_param->sprite_trajectory_du[i] = priv->sprite_trajectory.vop_ref_points[i];
|
||||
pic_param->sprite_trajectory_dv[i] = priv->sprite_trajectory.sprite_ref_points[i];
|
||||
|
@ -839,6 +843,10 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
|
|||
status = decode_gop(decoder, packet.data + packet.offset, packet.size);
|
||||
}
|
||||
else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
|
||||
GstMpeg4Packet video_packet;
|
||||
const guint8 *_data;
|
||||
gint _data_size;
|
||||
|
||||
status = decode_picture(decoder, packet.data + packet.offset, packet.size);
|
||||
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
|
||||
return status;
|
||||
|
@ -852,9 +860,8 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
|
|||
* while MB doesn't start from byte boundary -- it is what 'macroblock_offset'
|
||||
* in slice refer to
|
||||
*/
|
||||
const guint8 *_data = packet.data + packet.offset + priv->vop_hdr.size/8;
|
||||
gint _data_size = packet.size - (priv->vop_hdr.size/8);
|
||||
GstMpeg4Packet video_packet;
|
||||
_data = packet.data + packet.offset + priv->vop_hdr.size/8;
|
||||
_data_size = packet.size - (priv->vop_hdr.size/8);
|
||||
|
||||
if (priv->vol_hdr.resync_marker_disable) {
|
||||
status = decode_slice(decoder, _data, _data_size, FALSE);
|
||||
|
@ -862,11 +869,12 @@ decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
|
|||
return status;
|
||||
}
|
||||
else {
|
||||
GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
|
||||
gboolean first_slice = TRUE;
|
||||
|
||||
// next start_code is required to determine the end of last slice
|
||||
_data_size += 4;
|
||||
GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
|
||||
|
||||
gboolean first_slice = TRUE;
|
||||
while (_data_size > 0) {
|
||||
// we can skip user data here
|
||||
ret = gst_mpeg4_parse(&video_packet, TRUE, &priv->vop_hdr, _data, 0, _data_size);
|
||||
|
@ -955,6 +963,8 @@ gst_vaapi_decoder_mpeg4_decode_codec_data(GstVaapiDecoder *base_decoder,
|
|||
GstVaapiDecoderMpeg4 * const decoder =
|
||||
GST_VAAPI_DECODER_MPEG4_CAST(base_decoder);
|
||||
GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
|
||||
GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
|
||||
GstMpeg4Packet packet;
|
||||
guchar *buf;
|
||||
guint pos, buf_size;
|
||||
|
||||
|
@ -968,8 +978,6 @@ gst_vaapi_decoder_mpeg4_decode_codec_data(GstVaapiDecoder *base_decoder,
|
|||
buf[buf_size-1] = 0xb2;
|
||||
|
||||
pos = 0;
|
||||
GstMpeg4Packet packet;
|
||||
GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
|
||||
|
||||
while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
|
||||
result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
|
||||
|
|
|
@ -591,11 +591,12 @@ bs_write_subset_sps (GstBitWriter * bs,
|
|||
|
||||
for (i = 1; i <= num_views_minus1; i++) {
|
||||
guint32 num_anchor_refs_l0 = 0;
|
||||
guint32 num_anchor_refs_l1 = 0;
|
||||
|
||||
WRITE_UE (bs, num_anchor_refs_l0);
|
||||
for (j = 0; j < num_anchor_refs_l0; j++)
|
||||
WRITE_UE (bs, 0);
|
||||
|
||||
guint32 num_anchor_refs_l1 = 0;
|
||||
WRITE_UE (bs, num_anchor_refs_l1);
|
||||
for (j = 0; j < num_anchor_refs_l1; j++)
|
||||
WRITE_UE (bs, 0);
|
||||
|
@ -603,11 +604,12 @@ bs_write_subset_sps (GstBitWriter * bs,
|
|||
|
||||
for (i = 1; i <= num_views_minus1; i++) {
|
||||
guint32 num_non_anchor_refs_l0 = 0;
|
||||
guint32 num_non_anchor_refs_l1 = 0;
|
||||
|
||||
WRITE_UE (bs, num_non_anchor_refs_l0);
|
||||
for (j = 0; j < num_non_anchor_refs_l0; j++)
|
||||
WRITE_UE (bs, 0);
|
||||
|
||||
guint32 num_non_anchor_refs_l1 = 0;
|
||||
WRITE_UE (bs, num_non_anchor_refs_l1);
|
||||
for (j = 0; j < num_non_anchor_refs_l1; j++)
|
||||
WRITE_UE (bs, 0);
|
||||
|
@ -2452,13 +2454,14 @@ reset_properties (GstVaapiEncoderH264 * encoder)
|
|||
|
||||
for (i = 0; i < encoder->num_views; i++) {
|
||||
GstVaapiH264ViewRefPool *const ref_pool = &encoder->ref_pools[i];
|
||||
GstVaapiH264ViewReorderPool *const reorder_pool =
|
||||
&encoder->reorder_pools[i];
|
||||
|
||||
ref_pool->max_reflist0_count = 1;
|
||||
ref_pool->max_reflist1_count = encoder->num_bframes > 0;
|
||||
ref_pool->max_ref_frames = ref_pool->max_reflist0_count
|
||||
+ ref_pool->max_reflist1_count;
|
||||
|
||||
GstVaapiH264ViewReorderPool *const reorder_pool =
|
||||
&encoder->reorder_pools[i];
|
||||
reorder_pool->frame_index = 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -596,11 +596,12 @@ get_operations_default (void)
|
|||
ensure_properties ();
|
||||
|
||||
for (i = 0; i < N_PROPERTIES; i++) {
|
||||
GstVaapiFilterOpData *op_data;
|
||||
GParamSpec *const pspec = g_properties[i];
|
||||
if (!pspec)
|
||||
continue;
|
||||
|
||||
GstVaapiFilterOpData *const op_data = op_data_new (i, pspec);
|
||||
op_data = op_data_new (i, pspec);
|
||||
if (!op_data)
|
||||
goto error;
|
||||
g_ptr_array_add (ops, op_data);
|
||||
|
|
|
@ -198,17 +198,18 @@ gst_vaapidecode_update_src_caps (GstVaapiDecode * decode)
|
|||
{
|
||||
GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);
|
||||
GstVideoCodecState *state, *ref_state;
|
||||
GstVaapiCapsFeature feature;
|
||||
GstCapsFeatures *features = NULL;
|
||||
GstVideoInfo *vi;
|
||||
GstVideoFormat format = GST_VIDEO_FORMAT_I420;
|
||||
GstClockTime latency;
|
||||
gint fps_d, fps_n;
|
||||
|
||||
if (!decode->input_state)
|
||||
return FALSE;
|
||||
|
||||
ref_state = decode->input_state;
|
||||
|
||||
GstCapsFeatures *features = NULL;
|
||||
GstVaapiCapsFeature feature;
|
||||
|
||||
feature =
|
||||
gst_vaapi_find_preferred_caps_feature (GST_VIDEO_DECODER_SRC_PAD (vdec),
|
||||
GST_VIDEO_INFO_FORMAT (&ref_state->info), &format);
|
||||
|
@ -246,8 +247,8 @@ gst_vaapidecode_update_src_caps (GstVaapiDecode * decode)
|
|||
gst_caps_replace (&decode->srcpad_caps, state->caps);
|
||||
gst_video_codec_state_unref (state);
|
||||
|
||||
gint fps_n = GST_VIDEO_INFO_FPS_N (vi);
|
||||
gint fps_d = GST_VIDEO_INFO_FPS_D (vi);
|
||||
fps_n = GST_VIDEO_INFO_FPS_N (vi);
|
||||
fps_d = GST_VIDEO_INFO_FPS_D (vi);
|
||||
if (fps_n <= 0 || fps_d <= 0) {
|
||||
GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation");
|
||||
fps_n = 25;
|
||||
|
@ -258,7 +259,7 @@ gst_vaapidecode_update_src_caps (GstVaapiDecode * decode)
|
|||
* latency in general, with perfectly known unit boundaries (NALU,
|
||||
* AU), and up to 2 frames when we need to wait for the second frame
|
||||
* start to determine the first frame is complete */
|
||||
GstClockTime latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
|
||||
latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
|
||||
gst_video_decoder_set_latency (vdec, latency, latency);
|
||||
|
||||
return TRUE;
|
||||
|
@ -464,6 +465,7 @@ gst_vaapidecode_handle_frame (GstVideoDecoder * vdec,
|
|||
{
|
||||
GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
|
||||
GstVaapiDecoderStatus status;
|
||||
GstVaapiPluginBase *plugin;
|
||||
GstFlowReturn ret;
|
||||
|
||||
if (!decode->input_state)
|
||||
|
@ -478,7 +480,7 @@ gst_vaapidecode_handle_frame (GstVideoDecoder * vdec,
|
|||
if (!gst_video_decoder_negotiate (vdec))
|
||||
goto not_negotiated;
|
||||
|
||||
GstVaapiPluginBase *const plugin = GST_VAAPI_PLUGIN_BASE (vdec);
|
||||
plugin = GST_VAAPI_PLUGIN_BASE (vdec);
|
||||
if (!gst_vaapi_plugin_base_set_caps (plugin, NULL, decode->srcpad_caps))
|
||||
goto not_negotiated;
|
||||
|
||||
|
|
|
@ -180,12 +180,13 @@ error_create_proxy:
|
|||
void
|
||||
gst_vaapi_plugin_base_class_init (GstVaapiPluginBaseClass * klass)
|
||||
{
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
|
||||
klass->has_interface = default_has_interface;
|
||||
klass->display_changed = default_display_changed;
|
||||
|
||||
plugin_parent_class = g_type_class_peek_parent (klass);
|
||||
|
||||
GstElementClass *const element_class = GST_ELEMENT_CLASS (klass);
|
||||
element_class->set_context = GST_DEBUG_FUNCPTR (plugin_set_context);
|
||||
}
|
||||
|
||||
|
|
|
@ -412,14 +412,14 @@ GstCaps *
|
|||
gst_vaapi_video_format_new_template_caps_with_features (GstVideoFormat format,
|
||||
const gchar * features_string)
|
||||
{
|
||||
GstCapsFeatures *features;
|
||||
GstCaps *caps;
|
||||
|
||||
caps = gst_vaapi_video_format_new_template_caps (format);
|
||||
if (!caps)
|
||||
return NULL;
|
||||
|
||||
GstCapsFeatures *const features =
|
||||
gst_caps_features_new (features_string, NULL);
|
||||
features = gst_caps_features_new (features_string, NULL);
|
||||
if (!features) {
|
||||
gst_caps_unref (caps);
|
||||
return NULL;
|
||||
|
|
|
@ -479,6 +479,7 @@ gst_vaapipostproc_process_vpp (GstBaseTransform * trans, GstBuffer * inbuf,
|
|||
GstVaapiDeinterlaceMethod deint_method;
|
||||
guint flags, deint_flags;
|
||||
gboolean tff, deint, deint_refs, deint_changed;
|
||||
const GstVideoCropMeta *crop_meta;
|
||||
GstVaapiRectangle *crop_rect = NULL;
|
||||
GstVaapiRectangle tmp_rect;
|
||||
|
||||
|
@ -527,7 +528,7 @@ gst_vaapipostproc_process_vpp (GstBaseTransform * trans, GstBuffer * inbuf,
|
|||
goto error_invalid_buffer;
|
||||
inbuf_surface = gst_vaapi_video_meta_get_surface (inbuf_meta);
|
||||
|
||||
GstVideoCropMeta *const crop_meta = gst_buffer_get_video_crop_meta (inbuf);
|
||||
crop_meta = gst_buffer_get_video_crop_meta (inbuf);
|
||||
if (crop_meta) {
|
||||
crop_rect = &tmp_rect;
|
||||
crop_rect->x = crop_meta->x;
|
||||
|
@ -974,11 +975,13 @@ expand_allowed_srcpad_caps (GstVaapiPostproc * postproc, GstCaps * caps)
|
|||
num_structures = gst_caps_get_size (caps);
|
||||
for (i = 0; i < num_structures; i++) {
|
||||
GstCapsFeatures *const features = gst_caps_get_features (caps, i);
|
||||
GstStructure *structure;
|
||||
|
||||
if (gst_caps_features_contains (features,
|
||||
GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META))
|
||||
continue;
|
||||
|
||||
GstStructure *const structure = gst_caps_get_structure (caps, i);
|
||||
structure = gst_caps_get_structure (caps, i);
|
||||
if (!structure)
|
||||
continue;
|
||||
gst_structure_set_value (structure, "format", &value);
|
||||
|
|
|
@ -409,19 +409,22 @@ app_run (App * app)
|
|||
buffer_thread = g_thread_new ("get buffer thread", get_buffer_thread, app);
|
||||
|
||||
while (1) {
|
||||
GstVaapiSurfaceProxy *proxy;
|
||||
GstVaapiSurface *surface;
|
||||
|
||||
if (!load_frame (app, image))
|
||||
break;
|
||||
|
||||
if (!gst_vaapi_image_unmap (image))
|
||||
break;
|
||||
|
||||
GstVaapiSurfaceProxy *proxy =
|
||||
proxy =
|
||||
gst_vaapi_surface_proxy_new_from_pool (GST_VAAPI_SURFACE_POOL (pool));
|
||||
if (!proxy) {
|
||||
g_warning ("Could not get surface proxy from pool.");
|
||||
break;
|
||||
}
|
||||
GstVaapiSurface *surface = gst_vaapi_surface_proxy_get_surface (proxy);
|
||||
surface = gst_vaapi_surface_proxy_get_surface (proxy);
|
||||
if (!surface) {
|
||||
g_warning ("Could not get surface from proxy.");
|
||||
break;
|
||||
|
|
|
@ -246,12 +246,13 @@ parse_enum(const gchar *str, GType type, gint default_value,
|
|||
g_return_val_if_fail(out_value_ptr != NULL, FALSE);
|
||||
|
||||
if (str) {
|
||||
const GEnumValue *enum_value;
|
||||
GEnumClass * const enum_class = g_type_class_ref(type);
|
||||
|
||||
if (!enum_class)
|
||||
return FALSE;
|
||||
|
||||
const GEnumValue * const enum_value =
|
||||
g_enum_get_value_by_nick(enum_class, str);
|
||||
enum_value = g_enum_get_value_by_nick(enum_class, str);
|
||||
if (enum_value)
|
||||
out_value = enum_value->value;
|
||||
g_type_class_unref(enum_class);
|
||||
|
|
Loading…
Reference in a new issue