diff --git a/gst-libs/gst/video/video-converter.c b/gst-libs/gst/video/video-converter.c index 80c880def9..676b87617f 100644 --- a/gst-libs/gst/video/video-converter.c +++ b/gst-libs/gst/video/video-converter.c @@ -3085,8 +3085,8 @@ convert_scale_planes (GstVideoConverter * convert, } } -static void -setup_scale (GstVideoConverter * convert) +static gboolean +setup_scale (GstVideoConverter * convert, GstFormat fformat) { int i, n_planes; gint method, stride = 0; @@ -3101,6 +3101,16 @@ setup_scale (GstVideoConverter * convert) method = GET_OPT_RESAMPLER_METHOD (convert); taps = GET_OPT_RESAMPLER_TAPS (convert); + switch (GST_VIDEO_INFO_FORMAT (in_info)) { + case GST_VIDEO_FORMAT_RGB15: + case GST_VIDEO_FORMAT_RGB16: + if (method != GST_VIDEO_RESAMPLER_METHOD_NEAREST) + return FALSE; + break; + default: + break; + } + if (n_planes == 1) { if (GST_VIDEO_INFO_IS_YUV (in_info)) { GstVideoScaler *y_scaler, *uv_scaler; @@ -3136,7 +3146,7 @@ setup_scale (GstVideoConverter * convert) convert->config); gst_video_scaler_get_coeff (convert->fv_scaler[0], 0, NULL, &max_taps); - convert->fformat = GST_VIDEO_INFO_FORMAT (in_info); + convert->fformat = fformat; } else { for (i = 0; i < n_planes; i++) { guint n_taps; @@ -3156,10 +3166,12 @@ setup_scale (GstVideoConverter * convert) gst_video_scaler_get_coeff (convert->fv_scaler[i], 0, NULL, &n_taps); max_taps = MAX (max_taps, n_taps); } - convert->fformat = GST_VIDEO_FORMAT_GRAY8; + convert->fformat = fformat; } convert->flines = converter_alloc_new (stride, max_taps + BACKLOG, NULL, NULL); + + return TRUE; } /* Fast paths */ @@ -3174,6 +3186,7 @@ typedef struct gint width_align, height_align; void (*convert) (GstVideoConverter * convert, const GstVideoFrame * src, GstVideoFrame * dest); + GstVideoFormat fformat; } VideoTransform; static const VideoTransform transforms[] = { @@ -3293,26 +3306,36 @@ static const VideoTransform transforms[] = { #endif {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_I420, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_YV12, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_Y41B, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_Y42B, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_A420, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_YUV9, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_YVU9, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_GRAY8}, {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_YUY2}, {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_UYVY}, {GST_VIDEO_FORMAT_YVYU, GST_VIDEO_FORMAT_YVYU, TRUE, FALSE, FALSE, 0, 0, - convert_scale_planes}, + convert_scale_planes, GST_VIDEO_FORMAT_YVYU}, + + {GST_VIDEO_FORMAT_RGB15, GST_VIDEO_FORMAT_RGB15, TRUE, FALSE, FALSE, 0, 0, + convert_scale_planes, GST_VIDEO_FORMAT_NV12}, + {GST_VIDEO_FORMAT_RGB16, GST_VIDEO_FORMAT_RGB16, TRUE, FALSE, FALSE, 0, 0, + convert_scale_planes, GST_VIDEO_FORMAT_NV12}, + + {GST_VIDEO_FORMAT_RGB, GST_VIDEO_FORMAT_RGB, TRUE, FALSE, FALSE, 0, 0, + convert_scale_planes, GST_VIDEO_FORMAT_RGB}, + {GST_VIDEO_FORMAT_BGR, GST_VIDEO_FORMAT_BGR, TRUE, FALSE, FALSE, 0, 0, + convert_scale_planes, GST_VIDEO_FORMAT_BGR}, }; static gboolean @@ -3378,7 +3401,8 @@ video_converter_lookup_fastpath (GstVideoConverter * convert) convert->convert = transforms[i].convert; convert->tmpline = g_malloc0 (sizeof (guint16) * (width + 8) * 4); if (!transforms[i].keeps_size) - setup_scale (convert); + if (!setup_scale (convert, transforms[i].fformat)) + return FALSE; return TRUE; } } diff --git a/gst-libs/gst/video/video-scaler.c b/gst-libs/gst/video/video-scaler.c index 9d0a763624..7b86235c25 100644 --- a/gst-libs/gst/video/video-scaler.c +++ b/gst-libs/gst/video/video-scaler.c @@ -404,6 +404,53 @@ video_scale_h_near_u8 (GstVideoScaler * scale, d[i] = s[offset[i]]; } +static void +video_scale_h_near_3u8 (GstVideoScaler * scale, + gpointer src, gpointer dest, guint dest_offset, guint width, guint n_elems) +{ + guint8 *s, *d; + guint32 *offset; + gint i; + + d = (guint8 *) dest + dest_offset; + s = (guint8 *) src; + offset = scale->resampler.offset + dest_offset; + + for (i = 0; i < width; i++) { + d[i * 3 + 0] = s[offset[i] * 3 + 0]; + d[i * 3 + 1] = s[offset[i] * 3 + 1]; + d[i * 3 + 2] = s[offset[i] * 3 + 2]; + } +} + +static void +video_scale_h_near_u16 (GstVideoScaler * scale, + gpointer src, gpointer dest, guint dest_offset, guint width, guint n_elems) +{ + guint16 *s, *d; + gint i; + + d = (guint16 *) dest + dest_offset; + s = (guint16 *) src; + + { +#if 1 + guint32 *offset = scale->resampler.offset + dest_offset; + + for (i = 0; i < width; i++) + d[i] = s[offset[i]]; +#else + gint acc = 0; + + for (i = 0; i < width; i++) { + gint j = (acc + 0x8000) >> 16; + d[i] = s[j]; + acc += scale->inc; + } +#endif + } +} + static void video_scale_h_near_u32 (GstVideoScaler * scale, gpointer src, gpointer dest, guint dest_offset, guint width, guint n_elems) @@ -504,6 +551,29 @@ video_scale_h_ntap_u8 (GstVideoScaler * scale, d = (guint8 *) dest + dest_offset; break; } + case 2: + { + guint16 *p16 = (guint16 *) pixels; + guint16 *s = (guint16 *) src; + + for (i = 0; i < count; i++) + p16[i] = s[offset_n[i]]; + + d = (guint16 *) dest + dest_offset; + break; + } + case 3: + { + guint8 *s = (guint8 *) src; + + for (i = 0; i < count; i++) { + pixels[i * 3 + 0] = s[offset_n[i] * 3 + 0]; + pixels[i * 3 + 1] = s[offset_n[i] * 3 + 1]; + pixels[i * 3 + 2] = s[offset_n[i] * 3 + 2]; + } + d = (guint8 *) dest + dest_offset * 3; + break; + } case 4: { guint32 *p32 = (guint32 *) pixels; @@ -617,6 +687,18 @@ video_scale_h_ntap_u16 (GstVideoScaler * scale, d = (guint16 *) dest + dest_offset; break; } + case 3: + { + guint8 *s = (guint8 *) src; + + for (i = 0; i < count; i++) { + pixels[i * 3 + 0] = s[offset_n[i] * 3 + 0]; + pixels[i * 3 + 1] = s[offset_n[i] * 3 + 1]; + pixels[i * 3 + 2] = s[offset_n[i] * 3 + 2]; + } + d = (guint8 *) dest + dest_offset * 3; + break; + } case 4: { guint64 *p64 = (guint64 *) pixels; @@ -1031,16 +1113,12 @@ gst_video_scaler_horizontal (GstVideoScaler * scale, GstVideoFormat format, { gint n_elems; GstVideoScalerHFunc func; - const GstVideoFormatInfo *finfo; g_return_if_fail (scale != NULL); g_return_if_fail (src != NULL); g_return_if_fail (dest != NULL); g_return_if_fail (dest_offset + width <= scale->resampler.out_size); - finfo = gst_video_format_get_info (format); - g_return_if_fail (finfo->n_planes == 1); - switch (format) { case GST_VIDEO_FORMAT_GRAY8: switch (scale->resampler.max_taps) { @@ -1070,6 +1148,18 @@ gst_video_scaler_horizontal (GstVideoScaler * scale, GstVideoFormat format, n_elems = 1; width *= 2; break; + case GST_VIDEO_FORMAT_RGB: + case GST_VIDEO_FORMAT_BGR: + switch (scale->resampler.max_taps) { + case 1: + func = video_scale_h_near_3u8; + break; + default: + func = video_scale_h_ntap_u8; + break; + } + n_elems = 3; + break; case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_BGRx: @@ -1104,6 +1194,21 @@ gst_video_scaler_horizontal (GstVideoScaler * scale, GstVideoFormat format, } n_elems = 4; break; + case GST_VIDEO_FORMAT_NV12: + case GST_VIDEO_FORMAT_NV16: + case GST_VIDEO_FORMAT_NV21: + case GST_VIDEO_FORMAT_NV24: + switch (scale->resampler.max_taps) { + case 1: + func = video_scale_h_near_u16; + n_elems = 1; + break; + default: + func = video_scale_h_ntap_u8; + n_elems = 2; + break; + } + break; default: goto no_func; } @@ -1140,16 +1245,12 @@ gst_video_scaler_vertical (GstVideoScaler * scale, GstVideoFormat format, { gint n_elems, bits = 0; GstVideoScalerVFunc func; - const GstVideoFormatInfo *finfo; g_return_if_fail (scale != NULL); g_return_if_fail (src_lines != NULL); g_return_if_fail (dest != NULL); g_return_if_fail (dest_offset < scale->resampler.out_size); - finfo = gst_video_format_get_info (format); - g_return_if_fail (finfo->n_planes == 1); - switch (format) { case GST_VIDEO_FORMAT_GRAY8: bits = 8; @@ -1161,6 +1262,11 @@ gst_video_scaler_vertical (GstVideoScaler * scale, GstVideoFormat format, bits = 8; n_elems = 2; break; + case GST_VIDEO_FORMAT_RGB: + case GST_VIDEO_FORMAT_BGR: + bits = 8; + n_elems = 3; + break; case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_BGRx: @@ -1178,6 +1284,13 @@ gst_video_scaler_vertical (GstVideoScaler * scale, GstVideoFormat format, bits = 16; n_elems = 4; break; + case GST_VIDEO_FORMAT_NV12: + case GST_VIDEO_FORMAT_NV16: + case GST_VIDEO_FORMAT_NV21: + case GST_VIDEO_FORMAT_NV24: + bits = 8; + n_elems = 2; + break; default: goto no_func; }