gstreamer/gst-libs/gst/video/video-format.h
Wim Taymans cb7a7cd05c video-format: add method to get palette
Make a new method to get the default palette for paletted formats.
2013-02-04 15:06:07 +01:00

475 lines
20 KiB
C

/* GStreamer
* Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VIDEO_FORMAT_H__
#define __GST_VIDEO_FORMAT_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#include <gst/video/video-enumtypes.h>
/**
* GstVideoFormat:
* @GST_VIDEO_FORMAT_UNKNOWN: Unknown or unset video format id
* @GST_VIDEO_FORMAT_ENCODED: Encoded video format
* @GST_VIDEO_FORMAT_I420: planar 4:2:0 YUV
* @GST_VIDEO_FORMAT_YV12: planar 4:2:0 YVU (like I420 but UV planes swapped)
* @GST_VIDEO_FORMAT_YUY2: packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
* @GST_VIDEO_FORMAT_UYVY: packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
* @GST_VIDEO_FORMAT_AYUV: packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
* @GST_VIDEO_FORMAT_RGBx: sparse rgb packed into 32 bit, space last
* @GST_VIDEO_FORMAT_BGRx: sparse reverse rgb packed into 32 bit, space last
* @GST_VIDEO_FORMAT_xRGB: sparse rgb packed into 32 bit, space first
* @GST_VIDEO_FORMAT_xBGR: sparse reverse rgb packed into 32 bit, space first
* @GST_VIDEO_FORMAT_RGBA: rgb with alpha channel last
* @GST_VIDEO_FORMAT_BGRA: reverse rgb with alpha channel last
* @GST_VIDEO_FORMAT_ARGB: rgb with alpha channel first
* @GST_VIDEO_FORMAT_ABGR: reverse rgb with alpha channel first
* @GST_VIDEO_FORMAT_RGB: rgb
* @GST_VIDEO_FORMAT_BGR: reverse rgb
* @GST_VIDEO_FORMAT_Y41B: planar 4:1:1 YUV
* @GST_VIDEO_FORMAT_Y42B: planar 4:2:2 YUV
* @GST_VIDEO_FORMAT_YVYU: packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
* @GST_VIDEO_FORMAT_Y444: planar 4:4:4 YUV
* @GST_VIDEO_FORMAT_v210: packed 4:2:2 10-bit YUV, complex format
* @GST_VIDEO_FORMAT_v216: packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
* @GST_VIDEO_FORMAT_NV12: planar 4:2:0 YUV with interleaved UV plane
* @GST_VIDEO_FORMAT_NV21: planar 4:2:0 YUV with interleaved VU plane
* @GST_VIDEO_FORMAT_GRAY8: 8-bit grayscale
* @GST_VIDEO_FORMAT_GRAY16_BE: 16-bit grayscale, most significant byte first
* @GST_VIDEO_FORMAT_GRAY16_LE: 16-bit grayscale, least significant byte first
* @GST_VIDEO_FORMAT_v308: packed 4:4:4 YUV
* @GST_VIDEO_FORMAT_RGB16: rgb 5-6-5 bits per component
* @GST_VIDEO_FORMAT_BGR16: reverse rgb 5-6-5 bits per component
* @GST_VIDEO_FORMAT_RGB15: rgb 5-5-5 bits per component
* @GST_VIDEO_FORMAT_BGR15: reverse rgb 5-5-5 bits per component
* @GST_VIDEO_FORMAT_UYVP: packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
* @GST_VIDEO_FORMAT_A420: planar 4:4:2:0 AYUV
* @GST_VIDEO_FORMAT_RGB8P: 8-bit paletted RGB
* @GST_VIDEO_FORMAT_YUV9: planar 4:1:0 YUV
* @GST_VIDEO_FORMAT_YVU9: planar 4:1:0 YUV (like YUV9 but UV planes swapped)
* @GST_VIDEO_FORMAT_IYU1: packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
* @GST_VIDEO_FORMAT_ARGB64: rgb with alpha channel first, 16 bits per channel
* @GST_VIDEO_FORMAT_AYUV64: packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
* @GST_VIDEO_FORMAT_r210: packed 4:4:4 RGB, 10 bits per channel
* @GST_VIDEO_FORMAT_I420_10BE: planar 4:2:0 YUV, 10 bits per channel
* @GST_VIDEO_FORMAT_I420_10LE: planar 4:2:0 YUV, 10 bits per channel
* @GST_VIDEO_FORMAT_I422_10BE: planar 4:2:2 YUV, 10 bits per channel
* @GST_VIDEO_FORMAT_I422_10LE: planar 4:2:2 YUV, 10 bits per channel
* @GST_VIDEO_FORMAT_Y444_10BE: planar 4:4:4 YUV, 10 bits per channel
* @GST_VIDEO_FORMAT_Y444_10LE: planar 4:4:4 YUV, 10 bits per channel
* @GST_VIDEO_FORMAT_GBR: planar 4:4:4 RGB, 8 bits per channel
* @GST_VIDEO_FORMAT_GBR_10BE: planar 4:4:4 RGB, 10 bits per channel
* @GST_VIDEO_FORMAT_GBR_10LE: planar 4:4:4 RGB, 10 bits per channel
*
* Enum value describing the most common video formats.
*/
typedef enum {
GST_VIDEO_FORMAT_UNKNOWN,
GST_VIDEO_FORMAT_ENCODED,
GST_VIDEO_FORMAT_I420,
GST_VIDEO_FORMAT_YV12,
GST_VIDEO_FORMAT_YUY2,
GST_VIDEO_FORMAT_UYVY,
GST_VIDEO_FORMAT_AYUV,
GST_VIDEO_FORMAT_RGBx,
GST_VIDEO_FORMAT_BGRx,
GST_VIDEO_FORMAT_xRGB,
GST_VIDEO_FORMAT_xBGR,
GST_VIDEO_FORMAT_RGBA,
GST_VIDEO_FORMAT_BGRA,
GST_VIDEO_FORMAT_ARGB,
GST_VIDEO_FORMAT_ABGR,
GST_VIDEO_FORMAT_RGB,
GST_VIDEO_FORMAT_BGR,
GST_VIDEO_FORMAT_Y41B,
GST_VIDEO_FORMAT_Y42B,
GST_VIDEO_FORMAT_YVYU,
GST_VIDEO_FORMAT_Y444,
GST_VIDEO_FORMAT_v210,
GST_VIDEO_FORMAT_v216,
GST_VIDEO_FORMAT_NV12,
GST_VIDEO_FORMAT_NV21,
GST_VIDEO_FORMAT_GRAY8,
GST_VIDEO_FORMAT_GRAY16_BE,
GST_VIDEO_FORMAT_GRAY16_LE,
GST_VIDEO_FORMAT_v308,
GST_VIDEO_FORMAT_RGB16,
GST_VIDEO_FORMAT_BGR16,
GST_VIDEO_FORMAT_RGB15,
GST_VIDEO_FORMAT_BGR15,
GST_VIDEO_FORMAT_UYVP,
GST_VIDEO_FORMAT_A420,
GST_VIDEO_FORMAT_RGB8P,
GST_VIDEO_FORMAT_YUV9,
GST_VIDEO_FORMAT_YVU9,
GST_VIDEO_FORMAT_IYU1,
GST_VIDEO_FORMAT_ARGB64,
GST_VIDEO_FORMAT_AYUV64,
GST_VIDEO_FORMAT_r210,
GST_VIDEO_FORMAT_I420_10BE,
GST_VIDEO_FORMAT_I420_10LE,
GST_VIDEO_FORMAT_I422_10BE,
GST_VIDEO_FORMAT_I422_10LE,
GST_VIDEO_FORMAT_Y444_10BE,
GST_VIDEO_FORMAT_Y444_10LE,
GST_VIDEO_FORMAT_GBR,
GST_VIDEO_FORMAT_GBR_10BE,
GST_VIDEO_FORMAT_GBR_10LE,
} GstVideoFormat;
#define GST_VIDEO_MAX_PLANES 4
#define GST_VIDEO_MAX_COMPONENTS 4
typedef struct _GstVideoFormatInfo GstVideoFormatInfo;
/**
* GstVideoChromaSite:
* @GST_VIDEO_CHROMA_SITE_UNKNOWN: unknown cositing
* @GST_VIDEO_CHROMA_SITE_NONE: no cositing
* @GST_VIDEO_CHROMA_SITE_H_COSITED: chroma is horizontally cosited
* @GST_VIDEO_CHROMA_SITE_V_COSITED: chroma is vertically cosited
* @GST_VIDEO_CHROMA_SITE_ALT_LINE: choma samples are sited on alternate lines
* @GST_VIDEO_CHROMA_SITE_COSITED: chroma samples cosited with luma samples
* @GST_VIDEO_CHROMA_SITE_JPEG: jpeg style cositing, also for mpeg1 and mjpeg
* @GST_VIDEO_CHROMA_SITE_MPEG2: mpeg2 style cositing
* @GST_VIDEO_CHROMA_SITE_DV: DV style cositing
*
* Various Chroma sitings.
*/
typedef enum {
GST_VIDEO_CHROMA_SITE_UNKNOWN = 0,
GST_VIDEO_CHROMA_SITE_NONE = (1 << 0),
GST_VIDEO_CHROMA_SITE_H_COSITED = (1 << 1),
GST_VIDEO_CHROMA_SITE_V_COSITED = (1 << 2),
GST_VIDEO_CHROMA_SITE_ALT_LINE = (1 << 3),
/* some common chroma cositing */
GST_VIDEO_CHROMA_SITE_COSITED = (GST_VIDEO_CHROMA_SITE_H_COSITED | GST_VIDEO_CHROMA_SITE_V_COSITED),
GST_VIDEO_CHROMA_SITE_JPEG = (GST_VIDEO_CHROMA_SITE_NONE),
GST_VIDEO_CHROMA_SITE_MPEG2 = (GST_VIDEO_CHROMA_SITE_H_COSITED),
GST_VIDEO_CHROMA_SITE_DV = (GST_VIDEO_CHROMA_SITE_COSITED | GST_VIDEO_CHROMA_SITE_ALT_LINE),
} GstVideoChromaSite;
GstVideoChromaSite gst_video_chroma_from_string (const gchar * s);
const gchar * gst_video_chroma_to_string (GstVideoChromaSite site);
/**
* GstVideoFormatFlags:
* @GST_VIDEO_FORMAT_FLAG_YUV: The video format is YUV, components are numbered
* 0=Y, 1=U, 2=V.
* @GST_VIDEO_FORMAT_FLAG_RGB: The video format is RGB, components are numbered
* 0=R, 1=G, 2=B.
* @GST_VIDEO_FORMAT_FLAG_GRAY: The video is gray, there is one gray component
* with index 0.
* @GST_VIDEO_FORMAT_FLAG_ALPHA: The video format has an alpha components with
* the number 3.
* @GST_VIDEO_FORMAT_FLAG_LE: The video format has data stored in little
* endianness.
* @GST_VIDEO_FORMAT_FLAG_PALETTE: The video format has a palette. The palette
* is stored in the second plane and indexes are stored in the first plane.
* @GST_VIDEO_FORMAT_FLAG_COMPLEX: The video format has a complex layout that
* can't be described with the usual information in the #GstVideoFormatInfo.
* @GST_VIDEO_FORMAT_FLAG_UNPACK: This format can be used in a
* #GstVideoFormatUnpack and #GstVideoFormatPack function.
*
* The different video flags that a format info can have.
*/
typedef enum
{
GST_VIDEO_FORMAT_FLAG_YUV = (1 << 0),
GST_VIDEO_FORMAT_FLAG_RGB = (1 << 1),
GST_VIDEO_FORMAT_FLAG_GRAY = (1 << 2),
GST_VIDEO_FORMAT_FLAG_ALPHA = (1 << 3),
GST_VIDEO_FORMAT_FLAG_LE = (1 << 4),
GST_VIDEO_FORMAT_FLAG_PALETTE = (1 << 5),
GST_VIDEO_FORMAT_FLAG_COMPLEX = (1 << 6),
GST_VIDEO_FORMAT_FLAG_UNPACK = (1 << 7)
} GstVideoFormatFlags;
/* YUV components */
#define GST_VIDEO_COMP_Y 0
#define GST_VIDEO_COMP_U 1
#define GST_VIDEO_COMP_V 2
/* RGB components */
#define GST_VIDEO_COMP_R 0
#define GST_VIDEO_COMP_G 1
#define GST_VIDEO_COMP_B 2
/* alpha component */
#define GST_VIDEO_COMP_A 3
/* palette components */
#define GST_VIDEO_COMP_INDEX 0
#define GST_VIDEO_COMP_PALETTE 1
/**
* GstVideoPackFlags:
* @GST_VIDEO_PACK_FLAG_NONE: No flag
* @GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE: When the source has a smaller depth
* than the target format, set the least significant bits of the target
* to 0. This is likely sightly faster but less accurate. When this flag
* is not specified, the most significant bits of the source are duplicated
* in the least significant bits of the destination.
* @GST_VIDEO_PACK_FLAG_INTERLACED: The source is interlaced. The unpacked
* format will be interlaced as well with each line containing
* information from alternating fields.
*
* The different flags that can be used when packing and unpacking.
*/
typedef enum
{
GST_VIDEO_PACK_FLAG_NONE = 0,
GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE = (1 << 0),
GST_VIDEO_PACK_FLAG_INTERLACED = (1 << 1)
} GstVideoPackFlags;
/**
* GstVideoFormatUnpack:
* @info: a #GstVideoFormatInfo
* @flags: flags to control the unpacking
* @dest: a destination array
* @data: pointers to the data planes
* @stride: strides of the planes
* @x: the x position in the image to start from
* @y: the y position in the image to start from
* @width: the amount of pixels to unpack.
*
* Unpacks @width pixels from the given planes and strides containing data of
* format @info. The pixels will be unpacked into @dest which each component
* interleaved. @dest should at least be big enough to hold @width *
* n_components * size(unpack_format) bytes.
*/
typedef void (*GstVideoFormatUnpack) (const GstVideoFormatInfo *info,
GstVideoPackFlags flags, gpointer dest,
const gpointer data[GST_VIDEO_MAX_PLANES],
const gint stride[GST_VIDEO_MAX_PLANES],
gint x, gint y, gint width);
/**
* GstVideoFormatPack:
* @info: a #GstVideoFormatInfo
* @flags: flags to control the packing
* @src: a source array
* @sstride: the source array stride
* @data: pointers to the destination data planes
* @stride: strides of the destination planes
* @chroma_site: the chroma siting of the target when subsampled
* @y: the y position in the image to pack to
* @width: the amount of pixels to pack.
*
* Packs @width pixels from @src to the given planes and strides in the
* format @info. The pixels from source have each component interleaved
* and will be packed into the planes in @data.
*
* This function operates on pack_lines lines, meaning that @src should
* contain at least pack_lines lines with a stride of @sstride and @y
* should be a multiple of pack_lines.
*/
typedef void (*GstVideoFormatPack) (const GstVideoFormatInfo *info,
GstVideoPackFlags flags,
const gpointer src, gint sstride,
gpointer data[GST_VIDEO_MAX_PLANES],
const gint stride[GST_VIDEO_MAX_PLANES],
GstVideoChromaSite chroma_site,
gint y, gint width);
/**
* GstVideoFormatInfo:
* @format: #GstVideoFormat
* @name: string representation of the format
* @description: use readable description of the format
* @flags: #GstVideoFormatFlags
* @bits: The number of bits used to pack data items. This can be less than 8
* when multiple pixels are stored in a byte. for values > 8 multiple bytes
* should be read according to the endianness flag before applying the shift
* and mask.
* @n_components: the number of components in the video format.
* @shift: the number of bits to shift away to get the component data
* @depth: the depth in bits for each component
* @pixel_stride: the pixel stride of each component. This is the amount of
* bytes to the pixel immediately to the right. When bits < 8, the stride is
* expressed in bits. For 24-bit RGB, this would be 3 bytes, for example,
* while it would be 4 bytes for RGBx or ARGB.
* @n_planes: the number of planes for this format. The number of planes can be
* less than the amount of components when multiple components are packed into
* one plane.
* @plane: the plane number where a component can be found
* @poffset: the offset in the plane where the first pixel of the components
* can be found.
* @w_sub: subsampling factor of the width for the component. Use
* GST_VIDEO_SUB_SCALE to scale a width.
* @h_sub: subsampling factor of the height for the component. Use
* GST_VIDEO_SUB_SCALE to scale a height.
* @unpack_format: the format of the unpacked pixels. This format must have the
* #GST_VIDEO_FORMAT_FLAG_UNPACK flag set.
* @unpack_func: an unpack function for this format
* @pack_lines: the amount of lines that will be packed
* @pack_func: an pack function for this format
*
* Information for a video format.
*/
struct _GstVideoFormatInfo {
GstVideoFormat format;
const gchar *name;
const gchar *description;
GstVideoFormatFlags flags;
guint bits;
guint n_components;
guint shift[GST_VIDEO_MAX_COMPONENTS];
guint depth[GST_VIDEO_MAX_COMPONENTS];
gint pixel_stride[GST_VIDEO_MAX_COMPONENTS];
guint n_planes;
guint plane[GST_VIDEO_MAX_COMPONENTS];
guint poffset[GST_VIDEO_MAX_COMPONENTS];
guint w_sub[GST_VIDEO_MAX_COMPONENTS];
guint h_sub[GST_VIDEO_MAX_COMPONENTS];
GstVideoFormat unpack_format;
GstVideoFormatUnpack unpack_func;
gint pack_lines;
GstVideoFormatPack pack_func;
gpointer _gst_reserved[GST_PADDING];
};
#define GST_VIDEO_FORMAT_INFO_FORMAT(info) ((info)->format)
#define GST_VIDEO_FORMAT_INFO_NAME(info) ((info)->name)
#define GST_VIDEO_FORMAT_INFO_FLAGS(info) ((info)->flags)
#define GST_VIDEO_FORMAT_INFO_IS_YUV(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_YUV)
#define GST_VIDEO_FORMAT_INFO_IS_RGB(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_RGB)
#define GST_VIDEO_FORMAT_INFO_IS_GRAY(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_GRAY)
#define GST_VIDEO_FORMAT_INFO_HAS_ALPHA(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
#define GST_VIDEO_FORMAT_INFO_IS_LE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_LE)
#define GST_VIDEO_FORMAT_INFO_HAS_PALETTE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_PALETTE)
#define GST_VIDEO_FORMAT_INFO_IS_COMPLEX(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_COMPLEX)
#define GST_VIDEO_FORMAT_INFO_BITS(info) ((info)->bits)
#define GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info) ((info)->n_components)
#define GST_VIDEO_FORMAT_INFO_SHIFT(info,c) ((info)->shift[c])
#define GST_VIDEO_FORMAT_INFO_DEPTH(info,c) ((info)->depth[c])
/**
* GST_VIDEO_FORMAT_INFO_PSTRIDE:
*
* pixel stride for the given component. This is the amount of bytes to the
* pixel immediately to the right, so basically bytes from one pixel to the
* next. When bits < 8, the stride is expressed in bits.
*
* Examples: for 24-bit RGB, the pixel stride would be 3 bytes, while it
* would be 4 bytes for RGBx or ARGB, and 8 bytes for ARGB64 or AYUV64.
* For planar formats such as I420 the pixel stride is usually 1. For
* YUY2 it would be 2 bytes.
*/
#define GST_VIDEO_FORMAT_INFO_PSTRIDE(info,c) ((info)->pixel_stride[c])
/**
* GST_VIDEO_FORMAT_INFO_N_PLANES:
*
* Number of planes. This is the number of planes the pixel layout is
* organized in in memory. The number of planes can be less than the
* number of components (e.g. Y,U,V,A or R, G, B, A) when multiple
* components are packed into one plane.
*
* Examples: RGB/RGBx/RGBA: 1 plane, 3/3/4 components;
* I420: 3 planes, 3 components; NV21/NV12: 2 planes, 3 components.
*/
#define GST_VIDEO_FORMAT_INFO_N_PLANES(info) ((info)->n_planes)
/**
* GST_VIDEO_FORMAT_INFO_PLANE:
*
* Plane number where the given component can be found. A plane may
* contain data for multiple components.
*/
#define GST_VIDEO_FORMAT_INFO_PLANE(info,c) ((info)->plane[c])
#define GST_VIDEO_FORMAT_INFO_POFFSET(info,c) ((info)->poffset[c])
#define GST_VIDEO_FORMAT_INFO_W_SUB(info,c) ((info)->w_sub[c])
#define GST_VIDEO_FORMAT_INFO_H_SUB(info,c) ((info)->h_sub[c])
/* rounds up */
#define GST_VIDEO_SUB_SCALE(scale,val) (-((-((gint)(val)))>>(scale)))
#define GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info,c,w) GST_VIDEO_SUB_SCALE ((info)->w_sub[c],(w))
#define GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info,c,h) GST_VIDEO_SUB_SCALE ((info)->h_sub[c],(h))
#define GST_VIDEO_FORMAT_INFO_DATA(info,planes,comp) \
(((guint8*)(planes)[(info)->plane[comp]]) + (info)->poffset[comp])
/**
* GST_VIDEO_FORMAT_INFO_STRIDE:
*
* Row stride in bytes, that is number of bytes from the first pixel component
* of a row to the first pixel component in the next row. This might include
* some row padding (memory not actually used for anything, to make sure the
* beginning of the next row is aligned in a particular way).
*/
#define GST_VIDEO_FORMAT_INFO_STRIDE(info,strides,comp) ((strides)[(info)->plane[comp]])
#define GST_VIDEO_FORMAT_INFO_OFFSET(info,offsets,comp) \
(((offsets)[(info)->plane[comp]]) + (info)->poffset[comp])
/* format properties */
GstVideoFormat gst_video_format_from_masks (gint depth, gint bpp, gint endianness,
gint red_mask, gint green_mask,
gint blue_mask, gint alpha_mask) G_GNUC_CONST;
GstVideoFormat gst_video_format_from_fourcc (guint32 fourcc) G_GNUC_CONST;
GstVideoFormat gst_video_format_from_string (const gchar *format) G_GNUC_CONST;
guint32 gst_video_format_to_fourcc (GstVideoFormat format) G_GNUC_CONST;
const gchar * gst_video_format_to_string (GstVideoFormat format) G_GNUC_CONST;
const GstVideoFormatInfo *
gst_video_format_get_info (GstVideoFormat format) G_GNUC_CONST;
const gpointer gst_video_format_get_palette (GstVideoFormat format, gsize *size);
#define GST_VIDEO_SIZE_RANGE "(int) [ 1, max ]"
#define GST_VIDEO_FPS_RANGE "(fraction) [ 0, max ]"
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
# define GST_VIDEO_NE(s) G_STRINGIFY(s)"_LE"
# define GST_VIDEO_OE(s) G_STRINGIFY(s)"_BE"
#else
# define GST_VIDEO_NE(s) G_STRINGIFY(s)"_BE"
# define GST_VIDEO_OE(s) G_STRINGIFY(s)"_LE"
#endif
#define GST_VIDEO_FORMATS_ALL "{ I420, YV12, YUY2, UYVY, AYUV, RGBx, " \
"BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, Y41B, Y42B, " \
"YVYU, Y444, v210, v216, NV12, NV21, GRAY8, GRAY16_BE, GRAY16_LE, " \
"v308, RGB16, BGR16, RGB15, BGR15, UYVP, A420, RGB8P, YUV9, YVU9, " \
"IYU1, ARGB64, AYUV64, r210, I420_10LE, I420_10BE, I422_10LE, I422_10BE, " \
" Y444_10LE, Y444_10BE, GBR, GBR_10LE, GBR_10BE }"
/**
* GST_VIDEO_CAPS_MAKE:
* @format: string format that describes the pixel layout, as string
* (e.g. "I420", "RGB", "YV12", "YUY2", "AYUV", etc.)
*
* Generic caps string for video, for use in pad templates.
*/
#define GST_VIDEO_CAPS_MAKE(format) \
"video/x-raw, " \
"format = (string) " format ", " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE
G_END_DECLS
#endif /* __GST_VIDEO_FORMAT_H__ */