video: Add support for VANC and Closed Caption

This commits add common elements for Ancillary Data and Closed
Caption support in GStreamer:

* A VBI (Video Blanking Interval) parser that supports detection
  and extraction of Ancillary data according to the SMPTE S291M
  specification. Currently supports the v210 and UYVY video
  formats.

* A new GstMeta for Closed Caption : GstVideoCaptionMeta. This
  supports the two types of CC : CEA-608 and CEA-708, along with
  the 4 different ways they can be transported (other systems
  are super-set of those).

https://bugzilla.gnome.org/show_bug.cgi?id=794901
This commit is contained in:
Edward Hervey 2018-02-15 13:59:56 +01:00 committed by Edward Hervey
parent 43254a2196
commit 9dceb6ca52
7 changed files with 815 additions and 1 deletions

View file

@ -221,6 +221,7 @@
<xi:include href="xml/gstvideopool.xml" />
<xi:include href="xml/gstvideoutils.xml" />
<xi:include href="xml/gstnavigation.xml" />
<xi:include href="xml/gstvideoanc.xml" />
</chapter>
<chapter id="gl">
@ -281,6 +282,10 @@
<title>Index of deprecated API</title>
<xi:include href="xml/api-index-deprecated.xml"><xi:fallback /></xi:include>
</index>
<index>
<title>Index of new API in 1.16</title>
<xi:include href="xml/api-index-1.16.xml"><xi:fallback /></xi:include>
</index>
<index>
<title>Index of new API in 1.14</title>
<xi:include href="xml/api-index-1.14.xml"><xi:fallback /></xi:include>

View file

@ -3477,6 +3477,43 @@ gst_video_codec_frame_get_type
gst_video_codec_state_get_type
</SECTION>
<SECTION>
<FILE>gstvideoanc</FILE>
<INCLUDE>gst/video/video.h</INCLUDE>
<SUBSECTION ancillary>
GstVideoAncillary
GstVideoAncillaryDID
GST_VIDEO_ANCILLARY_DID16
GstVideoAncillaryDID16
<SUBSECTION vbi>
GstVideoVBIParser
GstVideoVBIParserResult
gst_video_vbi_parser_new
gst_video_vbi_parser_free
gst_video_vbi_parser_add_line
gst_video_vbi_parser_get_ancillary
gst_video_vbi_parser_copy
<SUBSECTION closedcaption>
GstVideoCaptionType
GstVideoCaptionMeta
gst_buffer_get_video_caption_meta
gst_buffer_add_video_caption_meta
<SUBSECTION Standard>
GST_TYPE_VIDEO_ANCILLARY_DID
GST_TYPE_VIDEO_ANCILLARY_DI_D16
GST_TYPE_VIDEO_CAPTION_TYPE
GST_TYPE_VIDEO_VBI_PARSER_RESULT
GST_VIDEO_CAPTION_META_API_TYPE
gst_video_caption_meta_get_info
GST_VIDEO_CAPTION_META_INFO
gst_video_caption_meta_api_get_type
gst_video_vbi_parser_get_type
gst_video_ancillary_di_d16_get_type
gst_video_ancillary_did_get_type
gst_video_caption_type_get_type
gst_video_vbi_parser_result_get_type
</SECTION>
<SECTION>
<FILE>gstdiscoverer</FILE>
<INCLUDE>gst/pbutils/pbutils.h</INCLUDE>

View file

@ -4,7 +4,7 @@ include $(top_srcdir)/common/orc.mak
glib_enum_headers = video.h video-format.h video-color.h video-info.h video-dither.h \
colorbalance.h navigation.h video-chroma.h video-tile.h video-converter.h \
video-resampler.h video-frame.h video-scaler.h
video-resampler.h video-frame.h video-scaler.h video-anc.h
glib_enum_define = GST_VIDEO
glib_gen_prefix = gst_video
glib_gen_basename = video
@ -24,6 +24,7 @@ libgstvideo_@GST_API_VERSION@_la_SOURCES = \
colorbalancechannel.c \
navigation.c \
video.c \
video-anc.c \
video-event.c \
video-format.c \
video-chroma.c \
@ -62,6 +63,7 @@ libgstvideo_@GST_API_VERSION@include_HEADERS = \
navigation.h \
video.h \
video-prelude.h \
video-anc.h \
video-event.h \
video-format.h \
video-chroma.h \

View file

@ -14,6 +14,7 @@ video_sources = [
'gstvideoutilsprivate.c',
'navigation.c',
'video.c',
'video-anc.c',
'video-blend.c',
'video-chroma.c',
'video-color.c',
@ -47,6 +48,7 @@ video_headers = [
'gstvideoutils.h',
'navigation.h',
'video.h',
'video-anc.h',
'video-event.h',
'video-format.h',
'video-chroma.h',
@ -70,6 +72,7 @@ install_headers(video_headers, subdir : 'gstreamer-1.0/gst/video/')
video_mkenum_headers = [
'video.h',
'video-anc.h',
'video-format.h',
'video-frame.h',
'video-chroma.h',

View file

@ -0,0 +1,537 @@
/* GStreamer
* Copyright (C) 2018 Edward Hervey <edward@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <string.h>
#include <gst/base/gstbytereader.h>
#include "video-anc.h"
/**
* SECTION:gstvideoanc
* @title: GstVideo Ancillary
* @short_description: Utilities for Ancillary data, VBI and Closed Caption
*
* A collection of objects and methods to assist with handling Ancillary Data
* present in Vertical Blanking Interval as well as Closed Caption.
*/
#ifndef GST_DISABLE_GST_DEBUG
#define GST_CAT_DEFAULT ensure_debug_category()
static GstDebugCategory *
ensure_debug_category (void)
{
static gsize cat_gonce = 0;
if (g_once_init_enter (&cat_gonce)) {
gsize cat_done;
cat_done = (gsize) _gst_debug_category_new ("video-anc", 0,
"Ancillary data, VBI and CC utilities");
g_once_init_leave (&cat_gonce, cat_done);
}
return (GstDebugCategory *) cat_gonce;
}
#else
#define ensure_debug_category() /* NOOP */
#endif /* GST_DISABLE_GST_DEBUG */
struct _GstVideoVBIParser
{
GstVideoInfo info; /* format of the lines provided */
guint8 *work_data; /* Converted line in planar 16bit format */
guint32 work_data_size; /* Size in bytes of work_data */
guint offset; /* Current offset (in bytes) in work_data */
gboolean bit16; /* Data is stored as 16bit if TRUE. Else 8bit(without parity) */
};
G_DEFINE_BOXED_TYPE (GstVideoVBIParser, gst_video_vbi_parser,
(GBoxedCopyFunc) gst_video_vbi_parser_copy,
(GBoxedFreeFunc) gst_video_vbi_parser_free);
GstVideoVBIParser *
gst_video_vbi_parser_copy (const GstVideoVBIParser * parser)
{
GstVideoVBIParser *res;
res = gst_video_vbi_parser_new (GST_VIDEO_INFO_FORMAT (&parser->info),
parser->info.width);
if (res) {
memcpy (res->work_data, parser->work_data, parser->work_data_size);
}
return res;
}
/* Smallest ANC size (which would have a size Data Count of 0 though) */
#define SMALLEST_ANC_SIZE 7
static GstVideoVBIParserResult
get_ancillary_16 (GstVideoVBIParser * parser, GstVideoAncillary * anc)
{
gboolean found = FALSE;
guint16 *data = (guint16 *) parser->work_data;
g_return_val_if_fail (parser != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
g_return_val_if_fail (anc != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
while (parser->offset < parser->work_data_size + SMALLEST_ANC_SIZE) {
guint8 DID, SDID, DC;
guint i;
/* Look for ADF
* FIXME : This assumes 10bit data with parity ! */
if (data[parser->offset] != 0x000 ||
data[parser->offset + 1] != 0x3ff ||
data[parser->offset + 2] != 0x3ff) {
parser->offset += 1;
continue;
}
/* FIXME : Add parity and checksum checks at some point if using
* 10bit data */
/* We have a valid ADF */
DID = data[parser->offset + 3] & 0xff;
SDID = data[parser->offset + 4] & 0xff;
DC = data[parser->offset + 5] & 0xff;
/* Check if we have enough room to get the User Data */
if (parser->offset >= parser->work_data_size + SMALLEST_ANC_SIZE + DC)
goto not_enough_data;
/* We found a valid ANC \o/ */
anc->DID = DID;
anc->SDID_block_number = SDID;
anc->data_count = DC;
memset (anc->data, 0, 256);
for (i = 0; i < anc->data_count; i++)
anc->data[i] = data[parser->offset + 6 + i] & 0xff;
found = TRUE;
parser->offset += SMALLEST_ANC_SIZE + DC;
break;
}
if (found)
return GST_VIDEO_VBI_PARSER_RESULT_OK;
return GST_VIDEO_VBI_PARSER_RESULT_DONE;
/* ERRORS */
not_enough_data:
{
GST_WARNING ("ANC requires more User Data that available line size");
/* Avoid further calls to go in the same error */
parser->offset = parser->work_data_size;
return GST_VIDEO_VBI_PARSER_RESULT_ERROR;
}
}
static GstVideoVBIParserResult
get_ancillary_8 (GstVideoVBIParser * parser, GstVideoAncillary * anc)
{
gboolean found = FALSE;
guint8 *data = parser->work_data;
g_return_val_if_fail (parser != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
g_return_val_if_fail (anc != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
while (parser->offset < parser->work_data_size + SMALLEST_ANC_SIZE) {
guint8 DID, SDID, DC;
guint i;
/* Look for 8bit ADF (0x00 0xff 0xff) */
if (data[parser->offset] != 0x00 ||
data[parser->offset + 1] != 0xff || data[parser->offset + 2] != 0xff) {
parser->offset += 1;
continue;
}
/* We have a valid ADF */
DID = data[parser->offset + 3];
SDID = data[parser->offset + 4];
DC = data[parser->offset + 5];
/* Check if we have enough room to get the User Data */
if (parser->offset >= parser->work_data_size + SMALLEST_ANC_SIZE + DC)
goto not_enough_data;
/* We found a valid ANC \o/ */
anc->DID = DID;
anc->SDID_block_number = SDID;
anc->data_count = DC;
memset (anc->data, 0, 256);
for (i = 0; i < anc->data_count; i++)
anc->data[i] = data[parser->offset + 6 + i];
found = TRUE;
parser->offset += SMALLEST_ANC_SIZE + DC;
break;
}
if (found)
return GST_VIDEO_VBI_PARSER_RESULT_OK;
return GST_VIDEO_VBI_PARSER_RESULT_DONE;
/* ERRORS */
not_enough_data:
{
GST_WARNING ("ANC requires more User Data that available line size");
/* Avoid further calls to go in the same error */
parser->offset = parser->work_data_size;
return GST_VIDEO_VBI_PARSER_RESULT_ERROR;
}
}
/**
* gst_video_vbi_parser_get_ancillary:
* @parser: a #GstVideoVBIParser
* @anc: (out caller-allocates): a #GstVideoAncillary to start the eventual ancillary data
*
* Parse the line provided previously by gst_video_vbi_parser_add_line().
*
* Since: 1.16
*
* Returns: %GST_VIDEO_VBI_PARSER_RESULT_OK if ancillary data was found and
* @anc was filled. %GST_VIDEO_VBI_PARSER_RESULT_DONE if there wasn't any
* data.
*/
GstVideoVBIParserResult
gst_video_vbi_parser_get_ancillary (GstVideoVBIParser * parser,
GstVideoAncillary * anc)
{
if (parser->bit16)
return get_ancillary_16 (parser, anc);
return get_ancillary_8 (parser, anc);
}
/**
* gst_video_vbi_parser_new:
* @format: a #GstVideoFormat
* @pixel_width: The width in pixel to use
*
* Create a new #GstVideoVBIParser for the specified @format and @pixel_width.
*
* Since: 1.16
*
* Returns: The new #GstVideoVBIParser or %NULL if the @format and/or @pixel_width
* is not supported.
*/
GstVideoVBIParser *
gst_video_vbi_parser_new (GstVideoFormat format, guint32 pixel_width)
{
GstVideoVBIParser *parser;
switch (format) {
case GST_VIDEO_FORMAT_v210:
parser = g_new0 (GstVideoVBIParser, 1);
parser->bit16 = TRUE;
break;
case GST_VIDEO_FORMAT_UYVY:
parser = g_new0 (GstVideoVBIParser, 1);
parser->bit16 = FALSE;
break;
default:
GST_WARNING ("Format not supported by GstVideoVBIParser");
return NULL;
}
gst_video_info_init (&parser->info);
if (!gst_video_info_set_format (&parser->info, format, pixel_width, 1)) {
GST_ERROR ("Could not create GstVideoInfo");
g_free (parser);
return NULL;
}
/* Allocate the workspace which is going to be 2 * pixel_width big
* 2 : number of pixels per "component" (we only deal with 4:2:2)
* We use 1 or 2 bytes per pixel depending on whether we are internally
* working in 8 or 16bit */
parser->work_data_size = 2 * pixel_width;
if (parser->bit16)
parser->work_data = g_malloc0 (parser->work_data_size * 2);
else
parser->work_data = g_malloc0 (parser->work_data_size);
parser->offset = 0;
return parser;
}
/**
* gst_video_vbi_parser_free:
* @parser: a #GstVideoVBIParser
*
* Frees the @parser.
*
* Since: 1.16
*/
void
gst_video_vbi_parser_free (GstVideoVBIParser * parser)
{
g_free (parser->work_data);
g_free (parser);
}
static void
convert_line_uyvy (GstVideoVBIParser * parser, const guint8 * data)
{
guint i;
guint8 *y = parser->work_data;
guint8 *uv = y + parser->info.width;
for (i = 0; i < parser->info.width - 3; i += 4) {
*uv++ = data[(i / 4) * 4 + 0];
*y++ = data[(i / 4) * 4 + 1];
*uv++ = data[(i / 4) * 4 + 2];
*y++ = data[(i / 4) * 4 + 3];
}
GST_MEMDUMP ("Converted line", parser->work_data, 128);
}
static void
gst_info_dump_mem16_line (gchar * linebuf, gsize linebuf_size,
const guint16 * mem, gsize mem_offset, gsize mem_size)
{
gchar hexstr[50], digitstr[6];
if (mem_size > 8)
mem_size = 8;
hexstr[0] = '\0';
if (mem != NULL) {
guint i = 0;
mem += mem_offset;
while (i < mem_size) {
g_snprintf (digitstr, sizeof (digitstr), "%04x ", mem[i]);
g_strlcat (hexstr, digitstr, sizeof (hexstr));
++i;
}
}
g_snprintf (linebuf, linebuf_size, "%08x: %-48.48s",
(guint) mem_offset, hexstr);
}
static void
convert_line_v210 (GstVideoVBIParser * parser, const guint8 * data)
{
guint i;
guint16 *y = (guint16 *) parser->work_data;
guint16 *uv = y + parser->info.width;
guint32 a, b, c, d;
/* Convert the line */
for (i = 0; i < parser->info.width - 5; i += 6) {
a = GST_READ_UINT32_LE (data + (i / 6) * 16 + 0);
b = GST_READ_UINT32_LE (data + (i / 6) * 16 + 4);
c = GST_READ_UINT32_LE (data + (i / 6) * 16 + 8);
d = GST_READ_UINT32_LE (data + (i / 6) * 16 + 12);
*uv++ = (a >> 0) & 0x3ff;
*y++ = (a >> 10) & 0x3ff;
*uv++ = (a >> 20) & 0x3ff;
*y++ = (b >> 0) & 0x3ff;
*uv++ = (b >> 10) & 0x3ff;
*y++ = (b >> 20) & 0x3ff;
*uv++ = (c >> 0) & 0x3ff;
*y++ = (c >> 10) & 0x3ff;
*uv++ = (c >> 20) & 0x3ff;
*y++ = (d >> 0) & 0x3ff;
*uv++ = (d >> 10) & 0x3ff;
*y++ = (d >> 20) & 0x3ff;
}
if (0) {
guint off = 0;
gsize length = parser->info.width * 2;
GST_TRACE ("--------"
"-------------------------------------------------------------------");
while (off < length) {
gchar buf[128];
/* gst_info_dump_mem_line will process 16 bytes (8 16bit chunks) at most */
gst_info_dump_mem16_line (buf, sizeof (buf),
(guint16 *) parser->work_data, off, length - off);
GST_TRACE ("%s", buf);
off += 8;
}
GST_TRACE ("--------"
"-------------------------------------------------------------------");
}
}
/**
* gst_video_vbi_parser_add_line:
* @parser: a #GstVideoVBIParser
* @data: (transfer none): The line of data to parse
*
* Provide a new line of data to the @parser. Call gst_video_vbi_parser_get_ancillary()
* to get the Ancillary data that might be present on that line.
*
* Since: 1.16
*/
void
gst_video_vbi_parser_add_line (GstVideoVBIParser * parser, const guint8 * data)
{
/* Reset offset */
parser->offset = 0;
switch (GST_VIDEO_INFO_FORMAT (&parser->info)) {
case GST_VIDEO_FORMAT_v210:
convert_line_v210 (parser, data);
break;
case GST_VIDEO_FORMAT_UYVY:
convert_line_uyvy (parser, data);
break;
default:
GST_ERROR ("UNSUPPORTED FORMAT !");
g_assert_not_reached ();
break;
}
}
/* Closed Caption Meta implementation *******************************************/
GType
gst_video_caption_meta_api_get_type (void)
{
static volatile GType type;
if (g_once_init_enter (&type)) {
static const gchar *tags[] = { NULL };
GType _type = gst_meta_api_type_register ("GstVideoCaptionMetaAPI", tags);
GST_INFO ("registering");
g_once_init_leave (&type, _type);
}
return type;
}
static gboolean
gst_video_caption_meta_transform (GstBuffer * dest, GstMeta * meta,
GstBuffer * buffer, GQuark type, gpointer data)
{
GstVideoCaptionMeta *dmeta, *smeta;
/* We always copy over the caption meta */
smeta = (GstVideoCaptionMeta *) meta;
GST_DEBUG ("copy caption metadata");
dmeta =
gst_buffer_add_video_caption_meta (dest, smeta->caption_type,
smeta->data, smeta->size);
if (!dmeta)
return FALSE;
return TRUE;
}
static gboolean
gst_video_caption_meta_init (GstMeta * meta, gpointer params,
GstBuffer * buffer)
{
GstVideoCaptionMeta *emeta = (GstVideoCaptionMeta *) meta;
memset (emeta, 0, sizeof (GstVideoCaptionMeta));
emeta->caption_type = GST_VIDEO_CAPTION_TYPE_UNKNOWN;
return TRUE;
}
static void
gst_video_caption_meta_free (GstMeta * meta, GstBuffer * buffer)
{
GstVideoCaptionMeta *emeta = (GstVideoCaptionMeta *) meta;
g_free (emeta->data);
}
const GstMetaInfo *
gst_video_caption_meta_get_info (void)
{
static const GstMetaInfo *meta_info = NULL;
if (g_once_init_enter ((GstMetaInfo **) & meta_info)) {
const GstMetaInfo *mi = gst_meta_register (GST_VIDEO_CAPTION_META_API_TYPE,
"GstVideoCaptionMeta",
sizeof (GstVideoCaptionMeta),
gst_video_caption_meta_init,
gst_video_caption_meta_free,
gst_video_caption_meta_transform);
g_once_init_leave ((GstMetaInfo **) & meta_info, (GstMetaInfo *) mi);
}
return meta_info;
}
/**
* gst_buffer_add_video_caption_meta:
* @buffer: a #GstBuffer
* @caption_type: The type of Closed Caption to add
* @data: (array length=size) (transfer none): The Closed Caption data
* @size: The size of @data in bytes
*
* Attaches #GstVideoCaptionMeta metadata to @buffer with the given
* parameters.
*
* Returns: (transfer none): the #GstVideoCaptionMeta on @buffer.
*
* Since: 1.16
*/
GstVideoCaptionMeta *
gst_buffer_add_video_caption_meta (GstBuffer * buffer,
GstVideoCaptionType caption_type, const guint8 * data, gsize size)
{
GstVideoCaptionMeta *meta;
g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);
g_return_val_if_fail (data != NULL, NULL);
g_return_val_if_fail (size > 0, NULL);
switch (caption_type) {
case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
case GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW:
case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
break;
default:
GST_ERROR ("Unknown caption type !");
return NULL;
}
/* FIXME : Add checks for content ? */
meta = (GstVideoCaptionMeta *) gst_buffer_add_meta (buffer,
GST_VIDEO_CAPTION_META_INFO, NULL);
g_return_val_if_fail (meta != NULL, NULL);
meta->caption_type = caption_type;
meta->data = g_memdup (data, size);
meta->size = size;
return meta;
}

View file

@ -0,0 +1,229 @@
/* GStreamer
* Copyright (C) <2018> Edward Hervey <edward@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_VIDEO_ANC_H__
#define __GST_VIDEO_ANC_H__
#include <gst/gst.h>
#include <gst/video/video-format.h>
#include <gst/video/video-info.h>
G_BEGIN_DECLS
typedef struct _GstVideoAncillary GstVideoAncillary;
/**
* GstVideoAncillary:
* @DID: The Data Identifier
* @SDID_block_number: The Secondary Data Identifier (if type 2) or the Data
* Block Number (if type 2)
* @data_count: The amount of data (in bytes) in @data (max 255 bytes)
* @data: (array length=data_count): The user data content of the Ancillary packet.
* Does not contain the ADF, DID, SDID nor CS.
*
* Video Ancillary data, according to SMPTE-291M specification.
*
* Note that the contents of the data are always stored as 8bit data (i.e. do not contain
* the parity check bits).
*
* Since: 1.16
*/
struct _GstVideoAncillary {
guint8 DID;
guint8 SDID_block_number;
guint8 data_count;
guint8 data[256];
/*< private >*/
/* Padding for future extension */
gpointer _gst_reserved[GST_PADDING];
};
typedef enum {
GST_VIDEO_ANCILLARY_DID_UNDEFINED = 0x00,
GST_VIDEO_ANCILLARY_DID_DELETION = 0x80,
GST_VIDEO_ANCILLARY_DID_HANC_3G_AUDIO_DATA_FIRST = 0xa0,
GST_VIDEO_ANCILLARY_DID_HANC_3G_AUDIO_DATA_LAST = 0xa7,
GST_VIDEO_ANCILLARY_DID_HANC_HDTV_AUDIO_DATA_FIRST = 0xe0,
GST_VIDEO_ANCILLARY_DID_HANC_HDTV_AUDIO_DATA_LAST = 0xe7,
GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_1_FIRST = 0xec,
GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_1_LAST = 0xef,
GST_VIDEO_ANCILLARY_DID_CAMERA_POSITION = 0xf0,
GST_VIDEO_ANCILLARY_DID_HANC_ERROR_DETECTION = 0xf4,
GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_2_FIRST = 0xf8,
GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_2_LAST = 0xff,
} GstVideoAncillaryDID;
/**
* GST_VIDEO_ANCILLARY_DID16:
* @anc: a #GstVideoAncillary
*
* Returns the #GstVideoAncillaryDID16 of the ancillary data.
*
* Since: 1.16
*
* Returns: a #GstVideoAncillaryDID16 identifier
*/
#define GST_VIDEO_ANCILLARY_DID16(anc) ((guint16)((anc)->DID) << 8 | (guint16)((anc)->SDID_block_number))
/**
* GstVideoAncillaryDID16:
* @GST_VIDEO_ANCILLARY_DID16_S334_EIA_708: CEA 708 Ancillary data according to SMPTE 334
* @GST_VIDEO_ANCILLARY_DID16_S334_EIA_608: CEA 608 Ancillary data according to SMPTE 334
*
* Some know types of Ancillary Data identifiers.
*
* Since: 1.16
*/
typedef enum {
GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 = 0x6101,
GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 = 0x6102,
} GstVideoAncillaryDID16;
/* Closed Caption support */
/**
* GstVideoCaptionType:
* @GST_VIDEO_CAPTION_TYPE_UNKNOWN: Unknown type of CC
* @GST_VIDEO_CAPTION_TYPE_CEA608_RAW: CEA-608 as byte pairs. Note that
* this format is not recommended since is does not specify to
* which field the caption comes from and therefore assumes
* it comes from the first field (and that there is no information
* on the second field). Use @@GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW
* if you wish to store CEA-608 from two fields.
* @GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW: CEA-608 as cc_data byte triplets.
* The first byte of each triplet shall specify the field as in CEA-708
* (i.e: 0xFC for the first field or 0xFD for the second field.). The 2nd
* and 3rd byte of each triplet at the cc1 and cc2 bytes. Use this if
* there is *only* CEA-608 caption. If there is also CEA-708 caption,
* use @GST_VIDEO_CAPTION_TYPE_CEA708_RAW.
* @GST_VIDEO_CAPTION_TYPE_CEA708_RAW: CEA-708 as cc_data byte triplets. They
* can also contain 608-in-708.
* @GST_VIDEO_CAPTION_TYPE_CEA708_CDP: CEA-708 (and optionally CEA-608) in
* a CDP (Caption Distribution Packet) defined by SMPTE S-334-2.
* Contains the whole CDP (starting with 0x9669).
*
* The various known types of Closed Caption (CC).
*
* Since: 1.16
*/
typedef enum {
GST_VIDEO_CAPTION_TYPE_UNKNOWN = 0,
GST_VIDEO_CAPTION_TYPE_CEA608_RAW = 1,
GST_VIDEO_CAPTION_TYPE_CEA608_IN_CEA708_RAW = 2,
GST_VIDEO_CAPTION_TYPE_CEA708_RAW = 3,
GST_VIDEO_CAPTION_TYPE_CEA708_CDP = 4
} GstVideoCaptionType;
/**
* GstVideoCaptionMeta:
* @meta: parent #GstMeta
* @caption_type: The type of Closed Caption contained in the meta.
* @data: (array length=size): The Closed Caption data.
* @size: The size in bytes of @data
*
* Extra buffer metadata providing Closed Caption.
*
* Since: 1.16
*/
typedef struct {
GstMeta meta;
GstVideoCaptionType caption_type;
guint8 *data;
gsize size;
} GstVideoCaptionMeta;
GST_VIDEO_API
GType gst_video_caption_meta_api_get_type (void);
#define GST_VIDEO_CAPTION_META_API_TYPE (gst_video_caption_meta_api_get_type())
GST_VIDEO_API
const GstMetaInfo *gst_video_caption_meta_get_info (void);
#define GST_VIDEO_CAPTION_META_INFO (gst_video_caption_meta_get_info())
/**
* gst_buffer_get_video_caption_meta:
* @b: A #GstBuffer
*
* Gets the #GstVideoCaptionMeta that might be present on @b.
*
* Since: 1.16
*
* Returns: The first #GstVideoCaptionMeta present on @b, or %NULL if
* no #GstVideoCaptionMeta are present
*/
#define gst_buffer_get_video_caption_meta(b) \
((GstVideoCaptionMeta*)gst_buffer_get_meta((b),GST_VIDEO_CAPTION_META_API_TYPE))
GST_VIDEO_API
GstVideoCaptionMeta *gst_buffer_add_video_caption_meta (GstBuffer * buffer,
GstVideoCaptionType caption_type,
const guint8 *data,
gsize size);
/**
* GstVideoVBIParser:
*
* A parser for detecting and extracting @GstVideoAncillary data from
* Vertical Blanking Interval lines of component signals.
*
* Since: 1.16
*/
typedef struct _GstVideoVBIParser GstVideoVBIParser;
GST_VIDEO_API
GType gst_video_vbi_parser_get_type (void);
/**
* GstVideoVBIParserResult:
* @GST_VIDEO_VBI_PARSER_RESULT_DONE: No line were provided, or no more Ancillary data was found.
* @GST_VIDEO_VBI_PARSER_RESULT_OK: A #GstVideoAncillary was found.
* @GST_VIDEO_VBI_PARSER_RESULT_ERROR: An error occured
*
* Return values for #GstVideoVBIParser
*
* Since: 1.16
*/
typedef enum {
GST_VIDEO_VBI_PARSER_RESULT_DONE = 0,
GST_VIDEO_VBI_PARSER_RESULT_OK = 1,
GST_VIDEO_VBI_PARSER_RESULT_ERROR = 2
} GstVideoVBIParserResult;
GST_VIDEO_API
GstVideoVBIParserResult gst_video_vbi_parser_get_ancillary(GstVideoVBIParser *parser,
GstVideoAncillary *anc);
GST_VIDEO_API
GstVideoVBIParser *gst_video_vbi_parser_new (GstVideoFormat format, guint32 pixel_width);
GST_VIDEO_API
GstVideoVBIParser *gst_video_vbi_parser_copy (const GstVideoVBIParser *parser);
GST_VIDEO_API
void gst_video_vbi_parser_free (GstVideoVBIParser *parser);
GST_VIDEO_API
void gst_video_vbi_parser_add_line (GstVideoVBIParser *parser, const guint8 *data);
G_END_DECLS
#endif /* __GST_VIDEO_ANC_H__ */

View file

@ -181,5 +181,6 @@ G_END_DECLS
#include <gst/video/videooverlay.h>
#include <gst/video/gstvideotimecode.h>
#include <gst/video/gstvideoaffinetransformationmeta.h>
#include <gst/video/video-anc.h>
#endif /* __GST_VIDEO_H__ */