2018-03-19 09:56:31 +00:00
|
|
|
/*
|
|
|
|
* GStreamer
|
|
|
|
* Copyright (C) 2018 Edward Hervey <edward@centricular.com>
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* SECTION:element-line21decoder
|
|
|
|
* @title: line21decoder
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifdef HAVE_CONFIG_H
|
|
|
|
# include <config.h>
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include <gst/gst.h>
|
|
|
|
#include <gst/video/video.h>
|
|
|
|
#include <string.h>
|
|
|
|
|
|
|
|
#include "gstline21dec.h"
|
|
|
|
|
|
|
|
GST_DEBUG_CATEGORY_STATIC (gst_line_21_decoder_debug);
|
|
|
|
#define GST_CAT_DEFAULT gst_line_21_decoder_debug
|
|
|
|
|
|
|
|
enum
|
|
|
|
{
|
|
|
|
PROP_0,
|
|
|
|
};
|
|
|
|
|
|
|
|
#define SUPPORTED_FORMATS "{ I420, YUY2, YVYU, UYVY, VYUY, v210 }"
|
|
|
|
|
|
|
|
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
|
|
|
|
GST_PAD_SINK,
|
|
|
|
GST_PAD_ALWAYS,
|
|
|
|
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (SUPPORTED_FORMATS)));
|
|
|
|
|
|
|
|
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
|
|
|
|
GST_PAD_SRC,
|
|
|
|
GST_PAD_ALWAYS,
|
|
|
|
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (SUPPORTED_FORMATS)));
|
|
|
|
|
|
|
|
G_DEFINE_TYPE (GstLine21Decoder, gst_line_21_decoder, GST_TYPE_VIDEO_FILTER);
|
|
|
|
#define parent_class gst_line_21_decoder_parent_class
|
|
|
|
|
|
|
|
static void gst_line_21_decoder_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec);
|
|
|
|
static void gst_line_21_decoder_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec);
|
|
|
|
|
|
|
|
static GstStateChangeReturn gst_line_21_decoder_change_state (GstElement *
|
|
|
|
element, GstStateChange transition);
|
|
|
|
static void gst_line_21_decoder_finalize (GObject * self);
|
|
|
|
static gboolean gst_line_21_decoder_set_info (GstVideoFilter * filter,
|
|
|
|
GstCaps * incaps, GstVideoInfo * in_info,
|
|
|
|
GstCaps * outcaps, GstVideoInfo * out_info);
|
|
|
|
static GstFlowReturn gst_line_21_decoder_transform_ip (GstVideoFilter * filter,
|
|
|
|
GstVideoFrame * frame);
|
|
|
|
static GstFlowReturn gst_line_21_decoder_prepare_output_buffer (GstBaseTransform
|
|
|
|
* trans, GstBuffer * in, GstBuffer ** out);
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_line_21_decoder_class_init (GstLine21DecoderClass * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *gobject_class;
|
|
|
|
GstElementClass *gstelement_class;
|
|
|
|
GstBaseTransformClass *transform_class;
|
|
|
|
GstVideoFilterClass *filter_class;
|
|
|
|
|
|
|
|
gobject_class = (GObjectClass *) klass;
|
|
|
|
gstelement_class = (GstElementClass *) klass;
|
|
|
|
transform_class = (GstBaseTransformClass *) klass;
|
|
|
|
filter_class = (GstVideoFilterClass *) klass;
|
|
|
|
|
|
|
|
gobject_class->set_property = gst_line_21_decoder_set_property;
|
|
|
|
gobject_class->get_property = gst_line_21_decoder_get_property;
|
|
|
|
gobject_class->finalize = gst_line_21_decoder_finalize;
|
|
|
|
|
|
|
|
gstelement_class->change_state =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_line_21_decoder_change_state);
|
|
|
|
|
|
|
|
gst_element_class_set_static_metadata (gstelement_class,
|
|
|
|
"Line 21 CC Decoder",
|
|
|
|
"Filter",
|
|
|
|
"Extract line21 CC from SD video streams",
|
|
|
|
"Edward Hervey <edward@centricular.com>");
|
|
|
|
|
|
|
|
gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
|
|
|
|
gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
|
|
|
|
|
|
|
|
transform_class->prepare_output_buffer =
|
|
|
|
gst_line_21_decoder_prepare_output_buffer;
|
|
|
|
|
|
|
|
filter_class->set_info = gst_line_21_decoder_set_info;
|
|
|
|
filter_class->transform_frame_ip = gst_line_21_decoder_transform_ip;
|
|
|
|
|
|
|
|
GST_DEBUG_CATEGORY_INIT (gst_line_21_decoder_debug, "line21decoder",
|
|
|
|
0, "Line 21 CC Decoder");
|
|
|
|
vbi_initialize_gst_debug ();
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_line_21_decoder_reset (GstLine21Decoder * self)
|
|
|
|
{
|
|
|
|
self->line21_offset = -1;
|
|
|
|
self->max_line_probes = 40;
|
|
|
|
if (self->info) {
|
|
|
|
gst_video_info_free (self->info);
|
|
|
|
self->info = NULL;
|
|
|
|
}
|
|
|
|
g_free (self->converted_lines);
|
|
|
|
self->converted_lines = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_line_21_decoder_init (GstLine21Decoder * filter)
|
|
|
|
{
|
|
|
|
gst_line_21_decoder_reset (filter);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_line_21_decoder_set_property (GObject * object, guint prop_id,
|
|
|
|
const GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
/* GstLine21Decoder *filter = GST_LINE21DECODER (object); */
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_line_21_decoder_get_property (GObject * object, guint prop_id,
|
|
|
|
GValue * value, GParamSpec * pspec)
|
|
|
|
{
|
|
|
|
/* GstLine21Decoder *filter = GST_LINE21DECODER (object); */
|
|
|
|
|
|
|
|
switch (prop_id) {
|
|
|
|
default:
|
|
|
|
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static vbi_pixfmt
|
|
|
|
vbi_pixfmt_from_gst_video_format (GstVideoFormat format,
|
|
|
|
gboolean * convert_v210)
|
|
|
|
{
|
|
|
|
*convert_v210 = FALSE;
|
|
|
|
|
|
|
|
switch (format) {
|
|
|
|
case GST_VIDEO_FORMAT_I420:
|
|
|
|
return VBI_PIXFMT_YUV420;
|
|
|
|
case GST_VIDEO_FORMAT_YUY2:
|
|
|
|
return VBI_PIXFMT_YUYV;
|
|
|
|
case GST_VIDEO_FORMAT_YVYU:
|
|
|
|
return VBI_PIXFMT_YVYU;
|
|
|
|
case GST_VIDEO_FORMAT_UYVY:
|
|
|
|
return VBI_PIXFMT_UYVY;
|
|
|
|
case GST_VIDEO_FORMAT_VYUY:
|
|
|
|
return VBI_PIXFMT_VYUY;
|
|
|
|
/* for v210 we'll convert it to I420 luma */
|
|
|
|
case GST_VIDEO_FORMAT_v210:
|
|
|
|
*convert_v210 = TRUE;
|
|
|
|
return VBI_PIXFMT_YUV420;
|
|
|
|
/* All the other formats are not really bullet-proof. Force conversion */
|
|
|
|
default:
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
#undef NATIVE_VBI_FMT
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_line_21_decoder_set_info (GstVideoFilter * filter,
|
|
|
|
GstCaps * incaps, GstVideoInfo * in_info,
|
|
|
|
GstCaps * outcaps, GstVideoInfo * out_info)
|
|
|
|
{
|
|
|
|
GstLine21Decoder *self = (GstLine21Decoder *) filter;
|
|
|
|
vbi_pixfmt fmt =
|
|
|
|
vbi_pixfmt_from_gst_video_format (GST_VIDEO_INFO_FORMAT (in_info),
|
|
|
|
&self->convert_v210);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (filter, "caps %" GST_PTR_FORMAT, incaps);
|
|
|
|
GST_DEBUG_OBJECT (filter, "plane_stride:%u , comp_stride:%u , pstride:%u",
|
|
|
|
GST_VIDEO_INFO_PLANE_STRIDE (in_info, 0),
|
|
|
|
GST_VIDEO_INFO_COMP_STRIDE (in_info, 0),
|
|
|
|
GST_VIDEO_INFO_COMP_PSTRIDE (in_info, 0));
|
|
|
|
GST_DEBUG_OBJECT (filter, "#planes : %d #components : %d",
|
|
|
|
GST_VIDEO_INFO_N_PLANES (in_info), GST_VIDEO_INFO_N_COMPONENTS (in_info));
|
|
|
|
|
|
|
|
if (GST_VIDEO_INFO_WIDTH (in_info) != 720) {
|
|
|
|
GST_DEBUG_OBJECT (filter, "Only 720 pixel wide formats are supported");
|
|
|
|
self->compatible_format = FALSE;
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (fmt == 0) {
|
|
|
|
if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_FORMAT_v210) {
|
|
|
|
GST_DEBUG_OBJECT (filter,
|
|
|
|
"Format not supported natively, Adding conversion to YUY2");
|
|
|
|
self->compatible_format = TRUE;
|
|
|
|
self->convert_v210 = TRUE;
|
|
|
|
} else {
|
|
|
|
GST_DEBUG_OBJECT (filter, "Unsupported format");
|
|
|
|
self->compatible_format = FALSE;
|
|
|
|
}
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (GST_VIDEO_INFO_WIDTH (in_info) == 720
|
|
|
|
&& GST_VIDEO_INFO_HEIGHT (in_info) >= 200) {
|
|
|
|
GST_DEBUG_OBJECT (filter, "Compatible size!");
|
|
|
|
GST_DEBUG_OBJECT (filter,
|
|
|
|
"Compatible format plane_stride:%u , comp_stride:%u , pstride:%u",
|
|
|
|
GST_VIDEO_INFO_PLANE_STRIDE (in_info, 0),
|
|
|
|
GST_VIDEO_INFO_COMP_STRIDE (in_info, 0),
|
|
|
|
GST_VIDEO_INFO_COMP_PSTRIDE (in_info, 0));
|
|
|
|
self->compatible_format = TRUE;
|
|
|
|
if (self->convert_v210) {
|
|
|
|
self->info = gst_video_info_new ();
|
|
|
|
gst_video_info_set_format (self->info, GST_VIDEO_FORMAT_I420,
|
|
|
|
GST_VIDEO_INFO_WIDTH (in_info), GST_VIDEO_INFO_HEIGHT (in_info));
|
|
|
|
/* Allocate space for two *I420* Y lines (with stride) */
|
|
|
|
self->converted_lines =
|
|
|
|
g_malloc0 (2 * GST_VIDEO_INFO_COMP_STRIDE (self->info, 0));
|
|
|
|
} else
|
|
|
|
self->info = gst_video_info_copy (in_info);
|
|
|
|
|
|
|
|
/* initialize the decoder */
|
|
|
|
vbi_raw_decoder_init (&self->zvbi_decoder);
|
|
|
|
/* We either deal with PAL (625 lines) or NTSC (525 lines) */
|
|
|
|
self->zvbi_decoder.scanning = 525;
|
|
|
|
/* The pixel format. Quite a few formats are handled by zvbi, but
|
|
|
|
* some are not and require conversion (or cheating) */
|
|
|
|
self->zvbi_decoder.sampling_format = fmt;
|
|
|
|
/* Sampling rate. For BT.601 it's 13.5MHz */
|
|
|
|
self->zvbi_decoder.sampling_rate = 13.5e6; /* Hz (i.e. BT.601) */
|
|
|
|
/* Stride */
|
|
|
|
self->zvbi_decoder.bytes_per_line =
|
|
|
|
GST_VIDEO_INFO_COMP_STRIDE (self->info, 0);
|
|
|
|
/* Sampling starts 9.7 µs from the front edge of the
|
|
|
|
hor. sync pulse. You may have to adjust this.
|
|
|
|
NOTE : This is actually ignored in the code ...
|
|
|
|
*/
|
|
|
|
self->zvbi_decoder.offset = 9.7e-6 * 13.5e6;
|
|
|
|
|
|
|
|
/* The following values indicate what we are feeding to zvbi.
|
|
|
|
* By setting start[0] = 21, we are telling zvbi that the very
|
|
|
|
* beginning of the data we are feeding to it corresponds to
|
|
|
|
* line 21 (which is where CC1/CC3 is located).
|
|
|
|
*
|
|
|
|
* Then by specifying count[0] = 1, we are telling it to only
|
|
|
|
* scan 1 line from the beginning of the data.
|
|
|
|
*
|
|
|
|
* It is more efficient and flexible to do it this way, since
|
|
|
|
* we can then control what we are feeding it (i.e. *we* will
|
|
|
|
* figure out the offset to line 21, which might or might not
|
|
|
|
* be the beginning of the buffer data, and feed data from
|
|
|
|
* there). This would also allows us to have a "scanning" mode
|
|
|
|
* where we repeatedly provide it with pairs of lines until it
|
|
|
|
* finds something. */
|
|
|
|
self->zvbi_decoder.start[0] = 21;
|
|
|
|
self->zvbi_decoder.count[0] = 1;
|
|
|
|
|
|
|
|
/* Second field. */
|
|
|
|
self->zvbi_decoder.start[1] = 284;
|
|
|
|
self->zvbi_decoder.count[1] = 1;
|
|
|
|
|
|
|
|
/* FIXME : Adjust according to the info.interlace_mode ! */
|
|
|
|
self->zvbi_decoder.interlaced = TRUE;
|
|
|
|
|
|
|
|
/* synchronous is essentially top-field-first.
|
|
|
|
* WARNING : zvbi doesn't support bottom-field-first. */
|
|
|
|
self->zvbi_decoder.synchronous = TRUE;
|
|
|
|
|
|
|
|
/* Specify the services you want. Adjust based on whether we
|
|
|
|
* have PAL or NTSC */
|
|
|
|
vbi_raw_decoder_add_services (&self->zvbi_decoder,
|
|
|
|
VBI_SLICED_CAPTION_525, /* strict */ 0);
|
|
|
|
|
|
|
|
} else
|
|
|
|
self->compatible_format = FALSE;
|
|
|
|
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_line_21_decoder_prepare_output_buffer (GstBaseTransform * trans,
|
|
|
|
GstBuffer * in, GstBuffer ** out)
|
|
|
|
{
|
|
|
|
GstLine21Decoder *self = (GstLine21Decoder *) trans;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (trans, "compatible_format:%d", self->compatible_format);
|
|
|
|
if (self->compatible_format) {
|
|
|
|
/* Make the output buffer writable */
|
|
|
|
*out = gst_buffer_make_writable (in);
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
GST_BASE_TRANSFORM_CLASS
|
|
|
|
(gst_line_21_decoder_parent_class)->prepare_output_buffer (trans, in,
|
|
|
|
out);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
convert_line_v210_luma (const guint8 * orig, guint8 * dest, guint width)
|
|
|
|
{
|
|
|
|
guint i;
|
|
|
|
guint32 a, b, c, d;
|
|
|
|
guint8 *y = dest;
|
|
|
|
|
|
|
|
for (i = 0; i < width - 5; i += 6) {
|
|
|
|
a = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 0);
|
|
|
|
b = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 4);
|
|
|
|
c = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 8);
|
|
|
|
d = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 12);
|
|
|
|
|
|
|
|
*y++ = (a >> 12) & 0xff;
|
|
|
|
*y++ = (b >> 2) & 0xff;
|
|
|
|
|
|
|
|
*y++ = (b >> 22) & 0xff;
|
|
|
|
*y++ = (c >> 12) & 0xff;
|
|
|
|
|
|
|
|
*y++ = (d >> 2) & 0xff;
|
|
|
|
*y++ = (d >> 22) & 0xff;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static guint8 *
|
|
|
|
get_video_data (GstLine21Decoder * self, GstVideoFrame * frame, gint line)
|
|
|
|
{
|
|
|
|
guint8 *data = self->converted_lines;
|
|
|
|
guint8 *v210;
|
|
|
|
|
|
|
|
if (!self->convert_v210)
|
|
|
|
return (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (frame,
|
|
|
|
0) + line * GST_VIDEO_INFO_COMP_STRIDE (self->info, 0);
|
|
|
|
|
|
|
|
v210 = (guint8 *)
|
|
|
|
GST_VIDEO_FRAME_PLANE_DATA (frame,
|
|
|
|
0) + line * GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
|
|
|
|
|
|
|
|
/* Convert v210 to I420 */
|
|
|
|
convert_line_v210_luma (v210, data, GST_VIDEO_FRAME_WIDTH (frame));
|
|
|
|
v210 += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
|
|
|
|
convert_line_v210_luma (v210, data + GST_VIDEO_INFO_COMP_STRIDE (self->info,
|
|
|
|
0), GST_VIDEO_FRAME_WIDTH (frame));
|
|
|
|
GST_MEMDUMP ("converted", self->converted_lines, 64);
|
|
|
|
return self->converted_lines;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Call this to scan for CC
|
|
|
|
* Returns TRUE if it was found and set, else FALSE */
|
|
|
|
static gboolean
|
|
|
|
gst_line_21_decoder_scan (GstLine21Decoder * self, GstVideoFrame * frame)
|
|
|
|
{
|
|
|
|
gint i;
|
|
|
|
vbi_sliced sliced[52];
|
|
|
|
gboolean found = FALSE;
|
|
|
|
guint8 *data;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (self, "Starting probing. max_line_probes:%d",
|
|
|
|
self->max_line_probes);
|
|
|
|
|
|
|
|
i = self->line21_offset;
|
|
|
|
if (i == -1) {
|
|
|
|
GST_DEBUG_OBJECT (self, "Scanning from the beginning");
|
|
|
|
i = 0;
|
|
|
|
}
|
|
|
|
|
2018-12-10 13:42:04 +00:00
|
|
|
for (; i < self->max_line_probes && i < GST_VIDEO_FRAME_HEIGHT (frame); i++) {
|
2018-03-19 09:56:31 +00:00
|
|
|
gint n_lines;
|
|
|
|
data = get_video_data (self, frame, i);
|
|
|
|
/* Scan until we get n_lines == 2 */
|
|
|
|
n_lines = vbi_raw_decode (&self->zvbi_decoder, data, sliced);
|
|
|
|
GST_DEBUG_OBJECT (self, "i:%d n_lines:%d", i, n_lines);
|
|
|
|
if (n_lines == 2) {
|
|
|
|
GST_DEBUG_OBJECT (self, "Found 2 CC lines at offset %d", i);
|
|
|
|
self->line21_offset = i;
|
|
|
|
found = TRUE;
|
|
|
|
break;
|
2018-12-10 13:43:19 +00:00
|
|
|
} else if (i == self->line21_offset) {
|
|
|
|
/* Otherwise if this was the previously probed line offset,
|
|
|
|
* reset and start searching again from the beginning */
|
|
|
|
i = -1;
|
|
|
|
self->line21_offset = -1;
|
2018-03-19 09:56:31 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!found) {
|
|
|
|
GST_DEBUG_OBJECT (self, "No CC found");
|
|
|
|
self->line21_offset = -1;
|
|
|
|
} else {
|
2018-12-10 13:54:49 +00:00
|
|
|
guint base_line1 = 0, base_line2 = 0;
|
|
|
|
guint8 ccdata[6] = { 0x80, 0x80, 0x80, 0x00, 0x80, 0x80 }; /* Initialize the ccdata */
|
|
|
|
|
|
|
|
if (GST_VIDEO_FRAME_HEIGHT (frame) == 525) {
|
|
|
|
base_line1 = 9;
|
|
|
|
base_line2 = 272;
|
|
|
|
} else if (GST_VIDEO_FRAME_HEIGHT (frame) == 625) {
|
|
|
|
base_line1 = 5;
|
|
|
|
base_line2 = 318;
|
|
|
|
}
|
|
|
|
|
|
|
|
ccdata[0] |= (base_line1 < i ? i - base_line1 : 0) & 0x1f;
|
2018-03-19 09:56:31 +00:00
|
|
|
ccdata[1] = sliced[0].data[0];
|
|
|
|
ccdata[2] = sliced[0].data[1];
|
2018-12-10 13:54:49 +00:00
|
|
|
ccdata[3] |= (base_line2 < i ? i - base_line2 : 0) & 0x1f;
|
2018-03-19 09:56:31 +00:00
|
|
|
ccdata[4] = sliced[1].data[0];
|
|
|
|
ccdata[5] = sliced[1].data[1];
|
|
|
|
gst_buffer_add_video_caption_meta (frame->buffer,
|
2018-12-10 13:54:49 +00:00
|
|
|
GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, ccdata, 6);
|
2018-03-19 09:56:31 +00:00
|
|
|
GST_TRACE_OBJECT (self,
|
|
|
|
"Got CC 0x%02x 0x%02x / 0x%02x 0x%02x '%c%c / %c%c'", ccdata[1],
|
|
|
|
ccdata[2], ccdata[4], ccdata[5],
|
|
|
|
g_ascii_isprint (ccdata[1] & 0x7f) ? ccdata[1] & 0x7f : '.',
|
|
|
|
g_ascii_isprint (ccdata[2] & 0x7f) ? ccdata[2] & 0x7f : '.',
|
|
|
|
g_ascii_isprint (ccdata[4] & 0x7f) ? ccdata[4] & 0x7f : '.',
|
|
|
|
g_ascii_isprint (ccdata[5] & 0x7f) ? ccdata[5] & 0x7f : '.');
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
return found;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_line_21_decoder_transform_ip (GstVideoFilter * filter,
|
|
|
|
GstVideoFrame * frame)
|
|
|
|
{
|
|
|
|
GstLine21Decoder *self = (GstLine21Decoder *) filter;
|
|
|
|
|
|
|
|
if (!self->compatible_format)
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
|
|
|
|
gst_line_21_decoder_scan (self, frame);
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstStateChangeReturn
|
|
|
|
gst_line_21_decoder_change_state (GstElement * element,
|
|
|
|
GstStateChange transition)
|
|
|
|
{
|
|
|
|
GstStateChangeReturn ret;
|
|
|
|
GstLine21Decoder *filter = GST_LINE21DECODER (element);
|
|
|
|
|
|
|
|
switch (transition) {
|
|
|
|
case GST_STATE_CHANGE_NULL_TO_READY:
|
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_READY_TO_PAUSED:
|
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
|
|
|
|
if (ret != GST_STATE_CHANGE_SUCCESS)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
switch (transition) {
|
|
|
|
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
|
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_PAUSED_TO_READY:
|
|
|
|
gst_line_21_decoder_reset (filter);
|
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_READY_TO_NULL:
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_line_21_decoder_finalize (GObject * object)
|
|
|
|
{
|
|
|
|
G_OBJECT_CLASS (parent_class)->finalize (object);
|
|
|
|
}
|