Merge branch 'master' into 0.11

This commit is contained in:
Wim Taymans 2011-08-04 09:40:46 +02:00
commit 2ba07782cd
20 changed files with 815 additions and 398 deletions

View file

@ -46,15 +46,6 @@ GST_DEBUG_CATEGORY_EXTERN (dirac_debug);
typedef struct _GstDiracEnc GstDiracEnc;
typedef struct _GstDiracEncClass GstDiracEncClass;
typedef enum
{
GST_DIRAC_ENC_OUTPUT_OGG,
GST_DIRAC_ENC_OUTPUT_QUICKTIME,
GST_DIRAC_ENC_OUTPUT_AVI,
GST_DIRAC_ENC_OUTPUT_MPEG_TS,
GST_DIRAC_ENC_OUTPUT_MP4
} GstDiracEncOutputType;
struct _GstDiracEnc
{
GstBaseVideoEncoder base_encoder;
@ -91,7 +82,6 @@ struct _GstDiracEnc
dirac_encoder_t *encoder;
dirac_sourceparams_t *src_params;
GstBuffer *seq_header_buffer;
GstDiracEncOutputType output_format;
guint64 last_granulepos;
guint64 granule_offset;
@ -362,18 +352,6 @@ gst_dirac_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (structure, "video/x-dirac")) {
dirac_enc->output_format = GST_DIRAC_ENC_OUTPUT_OGG;
} else if (gst_structure_has_name (structure, "video/x-qt-part")) {
dirac_enc->output_format = GST_DIRAC_ENC_OUTPUT_QUICKTIME;
} else if (gst_structure_has_name (structure, "video/x-avi-part")) {
dirac_enc->output_format = GST_DIRAC_ENC_OUTPUT_AVI;
} else if (gst_structure_has_name (structure, "video/x-mp4-part")) {
dirac_enc->output_format = GST_DIRAC_ENC_OUTPUT_MP4;
} else {
return FALSE;
}
gst_caps_unref (caps);
gst_base_video_encoder_set_latency_fields (base_video_encoder, 2 * 2);
@ -1263,75 +1241,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_dirac_enc_shape_output_quicktime (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment, frame->presentation_frame_number);
GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment,
frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment,
frame->system_frame_number);
GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_buffer_set_caps (buf,
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_dirac_enc_shape_output_mp4 (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment,
frame->presentation_frame_number);
GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment,
frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment,
frame->decode_frame_number);
GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC(base_video_encoder)->segment,
frame->system_frame_number);
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_buffer_set_caps (buf,
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_dirac_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
@ -1340,17 +1249,7 @@ gst_dirac_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
dirac_enc = GST_DIRAC_ENC (base_video_encoder);
switch (dirac_enc->output_format) {
case GST_DIRAC_ENC_OUTPUT_OGG:
return gst_dirac_enc_shape_output_ogg (base_video_encoder, frame);
case GST_DIRAC_ENC_OUTPUT_QUICKTIME:
return gst_dirac_enc_shape_output_quicktime (base_video_encoder, frame);
case GST_DIRAC_ENC_OUTPUT_MP4:
return gst_dirac_enc_shape_output_mp4 (base_video_encoder, frame);
default:
g_assert_not_reached ();
break;
}
gst_dirac_enc_shape_output_ogg (base_video_encoder, frame);
return GST_FLOW_ERROR;
}
@ -1398,54 +1297,14 @@ gst_dirac_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
state = gst_base_video_encoder_get_state (base_video_encoder);
if (dirac_enc->output_format == GST_DIRAC_ENC_OUTPUT_OGG) {
caps = gst_caps_new_simple ("video/x-dirac",
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d,
"streamheader", GST_TYPE_BUFFER, dirac_enc->codec_data, NULL);
} else if (dirac_enc->output_format == GST_DIRAC_ENC_OUTPUT_QUICKTIME) {
caps = gst_caps_new_simple ("video/x-qt-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (dirac_enc->output_format == GST_DIRAC_ENC_OUTPUT_AVI) {
caps = gst_caps_new_simple ("video/x-avi-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (dirac_enc->output_format == GST_DIRAC_ENC_OUTPUT_MPEG_TS) {
caps = gst_caps_new_simple ("video/x-mpegts-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (dirac_enc->output_format == GST_DIRAC_ENC_OUTPUT_MP4) {
caps = gst_caps_new_simple ("video/x-mp4-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else {
g_assert_not_reached ();
}
caps = gst_caps_new_simple ("video/x-dirac",
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d,
"streamheader", GST_TYPE_BUFFER, dirac_enc->codec_data, NULL);
return caps;
}

View file

@ -91,6 +91,12 @@
"rate = (int) {" SAMPLE_RATES "}, " \
"stream-format = (string) { adts, raw }, " \
"profile = (string) { main, lc }"
enum
{
VBR = 1,
ABR
};
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
@ -104,7 +110,9 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
enum
{
PROP_0,
PROP_QUALITY,
PROP_BITRATE,
PROP_RATE_CONTROL,
PROP_PROFILE,
PROP_TNS,
PROP_MIDSIDE,
@ -137,7 +145,9 @@ GST_DEBUG_CATEGORY_STATIC (faac_debug);
#define GST_CAT_DEFAULT faac_debug
#define FAAC_DEFAULT_OUTPUTFORMAT 0 /* RAW */
#define FAAC_DEFAULT_QUALITY 100
#define FAAC_DEFAULT_BITRATE 128 * 1000
#define FAAC_DEFAULT_RATE_CONTROL VBR
#define FAAC_DEFAULT_TNS FALSE
#define FAAC_DEFAULT_MIDSIDE TRUE
#define FAAC_DEFAULT_SHORTCTL SHORTCTL_NORMAL
@ -193,6 +203,26 @@ gst_faac_base_init (GstFaacClass * klass)
GST_DEBUG_CATEGORY_INIT (faac_debug, "faac", 0, "AAC encoding");
}
#define GST_TYPE_FAAC_RATE_CONTROL (gst_faac_brtype_get_type ())
static GType
gst_faac_brtype_get_type (void)
{
static GType gst_faac_brtype_type = 0;
if (!gst_faac_brtype_type) {
static GEnumValue gst_faac_brtype[] = {
{VBR, "VBR", "VBR encoding"},
{ABR, "ABR", "ABR encoding"},
{0, NULL, NULL},
};
gst_faac_brtype_type = g_enum_register_static ("GstFaacBrtype",
gst_faac_brtype);
}
return gst_faac_brtype_type;
}
#define GST_TYPE_FAAC_SHORTCTL (gst_faac_shortctl_get_type ())
static GType
gst_faac_shortctl_get_type (void)
@ -227,10 +257,18 @@ gst_faac_class_init (GstFaacClass * klass)
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_faac_finalize);
/* properties */
g_object_class_install_property (gobject_class, PROP_QUALITY,
g_param_spec_int ("quality", "Quality (%)",
"Variable bitrate (VBR) quantizer quality in %", 1, 1000,
FAAC_DEFAULT_QUALITY, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_BITRATE,
g_param_spec_int ("bitrate", "Bitrate (bps)", "Bitrate in bits/sec",
8 * 1000, 320 * 1000, FAAC_DEFAULT_BITRATE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_param_spec_int ("bitrate", "Bitrate (bps)",
"Average bitrate (ABR) in bits/sec", 8 * 1000, 320 * 1000,
FAAC_DEFAULT_BITRATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_RATE_CONTROL,
g_param_spec_enum ("rate-control", "Rate Control (ABR/VBR)",
"Encoding bitrate type (VBR/ABR)", GST_TYPE_FAAC_RATE_CONTROL,
FAAC_DEFAULT_RATE_CONTROL, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_TNS,
g_param_spec_boolean ("tns", "TNS", "Use temporal noise shaping",
FAAC_DEFAULT_TNS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
@ -271,7 +309,9 @@ gst_faac_init (GstFaac * faac)
faac->mpegversion = 4;
/* default properties */
faac->quality = FAAC_DEFAULT_QUALITY;
faac->bitrate = FAAC_DEFAULT_BITRATE;
faac->brtype = FAAC_DEFAULT_RATE_CONTROL;
faac->shortctl = FAAC_DEFAULT_SHORTCTL;
faac->outputformat = FAAC_DEFAULT_OUTPUTFORMAT;
faac->tns = FAAC_DEFAULT_TNS;
@ -557,7 +597,13 @@ gst_faac_configure_source_pad (GstFaac * faac)
conf->allowMidside = faac->midside;
conf->useLfe = 0;
conf->useTns = faac->tns;
conf->bitRate = faac->bitrate / faac->channels;
if (faac->brtype == VBR) {
conf->quantqual = faac->quality;
} else if (faac->brtype == ABR) {
conf->bitRate = faac->bitrate / faac->channels;
}
conf->inputFormat = faac->format;
conf->outputFormat = faac->outputformat;
conf->shortctl = faac->shortctl;
@ -879,9 +925,15 @@ gst_faac_set_property (GObject * object,
GST_OBJECT_LOCK (faac);
switch (prop_id) {
case PROP_QUALITY:
faac->quality = g_value_get_int (value);
break;
case PROP_BITRATE:
faac->bitrate = g_value_get_int (value);
break;
case PROP_RATE_CONTROL:
faac->brtype = g_value_get_enum (value);
break;
case PROP_TNS:
faac->tns = g_value_get_boolean (value);
break;
@ -908,9 +960,15 @@ gst_faac_get_property (GObject * object,
GST_OBJECT_LOCK (faac);
switch (prop_id) {
case PROP_QUALITY:
g_value_set_int (value, faac->quality);
break;
case PROP_BITRATE:
g_value_set_int (value, faac->bitrate);
break;
case PROP_RATE_CONTROL:
g_value_set_enum (value, faac->brtype);
break;
case PROP_TNS:
g_value_set_boolean (value, faac->tns);
break;

View file

@ -52,7 +52,9 @@ struct _GstFaac {
channels,
format,
bps,
quality,
bitrate,
brtype,
profile,
mpegversion,
shortctl,

View file

@ -407,6 +407,9 @@ gst_kate_enc_create_buffer (GstKateEnc * ke, kate_packet * kp,
{
GstBuffer *buffer;
g_return_val_if_fail (kp != NULL, NULL);
g_return_val_if_fail (kp->data != NULL, NULL);
buffer = gst_buffer_try_new_and_alloc (kp->nbytes);
if (G_UNLIKELY (!buffer)) {
GST_WARNING_OBJECT (ke, "Failed to allocate buffer for %u bytes",
@ -933,7 +936,7 @@ static GstFlowReturn
gst_kate_enc_chain_text (GstKateEnc * ke, GstBuffer * buf,
const char *mime_type)
{
kate_packet kp;
kate_packet kp = { 0 };
int ret = 0;
GstFlowReturn rflow;
GstClockTime start = GST_BUFFER_TIMESTAMP (buf);

View file

@ -50,15 +50,6 @@ GST_DEBUG_CATEGORY_EXTERN (schro_debug);
typedef struct _GstSchroEnc GstSchroEnc;
typedef struct _GstSchroEncClass GstSchroEncClass;
typedef enum
{
GST_SCHRO_ENC_OUTPUT_OGG,
GST_SCHRO_ENC_OUTPUT_QUICKTIME,
GST_SCHRO_ENC_OUTPUT_AVI,
GST_SCHRO_ENC_OUTPUT_MPEG_TS,
GST_SCHRO_ENC_OUTPUT_MP4
} GstSchroEncOutputType;
struct _GstSchroEnc
{
GstBaseVideoEncoder base_encoder;
@ -66,9 +57,6 @@ struct _GstSchroEnc
GstPad *sinkpad;
GstPad *srcpad;
/* video properties */
GstSchroEncOutputType output_format;
/* state */
SchroEncoder *encoder;
SchroVideoFormat *video_format;
@ -274,19 +262,6 @@ gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (structure, "video/x-dirac")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_OGG;
} else if (gst_structure_has_name (structure, "video/x-qt-part")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_QUICKTIME;
} else if (gst_structure_has_name (structure, "video/x-avi-part")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_AVI;
} else if (gst_structure_has_name (structure, "video/x-mp4-part")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_MP4;
} else {
gst_caps_unref (caps);
return FALSE;
}
gst_caps_unref (caps);
gst_base_video_encoder_set_latency_fields (base_video_encoder,
@ -519,88 +494,47 @@ gst_schro_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
state = gst_base_video_encoder_get_state (base_video_encoder);
if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_OGG) {
caps = gst_caps_new_simple ("video/x-dirac",
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
caps = gst_caps_new_simple ("video/x-dirac",
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
GST_BUFFER_FLAG_SET (schro_enc->seq_header_buffer, GST_BUFFER_FLAG_IN_CAPS);
GST_BUFFER_FLAG_SET (schro_enc->seq_header_buffer, GST_BUFFER_FLAG_IN_CAPS);
{
GValue array = { 0 };
GValue value = { 0 };
GstBuffer *buf;
int size;
{
GValue array = { 0 };
GValue value = { 0 };
GstBuffer *buf;
int size;
g_value_init (&array, GST_TYPE_ARRAY);
g_value_init (&value, GST_TYPE_BUFFER);
size = GST_BUFFER_SIZE (schro_enc->seq_header_buffer);
buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
g_value_init (&array, GST_TYPE_ARRAY);
g_value_init (&value, GST_TYPE_BUFFER);
size = GST_BUFFER_SIZE (schro_enc->seq_header_buffer);
buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
/* ogg(mux) expects the header buffers to have 0 timestamps -
set OFFSET and OFFSET_END accordingly */
GST_BUFFER_OFFSET (buf) = 0;
GST_BUFFER_OFFSET_END (buf) = 0;
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
/* ogg(mux) expects the header buffers to have 0 timestamps -
set OFFSET and OFFSET_END accordingly */
GST_BUFFER_OFFSET (buf) = 0;
GST_BUFFER_OFFSET_END (buf) = 0;
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
memcpy (GST_BUFFER_DATA (buf),
GST_BUFFER_DATA (schro_enc->seq_header_buffer), size);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
SCHRO_PARSE_CODE_END_OF_SEQUENCE);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value);
gst_structure_set_value (gst_caps_get_structure (caps, 0),
"streamheader", &array);
g_value_unset (&value);
g_value_unset (&array);
}
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_QUICKTIME) {
caps = gst_caps_new_simple ("video/x-qt-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_AVI) {
caps = gst_caps_new_simple ("video/x-avi-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_MPEG_TS) {
caps = gst_caps_new_simple ("video/x-mpegts-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_MP4) {
caps = gst_caps_new_simple ("video/x-mp4-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else {
g_assert_not_reached ();
caps = NULL;
memcpy (GST_BUFFER_DATA (buf),
GST_BUFFER_DATA (schro_enc->seq_header_buffer), size);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
SCHRO_PARSE_CODE_END_OF_SEQUENCE);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value);
gst_structure_set_value (gst_caps_get_structure (caps, 0),
"streamheader", &array);
g_value_unset (&value);
g_value_unset (&array);
}
return caps;
@ -610,7 +544,7 @@ gst_schro_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
static GstFlowReturn
gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc;
@ -651,99 +585,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_schro_enc_shape_output_quicktime (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->presentation_frame_number);
GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->system_frame_number);
GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_buffer_set_caps (buf,
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_schro_enc_shape_output_mp4 (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->presentation_frame_number);
GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->decode_frame_number);
GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
frame->system_frame_number);
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_buffer_set_caps (buf,
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc;
schro_enc = GST_SCHRO_ENC (base_video_encoder);
switch (schro_enc->output_format) {
case GST_SCHRO_ENC_OUTPUT_OGG:
return gst_schro_enc_shape_output_ogg (base_video_encoder, frame);
case GST_SCHRO_ENC_OUTPUT_QUICKTIME:
return gst_schro_enc_shape_output_quicktime (base_video_encoder, frame);
case GST_SCHRO_ENC_OUTPUT_MP4:
return gst_schro_enc_shape_output_mp4 (base_video_encoder, frame);
default:
g_assert_not_reached ();
break;
}
return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_schro_enc_process (GstSchroEnc * schro_enc)
{

View file

@ -85,6 +85,8 @@ gst_vp8_enc_coder_hook_free (GstVP8EncCoderHook * hook)
#define DEFAULT_BITRATE 0
#define DEFAULT_MODE VPX_VBR
#define DEFAULT_MINSECTION_PCT 5
#define DEFAULT_MAXSECTION_PCT 800
#define DEFAULT_MIN_QUANTIZER 0
#define DEFAULT_MAX_QUANTIZER 63
#define DEFAULT_QUALITY 5
@ -94,14 +96,25 @@ gst_vp8_enc_coder_hook_free (GstVP8EncCoderHook * hook)
#define DEFAULT_SPEED 0
#define DEFAULT_THREADS 1
#define DEFAULT_MULTIPASS_MODE VPX_RC_ONE_PASS
#define DEFAULT_MULTIPASS_CACHE_FILE NULL
#define DEFAULT_MULTIPASS_CACHE_FILE "multipass.cache"
#define DEFAULT_AUTO_ALT_REF_FRAMES FALSE
#define DEFAULT_LAG_IN_FRAMES 0
#define DEFAULT_SHARPNESS 0
#define DEFAULT_NOISE_SENSITIVITY 0
#define DEFAULT_TUNE VP8_TUNE_PSNR
#define DEFAULT_STATIC_THRESHOLD 0
#define DEFAULT_DROP_FRAME 0
#define DEFAULT_RESIZE_ALLOWED TRUE
#define DEFAULT_TOKEN_PARTS 0
enum
{
PROP_0,
PROP_BITRATE,
PROP_MODE,
PROP_MINSECTION_PCT,
PROP_MAXSECTION_PCT,
PROP_MIN_QUANTIZER,
PROP_MAX_QUANTIZER,
PROP_QUALITY,
@ -112,7 +125,15 @@ enum
PROP_THREADS,
PROP_MULTIPASS_MODE,
PROP_MULTIPASS_CACHE_FILE,
PROP_AUTO_ALT_REF_FRAMES
PROP_AUTO_ALT_REF_FRAMES,
PROP_LAG_IN_FRAMES,
PROP_SHARPNESS,
PROP_NOISE_SENSITIVITY,
PROP_TUNE,
PROP_STATIC_THRESHOLD,
PROP_DROP_FRAME,
PROP_RESIZE_ALLOWED,
PROP_TOKEN_PARTS
};
#define GST_VP8_ENC_MODE_TYPE (gst_vp8_enc_mode_get_type())
@ -160,6 +181,28 @@ gst_vp8_enc_multipass_mode_get_type (void)
return id;
}
#define GST_VP8_ENC_TUNE_TYPE (gst_vp8_enc_tune_get_type())
static GType
gst_vp8_enc_tune_get_type (void)
{
static const GEnumValue values[] = {
{VP8_TUNE_PSNR, "Tune for PSNR", "psnr"},
{VP8_TUNE_SSIM, "Tune for SSIM", "ssim"},
{0, NULL, NULL}
};
static volatile GType id = 0;
if (g_once_init_enter ((gsize *) & id)) {
GType _id;
_id = g_enum_register_static ("GstVP8EncTune", values);
g_once_init_leave ((gsize *) & id, _id);
}
return id;
}
static void gst_vp8_enc_finalize (GObject * object);
static void gst_vp8_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
@ -262,6 +305,20 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
GST_VP8_ENC_MODE_TYPE, DEFAULT_MODE,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MINSECTION_PCT,
g_param_spec_uint ("minsection-pct",
"minimum percentage allocation per section",
"The numbers represent a percentage of the average allocation per section (frame)",
0, 20, DEFAULT_MINSECTION_PCT,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MAXSECTION_PCT,
g_param_spec_uint ("maxsection-pct",
"maximum percentage allocation per section",
"The numbers represent a percentage of the average allocation per section (frame)",
200, 800, DEFAULT_MAXSECTION_PCT,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_MIN_QUANTIZER,
g_param_spec_int ("min-quantizer", "Minimum quantizer",
"Minimum (best) quantizer",
@ -328,6 +385,54 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
DEFAULT_AUTO_ALT_REF_FRAMES,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_LAG_IN_FRAMES,
g_param_spec_uint ("lag-in-frames", "Max number of frames to lag",
"If set, this value allows the encoder to consume a number of input "
"frames before producing output frames.",
0, 64, DEFAULT_LAG_IN_FRAMES,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SHARPNESS,
g_param_spec_int ("sharpness", "Sharpness",
"Sharpness",
0, 7, DEFAULT_SHARPNESS,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_NOISE_SENSITIVITY,
g_param_spec_int ("noise-sensitivity", "Noise Sensitivity",
"Noise Sensitivity",
0, 6, DEFAULT_NOISE_SENSITIVITY,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_TUNE,
g_param_spec_enum ("tune", "Tune",
"Tune",
GST_VP8_ENC_TUNE_TYPE, DEFAULT_TUNE,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_STATIC_THRESHOLD,
g_param_spec_int ("static-threshold", "Static Threshold",
"Static Threshold",
0, 1000, DEFAULT_STATIC_THRESHOLD,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_DROP_FRAME,
g_param_spec_int ("drop-frame", "Drop Frame",
"Drop Frame",
0, 100, DEFAULT_DROP_FRAME,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_RESIZE_ALLOWED,
g_param_spec_boolean ("resize-allowed", "Resize Allowed",
"Resize Allowed",
DEFAULT_RESIZE_ALLOWED,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_TOKEN_PARTS,
g_param_spec_int ("token-parts", "Token Parts",
"Token Parts",
0, 3, DEFAULT_TOKEN_PARTS,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
GST_DEBUG_CATEGORY_INIT (gst_vp8enc_debug, "vp8enc", 0, "VP8 Encoder");
}
@ -339,6 +444,8 @@ gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc, GstVP8EncClass * klass)
GST_DEBUG_OBJECT (gst_vp8_enc, "init");
gst_vp8_enc->bitrate = DEFAULT_BITRATE;
gst_vp8_enc->minsection_pct = DEFAULT_MINSECTION_PCT;
gst_vp8_enc->maxsection_pct = DEFAULT_MAXSECTION_PCT;
gst_vp8_enc->min_quantizer = DEFAULT_MIN_QUANTIZER;
gst_vp8_enc->max_quantizer = DEFAULT_MAX_QUANTIZER;
gst_vp8_enc->mode = DEFAULT_MODE;
@ -347,8 +454,9 @@ gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc, GstVP8EncClass * klass)
gst_vp8_enc->max_latency = DEFAULT_MAX_LATENCY;
gst_vp8_enc->max_keyframe_distance = DEFAULT_MAX_KEYFRAME_DISTANCE;
gst_vp8_enc->multipass_mode = DEFAULT_MULTIPASS_MODE;
gst_vp8_enc->multipass_cache_file = DEFAULT_MULTIPASS_CACHE_FILE;
gst_vp8_enc->multipass_cache_file = g_strdup (DEFAULT_MULTIPASS_CACHE_FILE);
gst_vp8_enc->auto_alt_ref_frames = DEFAULT_AUTO_ALT_REF_FRAMES;
gst_vp8_enc->lag_in_frames = DEFAULT_LAG_IN_FRAMES;
}
static void
@ -385,6 +493,12 @@ gst_vp8_enc_set_property (GObject * object, guint prop_id,
case PROP_MODE:
gst_vp8_enc->mode = g_value_get_enum (value);
break;
case PROP_MINSECTION_PCT:
gst_vp8_enc->minsection_pct = g_value_get_uint (value);
break;
case PROP_MAXSECTION_PCT:
gst_vp8_enc->maxsection_pct = g_value_get_uint (value);
break;
case PROP_MIN_QUANTIZER:
gst_vp8_enc->min_quantizer = g_value_get_int (value);
break;
@ -420,6 +534,30 @@ gst_vp8_enc_set_property (GObject * object, guint prop_id,
case PROP_AUTO_ALT_REF_FRAMES:
gst_vp8_enc->auto_alt_ref_frames = g_value_get_boolean (value);
break;
case PROP_LAG_IN_FRAMES:
gst_vp8_enc->lag_in_frames = g_value_get_uint (value);
break;
case PROP_SHARPNESS:
gst_vp8_enc->sharpness = g_value_get_int (value);
break;
case PROP_NOISE_SENSITIVITY:
gst_vp8_enc->noise_sensitivity = g_value_get_int (value);
break;
case PROP_TUNE:
gst_vp8_enc->tuning = g_value_get_enum (value);
break;
case PROP_STATIC_THRESHOLD:
gst_vp8_enc->static_threshold = g_value_get_int (value);
break;
case PROP_DROP_FRAME:
gst_vp8_enc->drop_frame = g_value_get_boolean (value);
break;
case PROP_RESIZE_ALLOWED:
gst_vp8_enc->resize_allowed = g_value_get_boolean (value);
break;
case PROP_TOKEN_PARTS:
gst_vp8_enc->partitions = g_value_get_int (value);
break;
default:
break;
}
@ -441,6 +579,12 @@ gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value,
case PROP_MODE:
g_value_set_enum (value, gst_vp8_enc->mode);
break;
case PROP_MINSECTION_PCT:
g_value_set_uint (value, gst_vp8_enc->minsection_pct);
break;
case PROP_MAXSECTION_PCT:
g_value_set_uint (value, gst_vp8_enc->maxsection_pct);
break;
case PROP_MIN_QUANTIZER:
g_value_set_int (value, gst_vp8_enc->min_quantizer);
break;
@ -474,6 +618,30 @@ gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value,
case PROP_AUTO_ALT_REF_FRAMES:
g_value_set_boolean (value, gst_vp8_enc->auto_alt_ref_frames);
break;
case PROP_LAG_IN_FRAMES:
g_value_set_uint (value, gst_vp8_enc->lag_in_frames);
break;
case PROP_SHARPNESS:
g_value_set_int (value, gst_vp8_enc->sharpness);
break;
case PROP_NOISE_SENSITIVITY:
g_value_set_int (value, gst_vp8_enc->noise_sensitivity);
break;
case PROP_TUNE:
g_value_set_enum (value, gst_vp8_enc->tuning);
break;
case PROP_STATIC_THRESHOLD:
g_value_set_int (value, gst_vp8_enc->static_threshold);
break;
case PROP_DROP_FRAME:
g_value_set_boolean (value, gst_vp8_enc->drop_frame);
break;
case PROP_RESIZE_ALLOWED:
g_value_set_boolean (value, gst_vp8_enc->resize_allowed);
break;
case PROP_TOKEN_PARTS:
g_value_set_int (value, gst_vp8_enc->partitions);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -553,6 +721,8 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
cfg.g_lag_in_frames = encoder->max_latency;
cfg.g_threads = encoder->threads;
cfg.rc_end_usage = encoder->mode;
cfg.rc_2pass_vbr_minsection_pct = encoder->minsection_pct;
cfg.rc_2pass_vbr_maxsection_pct = encoder->maxsection_pct;
/* Standalone qp-min do not make any sence, with bitrate=0 and qp-min=1
* encoder will use only default qp-max=63. Also this will make
* worst possbile quality.
@ -567,6 +737,8 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2);
cfg.rc_target_bitrate = encoder->bitrate;
}
cfg.rc_dropframe_thresh = encoder->drop_frame;
cfg.rc_resize_allowed = encoder->resize_allowed;
cfg.kf_mode = VPX_KF_AUTO;
cfg.kf_min_dist = 0;
@ -612,6 +784,25 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
gst_vpx_error_name (status));
}
status = vpx_codec_control (&encoder->encoder, VP8E_SET_NOISE_SENSITIVITY,
encoder->noise_sensitivity);
status = vpx_codec_control (&encoder->encoder, VP8E_SET_SHARPNESS,
encoder->sharpness);
status = vpx_codec_control (&encoder->encoder, VP8E_SET_STATIC_THRESHOLD,
encoder->static_threshold);
status = vpx_codec_control (&encoder->encoder, VP8E_SET_TOKEN_PARTITIONS,
encoder->partitions);
#if 0
status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_MAXFRAMES,
encoder->arnr_maxframes);
status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_STRENGTH,
encoder->arnr_strength);
status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE,
encoder->arnr_type);
#endif
status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING,
encoder->tuning);
status =
vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF,
(encoder->auto_alt_ref_frames ? 1 : 0));
@ -621,6 +812,8 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
(encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status));
}
cfg.g_lag_in_frames = encoder->lag_in_frames;
gst_base_video_encoder_set_latency (base_video_encoder, 0,
gst_util_uint64_scale (encoder->max_latency,
state->fps_d * GST_SECOND, state->fps_n));

View file

@ -60,6 +60,8 @@ struct _GstVP8Enc
/* properties */
int bitrate;
enum vpx_rc_mode mode;
unsigned int minsection_pct;
unsigned int maxsection_pct;
int min_quantizer;
int max_quantizer;
double quality;
@ -73,6 +75,14 @@ struct _GstVP8Enc
GByteArray *first_pass_cache_content;
vpx_fixed_buf_t last_pass_cache_content;
gboolean auto_alt_ref_frames;
unsigned int lag_in_frames;
int sharpness;
int noise_sensitivity;
vp8e_tuning tuning;
int static_threshold;
gboolean drop_frame;
gboolean resize_allowed;
gboolean partitions;
/* state */
gboolean inited;

View file

@ -175,7 +175,9 @@ typedef enum
GST_PHOTOGRAPHY_CAPS_APERTURE = (1 << 8),
GST_PHOTOGRAPHY_CAPS_EXPOSURE = (1 << 9),
GST_PHOTOGRAPHY_CAPS_SHAKE = (1 << 10),
GST_PHOTOGRAPHY_CAPS_NOISE_REDUCTION = (1 << 11)
GST_PHOTOGRAPHY_CAPS_NOISE_REDUCTION = (1 << 11),
GST_PHOTOGRAPHY_CAPS_FLICKER_REDUCTION = (1 << 12),
GST_PHOTOGRAPHY_CAPS_ALL = (~0)
} GstPhotoCaps;
typedef enum

View file

@ -440,8 +440,6 @@ gst_camera_bin_dispose (GObject * object)
gst_object_unref (camerabin->imagesink);
if (camerabin->image_encodebin)
gst_object_unref (camerabin->image_encodebin);
if (camerabin->imagebin_queue)
gst_object_unref (camerabin->imagebin_queue);
if (camerabin->imagebin_capsfilter)
gst_object_unref (camerabin->imagebin_capsfilter);
@ -1118,7 +1116,12 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
gint encbin_flags = 0;
if (!camera->elements_created) {
/* TODO check that elements created in _init were really created */
/* Check that elements created in _init were really created */
if (!(camera->audio_capsfilter && camera->videobin_capsfilter &&
camera->imagebin_capsfilter && camera->viewfinderbin_capsfilter)) {
missing_element_name = "capsfilter";
goto missing_element;
}
camera->video_encodebin =
gst_element_factory_make ("encodebin", "video-encodebin");
@ -1140,6 +1143,10 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
camera->videosink =
gst_element_factory_make ("filesink", "videobin-filesink");
if (!camera->videosink) {
missing_element_name = "filesink";
goto missing_element;
}
g_object_set (camera->videosink, "async", FALSE, NULL);
/* audio elements */
@ -1183,6 +1190,9 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
missing_element_name = "encodebin";
goto missing_element;
}
/* durations have no meaning for image captures */
g_object_set (camera->image_encodebin, "queue-time-max", (guint64) 0, NULL);
camera->image_encodebin_signal_id =
g_signal_connect (camera->image_encodebin, "element-added",
(GCallback) encodebin_element_added, camera);
@ -1215,29 +1225,25 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
camera->image_profile_switch = TRUE;
}
camera->imagebin_queue =
gst_element_factory_make ("queue", "imagebin-queue");
camera->viewfinderbin_queue =
gst_element_factory_make ("queue", "viewfinderbin-queue");
if (!camera->viewfinderbin_queue) {
missing_element_name = "queue";
goto missing_element;
}
g_object_set (camera->viewfinderbin_queue, "leaky", 2, "silent", TRUE,
NULL);
g_object_set (camera->imagebin_queue, "max-size-time", (guint64) 0,
"silent", TRUE, NULL);
gst_bin_add_many (GST_BIN_CAST (camera),
gst_object_ref (camera->video_encodebin),
gst_object_ref (camera->videosink),
gst_object_ref (camera->image_encodebin),
gst_object_ref (camera->imagesink),
gst_object_ref (camera->imagebin_queue),
gst_object_ref (camera->viewfinderbin_queue), NULL);
/* Linking can be optimized TODO */
gst_element_link (camera->video_encodebin, camera->videosink);
gst_element_link_many (camera->imagebin_queue, camera->imagebin_capsfilter,
NULL);
gst_element_link (camera->image_encodebin, camera->imagesink);
gst_element_link_many (camera->viewfinderbin_queue,
camera->viewfinderbin_capsfilter, camera->viewfinderbin, NULL);
@ -1344,8 +1350,13 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
G_CALLBACK (gst_camera_bin_src_notify_readyforcapture), camera);
gst_element_link_pads (camera->src, "vfsrc", camera->viewfinderbin_queue,
"sink");
gst_element_link_pads (camera->src, "imgsrc", camera->imagebin_queue,
"sink");
if (!gst_element_link_pads (camera->src, "imgsrc",
camera->imagebin_capsfilter, "sink")) {
GST_ERROR_OBJECT (camera,
"Failed to link camera source's imgsrc pad to image bin capsfilter");
goto fail;
}
if (!gst_element_link_pads (camera->src, "vidsrc",
camera->videobin_capsfilter, "sink")) {
GST_ERROR_OBJECT (camera,
@ -1359,8 +1370,8 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
}
gst_camera_bin_check_and_replace_filter (camera, &camera->image_filter,
camera->user_image_filter, camera->imagebin_queue,
camera->imagebin_capsfilter, NULL);
camera->user_image_filter, camera->src, camera->imagebin_capsfilter,
"imgsrc");
gst_camera_bin_check_and_replace_filter (camera, &camera->video_filter,
camera->user_video_filter, camera->src, camera->videobin_capsfilter,
"vidsrc");
@ -1387,6 +1398,10 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
} else {
camera->audio_src =
gst_element_factory_make (DEFAULT_AUDIO_SRC, "audiosrc");
if (!camera->audio_src) {
missing_element_name = DEFAULT_AUDIO_SRC;
goto missing_element;
}
}
gst_element_set_locked_state (camera->audio_src, TRUE);

View file

@ -63,7 +63,6 @@ struct _GstCameraBin2
GstElement *image_encodebin;
gulong image_encodebin_signal_id;
GstElement *imagesink;
GstElement *imagebin_queue;
GstElement *imagebin_capsfilter;
GstElement *video_filter;

View file

@ -19,10 +19,12 @@ noinst_HEADERS = \
libgstinter_la_CFLAGS = \
$(GST_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS)
libgstinter_la_LIBADD = \
$(GST_LIBS) \
$(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ -lgstaudio-@GST_MAJORMINOR@ \
$(LIBM)

View file

@ -1,14 +1,17 @@
plugin_LTLIBRARIES = libgstsubenc.la
libgstsubenc_la_SOURCES = \
gstsrtenc.c
gstsrtenc.c \
gstsubenc.c \
gstwebvttenc.c
libgstsubenc_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(GST_CONTROLLER_CFLAGS)
libgstsubenc_la_LIBADD = $(GST_BASE_LIBS) $(GST_CONTROLLER_LIBS)
libgstsubenc_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstsubenc_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = \
gstsrtenc.h
gstsrtenc.h \
gstwebvttenc.h
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \

View file

@ -46,7 +46,6 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_STATIC_CAPS ("text/plain; text/x-pango-markup"));
static GstFlowReturn gst_srt_enc_chain (GstPad * pad, GstBuffer * buf);
static gboolean plugin_init (GstPlugin * plugin);
static gchar *gst_srt_enc_timeconvertion (GstSrtEnc * srtenc, GstBuffer * buf);
static gchar *gst_srt_enc_timestamp_to_string (GstClockTime timestamp);
static void gst_srt_enc_get_property (GObject * object, guint prop_id,
@ -241,16 +240,3 @@ gst_srt_enc_init (GstSrtEnc * srtenc, GstSrtEncClass * klass)
gst_element_add_pad (GST_ELEMENT (srtenc), srtenc->sinkpad);
gst_pad_set_chain_function (srtenc->sinkpad, gst_srt_enc_chain);
}
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "srtenc", GST_RANK_NONE,
GST_TYPE_SRT_ENC);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"subenc",
"subtitle encoders",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

41
gst/subenc/gstsubenc.c Normal file
View file

@ -0,0 +1,41 @@
/* GStreamer
* Copyright (C) <2008> Thijs Vermeir <thijsvermeir@gmail.com>
* Copyright (C) 2011 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstsrtenc.h"
#include "gstwebvttenc.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
gst_element_register (plugin, "srtenc", GST_RANK_NONE, GST_TYPE_SRT_ENC);
gst_element_register (plugin, "webvttenc", GST_RANK_NONE,
GST_TYPE_WEBVTT_ENC);
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"subenc",
"subtitle encoders",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

277
gst/subenc/gstwebvttenc.c Normal file
View file

@ -0,0 +1,277 @@
/* GStreamer
* Copyright (C) <2008> Thijs Vermeir <thijsvermeir@gmail.com>
* Copyright (C) 2011 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "string.h"
#include "gstwebvttenc.h"
#include <gst/controller/gstcontroller.h>
GST_DEBUG_CATEGORY_STATIC (webvttenc_debug);
#define GST_CAT_DEFAULT webvttenc_debug
enum
{
ARG_0,
ARG_TIMESTAMP,
ARG_DURATION
};
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("text/webvtt"));
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("text/plain; text/x-pango-markup"));
static GstFlowReturn gst_webvtt_enc_chain (GstPad * pad, GstBuffer * buf);
static gchar *gst_webvtt_enc_timeconvertion (GstWebvttEnc * webvttenc,
GstBuffer * buf);
static gchar *gst_webvtt_enc_timestamp_to_string (GstClockTime timestamp);
static void gst_webvtt_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_webvtt_enc_reset (GstWebvttEnc * webvttenc);
static void gst_webvtt_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
GST_BOILERPLATE (GstWebvttEnc, gst_webvtt_enc, GstElement, GST_TYPE_ELEMENT);
static gchar *
gst_webvtt_enc_timestamp_to_string (GstClockTime timestamp)
{
guint h, m, s, ms;
h = timestamp / (3600 * GST_SECOND);
timestamp -= h * 3600 * GST_SECOND;
m = timestamp / (60 * GST_SECOND);
timestamp -= m * 60 * GST_SECOND;
s = timestamp / GST_SECOND;
timestamp -= s * GST_SECOND;
ms = timestamp / GST_MSECOND;
return g_strdup_printf ("%02d:%02d:%02d.%03d", h, m, s, ms);
}
static gchar *
gst_webvtt_enc_timeconvertion (GstWebvttEnc * webvttenc, GstBuffer * buf)
{
gchar *start_time;
gchar *stop_time;
gchar *string;
start_time = gst_webvtt_enc_timestamp_to_string (GST_BUFFER_TIMESTAMP (buf) +
webvttenc->timestamp);
if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf))) {
stop_time = gst_webvtt_enc_timestamp_to_string (GST_BUFFER_TIMESTAMP (buf) +
webvttenc->timestamp + GST_BUFFER_DURATION (buf) + webvttenc->duration);
} else {
stop_time = gst_webvtt_enc_timestamp_to_string (GST_BUFFER_TIMESTAMP (buf) +
webvttenc->timestamp + webvttenc->duration);
}
string = g_strdup_printf ("%s --> %s\n", start_time, stop_time);
g_free (start_time);
g_free (stop_time);
return string;
}
static GstFlowReturn
gst_webvtt_enc_chain (GstPad * pad, GstBuffer * buf)
{
GstWebvttEnc *webvttenc;
GstBuffer *new_buffer;
gchar *timing;
GstFlowReturn ret;
webvttenc = GST_WEBVTT_ENC (gst_pad_get_parent_element (pad));
if (!webvttenc->pushed_header) {
const char *header = "WEBVTT\n\n";
new_buffer = gst_buffer_new_and_alloc (strlen (header));
memcpy (GST_BUFFER_DATA (new_buffer), header, strlen (header));
GST_BUFFER_TIMESTAMP (new_buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (new_buffer) = GST_CLOCK_TIME_NONE;
ret = gst_pad_push (webvttenc->srcpad, new_buffer);
if (ret != GST_FLOW_OK) {
goto out;
}
webvttenc->pushed_header = TRUE;
}
gst_object_sync_values (G_OBJECT (webvttenc), GST_BUFFER_TIMESTAMP (buf));
timing = gst_webvtt_enc_timeconvertion (webvttenc, buf);
new_buffer =
gst_buffer_new_and_alloc (strlen (timing) + GST_BUFFER_SIZE (buf) + 1);
memcpy (GST_BUFFER_DATA (new_buffer), timing, strlen (timing));
memcpy (GST_BUFFER_DATA (new_buffer) + strlen (timing), GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
memcpy (GST_BUFFER_DATA (new_buffer) + GST_BUFFER_SIZE (new_buffer) - 1,
"\n", 1);
g_free (timing);
GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_DURATION (new_buffer) = GST_BUFFER_DURATION (buf);
ret = gst_pad_push (webvttenc->srcpad, new_buffer);
out:
gst_buffer_unref (buf);
gst_object_unref (webvttenc);
return ret;
}
static void
gst_webvtt_enc_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_set_details_simple (element_class,
"WebVTT encoder", "Codec/Encoder/Subtitle",
"WebVTT subtitle encoder", "David Schleef <ds@schleef.org>");
}
static void
gst_webvtt_enc_reset (GstWebvttEnc * webvttenc)
{
webvttenc->counter = 1;
}
static GstStateChangeReturn
gst_webvtt_enc_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
GstWebvttEnc *webvttenc = GST_WEBVTT_ENC (element);
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (ret == GST_STATE_CHANGE_FAILURE)
return ret;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_webvtt_enc_reset (webvttenc);
break;
default:
break;
}
return ret;
}
static void
gst_webvtt_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstWebvttEnc *webvttenc;
webvttenc = GST_WEBVTT_ENC (object);
switch (prop_id) {
case ARG_TIMESTAMP:
g_value_set_int64 (value, webvttenc->timestamp);
break;
case ARG_DURATION:
g_value_set_int64 (value, webvttenc->duration);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_webvtt_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstWebvttEnc *webvttenc;
webvttenc = GST_WEBVTT_ENC (object);
switch (prop_id) {
case ARG_TIMESTAMP:
webvttenc->timestamp = g_value_get_int64 (value);
break;
case ARG_DURATION:
webvttenc->duration = g_value_get_int64 (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_webvtt_enc_class_init (GstWebvttEncClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_webvtt_enc_set_property);
gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_webvtt_enc_get_property);
element_class->change_state = GST_DEBUG_FUNCPTR (gst_webvtt_enc_change_state);
g_object_class_install_property (gobject_class, ARG_TIMESTAMP,
g_param_spec_int64 ("timestamp", "Offset for the starttime",
"Offset for the starttime for the subtitles", G_MININT64, G_MAXINT64,
0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, ARG_DURATION,
g_param_spec_int64 ("duration", "Offset for the duration",
"Offset for the duration of the subtitles", G_MININT64, G_MAXINT64,
0,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (webvttenc_debug, "webvttenc", 0,
"SubRip subtitle encoder");
}
static void
gst_webvtt_enc_init (GstWebvttEnc * webvttenc, GstWebvttEncClass * klass)
{
gst_webvtt_enc_reset (webvttenc);
webvttenc->srcpad = gst_pad_new_from_static_template (&src_template, "src");
gst_element_add_pad (GST_ELEMENT (webvttenc), webvttenc->srcpad);
webvttenc->sinkpad =
gst_pad_new_from_static_template (&sink_template, "sink");
gst_element_add_pad (GST_ELEMENT (webvttenc), webvttenc->sinkpad);
gst_pad_set_chain_function (webvttenc->sinkpad, gst_webvtt_enc_chain);
}

65
gst/subenc/gstwebvttenc.h Normal file
View file

@ -0,0 +1,65 @@
/* GStreamer
* Copyright (C) <2008> Thijs Vermeir <thijsvermeir@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_WEBVTT_ENC_H__
#define __GST_WEBVTT_ENC_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_WEBVTT_ENC \
(gst_webvtt_enc_get_type())
#define GST_WEBVTT_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBVTT_ENC,GstWebvttEnc))
#define GST_WEBVTT_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_WEBVTT_ENC,GstWebvttEnc))
#define GST_IS_WEBVTT_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBVTT_ENC))
#define GST_IS_WEBVTT_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_WEBVTT_ENC))
typedef struct _GstWebvttEnc GstWebvttEnc;
typedef struct _GstWebvttEncClass GstWebvttEncClass;
struct _GstWebvttEncClass
{
GstElementClass parent_class;
};
struct _GstWebvttEnc
{
GstElement element;
GstPad *sinkpad;
GstPad *srcpad;
gboolean pushed_header;
/* properties */
gint64 timestamp;
gint64 duration;
/* counter for subtitle entry */
guint counter;
};
GType gst_webvtt_enc_get_type (void);
G_END_DECLS
#endif

View file

@ -86,6 +86,25 @@ gst_decklink_connection_get_type (void)
return type;
}
GType
gst_decklink_audio_connection_get_type (void)
{
static GType type;
if (!type) {
static const GEnumValue connections[] = {
{GST_DECKLINK_AUDIO_CONNECTION_AUTO, "auto", "Automatic"},
{GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED, "embedded", "SDI/HDMI embedded audio"},
{GST_DECKLINK_AUDIO_CONNECTION_AES_EBU, "aes", "AES/EBU input"},
{GST_DECKLINK_AUDIO_CONNECTION_ANALOG, "analog", "Analog input"},
{0, NULL, NULL}
};
type = g_enum_register_static ("GstDecklinkAudioConnection", connections);
}
return type;
}
#define NTSC 10, 11, false, false
#define PAL 12, 11, true, false
#define HD 1, 1, false, true

View file

@ -63,6 +63,15 @@ typedef enum {
#define GST_TYPE_DECKLINK_CONNECTION (gst_decklink_connection_get_type ())
GType gst_decklink_connection_get_type (void);
typedef enum {
GST_DECKLINK_AUDIO_CONNECTION_AUTO,
GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED,
GST_DECKLINK_AUDIO_CONNECTION_AES_EBU,
GST_DECKLINK_AUDIO_CONNECTION_ANALOG
} GstDecklinkAudioConnectionEnum;
#define GST_TYPE_DECKLINK_AUDIO_CONNECTION (gst_decklink_audio_connection_get_type ())
GType gst_decklink_audio_connection_get_type (void);
typedef struct _GstDecklinkMode GstDecklinkMode;
struct _GstDecklinkMode {
BMDDisplayMode mode;

View file

@ -119,7 +119,8 @@ enum
{
PROP_0,
PROP_MODE,
PROP_CONNECTION
PROP_CONNECTION,
PROP_AUDIO_INPUT
};
/* pad templates */
@ -191,6 +192,12 @@ gst_decklink_src_class_init (GstDecklinkSrcClass * klass)
GST_TYPE_DECKLINK_CONNECTION, GST_DECKLINK_CONNECTION_SDI,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
G_PARAM_CONSTRUCT)));
g_object_class_install_property (gobject_class, PROP_AUDIO_INPUT,
g_param_spec_enum ("audio-input", "Audio Input", "Audio Input Connection",
GST_TYPE_DECKLINK_AUDIO_CONNECTION, GST_DECKLINK_AUDIO_CONNECTION_AUTO,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
G_PARAM_CONSTRUCT)));
}
static void
@ -267,6 +274,8 @@ gst_decklink_src_init (GstDecklinkSrc * decklinksrc,
decklinksrc->copy_data = TRUE;
decklinksrc->mode = GST_DECKLINK_MODE_NTSC;
decklinksrc->connection = GST_DECKLINK_CONNECTION_SDI;
decklinksrc->audio_connection = GST_DECKLINK_AUDIO_CONNECTION_AUTO;
}
@ -287,6 +296,10 @@ gst_decklink_src_set_property (GObject * object, guint property_id,
decklinksrc->connection =
(GstDecklinkConnectionEnum) g_value_get_enum (value);
break;
case PROP_AUDIO_INPUT:
decklinksrc->audio_connection =
(GstDecklinkAudioConnectionEnum) g_value_get_enum (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
@ -309,6 +322,9 @@ gst_decklink_src_get_property (GObject * object, guint property_id,
case PROP_CONNECTION:
g_value_set_enum (value, decklinksrc->connection);
break;
case PROP_AUDIO_INPUT:
g_value_set_enum (value, decklinksrc->audio_connection);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
@ -379,6 +395,7 @@ gst_decklink_src_start (GstElement * element)
const GstDecklinkMode *mode;
IDeckLinkConfiguration *config;
BMDVideoConnection conn;
BMDAudioConnection aconn;
GST_DEBUG_OBJECT (decklinksrc, "start");
@ -416,21 +433,27 @@ gst_decklink_src_start (GstElement * element)
default:
case GST_DECKLINK_CONNECTION_SDI:
conn = bmdVideoConnectionSDI;
aconn = bmdAudioConnectionEmbedded;
break;
case GST_DECKLINK_CONNECTION_HDMI:
conn = bmdVideoConnectionHDMI;
aconn = bmdAudioConnectionEmbedded;
break;
case GST_DECKLINK_CONNECTION_OPTICAL_SDI:
conn = bmdVideoConnectionOpticalSDI;
aconn = bmdAudioConnectionEmbedded;
break;
case GST_DECKLINK_CONNECTION_COMPONENT:
conn = bmdVideoConnectionComponent;
aconn = bmdAudioConnectionAnalog;
break;
case GST_DECKLINK_CONNECTION_COMPOSITE:
conn = bmdVideoConnectionComposite;
aconn = bmdAudioConnectionAnalog;
break;
case GST_DECKLINK_CONNECTION_SVIDEO:
conn = bmdVideoConnectionSVideo;
aconn = bmdAudioConnectionAnalog;
break;
}
@ -449,15 +472,24 @@ gst_decklink_src_start (GstElement * element)
}
}
if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE ||
decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPONENT ||
decklinksrc->connection == GST_DECKLINK_CONNECTION_SVIDEO) {
ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection,
bmdAudioConnectionAnalog);
if (ret != S_OK) {
GST_ERROR ("set configuration (audio input connection)");
return FALSE;
}
switch (decklinksrc->audio_connection) {
default:
case GST_DECKLINK_AUDIO_CONNECTION_AUTO:
break;
case GST_DECKLINK_AUDIO_CONNECTION_EMBEDDED:
aconn = bmdAudioConnectionEmbedded;
break;
case GST_DECKLINK_AUDIO_CONNECTION_AES_EBU:
aconn = bmdAudioConnectionAESEBU;
break;
case GST_DECKLINK_AUDIO_CONNECTION_ANALOG:
aconn = bmdAudioConnectionAnalog;
break;
}
ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, aconn);
if (ret != S_OK) {
GST_ERROR ("set configuration (audio input connection)");
return FALSE;
}
#if 0

View file

@ -72,6 +72,7 @@ struct _GstDecklinkSrc
gboolean copy_data;
GstDecklinkModeEnum mode;
GstDecklinkConnectionEnum connection;
GstDecklinkAudioConnectionEnum audio_connection;
};
struct _GstDecklinkSrcClass