mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-20 23:36:38 +00:00
[MOVED FROM BAD 107/134] vp8: Port to -base video base classes
Conflicts: ext/vp8/Makefile.am ext/vp8/gstvp8dec.c ext/vp8/gstvp8enc.c Back to 0.10 state for now, need to be ported again.
This commit is contained in:
parent
dccf3dc0b6
commit
74aa2e7c6a
1 changed files with 137 additions and 138 deletions
|
@ -219,23 +219,23 @@ static void gst_vp8_enc_set_property (GObject * object, guint prop_id,
|
||||||
static void gst_vp8_enc_get_property (GObject * object, guint prop_id,
|
static void gst_vp8_enc_get_property (GObject * object, guint prop_id,
|
||||||
GValue * value, GParamSpec * pspec);
|
GValue * value, GParamSpec * pspec);
|
||||||
|
|
||||||
static gboolean gst_vp8_enc_start (GstBaseVideoEncoder * encoder);
|
static gboolean gst_vp8_enc_start (GstVideoEncoder * encoder);
|
||||||
static gboolean gst_vp8_enc_stop (GstBaseVideoEncoder * encoder);
|
static gboolean gst_vp8_enc_stop (GstVideoEncoder * encoder);
|
||||||
static gboolean gst_vp8_enc_set_format (GstBaseVideoEncoder *
|
static gboolean gst_vp8_enc_set_format (GstVideoEncoder *
|
||||||
base_video_encoder, GstVideoInfo * info);
|
video_encoder, GstVideoCodecState * state);
|
||||||
static gboolean gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder);
|
static gboolean gst_vp8_enc_finish (GstVideoEncoder * video_encoder);
|
||||||
static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder *
|
static GstFlowReturn gst_vp8_enc_handle_frame (GstVideoEncoder *
|
||||||
base_video_encoder, GstVideoFrameState * frame);
|
video_encoder, GstVideoCodecFrame * frame);
|
||||||
static GstFlowReturn gst_vp8_enc_shape_output (GstBaseVideoEncoder * encoder,
|
static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * encoder,
|
||||||
GstVideoFrameState * frame);
|
GstVideoCodecFrame * frame);
|
||||||
static gboolean gst_vp8_enc_sink_event (GstBaseVideoEncoder *
|
static gboolean gst_vp8_enc_sink_event (GstVideoEncoder *
|
||||||
base_video_encoder, GstEvent * event);
|
video_encoder, GstEvent * event);
|
||||||
|
|
||||||
static GstStaticPadTemplate gst_vp8_enc_sink_template =
|
static GstStaticPadTemplate gst_vp8_enc_sink_template =
|
||||||
GST_STATIC_PAD_TEMPLATE ("sink",
|
GST_STATIC_PAD_TEMPLATE ("sink",
|
||||||
GST_PAD_SINK,
|
GST_PAD_SINK,
|
||||||
GST_PAD_ALWAYS,
|
GST_PAD_ALWAYS,
|
||||||
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
|
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
|
||||||
);
|
);
|
||||||
|
|
||||||
static GstStaticPadTemplate gst_vp8_enc_src_template =
|
static GstStaticPadTemplate gst_vp8_enc_src_template =
|
||||||
|
@ -245,23 +245,33 @@ GST_STATIC_PAD_TEMPLATE ("src",
|
||||||
GST_STATIC_CAPS ("video/x-vp8")
|
GST_STATIC_CAPS ("video/x-vp8")
|
||||||
);
|
);
|
||||||
|
|
||||||
#define gst_vp8_enc_parent_class parent_class
|
static void
|
||||||
G_DEFINE_TYPE_WITH_CODE (GstVP8Enc, gst_vp8_enc, GST_TYPE_BASE_VIDEO_ENCODER,
|
do_init (GType vp8enc_type)
|
||||||
G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL);
|
{
|
||||||
G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL););
|
static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
|
||||||
|
const GInterfaceInfo preset_interface_info = {
|
||||||
|
NULL, /* interface_init */
|
||||||
|
NULL, /* interface_finalize */
|
||||||
|
NULL /* interface_data */
|
||||||
|
};
|
||||||
|
|
||||||
|
g_type_add_interface_static (vp8enc_type, GST_TYPE_TAG_SETTER,
|
||||||
|
&tag_setter_info);
|
||||||
|
g_type_add_interface_static (vp8enc_type, GST_TYPE_PRESET,
|
||||||
|
&preset_interface_info);
|
||||||
|
}
|
||||||
|
|
||||||
|
GST_BOILERPLATE_FULL (GstVP8Enc, gst_vp8_enc, GstVideoEncoder,
|
||||||
|
GST_TYPE_VIDEO_ENCODER, do_init);
|
||||||
|
|
||||||
static void
|
static void
|
||||||
gst_vp8_enc_class_init (GstVP8EncClass * klass)
|
gst_vp8_enc_class_init (GstVP8EncClass * klass)
|
||||||
{
|
{
|
||||||
GObjectClass *gobject_class;
|
GObjectClass *gobject_class;
|
||||||
GstElementClass *element_class;
|
GstVideoEncoderClass *video_encoder_class;
|
||||||
GstBaseVideoEncoderClass *base_video_encoder_class;
|
|
||||||
|
|
||||||
|
|
||||||
gobject_class = G_OBJECT_CLASS (klass);
|
gobject_class = G_OBJECT_CLASS (klass);
|
||||||
element_class = GST_ELEMENT_CLASS (klass);
|
video_encoder_class = GST_VIDEO_ENCODER_CLASS (klass);
|
||||||
base_video_encoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass);
|
|
||||||
|
|
||||||
gobject_class->set_property = gst_vp8_enc_set_property;
|
gobject_class->set_property = gst_vp8_enc_set_property;
|
||||||
gobject_class->get_property = gst_vp8_enc_get_property;
|
gobject_class->get_property = gst_vp8_enc_get_property;
|
||||||
|
@ -277,13 +287,13 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
|
||||||
"Codec/Encoder/Video",
|
"Codec/Encoder/Video",
|
||||||
"Encode VP8 video streams", "David Schleef <ds@entropywave.com>");
|
"Encode VP8 video streams", "David Schleef <ds@entropywave.com>");
|
||||||
|
|
||||||
base_video_encoder_class->start = gst_vp8_enc_start;
|
video_encoder_class->start = gst_vp8_enc_start;
|
||||||
base_video_encoder_class->stop = gst_vp8_enc_stop;
|
video_encoder_class->stop = gst_vp8_enc_stop;
|
||||||
base_video_encoder_class->handle_frame = gst_vp8_enc_handle_frame;
|
video_encoder_class->handle_frame = gst_vp8_enc_handle_frame;
|
||||||
base_video_encoder_class->set_format = gst_vp8_enc_set_format;
|
video_encoder_class->set_format = gst_vp8_enc_set_format;
|
||||||
base_video_encoder_class->finish = gst_vp8_enc_finish;
|
video_encoder_class->finish = gst_vp8_enc_finish;
|
||||||
base_video_encoder_class->shape_output = gst_vp8_enc_shape_output;
|
video_encoder_class->pre_push = gst_vp8_enc_pre_push;
|
||||||
base_video_encoder_class->event = gst_vp8_enc_sink_event;
|
video_encoder_class->sink_event = gst_vp8_enc_sink_event;
|
||||||
|
|
||||||
g_object_class_install_property (gobject_class, PROP_BITRATE,
|
g_object_class_install_property (gobject_class, PROP_BITRATE,
|
||||||
g_param_spec_int ("bitrate", "Bit rate",
|
g_param_spec_int ("bitrate", "Bit rate",
|
||||||
|
@ -430,7 +440,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc)
|
gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc, GstVP8EncClass * klass)
|
||||||
{
|
{
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (gst_vp8_enc, "init");
|
GST_DEBUG_OBJECT (gst_vp8_enc, "init");
|
||||||
|
@ -651,21 +661,21 @@ gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value,
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_vp8_enc_start (GstBaseVideoEncoder * base_video_encoder)
|
gst_vp8_enc_start (GstVideoEncoder * video_encoder)
|
||||||
{
|
{
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "start");
|
GST_DEBUG_OBJECT (video_encoder, "start");
|
||||||
|
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_vp8_enc_stop (GstBaseVideoEncoder * base_video_encoder)
|
gst_vp8_enc_stop (GstVideoEncoder * video_encoder)
|
||||||
{
|
{
|
||||||
GstVP8Enc *encoder;
|
GstVP8Enc *encoder;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "stop");
|
GST_DEBUG_OBJECT (video_encoder, "stop");
|
||||||
|
|
||||||
encoder = GST_VP8_ENC (base_video_encoder);
|
encoder = GST_VP8_ENC (video_encoder);
|
||||||
|
|
||||||
if (encoder->inited) {
|
if (encoder->inited) {
|
||||||
vpx_codec_destroy (&encoder->encoder);
|
vpx_codec_destroy (&encoder->encoder);
|
||||||
|
@ -689,8 +699,8 @@ gst_vp8_enc_stop (GstBaseVideoEncoder * base_video_encoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
gst_vp8_enc_set_format (GstVideoEncoder * video_encoder,
|
||||||
GstVideoInfo * info)
|
GstVideoCodecState * state)
|
||||||
{
|
{
|
||||||
GstVP8Enc *encoder;
|
GstVP8Enc *encoder;
|
||||||
vpx_codec_enc_cfg_t cfg;
|
vpx_codec_enc_cfg_t cfg;
|
||||||
|
@ -698,13 +708,15 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
vpx_image_t *image;
|
vpx_image_t *image;
|
||||||
guint8 *data = NULL;
|
guint8 *data = NULL;
|
||||||
GstCaps *caps;
|
GstCaps *caps;
|
||||||
gboolean ret;
|
gboolean ret = TRUE;
|
||||||
|
GstVideoInfo *info = &state->info;
|
||||||
|
GstVideoCodecState *output_state;
|
||||||
|
|
||||||
encoder = GST_VP8_ENC (base_video_encoder);
|
encoder = GST_VP8_ENC (video_encoder);
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "set_format");
|
GST_DEBUG_OBJECT (video_encoder, "set_format");
|
||||||
|
|
||||||
if (encoder->inited) {
|
if (encoder->inited) {
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "refusing renegotiation");
|
GST_DEBUG_OBJECT (video_encoder, "refusing renegotiation");
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -716,14 +728,10 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Scale default bitrate to our size */
|
cfg.g_w = GST_VIDEO_INFO_WIDTH (info);
|
||||||
cfg.rc_target_bitrate = gst_util_uint64_scale (cfg.rc_target_bitrate,
|
cfg.g_h = GST_VIDEO_INFO_HEIGHT (info);
|
||||||
info->width * info->height, cfg.g_w * cfg.g_h);
|
cfg.g_timebase.num = GST_VIDEO_INFO_FPS_D (info);
|
||||||
|
cfg.g_timebase.den = GST_VIDEO_INFO_FPS_N (info);
|
||||||
cfg.g_w = info->width;
|
|
||||||
cfg.g_h = info->height;
|
|
||||||
cfg.g_timebase.num = info->fps_d;
|
|
||||||
cfg.g_timebase.den = info->fps_n;
|
|
||||||
|
|
||||||
cfg.g_error_resilient = encoder->error_resilient;
|
cfg.g_error_resilient = encoder->error_resilient;
|
||||||
cfg.g_lag_in_frames = encoder->max_latency;
|
cfg.g_lag_in_frames = encoder->max_latency;
|
||||||
|
@ -743,6 +751,7 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
} else {
|
} else {
|
||||||
cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2);
|
cfg.rc_min_quantizer = (gint) (63 - encoder->quality * 6.2);
|
||||||
cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2);
|
cfg.rc_max_quantizer = (gint) (63 - encoder->quality * 6.2);
|
||||||
|
cfg.rc_target_bitrate = encoder->bitrate;
|
||||||
}
|
}
|
||||||
cfg.rc_dropframe_thresh = encoder->drop_frame;
|
cfg.rc_dropframe_thresh = encoder->drop_frame;
|
||||||
cfg.rc_resize_allowed = encoder->resize_allowed;
|
cfg.rc_resize_allowed = encoder->resize_allowed;
|
||||||
|
@ -823,11 +832,17 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
|
|
||||||
cfg.g_lag_in_frames = encoder->lag_in_frames;
|
cfg.g_lag_in_frames = encoder->lag_in_frames;
|
||||||
|
|
||||||
gst_base_video_encoder_set_latency (base_video_encoder, 0,
|
gst_video_encoder_set_latency (video_encoder, 0,
|
||||||
gst_util_uint64_scale (encoder->max_latency,
|
gst_util_uint64_scale (encoder->max_latency,
|
||||||
info->fps_d * GST_SECOND, info->fps_n));
|
GST_VIDEO_INFO_FPS_D (info) * GST_SECOND,
|
||||||
|
GST_VIDEO_INFO_FPS_N (info)));
|
||||||
encoder->inited = TRUE;
|
encoder->inited = TRUE;
|
||||||
|
|
||||||
|
/* Store input state */
|
||||||
|
if (encoder->input_state)
|
||||||
|
gst_video_codec_state_unref (encoder->input_state);
|
||||||
|
encoder->input_state = gst_video_codec_state_ref (state);
|
||||||
|
|
||||||
/* prepare cached image buffer setup */
|
/* prepare cached image buffer setup */
|
||||||
image = &encoder->image;
|
image = &encoder->image;
|
||||||
memset (image, 0, sizeof (*image));
|
memset (image, 0, sizeof (*image));
|
||||||
|
@ -835,8 +850,8 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
image->fmt = VPX_IMG_FMT_I420;
|
image->fmt = VPX_IMG_FMT_I420;
|
||||||
image->bps = 12;
|
image->bps = 12;
|
||||||
image->x_chroma_shift = image->y_chroma_shift = 1;
|
image->x_chroma_shift = image->y_chroma_shift = 1;
|
||||||
image->w = image->d_w = info->width;
|
image->w = image->d_w = GST_VIDEO_INFO_WIDTH (info);
|
||||||
image->h = image->d_h = info->height;
|
image->h = image->d_h = GST_VIDEO_INFO_HEIGHT (info);
|
||||||
|
|
||||||
image->stride[VPX_PLANE_Y] = GST_VIDEO_INFO_COMP_STRIDE (info, 0);
|
image->stride[VPX_PLANE_Y] = GST_VIDEO_INFO_COMP_STRIDE (info, 0);
|
||||||
image->stride[VPX_PLANE_U] = GST_VIDEO_INFO_COMP_STRIDE (info, 1);
|
image->stride[VPX_PLANE_U] = GST_VIDEO_INFO_COMP_STRIDE (info, 1);
|
||||||
|
@ -845,19 +860,13 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
image->planes[VPX_PLANE_U] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 1);
|
image->planes[VPX_PLANE_U] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 1);
|
||||||
image->planes[VPX_PLANE_V] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 2);
|
image->planes[VPX_PLANE_V] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 2);
|
||||||
|
|
||||||
caps = gst_caps_new_simple ("video/x-vp8",
|
caps = gst_caps_new_simple ("video/x-vp8", NULL);
|
||||||
"width", G_TYPE_INT, info->width,
|
|
||||||
"height", G_TYPE_INT, info->height,
|
|
||||||
"framerate", GST_TYPE_FRACTION, info->fps_n,
|
|
||||||
info->fps_d,
|
|
||||||
"pixel-aspect-ratio", GST_TYPE_FRACTION, info->par_n, info->par_d, NULL);
|
|
||||||
{
|
{
|
||||||
GstStructure *s;
|
GstStructure *s;
|
||||||
GstBuffer *stream_hdr, *vorbiscomment;
|
GstBuffer *stream_hdr, *vorbiscomment;
|
||||||
const GstTagList *iface_tags;
|
const GstTagList *iface_tags;
|
||||||
GValue array = { 0, };
|
GValue array = { 0, };
|
||||||
GValue value = { 0, };
|
GValue value = { 0, };
|
||||||
GstMapInfo map;
|
|
||||||
s = gst_caps_get_structure (caps, 0);
|
s = gst_caps_get_structure (caps, 0);
|
||||||
|
|
||||||
/* put buffers in a fixed list */
|
/* put buffers in a fixed list */
|
||||||
|
@ -866,38 +875,34 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
|
|
||||||
/* Create Ogg stream-info */
|
/* Create Ogg stream-info */
|
||||||
stream_hdr = gst_buffer_new_and_alloc (26);
|
stream_hdr = gst_buffer_new_and_alloc (26);
|
||||||
gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE);
|
data = GST_BUFFER_DATA (stream_hdr);
|
||||||
data = map.data;
|
|
||||||
|
|
||||||
GST_WRITE_UINT8 (data, 0x4F);
|
GST_WRITE_UINT8 (data, 0x4F);
|
||||||
GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */
|
GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */
|
||||||
GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */
|
GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */
|
||||||
GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */
|
GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */
|
||||||
GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */
|
GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */
|
||||||
GST_WRITE_UINT16_BE (data + 8, info->width);
|
GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info));
|
||||||
GST_WRITE_UINT16_BE (data + 10, info->height);
|
GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info));
|
||||||
GST_WRITE_UINT24_BE (data + 12, info->par_n);
|
GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info));
|
||||||
GST_WRITE_UINT24_BE (data + 15, info->par_d);
|
GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info));
|
||||||
GST_WRITE_UINT32_BE (data + 18, info->fps_n);
|
GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info));
|
||||||
GST_WRITE_UINT32_BE (data + 22, info->fps_d);
|
GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info));
|
||||||
|
|
||||||
gst_buffer_unmap (stream_hdr, &map);
|
GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS);
|
||||||
|
|
||||||
GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER);
|
|
||||||
gst_value_set_buffer (&value, stream_hdr);
|
gst_value_set_buffer (&value, stream_hdr);
|
||||||
gst_value_array_append_value (&array, &value);
|
gst_value_array_append_value (&array, &value);
|
||||||
g_value_unset (&value);
|
g_value_unset (&value);
|
||||||
gst_buffer_unref (stream_hdr);
|
gst_buffer_unref (stream_hdr);
|
||||||
|
|
||||||
iface_tags =
|
iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder));
|
||||||
gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
|
|
||||||
if (iface_tags) {
|
if (iface_tags) {
|
||||||
vorbiscomment =
|
vorbiscomment =
|
||||||
gst_tag_list_to_vorbiscomment_buffer (iface_tags,
|
gst_tag_list_to_vorbiscomment_buffer (iface_tags,
|
||||||
(const guint8 *) "OVP80\2 ", 7,
|
(const guint8 *) "OVP80\2 ", 7,
|
||||||
"Encoded with GStreamer vp8enc " PACKAGE_VERSION);
|
"Encoded with GStreamer vp8enc " PACKAGE_VERSION);
|
||||||
|
|
||||||
GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER);
|
GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);
|
||||||
|
|
||||||
g_value_init (&value, GST_TYPE_BUFFER);
|
g_value_init (&value, GST_TYPE_BUFFER);
|
||||||
gst_value_set_buffer (&value, vorbiscomment);
|
gst_value_set_buffer (&value, vorbiscomment);
|
||||||
|
@ -909,9 +914,9 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
||||||
gst_structure_set_value (s, "streamheader", &array);
|
gst_structure_set_value (s, "streamheader", &array);
|
||||||
g_value_unset (&array);
|
g_value_unset (&array);
|
||||||
}
|
}
|
||||||
|
output_state =
|
||||||
ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder), caps);
|
gst_video_encoder_set_output_state (video_encoder, caps, state);
|
||||||
gst_caps_unref (caps);
|
gst_video_codec_state_unref (output_state);
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
@ -921,12 +926,12 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
|
||||||
{
|
{
|
||||||
vpx_codec_iter_t iter = NULL;
|
vpx_codec_iter_t iter = NULL;
|
||||||
const vpx_codec_cx_pkt_t *pkt;
|
const vpx_codec_cx_pkt_t *pkt;
|
||||||
GstBaseVideoEncoder *base_video_encoder;
|
GstVideoEncoder *video_encoder;
|
||||||
GstVP8EncCoderHook *hook;
|
GstVP8EncCoderHook *hook;
|
||||||
GstVideoFrameState *frame;
|
GstVideoCodecFrame *frame;
|
||||||
GstFlowReturn ret = GST_FLOW_OK;
|
GstFlowReturn ret = GST_FLOW_OK;
|
||||||
|
|
||||||
base_video_encoder = GST_BASE_VIDEO_ENCODER (encoder);
|
video_encoder = GST_VIDEO_ENCODER (encoder);
|
||||||
|
|
||||||
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
|
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
|
||||||
while (pkt != NULL) {
|
while (pkt != NULL) {
|
||||||
|
@ -943,12 +948,12 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
|
||||||
g_byte_array_append (encoder->first_pass_cache_content,
|
g_byte_array_append (encoder->first_pass_cache_content,
|
||||||
pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz);
|
pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz);
|
||||||
|
|
||||||
frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder);
|
frame = gst_video_encoder_get_oldest_frame (video_encoder);
|
||||||
if (frame != NULL) {
|
if (frame != NULL) {
|
||||||
buffer = gst_buffer_new ();
|
buffer = gst_buffer_new ();
|
||||||
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE);
|
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL);
|
||||||
frame->src_buffer = buffer;
|
frame->output_buffer = buffer;
|
||||||
gst_base_video_encoder_finish_frame (base_video_encoder, frame);
|
gst_video_encoder_finish_frame (video_encoder, frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
|
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
|
||||||
|
@ -960,15 +965,17 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0;
|
invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0;
|
||||||
frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder);
|
frame = gst_video_encoder_get_oldest_frame (video_encoder);
|
||||||
g_assert (frame != NULL);
|
g_assert (frame != NULL);
|
||||||
frame->is_sync_point = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;
|
if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0)
|
||||||
|
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
|
||||||
|
else
|
||||||
|
GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
|
||||||
hook = frame->coder_hook;
|
hook = frame->coder_hook;
|
||||||
|
|
||||||
/* FIXME : It would be nice to avoid the memory copy ... */
|
buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz);
|
||||||
buffer =
|
|
||||||
gst_buffer_new_wrapped (g_memdup (pkt->data.frame.buf,
|
memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz);
|
||||||
pkt->data.frame.sz), pkt->data.frame.sz);
|
|
||||||
|
|
||||||
if (hook->image)
|
if (hook->image)
|
||||||
g_slice_free (vpx_image_t, hook->image);
|
g_slice_free (vpx_image_t, hook->image);
|
||||||
|
@ -977,8 +984,8 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
|
||||||
if (invisible) {
|
if (invisible) {
|
||||||
hook->invisible = g_list_append (hook->invisible, buffer);
|
hook->invisible = g_list_append (hook->invisible, buffer);
|
||||||
} else {
|
} else {
|
||||||
frame->src_buffer = buffer;
|
frame->output_buffer = buffer;
|
||||||
ret = gst_base_video_encoder_finish_frame (base_video_encoder, frame);
|
ret = gst_video_encoder_finish_frame (video_encoder, frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
|
pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter);
|
||||||
|
@ -988,15 +995,15 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstFlowReturn
|
static GstFlowReturn
|
||||||
gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
|
gst_vp8_enc_finish (GstVideoEncoder * video_encoder)
|
||||||
{
|
{
|
||||||
GstVP8Enc *encoder;
|
GstVP8Enc *encoder;
|
||||||
int flags = 0;
|
int flags = 0;
|
||||||
vpx_codec_err_t status;
|
vpx_codec_err_t status;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "finish");
|
GST_DEBUG_OBJECT (video_encoder, "finish");
|
||||||
|
|
||||||
encoder = GST_VP8_ENC (base_video_encoder);
|
encoder = GST_VP8_ENC (video_encoder);
|
||||||
|
|
||||||
status =
|
status =
|
||||||
vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags,
|
vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags,
|
||||||
|
@ -1030,48 +1037,40 @@ static vpx_image_t *
|
||||||
gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer)
|
gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer)
|
||||||
{
|
{
|
||||||
vpx_image_t *image = g_slice_new (vpx_image_t);
|
vpx_image_t *image = g_slice_new (vpx_image_t);
|
||||||
GstVideoFrame frame;
|
guint8 *data = GST_BUFFER_DATA (buffer);
|
||||||
|
|
||||||
memcpy (image, &enc->image, sizeof (*image));
|
memcpy (image, &enc->image, sizeof (*image));
|
||||||
|
|
||||||
gst_video_frame_map (&frame, &GST_BASE_VIDEO_CODEC (enc)->info,
|
image->img_data = data;
|
||||||
buffer, GST_MAP_READ);
|
image->planes[VPX_PLANE_Y] += (data - (guint8 *) NULL);
|
||||||
|
image->planes[VPX_PLANE_U] += (data - (guint8 *) NULL);
|
||||||
image->img_data = frame.data[0];
|
image->planes[VPX_PLANE_V] += (data - (guint8 *) NULL);
|
||||||
image->planes[VPX_PLANE_Y] = frame.data[0];
|
|
||||||
image->planes[VPX_PLANE_U] = frame.data[1];
|
|
||||||
image->planes[VPX_PLANE_V] = frame.data[2];
|
|
||||||
|
|
||||||
/* FIXME : We should only unmap when we're done with it */
|
|
||||||
|
|
||||||
gst_video_frame_unmap (&frame);
|
|
||||||
|
|
||||||
return image;
|
return image;
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstFlowReturn
|
static GstFlowReturn
|
||||||
gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
|
gst_vp8_enc_handle_frame (GstVideoEncoder * video_encoder,
|
||||||
GstVideoFrameState * frame)
|
GstVideoCodecFrame * frame)
|
||||||
{
|
{
|
||||||
GstVP8Enc *encoder;
|
GstVP8Enc *encoder;
|
||||||
const GstVideoState *state;
|
|
||||||
vpx_codec_err_t status;
|
vpx_codec_err_t status;
|
||||||
int flags = 0;
|
int flags = 0;
|
||||||
vpx_image_t *image;
|
vpx_image_t *image;
|
||||||
GstVP8EncCoderHook *hook;
|
GstVP8EncCoderHook *hook;
|
||||||
int quality;
|
int quality;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "handle_frame");
|
GST_DEBUG_OBJECT (video_encoder, "handle_frame");
|
||||||
|
|
||||||
encoder = GST_VP8_ENC (base_video_encoder);
|
encoder = GST_VP8_ENC (video_encoder);
|
||||||
|
|
||||||
state = gst_base_video_encoder_get_state (base_video_encoder);
|
|
||||||
encoder->n_frames++;
|
encoder->n_frames++;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "size %d %d", state->width,
|
GST_DEBUG_OBJECT (video_encoder, "size %d %d",
|
||||||
state->height);
|
GST_VIDEO_INFO_WIDTH (&encoder->input_state->info),
|
||||||
|
GST_VIDEO_INFO_HEIGHT (&encoder->input_state->info));
|
||||||
|
|
||||||
image = gst_vp8_enc_buffer_to_image (encoder, frame->sink_buffer);
|
image = gst_vp8_enc_buffer_to_image (encoder, frame->input_buffer);
|
||||||
|
|
||||||
hook = g_slice_new0 (GstVP8EncCoderHook);
|
hook = g_slice_new0 (GstVP8EncCoderHook);
|
||||||
hook->image = image;
|
hook->image = image;
|
||||||
|
@ -1079,7 +1078,7 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
|
||||||
frame->coder_hook_destroy_notify =
|
frame->coder_hook_destroy_notify =
|
||||||
(GDestroyNotify) gst_vp8_enc_coder_hook_free;
|
(GDestroyNotify) gst_vp8_enc_coder_hook_free;
|
||||||
|
|
||||||
if (frame->force_keyframe) {
|
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
|
||||||
flags |= VPX_EFLAG_FORCE_KF;
|
flags |= VPX_EFLAG_FORCE_KF;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1112,22 +1111,22 @@ _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist)
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstFlowReturn
|
static GstFlowReturn
|
||||||
gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
|
gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder,
|
||||||
GstVideoFrameState * frame)
|
GstVideoCodecFrame * frame)
|
||||||
{
|
{
|
||||||
GstVP8Enc *encoder;
|
GstVP8Enc *encoder;
|
||||||
GstBuffer *buf;
|
GstBuffer *buf;
|
||||||
const GstVideoState *state;
|
GstFlowReturn ret = GST_FLOW_OK;
|
||||||
GstFlowReturn ret;
|
|
||||||
GstVP8EncCoderHook *hook = frame->coder_hook;
|
GstVP8EncCoderHook *hook = frame->coder_hook;
|
||||||
GList *l;
|
GList *l;
|
||||||
gint inv_count;
|
gint inv_count;
|
||||||
|
GstVideoInfo *info;
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "shape_output");
|
GST_DEBUG_OBJECT (video_encoder, "pre_push");
|
||||||
|
|
||||||
encoder = GST_VP8_ENC (base_video_encoder);
|
encoder = GST_VP8_ENC (video_encoder);
|
||||||
|
|
||||||
state = gst_base_video_encoder_get_state (base_video_encoder);
|
info = &encoder->input_state->info;
|
||||||
|
|
||||||
g_assert (hook != NULL);
|
g_assert (hook != NULL);
|
||||||
|
|
||||||
|
@ -1135,7 +1134,8 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
|
||||||
buf = l->data;
|
buf = l->data;
|
||||||
l->data = NULL;
|
l->data = NULL;
|
||||||
|
|
||||||
if (l == hook->invisible && frame->is_sync_point) {
|
/* FIXME : All of this should have already been handled by base classes, no ? */
|
||||||
|
if (l == hook->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
|
||||||
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
|
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||||
encoder->keyframe_distance = 0;
|
encoder->keyframe_distance = 0;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1143,16 +1143,18 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
|
||||||
encoder->keyframe_distance++;
|
encoder->keyframe_distance++;
|
||||||
}
|
}
|
||||||
|
|
||||||
GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->src_buffer);
|
GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer);
|
||||||
GST_BUFFER_DURATION (buf) = 0;
|
GST_BUFFER_DURATION (buf) = 0;
|
||||||
GST_BUFFER_OFFSET_END (buf) =
|
GST_BUFFER_OFFSET_END (buf) =
|
||||||
_to_granulepos (frame->presentation_frame_number + 1,
|
_to_granulepos (frame->presentation_frame_number + 1,
|
||||||
inv_count, encoder->keyframe_distance);
|
inv_count, encoder->keyframe_distance);
|
||||||
GST_BUFFER_OFFSET (buf) =
|
GST_BUFFER_OFFSET (buf) =
|
||||||
gst_util_uint64_scale (frame->presentation_frame_number + 1,
|
gst_util_uint64_scale (frame->presentation_frame_number + 1,
|
||||||
GST_SECOND * state->fps_d, state->fps_n);
|
GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info));
|
||||||
|
|
||||||
ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
|
gst_buffer_set_caps (buf,
|
||||||
|
GST_PAD_CAPS (GST_VIDEO_ENCODER_SRC_PAD (video_encoder)));
|
||||||
|
ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf);
|
||||||
|
|
||||||
if (ret != GST_FLOW_OK) {
|
if (ret != GST_FLOW_OK) {
|
||||||
GST_WARNING_OBJECT (encoder, "flow error %d", ret);
|
GST_WARNING_OBJECT (encoder, "flow error %d", ret);
|
||||||
|
@ -1160,10 +1162,10 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
buf = frame->src_buffer;
|
buf = frame->output_buffer;
|
||||||
frame->src_buffer = NULL;
|
|
||||||
|
|
||||||
if (!hook->invisible && frame->is_sync_point) {
|
/* FIXME : All of this should have already been handled by base classes, no ? */
|
||||||
|
if (!hook->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
|
||||||
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
|
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||||
encoder->keyframe_distance = 0;
|
encoder->keyframe_distance = 0;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1176,25 +1178,22 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
|
||||||
encoder->keyframe_distance);
|
encoder->keyframe_distance);
|
||||||
GST_BUFFER_OFFSET (buf) =
|
GST_BUFFER_OFFSET (buf) =
|
||||||
gst_util_uint64_scale (frame->presentation_frame_number + 1,
|
gst_util_uint64_scale (frame->presentation_frame_number + 1,
|
||||||
GST_SECOND * state->fps_d, state->fps_n);
|
GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info));
|
||||||
|
|
||||||
GST_LOG_OBJECT (base_video_encoder, "src ts: %" GST_TIME_FORMAT,
|
GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT,
|
||||||
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
|
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
|
||||||
|
|
||||||
ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
|
|
||||||
if (ret != GST_FLOW_OK) {
|
|
||||||
GST_WARNING_OBJECT (encoder, "flow error %d", ret);
|
|
||||||
}
|
|
||||||
|
|
||||||
done:
|
done:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_vp8_enc_sink_event (GstBaseVideoEncoder * benc, GstEvent * event)
|
gst_vp8_enc_sink_event (GstVideoEncoder * benc, GstEvent * event)
|
||||||
{
|
{
|
||||||
GstVP8Enc *enc = GST_VP8_ENC (benc);
|
GstVP8Enc *enc = GST_VP8_ENC (benc);
|
||||||
|
|
||||||
|
/* FIXME : Move this to base encoder class */
|
||||||
|
|
||||||
if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) {
|
if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) {
|
||||||
GstTagList *list;
|
GstTagList *list;
|
||||||
GstTagSetter *setter = GST_TAG_SETTER (enc);
|
GstTagSetter *setter = GST_TAG_SETTER (enc);
|
||||||
|
|
Loading…
Reference in a new issue