mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-01-31 11:32:38 +00:00
openh264enc: Properly drain the encoder on ::finish()
This commit is contained in:
parent
8aedcc6bde
commit
10475b6793
1 changed files with 80 additions and 53 deletions
|
@ -101,6 +101,7 @@ static gboolean gst_openh264enc_stop(GstVideoEncoder *encoder);
|
||||||
static gboolean gst_openh264enc_set_format(GstVideoEncoder *encoder, GstVideoCodecState *state);
|
static gboolean gst_openh264enc_set_format(GstVideoEncoder *encoder, GstVideoCodecState *state);
|
||||||
static GstFlowReturn gst_openh264enc_handle_frame(GstVideoEncoder *encoder,
|
static GstFlowReturn gst_openh264enc_handle_frame(GstVideoEncoder *encoder,
|
||||||
GstVideoCodecFrame *frame);
|
GstVideoCodecFrame *frame);
|
||||||
|
static GstFlowReturn gst_openh264enc_finish (GstVideoEncoder *encoder);
|
||||||
static gboolean gst_openh264enc_propose_allocation (GstVideoEncoder * encoder, GstQuery * query);
|
static gboolean gst_openh264enc_propose_allocation (GstVideoEncoder * encoder, GstQuery * query);
|
||||||
static void gst_openh264enc_set_usage_type (GstOpenh264Enc *openh264enc, gint usage_type);
|
static void gst_openh264enc_set_usage_type (GstOpenh264Enc *openh264enc, gint usage_type);
|
||||||
static void gst_openh264enc_set_rate_control (GstOpenh264Enc *openh264enc, gint rc_mode);
|
static void gst_openh264enc_set_rate_control (GstOpenh264Enc *openh264enc, gint rc_mode);
|
||||||
|
@ -191,6 +192,7 @@ static void gst_openh264enc_class_init(GstOpenh264EncClass *klass)
|
||||||
video_encoder_class->set_format = GST_DEBUG_FUNCPTR(gst_openh264enc_set_format);
|
video_encoder_class->set_format = GST_DEBUG_FUNCPTR(gst_openh264enc_set_format);
|
||||||
video_encoder_class->handle_frame = GST_DEBUG_FUNCPTR(gst_openh264enc_handle_frame);
|
video_encoder_class->handle_frame = GST_DEBUG_FUNCPTR(gst_openh264enc_handle_frame);
|
||||||
video_encoder_class->propose_allocation = GST_DEBUG_FUNCPTR(gst_openh264enc_propose_allocation);
|
video_encoder_class->propose_allocation = GST_DEBUG_FUNCPTR(gst_openh264enc_propose_allocation);
|
||||||
|
video_encoder_class->finish = GST_DEBUG_FUNCPTR(gst_openh264enc_finish);
|
||||||
|
|
||||||
/* define properties */
|
/* define properties */
|
||||||
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_USAGE_TYPE,
|
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_USAGE_TYPE,
|
||||||
|
@ -572,16 +574,21 @@ static GstFlowReturn gst_openh264enc_handle_frame(GstVideoEncoder *encoder, GstV
|
||||||
gfloat fps;
|
gfloat fps;
|
||||||
GstVideoEncoder *base_encoder = GST_VIDEO_ENCODER(openh264enc);
|
GstVideoEncoder *base_encoder = GST_VIDEO_ENCODER(openh264enc);
|
||||||
|
|
||||||
|
if (frame) {
|
||||||
src_pic = new SSourcePicture;
|
src_pic = new SSourcePicture;
|
||||||
|
|
||||||
if (src_pic == NULL) {
|
if (src_pic == NULL) {
|
||||||
|
if (frame)
|
||||||
gst_video_codec_frame_unref(frame);
|
gst_video_codec_frame_unref(frame);
|
||||||
return GST_FLOW_ERROR;
|
return GST_FLOW_ERROR;
|
||||||
}
|
}
|
||||||
//fill default src_pic
|
//fill default src_pic
|
||||||
src_pic->iColorFormat = videoFormatI420;
|
src_pic->iColorFormat = videoFormatI420;
|
||||||
src_pic->uiTimeStamp = 0;
|
src_pic->uiTimeStamp = 0;
|
||||||
|
}
|
||||||
|
|
||||||
openh264enc->priv->frame_count++;
|
openh264enc->priv->frame_count++;
|
||||||
|
if (frame) {
|
||||||
if (G_UNLIKELY(openh264enc->priv->frame_count == 1)) {
|
if (G_UNLIKELY(openh264enc->priv->frame_count == 1)) {
|
||||||
openh264enc->priv->time_per_frame = (GST_NSECOND / openh264enc->priv->framerate);
|
openh264enc->priv->time_per_frame = (GST_NSECOND / openh264enc->priv->framerate);
|
||||||
openh264enc->priv->previous_timestamp = frame->pts;
|
openh264enc->priv->previous_timestamp = frame->pts;
|
||||||
|
@ -594,14 +601,18 @@ static GstFlowReturn gst_openh264enc_handle_frame(GstVideoEncoder *encoder, GstV
|
||||||
openh264enc->priv->encoder->SetOption(ENCODER_OPTION_FRAME_RATE, &fps);
|
openh264enc->priv->encoder->SetOption(ENCODER_OPTION_FRAME_RATE, &fps);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (openh264enc->priv->bitrate <= openh264enc->priv->drop_bitrate) {
|
if (openh264enc->priv->bitrate <= openh264enc->priv->drop_bitrate) {
|
||||||
GST_LOG_OBJECT(openh264enc, "Dropped frame due to too low bitrate");
|
GST_LOG_OBJECT(openh264enc, "Dropped frame due to too low bitrate");
|
||||||
|
if (frame) {
|
||||||
gst_video_encoder_finish_frame(encoder, frame);
|
gst_video_encoder_finish_frame(encoder, frame);
|
||||||
if (src_pic != NULL) delete src_pic;
|
delete src_pic;
|
||||||
|
}
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (frame) {
|
||||||
gst_video_frame_map(&video_frame, &openh264enc->priv->input_state->info, frame->input_buffer, GST_MAP_READ);
|
gst_video_frame_map(&video_frame, &openh264enc->priv->input_state->info, frame->input_buffer, GST_MAP_READ);
|
||||||
src_pic->iPicWidth = GST_VIDEO_FRAME_WIDTH(&video_frame);
|
src_pic->iPicWidth = GST_VIDEO_FRAME_WIDTH(&video_frame);
|
||||||
src_pic->iPicHeight = GST_VIDEO_FRAME_HEIGHT(&video_frame);
|
src_pic->iPicHeight = GST_VIDEO_FRAME_HEIGHT(&video_frame);
|
||||||
|
@ -617,32 +628,44 @@ static GstFlowReturn gst_openh264enc_handle_frame(GstVideoEncoder *encoder, GstV
|
||||||
openh264enc->priv->encoder->ForceIntraFrame(true);
|
openh264enc->priv->encoder->ForceIntraFrame(true);
|
||||||
GST_DEBUG_OBJECT(openh264enc,"Got force key unit event, next frame coded as intra picture");
|
GST_DEBUG_OBJECT(openh264enc,"Got force key unit event, next frame coded as intra picture");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
memset (&frame_info, 0, sizeof (SFrameBSInfo));
|
memset (&frame_info, 0, sizeof (SFrameBSInfo));
|
||||||
ret = openh264enc->priv->encoder->EncodeFrame(src_pic, &frame_info);
|
ret = openh264enc->priv->encoder->EncodeFrame(src_pic, &frame_info);
|
||||||
if (ret != cmResultSuccess) {
|
if (ret != cmResultSuccess) {
|
||||||
|
if (frame) {
|
||||||
gst_video_frame_unmap(&video_frame);
|
gst_video_frame_unmap(&video_frame);
|
||||||
GST_ELEMENT_ERROR(openh264enc, STREAM, ENCODE, ("Could not encode frame"), ("Openh264 returned %d", ret));
|
|
||||||
gst_video_codec_frame_unref(frame);
|
gst_video_codec_frame_unref(frame);
|
||||||
if (src_pic != NULL) delete src_pic;
|
delete src_pic;
|
||||||
|
GST_ELEMENT_ERROR(openh264enc, STREAM, ENCODE, ("Could not encode frame"), ("Openh264 returned %d", ret));
|
||||||
return GST_FLOW_ERROR;
|
return GST_FLOW_ERROR;
|
||||||
|
} else {
|
||||||
|
return GST_FLOW_EOS;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (videoFrameTypeSkip == frame_info.eFrameType) {
|
if (videoFrameTypeSkip == frame_info.eFrameType) {
|
||||||
|
if (frame) {
|
||||||
gst_video_frame_unmap(&video_frame);
|
gst_video_frame_unmap(&video_frame);
|
||||||
gst_video_encoder_finish_frame(base_encoder, frame);
|
gst_video_encoder_finish_frame(base_encoder, frame);
|
||||||
if (src_pic != NULL) delete src_pic;
|
delete src_pic;
|
||||||
|
}
|
||||||
|
|
||||||
return GST_FLOW_OK;
|
return GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (frame) {
|
||||||
|
gst_video_frame_unmap(&video_frame);
|
||||||
gst_video_codec_frame_unref(frame);
|
gst_video_codec_frame_unref(frame);
|
||||||
|
delete src_pic;
|
||||||
|
src_pic = NULL;
|
||||||
|
frame = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
frame = gst_video_encoder_get_oldest_frame(base_encoder);
|
frame = gst_video_encoder_get_oldest_frame(base_encoder);
|
||||||
if (!frame) {
|
if (!frame) {
|
||||||
gst_video_frame_unmap(&video_frame);
|
|
||||||
GST_ELEMENT_ERROR(openh264enc, STREAM, ENCODE, ("Could not encode frame"), ("openh264enc returned %d", ret));
|
GST_ELEMENT_ERROR(openh264enc, STREAM, ENCODE, ("Could not encode frame"), ("openh264enc returned %d", ret));
|
||||||
gst_video_codec_frame_unref(frame);
|
gst_video_codec_frame_unref(frame);
|
||||||
if (src_pic != NULL) delete src_pic;
|
|
||||||
return GST_FLOW_ERROR;
|
return GST_FLOW_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -697,14 +720,18 @@ static GstFlowReturn gst_openh264enc_handle_frame(GstVideoEncoder *encoder, GstV
|
||||||
|
|
||||||
GST_LOG_OBJECT(openh264enc, "openh264 picture %scoded OK!", (ret != cmResultSuccess) ? "NOT " : "");
|
GST_LOG_OBJECT(openh264enc, "openh264 picture %scoded OK!", (ret != cmResultSuccess) ? "NOT " : "");
|
||||||
|
|
||||||
gst_video_frame_unmap(&video_frame);
|
return gst_video_encoder_finish_frame(encoder, frame);
|
||||||
|
|
||||||
if (ret == cmResultSuccess) {
|
|
||||||
gst_video_encoder_finish_frame(encoder, frame);
|
|
||||||
} else {
|
|
||||||
gst_video_codec_frame_unref(frame);
|
|
||||||
GST_ELEMENT_ERROR(openh264enc, STREAM, ENCODE, ("Could not encode frame"), ("openh264enc returned %d", ret));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (ret == cmResultSuccess) ? GST_FLOW_OK : GST_FLOW_ERROR;
|
static GstFlowReturn gst_openh264enc_finish (GstVideoEncoder *encoder)
|
||||||
|
{
|
||||||
|
GstOpenh264Enc *openh264enc = GST_OPENH264ENC(encoder);
|
||||||
|
|
||||||
|
if (openh264enc->priv->frame_count == 0)
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
|
||||||
|
/* Drain encoder */
|
||||||
|
while ((gst_openh264enc_handle_frame (encoder, NULL)) == GST_FLOW_OK);
|
||||||
|
|
||||||
|
return GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue