diff --git a/sys/d3d11/gstd3d11_fwd.h b/sys/d3d11/gstd3d11_fwd.h index e8150ba23b..6d8c372c5c 100644 --- a/sys/d3d11/gstd3d11_fwd.h +++ b/sys/d3d11/gstd3d11_fwd.h @@ -82,6 +82,9 @@ typedef struct _GstD3D11H264DecClass GstD3D11H264DecClass; typedef struct _GstD3D11Vp9Dec GstD3D11Vp9Dec; typedef struct _GstD3D11Vp9DecClass GstD3D11Vp9DecClass; +typedef struct _GstD3D11H265Dec GstD3D11H265Dec; +typedef struct _GstD3D11H265DecClass GstD3D11H265DecClass; + G_END_DECLS #endif /* __GST_D3D11_FWD_H__ */ diff --git a/sys/d3d11/gstd3d11decoder.c b/sys/d3d11/gstd3d11decoder.c index e86417247e..536a011bd7 100644 --- a/sys/d3d11/gstd3d11decoder.c +++ b/sys/d3d11/gstd3d11decoder.c @@ -599,7 +599,8 @@ gst_d3d11_decoder_open (GstD3D11Decoder * decoder, GstD3D11Codec codec, break; } - if (codec == GST_D3D11_CODEC_VP9 && config_list[i].ConfigBitstreamRaw == 1) { + if ((codec == GST_D3D11_CODEC_VP9 || codec == GST_D3D11_CODEC_H265) + && config_list[i].ConfigBitstreamRaw == 1) { best_config = &config_list[i]; break; } diff --git a/sys/d3d11/gstd3d11decoder.h b/sys/d3d11/gstd3d11decoder.h index 5412dad7d1..93ce11bca5 100644 --- a/sys/d3d11/gstd3d11decoder.h +++ b/sys/d3d11/gstd3d11decoder.h @@ -58,6 +58,7 @@ typedef enum GST_D3D11_CODEC_NONE, GST_D3D11_CODEC_H264, GST_D3D11_CODEC_VP9, + GST_D3D11_CODEC_H265, /* the last of supported codec */ GST_D3D11_CODEC_LAST diff --git a/sys/d3d11/gstd3d11h265dec.c b/sys/d3d11/gstd3d11h265dec.c new file mode 100644 index 0000000000..cc2e233ab6 --- /dev/null +++ b/sys/d3d11/gstd3d11h265dec.c @@ -0,0 +1,1402 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include +#endif + +#include "gstd3d11h265dec.h" +#include "gstd3d11memory.h" +#include "gstd3d11bufferpool.h" +#include + +GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h265_dec_debug); +#define GST_CAT_DEFAULT gst_d3d11_h265_dec_debug + +enum +{ + PROP_0, + PROP_ADAPTER +}; + +#define DEFAULT_ADAPTER -1 + +/* copied from d3d11.h since mingw header doesn't define them */ +DEFINE_GUID (GST_GUID_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN, + 0x5b11d51b, 0x2f4c, 0x4452, 0xbc, 0xc3, 0x09, 0xf2, 0xa1, 0x16, 0x0c, 0xc0); +DEFINE_GUID (GST_GUID_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10, + 0x107af0e0, 0xef1a, 0x4d19, 0xab, 0xa8, 0x67, 0xa1, 0x63, 0x07, 0x3d, 0x13); + +/* worst case 16 + 4 margin */ +#define NUM_OUTPUT_VIEW 20 + +static GstStaticPadTemplate sink_template = +GST_STATIC_PAD_TEMPLATE (GST_VIDEO_DECODER_SINK_NAME, + GST_PAD_SINK, GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-h265, " + "stream-format=(string) { hev1, hvc1, byte-stream }, " + "alignment=(string) au, " "profile = (string) { main-10, main }") + ); + +static GstStaticPadTemplate src_template = + GST_STATIC_PAD_TEMPLATE (GST_VIDEO_DECODER_SRC_NAME, + GST_PAD_SRC, GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, "{ NV12, P010_10LE }") "; " + GST_VIDEO_CAPS_MAKE ("{ NV12, P010_10LE }"))); + +#define parent_class gst_d3d11_h265_dec_parent_class +G_DEFINE_TYPE (GstD3D11H265Dec, gst_d3d11_h265_dec, GST_TYPE_H265_DECODER); + +static void gst_d3d11_h265_dec_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_d3d11_h265_dec_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); +static void gst_d3d11_h265_dec_dispose (GObject * object); +static void gst_d3d11_h265_dec_set_context (GstElement * element, + GstContext * context); + +static gboolean gst_d3d11_h265_dec_open (GstVideoDecoder * decoder); +static gboolean gst_d3d11_h265_dec_close (GstVideoDecoder * decoder); +static gboolean gst_d3d11_h265_dec_start (GstVideoDecoder * decoder); +static gboolean gst_d3d11_h265_dec_stop (GstVideoDecoder * decoder); +static GstFlowReturn gst_d3d11_h265_dec_handle_frame (GstVideoDecoder * + decoder, GstVideoCodecFrame * frame); +static gboolean gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder); +static gboolean gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder * + decoder, GstQuery * query); +static gboolean gst_d3d11_h265_dec_src_query (GstVideoDecoder * decoder, + GstQuery * query); + +/* GstH265Decoder */ +static gboolean gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder, + const GstH265SPS * sps); +static gboolean gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder, + GstH265Picture * picture); +static GstFlowReturn gst_d3d11_h265_dec_output_picture (GstH265Decoder * + decoder, GstH265Picture * picture); +static gboolean gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder, + GstH265Picture * picture, GstH265Slice * slice, GstH265Dpb * dpb); +static gboolean gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder, + GstH265Picture * picture, GstH265Slice * slice); +static gboolean gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder, + GstH265Picture * picture); + +static void +gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass); + GstH265DecoderClass *h265decoder_class = GST_H265_DECODER_CLASS (klass); + + gobject_class->set_property = gst_d3d11_h265_dec_set_property; + gobject_class->get_property = gst_d3d11_h265_dec_get_property; + gobject_class->dispose = gst_d3d11_h265_dec_dispose; + + g_object_class_install_property (gobject_class, PROP_ADAPTER, + g_param_spec_int ("adapter", "Adapter", + "Adapter index for creating device (-1 for default)", + -1, G_MAXINT32, DEFAULT_ADAPTER, + G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY | + G_PARAM_STATIC_STRINGS)); + + element_class->set_context = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_set_context); + + gst_element_class_set_static_metadata (element_class, + "Direct3D11 H.265 Video Decoder", + "Codec/Decoder/Video/Hardware", + "A Direct3D11 based H.265 video decoder", + "Seungha Yang "); + + gst_element_class_add_static_pad_template (element_class, &sink_template); + gst_element_class_add_static_pad_template (element_class, &src_template); + + decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_open); + decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_close); + decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_start); + decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_stop); + decoder_class->handle_frame = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_handle_frame); + decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_negotiate); + decoder_class->decide_allocation = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_decide_allocation); + decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_src_query); + + h265decoder_class->new_sequence = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_new_sequence); + h265decoder_class->new_picture = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_new_picture); + h265decoder_class->output_picture = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_output_picture); + h265decoder_class->start_picture = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_start_picture); + h265decoder_class->decode_slice = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_decode_slice); + h265decoder_class->end_picture = + GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_end_picture); +} + +static void +gst_d3d11_h265_dec_init (GstD3D11H265Dec * self) +{ + self->slice_list = g_array_new (FALSE, TRUE, sizeof (DXVA_Slice_HEVC_Short)); + self->adapter = DEFAULT_ADAPTER; +} + +static void +gst_d3d11_h265_dec_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (object); + + switch (prop_id) { + case PROP_ADAPTER: + self->adapter = g_value_get_int (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_d3d11_h265_dec_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (object); + + switch (prop_id) { + case PROP_ADAPTER: + g_value_set_int (value, self->adapter); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_d3d11_h265_dec_dispose (GObject * object) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (object); + + if (self->slice_list) { + g_array_unref (self->slice_list); + self->slice_list = NULL; + } + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static void +gst_d3d11_h265_dec_set_context (GstElement * element, GstContext * context) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (element); + + gst_d3d11_handle_set_context (element, context, self->adapter, &self->device); + + GST_ELEMENT_CLASS (parent_class)->set_context (element, context); +} + +static gboolean +gst_d3d11_h265_dec_open (GstVideoDecoder * decoder) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + + if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), self->adapter, + &self->device)) { + GST_ERROR_OBJECT (self, "Cannot create d3d11device"); + return FALSE; + } + + self->d3d11_decoder = gst_d3d11_decoder_new (self->device); + + if (!self->d3d11_decoder) { + GST_ERROR_OBJECT (self, "Cannot create d3d11 decoder"); + gst_clear_object (&self->device); + return FALSE; + } + + return TRUE; +} + +static gboolean +gst_d3d11_h265_dec_close (GstVideoDecoder * decoder) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + + gst_clear_object (&self->d3d11_decoder); + gst_clear_object (&self->device); + + return TRUE; +} + +static gboolean +gst_d3d11_h265_dec_start (GstVideoDecoder * decoder) +{ + return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder); +} + +static gboolean +gst_d3d11_h265_dec_stop (GstVideoDecoder * decoder) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + + gst_h265_picture_replace (&self->current_picture, NULL); + + return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder); +} + +static GstFlowReturn +gst_d3d11_h265_dec_handle_frame (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GstBuffer *in_buf = frame->input_buffer; + + GST_LOG_OBJECT (self, + "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %" + GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)), + GST_TIME_ARGS (GST_BUFFER_DTS (in_buf))); + + if (!self->current_picture) { + GST_ERROR_OBJECT (self, "No current picture"); + gst_video_decoder_drop_frame (decoder, frame); + + return GST_FLOW_ERROR; + } + + gst_video_codec_frame_set_user_data (frame, + self->current_picture, (GDestroyNotify) gst_h265_picture_unref); + self->current_picture = NULL; + + gst_video_codec_frame_unref (frame); + + return GST_FLOW_OK; +} + +static gboolean +gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GstH265Decoder *h265dec = GST_H265_DECODER (decoder); + GstCaps *peer_caps; + + GST_DEBUG_OBJECT (self, "negotiate"); + + if (self->output_state) + gst_video_codec_state_unref (self->output_state); + + self->output_state = + gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self), + self->out_format, self->width, self->height, h265dec->input_state); + + self->output_state->caps = gst_video_info_to_caps (&self->output_state->info); + + peer_caps = gst_pad_get_allowed_caps (GST_VIDEO_DECODER_SRC_PAD (self)); + GST_DEBUG_OBJECT (self, "Allowed caps %" GST_PTR_FORMAT, peer_caps); + + self->use_d3d11_output = FALSE; + + if (!peer_caps || gst_caps_is_any (peer_caps)) { + GST_DEBUG_OBJECT (self, + "cannot determine output format, use system memory"); + } else { + GstCapsFeatures *features; + guint size = gst_caps_get_size (peer_caps); + guint i; + + for (i = 0; i < size; i++) { + features = gst_caps_get_features (peer_caps, i); + if (features && gst_caps_features_contains (features, + GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY)) { + GST_DEBUG_OBJECT (self, "found D3D11 memory feature"); + gst_caps_set_features (self->output_state->caps, 0, + gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, NULL)); + + self->use_d3d11_output = TRUE; + break; + } + } + } + gst_clear_caps (&peer_caps); + + return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder); +} + +static gboolean +gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder * decoder, + GstQuery * query) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GstCaps *outcaps; + GstBufferPool *pool = NULL; + guint n, size, min, max; + GstVideoInfo vinfo = { 0, }; + GstStructure *config; + GstD3D11AllocationParams *d3d11_params; + + GST_DEBUG_OBJECT (self, "decide allocation"); + + gst_query_parse_allocation (query, &outcaps, NULL); + + if (!outcaps) { + GST_DEBUG_OBJECT (self, "No output caps"); + return FALSE; + } + + gst_video_info_from_caps (&vinfo, outcaps); + n = gst_query_get_n_allocation_pools (query); + if (n > 0) + gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); + + /* create our own pool */ + if (pool && (self->use_d3d11_output && !GST_D3D11_BUFFER_POOL (pool))) { + gst_object_unref (pool); + pool = NULL; + } + + if (!pool) { + if (self->use_d3d11_output) + pool = gst_d3d11_buffer_pool_new (self->device); + else + pool = gst_video_buffer_pool_new (); + + min = max = 0; + size = (guint) vinfo.size; + } + + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_set_params (config, outcaps, size, min, max); + gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); + + if (self->use_d3d11_output) { + d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config); + if (!d3d11_params) + d3d11_params = gst_d3d11_allocation_params_new (&vinfo, 0, + D3D11_USAGE_DEFAULT, 0); + + /* dxva2 decoder uses non-resource format + * (e.g., use NV12 instead of R8 + R8G8 */ + d3d11_params->desc[0].Width = GST_VIDEO_INFO_WIDTH (&vinfo); + d3d11_params->desc[0].Height = GST_VIDEO_INFO_HEIGHT (&vinfo); + d3d11_params->desc[0].Format = d3d11_params->d3d11_format->dxgi_format; + + d3d11_params->flags &= ~GST_D3D11_ALLOCATION_FLAG_USE_RESOURCE_FORMAT; + + gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params); + gst_d3d11_allocation_params_free (d3d11_params); + } + + gst_buffer_pool_set_config (pool, config); + if (self->use_d3d11_output) + size = GST_D3D11_BUFFER_POOL (pool)->buffer_size; + + if (n > 0) + gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max); + else + gst_query_add_allocation_pool (query, pool, size, min, max); + gst_object_unref (pool); + + return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation + (decoder, query); +} + +static gboolean +gst_d3d11_h265_dec_src_query (GstVideoDecoder * decoder, GstQuery * query) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder), + query, self->device)) { + return TRUE; + } + break; + default: + break; + } + + return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query); +} + +static gboolean +gst_d3d11_h265_dec_new_sequence (GstH265Decoder * decoder, + const GstH265SPS * sps) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + gint crop_width, crop_height; + gboolean modified = FALSE; + static const GUID *main_10_guid = + &GST_GUID_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN10; + static const GUID *main_guid = &GST_GUID_D3D11_DECODER_PROFILE_HEVC_VLD_MAIN; + + GST_LOG_OBJECT (self, "new sequence"); + + if (sps->conformance_window_flag) { + crop_width = sps->crop_rect_width; + crop_height = sps->crop_rect_height; + } else { + crop_width = sps->width; + crop_height = sps->height; + } + + if (self->width != crop_width || self->height != crop_height || + self->coded_width != sps->width || self->coded_height != sps->height) { + GST_INFO_OBJECT (self, "resolution changed %dx%d", crop_width, crop_height); + self->width = crop_width; + self->height = crop_height; + self->coded_width = sps->width; + self->coded_height = sps->height; + modified = TRUE; + } + + if (self->bitdepth != sps->bit_depth_luma_minus8 + 8) { + GST_INFO_OBJECT (self, "bitdepth changed"); + self->bitdepth = sps->bit_depth_luma_minus8 + 8; + modified = TRUE; + } + + if (self->chroma_format_idc != sps->chroma_format_idc) { + GST_INFO_OBJECT (self, "chroma format changed"); + self->chroma_format_idc = sps->chroma_format_idc; + modified = TRUE; + } + + if (modified || !self->d3d11_decoder->opened) { + const GUID *profile_guid = NULL; + GstVideoInfo info; + + self->out_format = GST_VIDEO_FORMAT_UNKNOWN; + + if (self->bitdepth == 8) { + if (self->chroma_format_idc == 1) { + self->out_format = GST_VIDEO_FORMAT_NV12; + profile_guid = main_guid; + } else { + GST_FIXME_OBJECT (self, "Could not support 8bits non-4:2:0 format"); + } + } else if (self->bitdepth == 10) { + if (self->chroma_format_idc == 1) { + self->out_format = GST_VIDEO_FORMAT_P010_10LE; + profile_guid = main_10_guid; + } else { + GST_FIXME_OBJECT (self, "Could not support 10bits non-4:2:0 format"); + } + } + + if (self->out_format == GST_VIDEO_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT (self, "Could not support bitdepth/chroma format"); + return FALSE; + } + + /* allocated internal pool with coded width/height */ + gst_video_info_set_format (&info, + self->out_format, self->coded_width, self->coded_height); + + gst_d3d11_decoder_reset (self->d3d11_decoder); + if (!gst_d3d11_decoder_open (self->d3d11_decoder, GST_D3D11_CODEC_H265, + &info, NUM_OUTPUT_VIEW, &profile_guid, 1)) { + GST_ERROR_OBJECT (self, "Failed to create decoder"); + return FALSE; + } + + if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) { + GST_ERROR_OBJECT (self, "Failed to negotiate with downstream"); + return FALSE; + } + } + + return TRUE; +} + +static gboolean +gst_d3d11_h265_dec_get_bitstream_buffer (GstD3D11H265Dec * self) +{ + GST_TRACE_OBJECT (self, "Getting bitstream buffer"); + if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &self->bitstream_buffer_size, + (gpointer *) & self->bitstream_buffer_bytes)) { + GST_ERROR_OBJECT (self, "Faild to get bitstream buffer"); + return FALSE; + } + + GST_TRACE_OBJECT (self, "Got bitstream buffer %p with size %d", + self->bitstream_buffer_bytes, self->bitstream_buffer_size); + self->current_offset = 0; + + return TRUE; +} + +static GstD3D11DecoderOutputView * +gst_d3d11_h265_dec_get_output_view_from_picture (GstD3D11H265Dec * self, + GstH265Picture * picture) +{ + GstBuffer *view_buffer; + GstD3D11DecoderOutputView *view; + + view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture); + if (!view_buffer) { + GST_DEBUG_OBJECT (self, "current picture does not have output view buffer"); + return NULL; + } + + view = gst_d3d11_decoder_get_output_view_from_buffer (self->d3d11_decoder, + view_buffer); + if (!view) { + GST_DEBUG_OBJECT (self, "current picture does not have output view handle"); + return NULL; + } + + return view; +} + +static gint +gst_d3d11_h265_dec_get_ref_index (GstD3D11H265Dec * self, gint view_id) +{ + gint i; + for (i = 0; i < G_N_ELEMENTS (self->ref_pic_list); i++) { + if (self->ref_pic_list[i].Index7Bits == view_id) + return i; + } + + return 0xff; +} + +static gboolean +gst_d3d11_h265_dec_start_picture (GstH265Decoder * decoder, + GstH265Picture * picture, GstH265Slice * slice, GstH265Dpb * dpb) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GstD3D11DecoderOutputView *view; + gint i, j; + GArray *dpb_array; + + view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture); + if (!view) { + GST_ERROR_OBJECT (self, "current picture does not have output view handle"); + return FALSE; + } + + GST_TRACE_OBJECT (self, "Begin frame"); + + if (!gst_d3d11_decoder_begin_frame (self->d3d11_decoder, view, 0, NULL)) { + GST_ERROR_OBJECT (self, "Failed to begin frame"); + return FALSE; + } + + for (i = 0; i < 15; i++) { + self->ref_pic_list[i].bPicEntry = 0xff; + self->pic_order_cnt_val_list[i] = 0; + } + + for (i = 0; i < 8; i++) { + self->ref_pic_set_st_curr_before[i] = 0xff; + self->ref_pic_set_st_curr_after[i] = 0xff; + self->ref_pic_set_lt_curr[i] = 0xff; + } + + dpb_array = gst_h265_dpb_get_pictures_all (dpb); + + GST_LOG_OBJECT (self, "DPB size %d", dpb_array->len); + + for (i = 0; i < dpb_array->len && i < G_N_ELEMENTS (self->ref_pic_list); i++) { + GstH265Picture *other = g_array_index (dpb_array, GstH265Picture *, i); + GstD3D11DecoderOutputView *other_view; + gint id = 0xff; + + if (!other->ref) { + GST_LOG_OBJECT (self, "%dth picture in dpb is not reference, skip", i); + continue; + } + + other_view = gst_d3d11_h265_dec_get_output_view_from_picture (self, other); + + if (other_view) + id = other_view->view_id; + + self->ref_pic_list[i].Index7Bits = id; + self->ref_pic_list[i].AssociatedFlag = other->long_term; + self->pic_order_cnt_val_list[i] = other->pic_order_cnt; + } + + for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_before); i++) { + GstH265Picture *other = NULL; + gint id = 0xff; + + while (other == NULL && j < decoder->NumPocStCurrBefore) + other = decoder->RefPicSetStCurrBefore[j++]; + + if (other) { + GstD3D11DecoderOutputView *other_view; + + other_view = + gst_d3d11_h265_dec_get_output_view_from_picture (self, other); + + if (other_view) + id = gst_d3d11_h265_dec_get_ref_index (self, other_view->view_id); + } + + self->ref_pic_set_st_curr_before[i] = id; + } + + for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_st_curr_after); i++) { + GstH265Picture *other = NULL; + gint id = 0xff; + + while (other == NULL && j < decoder->NumPocStCurrAfter) + other = decoder->RefPicSetStCurrAfter[j++]; + + if (other) { + GstD3D11DecoderOutputView *other_view; + + other_view = + gst_d3d11_h265_dec_get_output_view_from_picture (self, other); + + if (other_view) + id = gst_d3d11_h265_dec_get_ref_index (self, other_view->view_id); + } + + self->ref_pic_set_st_curr_after[i] = id; + } + + for (i = 0, j = 0; i < G_N_ELEMENTS (self->ref_pic_set_lt_curr); i++) { + GstH265Picture *other = NULL; + gint id = 0xff; + + while (other == NULL && j < decoder->NumPocLtCurr) + other = decoder->RefPicSetLtCurr[j++]; + + if (other) { + GstD3D11DecoderOutputView *other_view; + + other_view = + gst_d3d11_h265_dec_get_output_view_from_picture (self, other); + + if (other_view) + id = gst_d3d11_h265_dec_get_ref_index (self, other_view->view_id); + } + + self->ref_pic_set_lt_curr[i] = id; + } + + g_array_unref (dpb_array); + g_array_set_size (self->slice_list, 0); + + return gst_d3d11_h265_dec_get_bitstream_buffer (self); +} + +static gboolean +gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder, + GstH265Picture * picture) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GstBuffer *view_buffer; + GstD3D11Memory *mem; + + view_buffer = gst_d3d11_decoder_get_output_view_buffer (self->d3d11_decoder); + if (!view_buffer) { + GST_ERROR_OBJECT (self, "No available output view buffer"); + return FALSE; + } + + mem = (GstD3D11Memory *) gst_buffer_peek_memory (view_buffer, 0); + + GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT " (index %d)", + view_buffer, mem->subresource_index); + + gst_h265_picture_set_user_data (picture, + view_buffer, (GDestroyNotify) gst_buffer_unref); + + GST_LOG_OBJECT (self, "New h265picture %p", picture); + + gst_h265_picture_replace (&self->current_picture, picture); + + return TRUE; +} + +static GstFlowReturn +gst_d3d11_h265_dec_output_picture (GstH265Decoder * decoder, + GstH265Picture * picture) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GList *pending_frames, *iter; + GstVideoCodecFrame *frame = NULL; + GstBuffer *output_buffer = NULL; + GstFlowReturn ret; + GstBuffer *view_buffer; + + GST_LOG_OBJECT (self, + "Outputting picture %p, poc %d", picture, picture->pic_order_cnt); + + view_buffer = (GstBuffer *) gst_h265_picture_get_user_data (picture); + + if (!view_buffer) { + GST_ERROR_OBJECT (self, "Could not get output view"); + return FALSE; + } + + pending_frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self)); + + for (iter = pending_frames; iter; iter = g_list_next (iter)) { + GstVideoCodecFrame *tmp; + GstH265Picture *other_pic; + + tmp = (GstVideoCodecFrame *) iter->data; + other_pic = gst_video_codec_frame_get_user_data (tmp); + if (!other_pic) { + /* FIXME: what should we do here? */ + GST_WARNING_OBJECT (self, + "Codec frame %p does not have corresponding picture object", tmp); + continue; + } + + if (other_pic == picture) { + frame = gst_video_codec_frame_ref (tmp); + break; + } + } + + g_list_free_full (pending_frames, + (GDestroyNotify) gst_video_codec_frame_unref); + + if (!frame) { + GST_WARNING_OBJECT (self, + "Failed to find codec frame for picture %p", picture); + + output_buffer = + gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self)); + + if (!output_buffer) { + GST_ERROR_OBJECT (self, "Couldn't allocate output buffer"); + return GST_FLOW_ERROR; + } + + GST_BUFFER_PTS (output_buffer) = picture->pts; + GST_BUFFER_DTS (output_buffer) = GST_CLOCK_TIME_NONE; + GST_BUFFER_DURATION (output_buffer) = GST_CLOCK_TIME_NONE; + } else { + ret = + gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self), + frame); + + if (ret != GST_FLOW_OK) { + GST_ERROR_OBJECT (self, "failed to allocate output frame"); + return ret; + } + + output_buffer = frame->output_buffer; + GST_BUFFER_PTS (output_buffer) = GST_BUFFER_PTS (frame->input_buffer); + GST_BUFFER_DTS (output_buffer) = GST_CLOCK_TIME_NONE; + GST_BUFFER_DURATION (output_buffer) = + GST_BUFFER_DURATION (frame->input_buffer); + } + + if (!gst_d3d11_decoder_copy_decoder_buffer (self->d3d11_decoder, + &self->output_state->info, view_buffer, output_buffer)) { + GST_ERROR_OBJECT (self, "Failed to copy buffer"); + if (frame) + gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame); + else + gst_buffer_unref (output_buffer); + + return GST_FLOW_ERROR; + } + + GST_LOG_OBJECT (self, "Finish frame %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_PTS (output_buffer))); + + if (frame) { + ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame); + } else { + ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), output_buffer); + } + + return ret; +} + +static gboolean +gst_d3d11_h265_dec_submit_slice_data (GstD3D11H265Dec * self) +{ + guint buffer_size; + gpointer buffer; + guint8 *data; + gsize offset = 0; + gint i; + D3D11_VIDEO_DECODER_BUFFER_DESC buffer_desc[4] = { 0, }; + gboolean ret; + guint buffer_count = 0; + + if (self->slice_list->len < 1) { + GST_WARNING_OBJECT (self, "Nothing to submit"); + return FALSE; + } + + GST_TRACE_OBJECT (self, "Getting slice control buffer"); + + if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL, &buffer_size, &buffer)) { + GST_ERROR_OBJECT (self, "Couldn't get slice control buffer"); + return FALSE; + } + + data = buffer; + for (i = 0; i < self->slice_list->len; i++) { + DXVA_Slice_HEVC_Short *slice_data = + &g_array_index (self->slice_list, DXVA_Slice_HEVC_Short, i); + + memcpy (data + offset, slice_data, sizeof (DXVA_Slice_HEVC_Short)); + offset += sizeof (DXVA_Slice_HEVC_Short); + } + + GST_TRACE_OBJECT (self, "Release slice control buffer"); + if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL)) { + GST_ERROR_OBJECT (self, "Failed to release slice control buffer"); + return FALSE; + } + + if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_BITSTREAM)) { + GST_ERROR_OBJECT (self, "Failed to release bitstream buffer"); + return FALSE; + } + + buffer_desc[buffer_count].BufferType = + D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS; + buffer_desc[buffer_count].DataOffset = 0; + buffer_desc[buffer_count].DataSize = sizeof (DXVA_PicParams_HEVC); + buffer_count++; + + if (self->submit_iq_data) { + buffer_desc[buffer_count].BufferType = + D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX; + buffer_desc[buffer_count].DataOffset = 0; + buffer_desc[buffer_count].DataSize = sizeof (DXVA_Qmatrix_HEVC); + buffer_count++; + } + + buffer_desc[buffer_count].BufferType = + D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL; + buffer_desc[buffer_count].DataOffset = 0; + buffer_desc[buffer_count].DataSize = + sizeof (DXVA_Slice_HEVC_Short) * self->slice_list->len; + buffer_count++; + + buffer_desc[buffer_count].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM; + buffer_desc[buffer_count].DataOffset = 0; + buffer_desc[buffer_count].DataSize = self->current_offset; + buffer_count++; + + ret = gst_d3d11_decoder_submit_decoder_buffers (self->d3d11_decoder, + buffer_count, buffer_desc); + + self->current_offset = 0; + self->bitstream_buffer_bytes = NULL; + self->bitstream_buffer_size = 0; + g_array_set_size (self->slice_list, 0); + + return ret; +} + +static gboolean +gst_d3d11_h265_dec_end_picture (GstH265Decoder * decoder, + GstH265Picture * picture) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + + GST_LOG_OBJECT (self, "end picture %p, (poc %d)", + picture, picture->pic_order_cnt); + + if (!gst_d3d11_h265_dec_submit_slice_data (self)) { + GST_ERROR_OBJECT (self, "Failed to submit slice data"); + return FALSE; + } + + if (!gst_d3d11_decoder_end_frame (self->d3d11_decoder)) { + GST_ERROR_OBJECT (self, "Failed to EndFrame"); + return FALSE; + } + + return TRUE; +} + +static void +gst_d3d11_h265_dec_picture_params_from_sps (GstD3D11H265Dec * self, + const GstH265SPS * sps, DXVA_PicParams_HEVC * params) +{ +#define COPY_FIELD(f) \ + (params)->f = (sps)->f +#define COPY_FIELD_WITH_PREFIX(f) \ + (params)->G_PASTE(sps_,f) = (sps)->f + + params->PicWidthInMinCbsY = + sps->width >> (sps->log2_min_luma_coding_block_size_minus3 + 3); + params->PicHeightInMinCbsY = + sps->height >> (sps->log2_min_luma_coding_block_size_minus3 + 3); + params->sps_max_dec_pic_buffering_minus1 = + sps->max_dec_pic_buffering_minus1[sps->max_sub_layers_minus1]; + + COPY_FIELD (chroma_format_idc); + COPY_FIELD (separate_colour_plane_flag); + COPY_FIELD (bit_depth_luma_minus8); + COPY_FIELD (bit_depth_chroma_minus8); + COPY_FIELD (log2_max_pic_order_cnt_lsb_minus4); + COPY_FIELD (log2_min_luma_coding_block_size_minus3); + COPY_FIELD (log2_diff_max_min_luma_coding_block_size); + COPY_FIELD (log2_min_transform_block_size_minus2); + COPY_FIELD (log2_diff_max_min_transform_block_size); + COPY_FIELD (max_transform_hierarchy_depth_inter); + COPY_FIELD (max_transform_hierarchy_depth_intra); + COPY_FIELD (num_short_term_ref_pic_sets); + COPY_FIELD (num_long_term_ref_pics_sps); + COPY_FIELD (scaling_list_enabled_flag); + COPY_FIELD (amp_enabled_flag); + COPY_FIELD (sample_adaptive_offset_enabled_flag); + COPY_FIELD (pcm_enabled_flag); + + if (sps->pcm_enabled_flag) { + COPY_FIELD (pcm_sample_bit_depth_luma_minus1); + COPY_FIELD (pcm_sample_bit_depth_chroma_minus1); + COPY_FIELD (log2_min_pcm_luma_coding_block_size_minus3); + COPY_FIELD (log2_diff_max_min_pcm_luma_coding_block_size); + } + + COPY_FIELD (pcm_loop_filter_disabled_flag); + COPY_FIELD (long_term_ref_pics_present_flag); + COPY_FIELD_WITH_PREFIX (temporal_mvp_enabled_flag); + COPY_FIELD (strong_intra_smoothing_enabled_flag); + +#undef COPY_FIELD +#undef COPY_FIELD_WITH_PREFIX +} + +static void +gst_d3d11_h265_dec_picture_params_from_pps (GstD3D11H265Dec * self, + const GstH265PPS * pps, DXVA_PicParams_HEVC * params) +{ + gint i; + +#define COPY_FIELD(f) \ + (params)->f = (pps)->f +#define COPY_FIELD_WITH_PREFIX(f) \ + (params)->G_PASTE(pps_,f) = (pps)->f + + COPY_FIELD (num_ref_idx_l0_default_active_minus1); + COPY_FIELD (num_ref_idx_l1_default_active_minus1); + COPY_FIELD (init_qp_minus26); + COPY_FIELD (dependent_slice_segments_enabled_flag); + COPY_FIELD (output_flag_present_flag); + COPY_FIELD (num_extra_slice_header_bits); + COPY_FIELD (sign_data_hiding_enabled_flag); + COPY_FIELD (cabac_init_present_flag); + COPY_FIELD (constrained_intra_pred_flag); + COPY_FIELD (transform_skip_enabled_flag); + COPY_FIELD (cu_qp_delta_enabled_flag); + COPY_FIELD_WITH_PREFIX (slice_chroma_qp_offsets_present_flag); + COPY_FIELD (weighted_pred_flag); + COPY_FIELD (weighted_bipred_flag); + COPY_FIELD (transquant_bypass_enabled_flag); + COPY_FIELD (tiles_enabled_flag); + COPY_FIELD (entropy_coding_sync_enabled_flag); + COPY_FIELD (uniform_spacing_flag); + + if (pps->tiles_enabled_flag) + COPY_FIELD (loop_filter_across_tiles_enabled_flag); + + COPY_FIELD_WITH_PREFIX (loop_filter_across_slices_enabled_flag); + COPY_FIELD (deblocking_filter_override_enabled_flag); + COPY_FIELD_WITH_PREFIX (deblocking_filter_disabled_flag); + COPY_FIELD (lists_modification_present_flag); + COPY_FIELD (slice_segment_header_extension_present_flag); + COPY_FIELD_WITH_PREFIX (cb_qp_offset); + COPY_FIELD_WITH_PREFIX (cr_qp_offset); + + if (pps->tiles_enabled_flag) { + COPY_FIELD (num_tile_columns_minus1); + COPY_FIELD (num_tile_rows_minus1); + if (!pps->uniform_spacing_flag) { + for (i = 0; i < pps->num_tile_columns_minus1 + 1; i++) + COPY_FIELD (column_width_minus1[i]); + + for (i = 0; i < pps->num_tile_rows_minus1 + 1; i++) + COPY_FIELD (row_height_minus1[i]); + } + } + + COPY_FIELD (diff_cu_qp_delta_depth); + COPY_FIELD_WITH_PREFIX (beta_offset_div2); + COPY_FIELD_WITH_PREFIX (tc_offset_div2); + COPY_FIELD (log2_parallel_merge_level_minus2); + +#undef COPY_FIELD +#undef COPY_FIELD_WITH_PREFIX +} + +static void +gst_d3d11_h265_dec_picture_params_from_slice_header (GstD3D11H265Dec * + self, const GstH265SliceHdr * slice_header, DXVA_PicParams_HEVC * params) +{ + if (slice_header->short_term_ref_pic_set_sps_flag == 0) { + params->ucNumDeltaPocsOfRefRpsIdx = + slice_header->short_term_ref_pic_sets.NumDeltaPocs; + params->wNumBitsForShortTermRPSInSlice = + slice_header->short_term_ref_pic_set_size; + } +} + +static gboolean +gst_d3d11_h265_dec_fill_picture_params (GstD3D11H265Dec * self, + const GstH265SliceHdr * slice_header, DXVA_PicParams_HEVC * params) +{ + const GstH265SPS *sps; + const GstH265PPS *pps; + + g_return_val_if_fail (slice_header->pps != NULL, FALSE); + g_return_val_if_fail (slice_header->pps->sps != NULL, FALSE); + + pps = slice_header->pps; + sps = pps->sps; + + memset (params, 0, sizeof (DXVA_PicParams_HEVC)); + + /* not related to hevc syntax */ + params->NoPicReorderingFlag = 0; + params->NoBiPredFlag = 0; + params->ReservedBits1 = 0; + params->StatusReportFeedbackNumber = 1; + + gst_d3d11_h265_dec_picture_params_from_sps (self, sps, params); + gst_d3d11_h265_dec_picture_params_from_pps (self, pps, params); + gst_d3d11_h265_dec_picture_params_from_slice_header (self, + slice_header, params); + + return TRUE; +} + +#ifndef GST_DISABLE_GST_DEBUG +static void +gst_d3d11_h265_dec_dump_pic_params (GstD3D11H265Dec * self, + DXVA_PicParams_HEVC * params) +{ + gint i; + + GST_TRACE_OBJECT (self, "Dump current DXVA_PicParams_HEVC"); + +#define DUMP_PIC_PARAMS(p) \ + GST_TRACE_OBJECT (self, "\t" G_STRINGIFY(p) ": %d", (gint)params->p) + + DUMP_PIC_PARAMS (PicWidthInMinCbsY); + DUMP_PIC_PARAMS (PicHeightInMinCbsY); + DUMP_PIC_PARAMS (chroma_format_idc); + DUMP_PIC_PARAMS (separate_colour_plane_flag); + DUMP_PIC_PARAMS (bit_depth_chroma_minus8); + DUMP_PIC_PARAMS (NoPicReorderingFlag); + DUMP_PIC_PARAMS (NoBiPredFlag); + DUMP_PIC_PARAMS (CurrPic.Index7Bits); + DUMP_PIC_PARAMS (sps_max_dec_pic_buffering_minus1); + DUMP_PIC_PARAMS (log2_min_luma_coding_block_size_minus3); + DUMP_PIC_PARAMS (log2_diff_max_min_luma_coding_block_size); + DUMP_PIC_PARAMS (log2_min_transform_block_size_minus2); + DUMP_PIC_PARAMS (log2_diff_max_min_transform_block_size); + DUMP_PIC_PARAMS (max_transform_hierarchy_depth_inter); + DUMP_PIC_PARAMS (max_transform_hierarchy_depth_intra); + DUMP_PIC_PARAMS (num_short_term_ref_pic_sets); + DUMP_PIC_PARAMS (num_long_term_ref_pics_sps); + DUMP_PIC_PARAMS (num_ref_idx_l0_default_active_minus1); + DUMP_PIC_PARAMS (num_ref_idx_l1_default_active_minus1); + DUMP_PIC_PARAMS (init_qp_minus26); + DUMP_PIC_PARAMS (ucNumDeltaPocsOfRefRpsIdx); + DUMP_PIC_PARAMS (wNumBitsForShortTermRPSInSlice); + DUMP_PIC_PARAMS (scaling_list_enabled_flag); + DUMP_PIC_PARAMS (amp_enabled_flag); + DUMP_PIC_PARAMS (sample_adaptive_offset_enabled_flag); + DUMP_PIC_PARAMS (pcm_enabled_flag); + DUMP_PIC_PARAMS (pcm_sample_bit_depth_luma_minus1); + DUMP_PIC_PARAMS (pcm_sample_bit_depth_chroma_minus1); + DUMP_PIC_PARAMS (log2_min_pcm_luma_coding_block_size_minus3); + DUMP_PIC_PARAMS (log2_diff_max_min_pcm_luma_coding_block_size); + DUMP_PIC_PARAMS (pcm_loop_filter_disabled_flag); + DUMP_PIC_PARAMS (long_term_ref_pics_present_flag); + DUMP_PIC_PARAMS (sps_temporal_mvp_enabled_flag); + DUMP_PIC_PARAMS (strong_intra_smoothing_enabled_flag); + DUMP_PIC_PARAMS (dependent_slice_segments_enabled_flag); + DUMP_PIC_PARAMS (output_flag_present_flag); + DUMP_PIC_PARAMS (num_extra_slice_header_bits); + DUMP_PIC_PARAMS (sign_data_hiding_enabled_flag); + DUMP_PIC_PARAMS (cabac_init_present_flag); + + DUMP_PIC_PARAMS (constrained_intra_pred_flag); + DUMP_PIC_PARAMS (transform_skip_enabled_flag); + DUMP_PIC_PARAMS (cu_qp_delta_enabled_flag); + DUMP_PIC_PARAMS (pps_slice_chroma_qp_offsets_present_flag); + DUMP_PIC_PARAMS (weighted_pred_flag); + DUMP_PIC_PARAMS (weighted_bipred_flag); + DUMP_PIC_PARAMS (transquant_bypass_enabled_flag); + DUMP_PIC_PARAMS (tiles_enabled_flag); + DUMP_PIC_PARAMS (entropy_coding_sync_enabled_flag); + DUMP_PIC_PARAMS (uniform_spacing_flag); + DUMP_PIC_PARAMS (loop_filter_across_tiles_enabled_flag); + DUMP_PIC_PARAMS (pps_loop_filter_across_slices_enabled_flag); + DUMP_PIC_PARAMS (deblocking_filter_override_enabled_flag); + DUMP_PIC_PARAMS (pps_deblocking_filter_disabled_flag); + DUMP_PIC_PARAMS (lists_modification_present_flag); + DUMP_PIC_PARAMS (IrapPicFlag); + DUMP_PIC_PARAMS (IdrPicFlag); + DUMP_PIC_PARAMS (IntraPicFlag); + DUMP_PIC_PARAMS (pps_cb_qp_offset); + DUMP_PIC_PARAMS (pps_cr_qp_offset); + DUMP_PIC_PARAMS (num_tile_columns_minus1); + DUMP_PIC_PARAMS (num_tile_rows_minus1); + for (i = 0; i < G_N_ELEMENTS (params->column_width_minus1); i++) + GST_TRACE_OBJECT (self, "\tcolumn_width_minus1[%d]: %d", i, + params->column_width_minus1[i]); + for (i = 0; i < G_N_ELEMENTS (params->row_height_minus1); i++) + GST_TRACE_OBJECT (self, "\trow_height_minus1[%d]: %d", i, + params->row_height_minus1[i]); + DUMP_PIC_PARAMS (diff_cu_qp_delta_depth); + DUMP_PIC_PARAMS (pps_beta_offset_div2); + DUMP_PIC_PARAMS (pps_tc_offset_div2); + DUMP_PIC_PARAMS (log2_parallel_merge_level_minus2); + DUMP_PIC_PARAMS (CurrPicOrderCntVal); + + for (i = 0; i < G_N_ELEMENTS (params->RefPicList); i++) { + GST_TRACE_OBJECT (self, "\tRefPicList[%d].Index7Bits: %d", i, + params->RefPicList[i].Index7Bits); + GST_TRACE_OBJECT (self, "\tRefPicList[%d].AssociatedFlag: %d", i, + params->RefPicList[i].AssociatedFlag); + GST_TRACE_OBJECT (self, "\tPicOrderCntValList[%d]: %d", i, + params->PicOrderCntValList[i]); + } + + for (i = 0; i < G_N_ELEMENTS (params->RefPicSetStCurrBefore); i++) { + GST_TRACE_OBJECT (self, "\tRefPicSetStCurrBefore[%d]: %d", i, + params->RefPicSetStCurrBefore[i]); + GST_TRACE_OBJECT (self, "\tRefPicSetStCurrAfter[%d]: %d", i, + params->RefPicSetStCurrAfter[i]); + GST_TRACE_OBJECT (self, "\tRefPicSetLtCurr[%d]: %d", i, + params->RefPicSetLtCurr[i]); + } + +#undef DUMP_PIC_PARAMS +} +#endif + +static gboolean +gst_d3d11_h265_dec_decode_slice (GstH265Decoder * decoder, + GstH265Picture * picture, GstH265Slice * slice) +{ + GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder); + GstH265PPS *pps; + DXVA_PicParams_HEVC pic_params = { 0, }; + DXVA_Qmatrix_HEVC iq_matrix = { 0, }; + guint d3d11_buffer_size = 0; + gpointer d3d11_buffer = NULL; + gint i, j; + GstD3D11DecoderOutputView *view; + + pps = slice->header.pps; + + view = gst_d3d11_h265_dec_get_output_view_from_picture (self, picture); + + if (!view) { + GST_ERROR_OBJECT (self, "current picture does not have output view"); + return FALSE; + } + + gst_d3d11_h265_dec_fill_picture_params (self, &slice->header, &pic_params); + + pic_params.CurrPic.Index7Bits = view->view_id; + pic_params.IrapPicFlag = IS_IRAP (slice->nalu.type); + pic_params.IdrPicFlag = IS_IDR (slice->nalu.type); + pic_params.IntraPicFlag = IS_IRAP (slice->nalu.type); + pic_params.CurrPicOrderCntVal = picture->pic_order_cnt; + + memcpy (pic_params.RefPicList, self->ref_pic_list, + sizeof (pic_params.RefPicList)); + memcpy (pic_params.PicOrderCntValList, self->pic_order_cnt_val_list, + sizeof (pic_params.PicOrderCntValList)); + memcpy (pic_params.RefPicSetStCurrBefore, self->ref_pic_set_st_curr_before, + sizeof (pic_params.RefPicSetStCurrBefore)); + memcpy (pic_params.RefPicSetStCurrAfter, self->ref_pic_set_st_curr_after, + sizeof (pic_params.RefPicSetStCurrAfter)); + memcpy (pic_params.RefPicSetLtCurr, self->ref_pic_set_lt_curr, + sizeof (pic_params.RefPicSetLtCurr)); + +#ifndef GST_DISABLE_GST_DEBUG + gst_d3d11_h265_dec_dump_pic_params (self, &pic_params); +#endif + + GST_TRACE_OBJECT (self, "Getting picture param decoder buffer"); + + if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS, &d3d11_buffer_size, + &d3d11_buffer)) { + GST_ERROR_OBJECT (self, + "Failed to get decoder buffer for picture parameters"); + return FALSE; + } + + memcpy (d3d11_buffer, &pic_params, sizeof (pic_params)); + + GST_TRACE_OBJECT (self, "Release picture param decoder buffer"); + + if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS)) { + GST_ERROR_OBJECT (self, "Failed to release decoder buffer"); + return FALSE; + } + + if (pps->scaling_list_data_present_flag) { + self->submit_iq_data = TRUE; + + for (i = 0; i < 6; i++) { + for (j = 0; j < 16; j++) { + iq_matrix.ucScalingLists0[i][j] = + pps->scaling_list.scaling_lists_4x4[i][j]; + } + } + + for (i = 0; i < 6; i++) { + for (j = 0; j < 64; j++) { + iq_matrix.ucScalingLists1[i][j] = + pps->scaling_list.scaling_lists_8x8[i][j]; + iq_matrix.ucScalingLists2[i][j] = + pps->scaling_list.scaling_lists_16x16[i][j]; + } + } + + for (i = 0; i < 2; i++) { + for (j = 0; j < 64; j++) { + iq_matrix.ucScalingLists3[i][j] = + pps->scaling_list.scaling_lists_32x32[i][j]; + } + } + + for (i = 0; i < 6; i++) + iq_matrix.ucScalingListDCCoefSizeID2[i] = + pps->scaling_list.scaling_list_dc_coef_minus8_16x16[i]; + + for (i = 0; i < 2; i++) + iq_matrix.ucScalingListDCCoefSizeID3[i] = + pps->scaling_list.scaling_list_dc_coef_minus8_32x32[i]; + + GST_TRACE_OBJECT (self, "Getting inverse quantization maxtirx buffer"); + + if (!gst_d3d11_decoder_get_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX, + &d3d11_buffer_size, &d3d11_buffer)) { + GST_ERROR_OBJECT (self, + "Failed to get decoder buffer for inv. quantization matrix"); + return FALSE; + } + + memcpy (d3d11_buffer, &iq_matrix, sizeof (iq_matrix)); + + GST_TRACE_OBJECT (self, "Release inverse quantization maxtirx buffer"); + + if (!gst_d3d11_decoder_release_decoder_buffer (self->d3d11_decoder, + D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX)) { + GST_ERROR_OBJECT (self, "Failed to release decoder buffer"); + return FALSE; + } + } else { + self->submit_iq_data = FALSE; + } + + { + guint to_write = slice->nalu.size + 3; + gboolean is_first = TRUE; + + while (to_write > 0) { + guint bytes_to_copy; + gboolean is_last = TRUE; + DXVA_Slice_HEVC_Short slice_short = { 0, }; + + if (self->bitstream_buffer_size < to_write && self->slice_list->len > 0) { + if (!gst_d3d11_h265_dec_submit_slice_data (self)) { + GST_ERROR_OBJECT (self, "Failed to submit bitstream buffers"); + return FALSE; + } + + if (!gst_d3d11_h265_dec_get_bitstream_buffer (self)) { + GST_ERROR_OBJECT (self, "Failed to get bitstream buffer"); + return FALSE; + } + } + + bytes_to_copy = to_write; + + if (bytes_to_copy > self->bitstream_buffer_size) { + bytes_to_copy = self->bitstream_buffer_size; + is_last = FALSE; + } + + if (bytes_to_copy >= 3 && is_first) { + /* normal case */ + self->bitstream_buffer_bytes[0] = 0; + self->bitstream_buffer_bytes[1] = 0; + self->bitstream_buffer_bytes[2] = 1; + memcpy (self->bitstream_buffer_bytes + 3, + slice->nalu.data + slice->nalu.offset, bytes_to_copy - 3); + } else { + /* when this nal unit date is splitted into two buffer */ + memcpy (self->bitstream_buffer_bytes, + slice->nalu.data + slice->nalu.offset, bytes_to_copy); + } + + slice_short.BSNALunitDataLocation = self->current_offset; + slice_short.SliceBytesInBuffer = bytes_to_copy; + /* wBadSliceChopping: (dxva h265 spec.) + * 0: All bits for the slice are located within the corresponding + * bitstream data buffer + * 1: The bitstream data buffer contains the start of the slice, + * but not the entire slice, because the buffer is full + * 2: The bitstream data buffer contains the end of the slice. + * It does not contain the start of the slice, because the start of + * the slice was located in the previous bitstream data buffer. + * 3: The bitstream data buffer does not contain the start of the slice + * (because the start of the slice was located in the previous + * bitstream data buffer), and it does not contain the end of the slice + * (because the current bitstream data buffer is also full). + */ + if (is_last && is_first) { + slice_short.wBadSliceChopping = 0; + } else if (!is_last && is_first) { + slice_short.wBadSliceChopping = 1; + } else if (is_last && !is_first) { + slice_short.wBadSliceChopping = 2; + } else { + slice_short.wBadSliceChopping = 3; + } + + g_array_append_val (self->slice_list, slice_short); + self->bitstream_buffer_size -= bytes_to_copy; + self->current_offset += bytes_to_copy; + self->bitstream_buffer_bytes += bytes_to_copy; + is_first = FALSE; + to_write -= bytes_to_copy; + } + } + + return TRUE; +} diff --git a/sys/d3d11/gstd3d11h265dec.h b/sys/d3d11/gstd3d11h265dec.h new file mode 100644 index 0000000000..402ccf3355 --- /dev/null +++ b/sys/d3d11/gstd3d11h265dec.h @@ -0,0 +1,88 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_D3D11_H265_DEC_H__ +#define __GST_D3D11_H265_DEC_H__ + +#include "gsth265decoder.h" +#include "gsth265picture.h" +#include "gstd3d11decoder.h" + +G_BEGIN_DECLS + +#define GST_TYPE_D3D11_H265_DEC \ + (gst_d3d11_h265_dec_get_type()) +#define GST_D3D11_H265_DEC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_D3D11_H265_DEC,GstD3D11H265Dec)) +#define GST_D3D11_H265_DEC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_D3D11_H265_DEC,GstD3D11H265DecClass)) +#define GST_D3D11_H265_DEC_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_D3D11_H265_DEC,GstD3D11H265DecClass)) +#define GST_IS_D3D11_H265_DEC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_D3D11_H265_DEC)) +#define GST_IS_D3D11_H265_DEC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_D3D11_H265_DEC)) + +struct _GstD3D11H265Dec +{ + GstH265Decoder parent; + + GstVideoCodecState *output_state; + + GstD3D11Device *device; + gint adapter; + + guint width, height; + guint coded_width, coded_height; + guint bitdepth; + guint chroma_format_idc; + GstVideoFormat out_format; + + DXVA_PicEntry_HEVC ref_pic_list[15]; + INT pic_order_cnt_val_list[15]; + UCHAR ref_pic_set_st_curr_before[8]; + UCHAR ref_pic_set_st_curr_after[8]; + UCHAR ref_pic_set_lt_curr[8]; + + /* Array of DXVA_Slice_HEVC_Short */ + GArray *slice_list; + gboolean submit_iq_data; + + GstD3D11Decoder *d3d11_decoder; + + GstH265Picture *current_picture; + + /* Pointing current bitstream buffer */ + guint current_offset; + guint bitstream_buffer_size; + guint8 * bitstream_buffer_bytes; + + gboolean use_d3d11_output; +}; + +struct _GstD3D11H265DecClass +{ + GstH265DecoderClass parent_class; +}; + +GType gst_d3d11_h265_dec_get_type (void); + +G_END_DECLS + +#endif /* __GST_D3D11_H265_DEC_H__ */ diff --git a/sys/d3d11/gsth265decoder.c b/sys/d3d11/gsth265decoder.c new file mode 100644 index 0000000000..5577869566 --- /dev/null +++ b/sys/d3d11/gsth265decoder.c @@ -0,0 +1,1457 @@ +/* GStreamer + * Copyright (C) 2015 Intel Corporation + * Author: Sreerenj Balachandran + * Copyright (C) 2019 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include +#endif + +#include "gsth265decoder.h" + +GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h265_dec_debug); +#define GST_CAT_DEFAULT gst_d3d11_h265_dec_debug + +typedef enum +{ + GST_H265_DECODER_FORMAT_NONE, + GST_H265_DECODER_FORMAT_HVC1, + GST_H265_DECODER_FORMAT_HEV1, + GST_H265_DECODER_FORMAT_BYTE +} GstH265DecoderFormat; + +typedef enum +{ + GST_H265_DECODER_ALIGN_NONE, + GST_H265_DECODER_ALIGN_NAL, + GST_H265_DECODER_ALIGN_AU +} GstH265DecoderAlign; + +struct _GstH265DecoderPrivate +{ + gint width, height; + + /* input codec_data, if any */ + GstBuffer *codec_data; + guint nal_length_size; + + /* state */ + GstH265DecoderFormat in_format; + GstH265DecoderAlign align; + GstH265Parser *parser; + GstH265Dpb *dpb; + GstFlowReturn last_ret; + + /* vps/sps/pps of the current slice */ + const GstH265VPS *active_vps; + const GstH265SPS *active_sps; + const GstH265PPS *active_pps; + + guint32 SpsMaxLatencyPictures; + gint32 WpOffsetHalfRangeC; + + /* Picture currently being processed/decoded */ + GstH265Picture *current_picture; + + /* Slice (slice header + nalu) currently being processed/decodec */ + GstH265Slice current_slice; + GstH265Slice prev_slice; + GstH265Slice prev_independent_slice; + + gint32 poc; // PicOrderCntVal + gint32 poc_msb; // PicOrderCntMsb + gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header()) + gint32 prev_poc_msb; // prevPicOrderCntMsb + gint32 prev_poc_lsb; // prevPicOrderCntLsb + gint32 prev_tid0pic_poc_lsb; + gint32 prev_tid0pic_poc_msb; + gint32 PocStCurrBefore[16]; + gint32 PocStCurrAfter[16]; + gint32 PocStFoll[16]; + gint32 PocLtCurr[16]; + gint32 PocLtFoll[16]; + + /* PicOrderCount of the previously outputted frame */ + gint last_output_poc; + + gboolean associated_irap_NoRaslOutputFlag; +}; + +#define parent_class gst_h265_decoder_parent_class +G_DEFINE_ABSTRACT_TYPE_WITH_PRIVATE (GstH265Decoder, gst_h265_decoder, + GST_TYPE_VIDEO_DECODER); + +static gboolean gst_h265_decoder_start (GstVideoDecoder * decoder); +static gboolean gst_h265_decoder_stop (GstVideoDecoder * decoder); +static GstFlowReturn gst_h265_decoder_parse (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos); +static gboolean gst_h265_decoder_set_format (GstVideoDecoder * decoder, + GstVideoCodecState * state); +static GstFlowReturn gst_h265_decoder_finish (GstVideoDecoder * decoder); +static gboolean gst_h265_decoder_flush (GstVideoDecoder * decoder); +static GstFlowReturn gst_h265_decoder_drain (GstVideoDecoder * decoder); + +static gboolean gst_h265_decoder_finish_current_picture (GstH265Decoder * self); +static void gst_h265_decoder_clear_dpb (GstH265Decoder * self); +static gboolean +gst_h265_decoder_output_all_remaining_pics (GstH265Decoder * self); +static gboolean gst_h265_decoder_start_current_picture (GstH265Decoder * self); + +static void +gst_h265_decoder_class_init (GstH265DecoderClass * klass) +{ + GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass); + + decoder_class->start = GST_DEBUG_FUNCPTR (gst_h265_decoder_start); + decoder_class->stop = GST_DEBUG_FUNCPTR (gst_h265_decoder_stop); + decoder_class->parse = GST_DEBUG_FUNCPTR (gst_h265_decoder_parse); + decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_h265_decoder_set_format); + decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h265_decoder_finish); + decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h265_decoder_flush); + decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h265_decoder_drain); +} + +static void +gst_h265_decoder_init (GstH265Decoder * self) +{ + gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), FALSE); + + self->priv = gst_h265_decoder_get_instance_private (self); +} + +static gboolean +gst_h265_decoder_start (GstVideoDecoder * decoder) +{ + GstH265Decoder *self = GST_H265_DECODER (decoder); + GstH265DecoderPrivate *priv = self->priv; + + priv->parser = gst_h265_parser_new (); + priv->dpb = gst_h265_dpb_new (); + + return TRUE; +} + +static gboolean +gst_h265_decoder_stop (GstVideoDecoder * decoder) +{ + GstH265Decoder *self = GST_H265_DECODER (decoder); + GstH265DecoderPrivate *priv = self->priv; + + if (self->input_state) { + gst_video_codec_state_unref (self->input_state); + self->input_state = NULL; + } + + gst_clear_buffer (&priv->codec_data); + + if (priv->parser) { + gst_h265_parser_free (priv->parser); + priv->parser = NULL; + } + + if (priv->dpb) { + gst_h265_dpb_free (priv->dpb); + priv->dpb = NULL; + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_parse_vps (GstH265Decoder * self, GstH265NalUnit * nalu) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265VPS vps; + GstH265ParserResult pres; + gboolean ret = TRUE; + + gst_h265_decoder_finish_current_picture (self); + + pres = gst_h265_parser_parse_vps (priv->parser, nalu, &vps); + if (pres != GST_H265_PARSER_OK) { + GST_WARNING_OBJECT (self, "Failed to parse VPS, result %d", pres); + return FALSE; + } + + GST_LOG_OBJECT (self, "VPS parsed"); + + return ret; +} + +static gboolean +gst_h265_decoder_process_sps (GstH265Decoder * self, GstH265SPS * sps) +{ + GstH265DecoderPrivate *priv = self->priv; + gint max_dpb_size; + gint prev_max_dpb_size; + gint MaxLumaPS; + const gint MaxDpbPicBuf = 6; + gint PicSizeInSamplesY; + guint high_precision_offsets_enabled_flag = 0; + guint bitdepthC = 0; + + /* A.4.1 */ + MaxLumaPS = 35651584; + PicSizeInSamplesY = sps->width * sps->height; + if (PicSizeInSamplesY <= (MaxLumaPS >> 2)) + max_dpb_size = MaxDpbPicBuf * 4; + else if (PicSizeInSamplesY <= (MaxLumaPS >> 1)) + max_dpb_size = MaxDpbPicBuf * 2; + else if (PicSizeInSamplesY <= ((3 * MaxLumaPS) >> 2)) + max_dpb_size = (MaxDpbPicBuf * 4) / 3; + else + max_dpb_size = MaxDpbPicBuf; + + max_dpb_size = MIN (max_dpb_size, 16); + + prev_max_dpb_size = gst_h265_dpb_get_max_num_pics (priv->dpb); + if (priv->width != sps->width || priv->height != sps->height || + prev_max_dpb_size != max_dpb_size) { + GstH265DecoderClass *klass = GST_H265_DECODER_GET_CLASS (self); + + GST_DEBUG_OBJECT (self, + "SPS updated, resolution: %dx%d -> %dx%d, dpb size: %d -> %d", + priv->width, priv->height, sps->width, sps->height, + prev_max_dpb_size, max_dpb_size); + + g_assert (klass->new_sequence); + + if (!klass->new_sequence (self, sps)) { + GST_ERROR_OBJECT (self, "subclass does not want accept new sequence"); + return FALSE; + } + + priv->width = sps->width; + priv->height = sps->height; + + gst_h265_dpb_set_max_num_pics (priv->dpb, max_dpb_size); + } + + if (sps->max_latency_increase_plus1[sps->max_sub_layers_minus1]) { + priv->SpsMaxLatencyPictures = + sps->max_num_reorder_pics[sps->max_sub_layers_minus1] + + sps->max_latency_increase_plus1[sps->max_sub_layers_minus1] - 1; + } + + /* Calculate WpOffsetHalfRangeC: (7-34) + * FIXME: We don't have parser API for sps_range_extension, so + * assuming high_precision_offsets_enabled_flag as zero */ + bitdepthC = sps->bit_depth_chroma_minus8 + 8; + priv->WpOffsetHalfRangeC = + 1 << (high_precision_offsets_enabled_flag ? (bitdepthC - 1) : 7); + + GST_DEBUG_OBJECT (self, "Set DPB max size %d", max_dpb_size); + + return TRUE; +} + +static gboolean +gst_h265_decoder_parse_sps (GstH265Decoder * self, GstH265NalUnit * nalu) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265SPS sps; + GstH265ParserResult pres; + gboolean ret = TRUE; + + gst_h265_decoder_finish_current_picture (self); + + pres = gst_h265_parser_parse_sps (priv->parser, nalu, &sps, TRUE); + if (pres != GST_H265_PARSER_OK) { + GST_WARNING_OBJECT (self, "Failed to parse SPS, result %d", pres); + return FALSE; + } + + GST_LOG_OBJECT (self, "SPS parsed"); + + if (!gst_h265_decoder_process_sps (self, &sps)) + ret = FALSE; + + return ret; +} + +static gboolean +gst_h265_decoder_parse_pps (GstH265Decoder * self, GstH265NalUnit * nalu) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265PPS pps; + GstH265ParserResult pres; + + gst_h265_decoder_finish_current_picture (self); + + pres = gst_h265_parser_parse_pps (priv->parser, nalu, &pps); + if (pres != GST_H265_PARSER_OK) { + GST_WARNING_OBJECT (self, "Failed to parse PPS, result %d", pres); + return FALSE; + } + + GST_LOG_OBJECT (self, "PPS parsed"); + + return TRUE; +} + +static gboolean +gst_h265_decoder_decode_slice (GstH265Decoder * self) +{ + GstH265DecoderClass *klass = GST_H265_DECODER_GET_CLASS (self); + GstH265DecoderPrivate *priv = self->priv; + GstH265Slice *slice = &priv->current_slice; + GstH265Picture *picture = priv->current_picture; + + if (!picture) { + GST_ERROR_OBJECT (self, "No current picture"); + return FALSE; + } + + g_assert (klass->decode_slice); + + return klass->decode_slice (self, picture, slice); +} + +static gboolean +gst_h265_decoder_preprocess_slice (GstH265Decoder * self, GstH265Slice * slice) +{ + GstH265DecoderPrivate *priv = self->priv; + const GstH265SliceHdr *slice_hdr = &slice->header; + const GstH265NalUnit *nalu = &slice->nalu; + + if (priv->current_picture && slice_hdr->first_slice_segment_in_pic_flag) { + GST_WARNING_OBJECT (self, + "Current picture is not finished but slice header has " + "first_slice_segment_in_pic_flag"); + return FALSE; + } + + if (IS_IDR (nalu->type)) { + GST_DEBUG_OBJECT (self, "IDR nalu, clear dpb"); + gst_h265_decoder_flush (GST_VIDEO_DECODER (self)); + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_parse_slice (GstH265Decoder * self, GstH265NalUnit * nalu, + GstClockTime pts) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265ParserResult pres = GST_H265_PARSER_OK; + + memset (&priv->current_slice, 0, sizeof (GstH265Slice)); + + pres = gst_h265_parser_parse_slice_hdr (priv->parser, nalu, + &priv->current_slice.header); + + if (pres != GST_H265_PARSER_OK) { + GST_ERROR_OBJECT (self, "Failed to parse slice header, ret %d", pres); + memset (&priv->current_slice, 0, sizeof (GstH265Slice)); + + return FALSE; + } + + priv->current_slice.nalu = *nalu; + + if (!gst_h265_decoder_preprocess_slice (self, &priv->current_slice)) + return FALSE; + + priv->active_pps = priv->current_slice.header.pps; + priv->active_sps = priv->active_pps->sps; + + if (!priv->current_picture) { + GstH265DecoderClass *klass = GST_H265_DECODER_GET_CLASS (self); + GstH265Picture *picture; + gboolean ret = TRUE; + + picture = gst_h265_picture_new (); + picture->pts = pts; + + if (klass->new_picture) + ret = klass->new_picture (self, picture); + + if (!ret) { + GST_ERROR_OBJECT (self, "subclass does not want accept new picture"); + gst_h265_picture_unref (picture); + return FALSE; + } + + priv->current_picture = picture; + + if (!gst_h265_decoder_start_current_picture (self)) { + GST_ERROR_OBJECT (self, "start picture failed"); + return FALSE; + } + + /* this picture was dropped */ + if (!priv->current_picture) + return TRUE; + } + + return gst_h265_decoder_decode_slice (self); +} + +static GstFlowReturn +gst_h265_decoder_parse_nal (GstH265Decoder * self, const guint8 * data, + gsize size, GstClockTime pts, gboolean at_eos, gsize * consumed_size) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265ParserResult pres; + GstH265NalUnit nalu; + gboolean ret = TRUE; + + *consumed_size = 0; + + if (priv->in_format == GST_H265_DECODER_FORMAT_HVC1 || + priv->in_format == GST_H265_DECODER_FORMAT_HEV1) { + if (priv->nal_length_size < 1 || priv->nal_length_size > 4) { + GST_ERROR_OBJECT (self, + "invalid nal length size %d", priv->nal_length_size); + return GST_FLOW_ERROR; + } + + pres = gst_h265_parser_identify_nalu_hevc (priv->parser, + data, 0, size, priv->nal_length_size, &nalu); + + if (pres != GST_H265_PARSER_OK) { + GST_WARNING_OBJECT (self, "parsing hevc nal ret %d", pres); + return GST_FLOW_ERROR; + } + } else { + if (size < 5) { + GST_DEBUG_OBJECT (self, "Too small data"); + return GST_VIDEO_DECODER_FLOW_NEED_DATA; + } + + pres = gst_h265_parser_identify_nalu (priv->parser, data, 0, size, &nalu); + + if (pres != GST_H265_PARSER_OK) { + if (pres == GST_H265_PARSER_NO_NAL_END) { + if (at_eos || priv->align == GST_H265_DECODER_ALIGN_AU) { + /* assume au boundary */ + } else { + return GST_VIDEO_DECODER_FLOW_NEED_DATA; + } + } else { + GST_WARNING_OBJECT (self, "parser ret %d", pres); + return GST_FLOW_ERROR; + } + } + } + + GST_LOG_OBJECT (self, "Parsed nal type: %d, offset %d, size %d", + nalu.type, nalu.offset, nalu.size); + + switch (nalu.type) { + case GST_H265_NAL_VPS: + ret = gst_h265_decoder_parse_vps (self, &nalu); + break; + case GST_H265_NAL_SPS: + ret = gst_h265_decoder_parse_sps (self, &nalu); + break; + case GST_H265_NAL_PPS: + ret = gst_h265_decoder_parse_pps (self, &nalu); + break; + case GST_H265_NAL_SLICE_TRAIL_N: + case GST_H265_NAL_SLICE_TRAIL_R: + case GST_H265_NAL_SLICE_TSA_N: + case GST_H265_NAL_SLICE_TSA_R: + case GST_H265_NAL_SLICE_STSA_N: + case GST_H265_NAL_SLICE_STSA_R: + case GST_H265_NAL_SLICE_RADL_N: + case GST_H265_NAL_SLICE_RADL_R: + case GST_H265_NAL_SLICE_RASL_N: + case GST_H265_NAL_SLICE_RASL_R: + case GST_H265_NAL_SLICE_BLA_W_LP: + case GST_H265_NAL_SLICE_BLA_W_RADL: + case GST_H265_NAL_SLICE_BLA_N_LP: + case GST_H265_NAL_SLICE_IDR_W_RADL: + case GST_H265_NAL_SLICE_IDR_N_LP: + case GST_H265_NAL_SLICE_CRA_NUT: + ret = gst_h265_decoder_parse_slice (self, &nalu, pts); + break; + default: + break; + } + + if (consumed_size) + *consumed_size = nalu.offset + nalu.size; + + if (!ret) + return GST_FLOW_ERROR; + + return GST_FLOW_OK; +} + +static GstFlowReturn +gst_h265_decoder_parse (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos) +{ + GstH265Decoder *self = GST_H265_DECODER (decoder); + GstH265DecoderPrivate *priv = self->priv; + GstFlowReturn ret = GST_FLOW_OK; + guint size; + const guint8 *data; + gsize consumed = 0; + + /* The return from have_frame() or output_picture() */ + priv->last_ret = GST_FLOW_OK; + + size = gst_adapter_available (adapter); + + data = (const guint8 *) gst_adapter_map (adapter, size); + ret = gst_h265_decoder_parse_nal (self, data, size, + gst_adapter_prev_pts (adapter, NULL), at_eos, &consumed); + gst_adapter_unmap (adapter); + + if (consumed) { + GST_TRACE_OBJECT (self, "consumed size %" G_GSIZE_FORMAT, consumed); + gst_video_decoder_add_to_frame (decoder, consumed); + } + + if (ret == GST_FLOW_ERROR) + goto error; + + /* When AU alginment and has no available input data more, + * finish current picture if any */ + if (priv->align == GST_H265_DECODER_ALIGN_AU && + !gst_adapter_available (adapter)) { + gst_h265_decoder_finish_current_picture (self); + } + + /* check last flow return again */ + if (ret == GST_FLOW_ERROR) + goto error; + + return priv->last_ret; + +error: + GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE, + ("Failed to decode data"), (NULL), ret); + + return ret; +} + +static void +gst_h265_decoder_format_from_caps (GstH265Decoder * self, GstCaps * caps, + GstH265DecoderFormat * format, GstH265DecoderAlign * align) +{ + if (format) + *format = GST_H265_DECODER_FORMAT_NONE; + + if (align) + *align = GST_H265_DECODER_ALIGN_NONE; + + if (!gst_caps_is_fixed (caps)) { + GST_WARNING_OBJECT (self, "Caps wasn't fixed"); + return; + } + + GST_DEBUG_OBJECT (self, "parsing caps: %" GST_PTR_FORMAT, caps); + + if (caps && gst_caps_get_size (caps) > 0) { + GstStructure *s = gst_caps_get_structure (caps, 0); + const gchar *str = NULL; + + if (format) { + if ((str = gst_structure_get_string (s, "stream-format"))) { + if (strcmp (str, "hvc1") == 0) + *format = GST_H265_DECODER_FORMAT_HVC1; + else if (strcmp (str, "hev1") == 0) + *format = GST_H265_DECODER_FORMAT_HEV1; + else if (strcmp (str, "byte-stream") == 0) + *format = GST_H265_DECODER_FORMAT_BYTE; + } + } + + if (align) { + if ((str = gst_structure_get_string (s, "alignment"))) { + if (strcmp (str, "au") == 0) + *align = GST_H265_DECODER_ALIGN_AU; + else if (strcmp (str, "nal") == 0) + *align = GST_H265_DECODER_ALIGN_NAL; + } + } + } +} + +static gboolean +gst_h265_decoder_parse_codec_data (GstH265Decoder * self, const guint8 * data, + gsize size) +{ + GstH265DecoderPrivate *priv = self->priv; + guint num_nal_arrays; + guint off; + guint num_nals, i, j; + GstH265ParserResult pres; + GstH265NalUnit nalu; + + /* parse the hvcC data */ + if (size < 23) { + GST_WARNING_OBJECT (self, "hvcC too small"); + return FALSE; + } + + /* wrong hvcC version */ + if (data[0] != 0 && data[0] != 1) { + return FALSE; + } + + priv->nal_length_size = (data[21] & 0x03) + 1; + GST_DEBUG_OBJECT (self, "nal length size %u", priv->nal_length_size); + + num_nal_arrays = data[22]; + off = 23; + + for (i = 0; i < num_nal_arrays; i++) { + if (off + 3 >= size) { + GST_WARNING_OBJECT (self, "hvcC too small"); + return FALSE; + } + + num_nals = GST_READ_UINT16_BE (data + off + 1); + off += 3; + for (j = 0; j < num_nals; j++) { + pres = gst_h265_parser_identify_nalu_hevc (priv->parser, + data, off, size, 2, &nalu); + + if (pres != GST_H265_PARSER_OK) { + GST_WARNING_OBJECT (self, "hvcC too small"); + return FALSE; + } + + switch (nalu.type) { + case GST_H265_NAL_VPS: + gst_h265_decoder_parse_vps (self, &nalu); + break; + case GST_H265_NAL_SPS: + gst_h265_decoder_parse_sps (self, &nalu); + break; + case GST_H265_NAL_PPS: + gst_h265_decoder_parse_pps (self, &nalu); + break; + default: + break; + } + + off = nalu.offset + nalu.size; + } + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_set_format (GstVideoDecoder * decoder, + GstVideoCodecState * state) +{ + GstH265Decoder *self = GST_H265_DECODER (decoder); + GstH265DecoderPrivate *priv = self->priv; + + GST_DEBUG_OBJECT (decoder, "Set format"); + + if (self->input_state) + gst_video_codec_state_unref (self->input_state); + + self->input_state = gst_video_codec_state_ref (state); + + if (state->caps) { + GstStructure *str; + const GValue *codec_data_value; + GstH265DecoderFormat format; + GstH265DecoderAlign align; + + gst_h265_decoder_format_from_caps (self, state->caps, &format, &align); + + str = gst_caps_get_structure (state->caps, 0); + codec_data_value = gst_structure_get_value (str, "codec_data"); + + if (GST_VALUE_HOLDS_BUFFER (codec_data_value)) { + gst_buffer_replace (&priv->codec_data, + gst_value_get_buffer (codec_data_value)); + } else { + gst_buffer_replace (&priv->codec_data, NULL); + } + + if (format == GST_H265_DECODER_FORMAT_NONE) { + /* codec_data implies packetized */ + if (codec_data_value != NULL) { + GST_WARNING_OBJECT (self, + "video/x-h265 caps with codec_data but no stream-format=hev1 or hvc1"); + format = GST_H265_DECODER_FORMAT_HEV1; + } else { + /* otherwise assume bytestream input */ + GST_WARNING_OBJECT (self, + "video/x-h265 caps without codec_data or stream-format"); + format = GST_H265_DECODER_FORMAT_BYTE; + } + } + + if (format == GST_H265_DECODER_FORMAT_HEV1 || + format == GST_H265_DECODER_FORMAT_HVC1) { + if (codec_data_value == NULL) { + /* Try it with size 4 anyway */ + priv->nal_length_size = 4; + GST_WARNING_OBJECT (self, + "packetized format without codec data, assuming nal length size is 4"); + } + + /* AVC implies alignment=au */ + if (align == GST_H265_DECODER_ALIGN_NONE) + align = GST_H265_DECODER_ALIGN_AU; + } + + if (format == GST_H265_DECODER_FORMAT_BYTE) { + if (codec_data_value != NULL) { + GST_WARNING_OBJECT (self, "bytestream with codec data"); + } + } + + priv->in_format = format; + priv->align = align; + } + + if (priv->codec_data) { + GstMapInfo map; + + gst_buffer_map (priv->codec_data, &map, GST_MAP_READ); + gst_h265_decoder_parse_codec_data (self, map.data, map.size); + gst_buffer_unmap (priv->codec_data, &map); + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_flush (GstVideoDecoder * decoder) +{ + GstH265Decoder *self = GST_H265_DECODER (decoder); + gboolean ret = TRUE; + + if (!gst_h265_decoder_output_all_remaining_pics (self)) + ret = FALSE; + + gst_h265_decoder_clear_dpb (self); + + return ret; +} + +static GstFlowReturn +gst_h265_decoder_drain (GstVideoDecoder * decoder) +{ + GstH265Decoder *self = GST_H265_DECODER (decoder); + GstH265DecoderPrivate *priv = self->priv; + + gst_h265_decoder_flush (decoder); + + return priv->last_ret; +} + +static GstFlowReturn +gst_h265_decoder_finish (GstVideoDecoder * decoder) +{ + return gst_h265_decoder_drain (decoder); +} + +static gboolean +gst_h265_decoder_fill_picture_from_slice (GstH265Decoder * self, + const GstH265Slice * slice, GstH265Picture * picture) +{ + GstH265DecoderPrivate *priv = self->priv; + const GstH265SliceHdr *slice_hdr = &slice->header; + const GstH265NalUnit *nalu = &slice->nalu; + + if (nalu->type >= GST_H265_NAL_SLICE_BLA_W_LP && + nalu->type <= GST_H265_NAL_SLICE_CRA_NUT) + picture->RapPicFlag = TRUE; + + /* FIXME: Use SEI header values */ + picture->field = GST_H265_PICTURE_FIELD_FRAME; + + /* NoRaslOutputFlag == 1 if the current picture is + * 1) an IDR picture + * 2) a BLA picture + * 3) a CRA picture that is the first access unit in the bitstream + * 4) first picture that follows an end of sequence NAL unit in decoding order + * 5) has HandleCraAsBlaFlag == 1 (set by external means, so not considering ) + */ + if (IS_IDR (nalu->type) || IS_BLA (nalu->type) || IS_CRA (nalu->type)) { + picture->NoRaslOutputFlag = TRUE; + } + + if (IS_IRAP (nalu->type)) { + picture->IntraPicFlag = TRUE; + priv->associated_irap_NoRaslOutputFlag = picture->NoRaslOutputFlag; + } + + if (IS_RASL (nalu->type) && priv->associated_irap_NoRaslOutputFlag) { + picture->output_flag = FALSE; + } else { + picture->output_flag = slice_hdr->pic_output_flag; + } + + return TRUE; +} + +#define RSV_VCL_N10 10 +#define RSV_VCL_N12 12 +#define RSV_VCL_N14 14 + +static gboolean +nal_is_ref (guint8 nal_type) +{ + gboolean ret = FALSE; + switch (nal_type) { + case GST_H265_NAL_SLICE_TRAIL_N: + case GST_H265_NAL_SLICE_TSA_N: + case GST_H265_NAL_SLICE_STSA_N: + case GST_H265_NAL_SLICE_RADL_N: + case GST_H265_NAL_SLICE_RASL_N: + case RSV_VCL_N10: + case RSV_VCL_N12: + case RSV_VCL_N14: + ret = FALSE; + break; + default: + ret = TRUE; + break; + } + return ret; +} + +static gboolean +gst_h265_decoder_calculate_poc (GstH265Decoder * self, + const GstH265Slice * slice, GstH265Picture * picture) +{ + GstH265DecoderPrivate *priv = self->priv; + const GstH265SliceHdr *slice_hdr = &slice->header; + const GstH265NalUnit *nalu = &slice->nalu; + const GstH265SPS *sps = priv->active_sps; + gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4); + + GST_DEBUG_OBJECT (self, "decode PicOrderCntVal"); + + priv->prev_poc_lsb = priv->poc_lsb; + priv->prev_poc_msb = priv->poc_msb; + + if (!(IS_IRAP (nalu->type) && picture->NoRaslOutputFlag)) { + priv->prev_poc_lsb = priv->prev_tid0pic_poc_lsb; + priv->prev_poc_msb = priv->prev_tid0pic_poc_msb; + } + + /* Finding PicOrderCntMsb */ + if (IS_IRAP (nalu->type) && picture->NoRaslOutputFlag) + priv->poc_msb = 0; + else { + /* (8-1) */ + if ((slice_hdr->pic_order_cnt_lsb < priv->prev_poc_lsb) && + ((priv->prev_poc_lsb - slice_hdr->pic_order_cnt_lsb) >= + (MaxPicOrderCntLsb / 2))) + priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb; + + else if ((slice_hdr->pic_order_cnt_lsb > priv->prev_poc_lsb) && + ((slice_hdr->pic_order_cnt_lsb - priv->prev_poc_lsb) > + (MaxPicOrderCntLsb / 2))) + priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb; + + else + priv->poc_msb = priv->prev_poc_msb; + } + + /* (8-2) */ + priv->poc = picture->pic_order_cnt = + priv->poc_msb + slice_hdr->pic_order_cnt_lsb; + priv->poc_lsb = picture->pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb; + + if (IS_IDR (nalu->type)) { + picture->pic_order_cnt = 0; + picture->pic_order_cnt_lsb = 0; + priv->poc_lsb = 0; + priv->poc_msb = 0; + priv->prev_poc_lsb = 0; + priv->prev_poc_msb = 0; + priv->prev_tid0pic_poc_lsb = 0; + priv->prev_tid0pic_poc_msb = 0; + } + + GST_DEBUG_OBJECT (self, + "PicOrderCntVal %d, (lsb %d)", picture->pic_order_cnt, + picture->pic_order_cnt_lsb); + + if (nalu->temporal_id_plus1 == 1 && !IS_RASL (nalu->type) && + !IS_RADL (nalu->type) && nal_is_ref (nalu->type)) { + priv->prev_tid0pic_poc_lsb = slice_hdr->pic_order_cnt_lsb; + priv->prev_tid0pic_poc_msb = priv->poc_msb; + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_init_current_picture (GstH265Decoder * self) +{ + GstH265DecoderPrivate *priv = self->priv; + + if (!gst_h265_decoder_fill_picture_from_slice (self, &priv->current_slice, + priv->current_picture)) { + return FALSE; + } + + if (!gst_h265_decoder_calculate_poc (self, + &priv->current_slice, priv->current_picture)) + return FALSE; + + return TRUE; +} + +static gboolean +has_entry_in_rps (GstH265Picture * dpb_pic, + GstH265Picture ** rps_list, guint rps_list_length) +{ + guint i; + + if (!dpb_pic || !rps_list || !rps_list_length) + return FALSE; + + for (i = 0; i < rps_list_length; i++) { + if (rps_list[i] && rps_list[i]->pic_order_cnt == dpb_pic->pic_order_cnt) + return TRUE; + } + return FALSE; +} + +static void +gst_h265_decoder_derive_and_mark_rps (GstH265Decoder * self, + GstH265Picture * picture, gint32 * CurrDeltaPocMsbPresentFlag, + gint32 * FollDeltaPocMsbPresentFlag) +{ + GstH265DecoderPrivate *priv = self->priv; + guint i; + GArray *dpb_array; + + for (i = 0; i < 16; i++) { + gst_h265_picture_replace (&self->RefPicSetLtCurr[i], NULL); + gst_h265_picture_replace (&self->RefPicSetLtFoll[i], NULL); + gst_h265_picture_replace (&self->RefPicSetStCurrBefore[i], NULL); + gst_h265_picture_replace (&self->RefPicSetStCurrAfter[i], NULL); + gst_h265_picture_replace (&self->RefPicSetStFoll[i], NULL); + } + + /* (8-6) */ + for (i = 0; i < self->NumPocLtCurr; i++) { + if (!CurrDeltaPocMsbPresentFlag[i]) { + self->RefPicSetLtCurr[i] = + gst_h265_dpb_get_ref_by_poc_lsb (priv->dpb, priv->PocLtCurr[i]); + } else { + self->RefPicSetLtCurr[i] = + gst_h265_dpb_get_ref_by_poc (priv->dpb, priv->PocLtCurr[i]); + } + } + + for (i = 0; i < self->NumPocLtFoll; i++) { + if (!FollDeltaPocMsbPresentFlag[i]) { + self->RefPicSetLtFoll[i] = + gst_h265_dpb_get_ref_by_poc_lsb (priv->dpb, priv->PocLtFoll[i]); + } else { + self->RefPicSetLtFoll[i] = + gst_h265_dpb_get_ref_by_poc (priv->dpb, priv->PocLtFoll[i]); + } + } + + /* Mark all ref pics in RefPicSetLtCurr and RefPicSetLtFol as long_term_refs */ + for (i = 0; i < self->NumPocLtCurr; i++) { + if (self->RefPicSetLtCurr[i]) { + self->RefPicSetLtCurr[i]->ref = TRUE; + self->RefPicSetLtCurr[i]->long_term = TRUE; + } + } + + for (i = 0; i < self->NumPocLtFoll; i++) { + if (self->RefPicSetLtFoll[i]) { + self->RefPicSetLtFoll[i]->ref = TRUE; + self->RefPicSetLtFoll[i]->long_term = TRUE; + } + } + + /* (8-7) */ + for (i = 0; i < self->NumPocStCurrBefore; i++) { + self->RefPicSetStCurrBefore[i] = + gst_h265_dpb_get_short_ref_by_poc (priv->dpb, priv->PocStCurrBefore[i]); + } + + for (i = 0; i < self->NumPocStCurrAfter; i++) { + self->RefPicSetStCurrAfter[i] = + gst_h265_dpb_get_short_ref_by_poc (priv->dpb, priv->PocStCurrAfter[i]); + } + + for (i = 0; i < self->NumPocStFoll; i++) { + self->RefPicSetStFoll[i] = + gst_h265_dpb_get_short_ref_by_poc (priv->dpb, priv->PocStFoll[i]); + } + + /* Mark all dpb pics not beloging to RefPicSet*[] as unused for ref */ + dpb_array = gst_h265_dpb_get_pictures_all (priv->dpb); + for (i = 0; i < dpb_array->len; i++) { + GstH265Picture *dpb_pic = g_array_index (dpb_array, GstH265Picture *, i); + + if (dpb_pic && + !has_entry_in_rps (dpb_pic, self->RefPicSetLtCurr, self->NumPocLtCurr) + && !has_entry_in_rps (dpb_pic, self->RefPicSetLtFoll, + self->NumPocLtFoll) + && !has_entry_in_rps (dpb_pic, self->RefPicSetStCurrAfter, + self->NumPocStCurrAfter) + && !has_entry_in_rps (dpb_pic, self->RefPicSetStCurrBefore, + self->NumPocStCurrBefore) + && !has_entry_in_rps (dpb_pic, self->RefPicSetStFoll, + self->NumPocStFoll)) { + GST_LOG_OBJECT (self, "Mark Picture %p (poc %d) as non-ref", dpb_pic, + dpb_pic->pic_order_cnt); + dpb_pic->ref = FALSE; + dpb_pic->long_term = FALSE; + } + } + + g_array_unref (dpb_array); +} + +static gboolean +gst_h265_decoder_prepare_rps (GstH265Decoder * self, const GstH265Slice * slice, + GstH265Picture * picture) +{ + GstH265DecoderPrivate *priv = self->priv; + gint32 CurrDeltaPocMsbPresentFlag[16] = { 0, }; + gint32 FollDeltaPocMsbPresentFlag[16] = { 0, }; + const GstH265SliceHdr *slice_hdr = &slice->header; + const GstH265NalUnit *nalu = &slice->nalu; + const GstH265SPS *sps = priv->active_sps; + guint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4); + gint i, j, k; + + /* if it is an irap pic, set all ref pics in dpb as unused for ref */ + if (IS_IRAP (nalu->type) && picture->NoRaslOutputFlag) { + GST_DEBUG_OBJECT (self, "Mark all pictures in DPB as non-ref"); + gst_h265_dpb_mark_all_non_ref (priv->dpb); + } + + /* Reset everything for IDR */ + if (IS_IDR (nalu->type)) { + memset (priv->PocStCurrBefore, 0, sizeof (priv->PocStCurrBefore)); + memset (priv->PocStCurrAfter, 0, sizeof (priv->PocStCurrAfter)); + memset (priv->PocStFoll, 0, sizeof (priv->PocStFoll)); + memset (priv->PocLtCurr, 0, sizeof (priv->PocLtCurr)); + memset (priv->PocLtFoll, 0, sizeof (priv->PocLtFoll)); + self->NumPocStCurrBefore = self->NumPocStCurrAfter = self->NumPocStFoll = 0; + self->NumPocLtCurr = self->NumPocLtFoll = 0; + } else { + const GstH265ShortTermRefPicSet *stRefPic = NULL; + gint32 num_lt_pics, pocLt; + gint32 PocLsbLt[16] = { 0, }; + gint32 UsedByCurrPicLt[16] = { 0, }; + gint32 DeltaPocMsbCycleLt[16] = { 0, }; + gint numtotalcurr = 0; + + /* this is based on CurrRpsIdx described in spec */ + if (!slice_hdr->short_term_ref_pic_set_sps_flag) + stRefPic = &slice_hdr->short_term_ref_pic_sets; + else if (sps->num_short_term_ref_pic_sets) + stRefPic = + &sps->short_term_ref_pic_set[slice_hdr->short_term_ref_pic_set_idx]; + + g_assert (stRefPic != NULL); + + GST_LOG_OBJECT (self, + "NumDeltaPocs: %d, NumNegativePics: %d, NumPositivePics %d", + stRefPic->NumDeltaPocs, stRefPic->NumNegativePics, + stRefPic->NumPositivePics); + + for (i = 0, j = 0, k = 0; i < stRefPic->NumNegativePics; i++) { + if (stRefPic->UsedByCurrPicS0[i]) { + priv->PocStCurrBefore[j++] = + picture->pic_order_cnt + stRefPic->DeltaPocS0[i]; + numtotalcurr++; + } else + priv->PocStFoll[k++] = picture->pic_order_cnt + stRefPic->DeltaPocS0[i]; + } + self->NumPocStCurrBefore = j; + for (i = 0, j = 0; i < stRefPic->NumPositivePics; i++) { + if (stRefPic->UsedByCurrPicS1[i]) { + priv->PocStCurrAfter[j++] = + picture->pic_order_cnt + stRefPic->DeltaPocS1[i]; + numtotalcurr++; + } else + priv->PocStFoll[k++] = picture->pic_order_cnt + stRefPic->DeltaPocS1[i]; + } + self->NumPocStCurrAfter = j; + self->NumPocStFoll = k; + num_lt_pics = slice_hdr->num_long_term_sps + slice_hdr->num_long_term_pics; + /* The variables PocLsbLt[i] and UsedByCurrPicLt[i] are derived as follows: */ + for (i = 0; i < num_lt_pics; i++) { + if (i < slice_hdr->num_long_term_sps) { + PocLsbLt[i] = sps->lt_ref_pic_poc_lsb_sps[slice_hdr->lt_idx_sps[i]]; + UsedByCurrPicLt[i] = + sps->used_by_curr_pic_lt_sps_flag[slice_hdr->lt_idx_sps[i]]; + } else { + PocLsbLt[i] = slice_hdr->poc_lsb_lt[i]; + UsedByCurrPicLt[i] = slice_hdr->used_by_curr_pic_lt_flag[i]; + } + if (UsedByCurrPicLt[i]) + numtotalcurr++; + } + + self->NumPocTotalCurr = numtotalcurr; + + /* The variable DeltaPocMsbCycleLt[i] is derived as follows: (7-38) */ + for (i = 0; i < num_lt_pics; i++) { + if (i == 0 || i == slice_hdr->num_long_term_sps) + DeltaPocMsbCycleLt[i] = slice_hdr->delta_poc_msb_cycle_lt[i]; + else + DeltaPocMsbCycleLt[i] = + slice_hdr->delta_poc_msb_cycle_lt[i] + DeltaPocMsbCycleLt[i - 1]; + } + + /* (8-5) */ + for (i = 0, j = 0, k = 0; i < num_lt_pics; i++) { + pocLt = PocLsbLt[i]; + if (slice_hdr->delta_poc_msb_present_flag[i]) + pocLt += + picture->pic_order_cnt - DeltaPocMsbCycleLt[i] * MaxPicOrderCntLsb - + slice_hdr->pic_order_cnt_lsb; + if (UsedByCurrPicLt[i]) { + priv->PocLtCurr[j] = pocLt; + CurrDeltaPocMsbPresentFlag[j++] = + slice_hdr->delta_poc_msb_present_flag[i]; + } else { + priv->PocLtFoll[k] = pocLt; + FollDeltaPocMsbPresentFlag[k++] = + slice_hdr->delta_poc_msb_present_flag[i]; + } + } + self->NumPocLtCurr = j; + self->NumPocLtFoll = k; + } + + GST_LOG_OBJECT (self, "NumPocStCurrBefore: %d", self->NumPocStCurrBefore); + GST_LOG_OBJECT (self, "NumPocStCurrAfter: %d", self->NumPocStCurrAfter); + GST_LOG_OBJECT (self, "NumPocStFoll: %d", self->NumPocStFoll); + GST_LOG_OBJECT (self, "NumPocLtCurr: %d", self->NumPocLtCurr); + GST_LOG_OBJECT (self, "NumPocLtFoll: %d", self->NumPocLtFoll); + GST_LOG_OBJECT (self, "NumPocTotalCurr: %d", self->NumPocTotalCurr); + + /* the derivation process for the RPS and the picture marking */ + gst_h265_decoder_derive_and_mark_rps (self, picture, + CurrDeltaPocMsbPresentFlag, FollDeltaPocMsbPresentFlag); + + return TRUE; +} + +static void +gst_h265_decoder_clear_dpb (GstH265Decoder * self) +{ + GstH265DecoderPrivate *priv = self->priv; + + gst_h265_dpb_clear (priv->dpb); + priv->last_output_poc = -1; +} + +static void +gst_h265_decoder_do_output_picture (GstH265Decoder * self, + GstH265Picture * picture) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265DecoderClass *klass; + + picture->outputted = TRUE; + + if (picture->pic_order_cnt < priv->last_output_poc) { + GST_WARNING_OBJECT (self, + "Outputting out of order %d -> %d, likely a broken stream", + priv->last_output_poc, picture->pic_order_cnt); + } + + priv->last_output_poc = picture->pic_order_cnt; + + klass = GST_H265_DECODER_GET_CLASS (self); + + if (klass->output_picture) + priv->last_ret = klass->output_picture (self, picture); +} + +static gint +poc_asc_compare (const GstH265Picture * a, const GstH265Picture * b) +{ + return a->pic_order_cnt > b->pic_order_cnt; +} + +static gboolean +gst_h265_decoder_output_all_remaining_pics (GstH265Decoder * self) +{ + GstH265DecoderPrivate *priv = self->priv; + GList *to_output = NULL; + GList *iter; + + gst_h265_dpb_get_pictures_not_outputted (priv->dpb, &to_output); + + to_output = g_list_sort (to_output, (GCompareFunc) poc_asc_compare); + + for (iter = to_output; iter; iter = g_list_next (iter)) { + GstH265Picture *picture = (GstH265Picture *) iter->data; + + GST_LOG_OBJECT (self, "Output picture %p (poc %d)", picture, + picture->pic_order_cnt); + gst_h265_decoder_do_output_picture (self, picture); + } + + if (to_output) + g_list_free_full (to_output, (GDestroyNotify) gst_h265_picture_unref); + + return TRUE; +} + +/* C.5.2.2 */ +static gboolean +gst_h265_decoder_dpb_init (GstH265Decoder * self, const GstH265Slice * slice, + GstH265Picture * picture) +{ + const GstH265SliceHdr *slice_hdr = &slice->header; + const GstH265NalUnit *nalu = &slice->nalu; + + if (IS_IRAP (nalu->type) && picture->NoRaslOutputFlag) { + + if (nalu->type == GST_H265_NAL_SLICE_CRA_NUT) + picture->NoOutputOfPriorPicsFlag = TRUE; + else + picture->NoOutputOfPriorPicsFlag = + slice_hdr->no_output_of_prior_pics_flag; + + if (picture->NoOutputOfPriorPicsFlag) { + GST_DEBUG_OBJECT (self, "Clear dpb"); + gst_h265_decoder_flush (GST_VIDEO_DECODER (self)); + } + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_start_current_picture (GstH265Decoder * self) +{ + GstH265DecoderClass *klass; + GstH265DecoderPrivate *priv = self->priv; + gboolean ret = TRUE; + + g_assert (priv->current_picture != NULL); + g_assert (priv->active_sps != NULL); + g_assert (priv->active_pps != NULL); + + if (!gst_h265_decoder_init_current_picture (self)) + return FALSE; + + /* Drop all RASL pictures having NoRaslOutputFlag is TRUE for the + * associated IRAP picture */ + if (IS_RASL (priv->current_slice.nalu.type) && + priv->associated_irap_NoRaslOutputFlag) { + GST_DEBUG_OBJECT (self, "Drop current picture"); + gst_h265_picture_replace (&priv->current_picture, NULL); + return TRUE; + } + + gst_h265_decoder_prepare_rps (self, &priv->current_slice, + priv->current_picture); + + gst_h265_decoder_dpb_init (self, &priv->current_slice, priv->current_picture); + + klass = GST_H265_DECODER_GET_CLASS (self); + if (klass->start_picture) + ret = klass->start_picture (self, priv->current_picture, + &priv->current_slice, priv->dpb); + + if (!ret) { + GST_ERROR_OBJECT (self, "subclass does not want to start picture"); + return FALSE; + } + + return TRUE; +} + +static gboolean +gst_h265_decoder_finish_picture (GstH265Decoder * self, + GstH265Picture * picture) +{ + GstH265DecoderPrivate *priv = self->priv; + const GstH265SPS *sps = priv->active_sps; + GList *not_outputted = NULL; + guint num_remaining; + GList *iter; +#ifndef GST_DISABLE_GST_DEBUG + gint i; +#endif + + /* Remove unused (for reference or later output) pictures from DPB, marking + * them as such */ + gst_h265_dpb_delete_unused (priv->dpb); + + GST_LOG_OBJECT (self, + "Finishing picture %p (poc %d), entries in DPB %d", + picture, picture->pic_order_cnt, gst_h265_dpb_get_size (priv->dpb)); + + /* The ownership of pic will either be transferred to DPB - if the picture is + * still needed (for output and/or reference) - or we will release it + * immediately if we manage to output it here and won't have to store it for + * future reference */ + + /* Get all pictures that haven't been outputted yet */ + gst_h265_dpb_get_pictures_not_outputted (priv->dpb, ¬_outputted); + + /* C.5.2.3 */ + for (iter = not_outputted; iter; iter = g_list_next (iter)) { + GstH265Picture *other = GST_H265_PICTURE (iter->data); + + if (!other->outputted) + other->pic_latency_cnt++; + } + + if (picture->output_flag) { + picture->outputted = FALSE; + picture->pic_latency_cnt = 0; + } else { + picture->outputted = TRUE; + } + + /* set pic as short_term_ref */ + picture->ref = TRUE; + picture->long_term = FALSE; + + /* Include the one we've just decoded */ + not_outputted = g_list_append (not_outputted, picture); + + /* for debugging */ +#ifndef GST_DISABLE_GST_DEBUG + GST_TRACE_OBJECT (self, "Before sorting not outputted list"); + i = 0; + for (iter = not_outputted; iter; iter = g_list_next (iter)) { + GstH265Picture *tmp = (GstH265Picture *) iter->data; + + GST_TRACE_OBJECT (self, + "\t%dth picture %p (poc %d)", i, tmp, tmp->pic_order_cnt); + i++; + } +#endif + + /* Sort in output order */ + not_outputted = g_list_sort (not_outputted, (GCompareFunc) poc_asc_compare); + +#ifndef GST_DISABLE_GST_DEBUG + GST_TRACE_OBJECT (self, + "After sorting not outputted list in poc ascending order"); + i = 0; + for (iter = not_outputted; iter; iter = g_list_next (iter)) { + GstH265Picture *tmp = (GstH265Picture *) iter->data; + + GST_TRACE_OBJECT (self, + "\t%dth picture %p (poc %d)", i, tmp, tmp->pic_order_cnt); + i++; + } +#endif + + /* Try to output as many pictures as we can. A picture can be output, + * if the number of decoded and not yet outputted pictures that would remain + * in DPB afterwards would at least be equal to max_num_reorder_frames. + * If the outputted picture is not a reference picture, it doesn't have + * to remain in the DPB and can be removed */ + iter = not_outputted; + num_remaining = g_list_length (not_outputted); + + while (num_remaining > sps->max_num_reorder_pics[sps->max_sub_layers_minus1] + || (num_remaining && + sps->max_latency_increase_plus1[sps->max_sub_layers_minus1] && + !GST_H265_PICTURE (iter->data)->outputted && + GST_H265_PICTURE (iter->data)->pic_latency_cnt >= + priv->SpsMaxLatencyPictures)) { + GstH265Picture *to_output = GST_H265_PICTURE (iter->data); + + GST_LOG_OBJECT (self, + "Output picture %p (poc %d)", to_output, to_output->pic_order_cnt); + gst_h265_decoder_do_output_picture (self, to_output); + if (!to_output->ref) { + /* Current picture hasn't been inserted into DPB yet, so don't remove it + * if we managed to output it immediately */ + gint outputted_poc = to_output->pic_order_cnt; + if (outputted_poc != picture->pic_order_cnt) { + GST_LOG_OBJECT (self, "Delete picture %p (poc %d) from DPB", + to_output, to_output->pic_order_cnt); + gst_h265_dpb_delete_by_poc (priv->dpb, outputted_poc); + } + } + + iter = g_list_next (iter); + num_remaining--; + } + + /* If we haven't managed to output the picture that we just decoded, or if + * it's a reference picture, we have to store it in DPB */ + if (!picture->outputted || picture->ref) { + if (gst_h265_dpb_is_full (priv->dpb)) { + /* If we haven't managed to output anything to free up space in DPB + * to store this picture, it's an error in the stream */ + GST_WARNING_OBJECT (self, "Could not free up space in DPB"); + return FALSE; + } + + GST_TRACE_OBJECT (self, + "Put picture %p (outputted %d, ref %d, poc %d) to dpb", + picture, picture->outputted, picture->ref, picture->pic_order_cnt); + gst_h265_dpb_add (priv->dpb, gst_h265_picture_ref (picture)); + } + + if (not_outputted) + g_list_free_full (not_outputted, (GDestroyNotify) gst_h265_picture_unref); + + return TRUE; +} + +static gboolean +gst_h265_decoder_finish_current_picture (GstH265Decoder * self) +{ + GstH265DecoderPrivate *priv = self->priv; + GstH265DecoderClass *klass; + GstH265Picture *picture; + gboolean ret = TRUE; + + if (!priv->current_picture) + return TRUE; + + picture = priv->current_picture; + priv->current_picture = NULL; + + klass = GST_H265_DECODER_GET_CLASS (self); + + if (klass->end_picture) + ret = klass->end_picture (self, picture); + + if (picture->output_flag) { + gst_video_decoder_have_frame (GST_VIDEO_DECODER (self)); + } else { + GST_DEBUG_OBJECT (self, "Skip have_frame for picture %p", picture); + } + + /* finish picture takes ownership of the picture */ + if (!gst_h265_decoder_finish_picture (self, picture)) { + GST_ERROR_OBJECT (self, "Failed to finish picture"); + return FALSE; + } + + return ret; +} diff --git a/sys/d3d11/gsth265decoder.h b/sys/d3d11/gsth265decoder.h new file mode 100644 index 0000000000..0641b63aff --- /dev/null +++ b/sys/d3d11/gsth265decoder.h @@ -0,0 +1,143 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_H265_DECODER_H__ +#define __GST_H265_DECODER_H__ + +#include +#include +#include +#include "gsth265picture.h" + +G_BEGIN_DECLS + +#define GST_TYPE_H265_DECODER (gst_h265_decoder_get_type()) +#define GST_H265_DECODER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_H265_DECODER,GstH265Decoder)) +#define GST_H265_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_H265_DECODER,GstH265DecoderClass)) +#define GST_H265_DECODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_H265_DECODER,GstH265DecoderClass)) +#define GST_IS_H265_DECODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_H265_DECODER)) +#define GST_IS_H265_DECODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_H265_DECODER)) +#define GST_H265_DECODER_CAST(obj) ((GstH265Decoder*)obj) + +typedef struct _GstH265Decoder GstH265Decoder; +typedef struct _GstH265DecoderClass GstH265DecoderClass; +typedef struct _GstH265DecoderPrivate GstH265DecoderPrivate; + +#define IS_IDR(nal_type) \ + ((nal_type) == GST_H265_NAL_SLICE_IDR_W_RADL || (nal_type) == GST_H265_NAL_SLICE_IDR_N_LP) + +#define IS_IRAP(nal_type) \ + ((nal_type) >= GST_H265_NAL_SLICE_BLA_W_LP && (nal_type) <= RESERVED_IRAP_NAL_TYPE_MAX) + +#define IS_BLA(nal_type) \ + ((nal_type) >= GST_H265_NAL_SLICE_BLA_W_LP && (nal_type) <= GST_H265_NAL_SLICE_BLA_N_LP) + +#define IS_CRA(nal_type) \ + ((nal_type) == GST_H265_NAL_SLICE_CRA_NUT) + +#define IS_RADL(nal_type) \ + ((nal_type) >= GST_H265_NAL_SLICE_RADL_N && (nal_type) <= GST_H265_NAL_SLICE_RADL_R) + +#define IS_RASL(nal_type) \ + ((nal_type) >= GST_H265_NAL_SLICE_RASL_N && (nal_type) <= GST_H265_NAL_SLICE_RASL_R) + +/** + * GstH265Decoder: + * + * The opaque #GstH265Decoder data structure. + */ +struct _GstH265Decoder +{ + /*< private >*/ + GstVideoDecoder parent; + + /*< protected >*/ + GstVideoCodecState * input_state; + + GstH265Picture *RefPicSetStCurrBefore[16]; + GstH265Picture *RefPicSetStCurrAfter[16]; + GstH265Picture *RefPicSetStFoll[16]; + GstH265Picture *RefPicSetLtCurr[16]; + GstH265Picture *RefPicSetLtFoll[16]; + + guint NumPocStCurrBefore; + guint NumPocStCurrAfter; + guint NumPocStFoll; + guint NumPocLtCurr; + guint NumPocLtFoll; + guint NumPocTotalCurr; + + /*< private >*/ + GstH265DecoderPrivate *priv; + gpointer padding[GST_PADDING_LARGE]; +}; + +/** + * GstH265DecoderClass: + * @new_sequence: Notifies subclass of SPS update + * @new_picture: Optional. + * Called whenever new #GstH265Picture is created. + * Subclass can set implementation specific user data + * on the #GstH265Picture via gst_h265_picture_set_user_data() + * @output_picture: Optional. + * Called just before gst_video_decoder_have_frame(). + * Subclass should be prepared for handle_frame() + * @start_picture: Optional. + * Called per one #GstH265Picture to notify subclass to prepare + * decoding process for the #GstH265Picture + * @decode_slice: Provides per slice data with parsed slice header and + * required raw bitstream for subclass to decode it + * @end_picture: Optional. + * Called per one #GstH265Picture to notify subclass to finish + * decoding process for the #GstH265Picture + */ +struct _GstH265DecoderClass +{ + GstVideoDecoderClass parent_class; + + gboolean (*new_sequence) (GstH265Decoder * decoder, + const GstH265SPS * sps); + + gboolean (*new_picture) (GstH265Decoder * decoder, + GstH265Picture * picture); + + GstFlowReturn (*output_picture) (GstH265Decoder * decoder, + GstH265Picture * picture); + + gboolean (*start_picture) (GstH265Decoder * decoder, + GstH265Picture * picture, + GstH265Slice * slice, + GstH265Dpb * dpb); + + gboolean (*decode_slice) (GstH265Decoder * decoder, + GstH265Picture * picture, + GstH265Slice * slice); + + gboolean (*end_picture) (GstH265Decoder * decoder, + GstH265Picture * picture); + + /*< private >*/ + gpointer padding[GST_PADDING_LARGE]; +}; + +GType gst_h265_decoder_get_type (void); + +G_END_DECLS + +#endif /* __GST_H265_DECODER_H__ */ diff --git a/sys/d3d11/gsth265picture.c b/sys/d3d11/gsth265picture.c new file mode 100644 index 0000000000..0df7b469e2 --- /dev/null +++ b/sys/d3d11/gsth265picture.c @@ -0,0 +1,486 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include +#endif + +#include "gsth265picture.h" + +GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_h265_dec_debug); +#define GST_CAT_DEFAULT gst_d3d11_h265_dec_debug + +GST_DEFINE_MINI_OBJECT_TYPE (GstH265Picture, gst_h265_picture); + +static void +_gst_h265_picture_free (GstH265Picture * picture) +{ + if (picture->notify) + picture->notify (picture->user_data); + + g_free (picture); +} + +/** + * gst_h265_picture_new: + * + * Create new #GstH265Picture + * + * Returns: a new #GstH265Picture + */ +GstH265Picture * +gst_h265_picture_new (void) +{ + GstH265Picture *pic; + + pic = g_new0 (GstH265Picture, 1); + + pic->pts = GST_CLOCK_TIME_NONE; + pic->field = GST_H265_PICTURE_FIELD_FRAME; + + gst_mini_object_init (GST_MINI_OBJECT_CAST (pic), 0, + GST_TYPE_H265_PICTURE, NULL, NULL, + (GstMiniObjectFreeFunction) _gst_h265_picture_free); + + return pic; +} + +/** + * gst_h265_picture_set_user_data: + * @picture: a #GstH265Picture + * @user_data: private data + * @notify: (closure user_data): a #GDestroyNotify + * + * Sets @user_data on the picture and the #GDestroyNotify that will be called when + * the picture is freed. + * + * If a @user_data was previously set, then the previous set @notify will be called + * before the @user_data is replaced. + */ +void +gst_h265_picture_set_user_data (GstH265Picture * picture, gpointer user_data, + GDestroyNotify notify) +{ + g_return_if_fail (GST_IS_H265_PICTURE (picture)); + + if (picture->notify) + picture->notify (picture->user_data); + + picture->user_data = user_data; + picture->notify = notify; +} + +/** + * gst_h265_picture_get_user_data: + * @picture: a #GstH265Picture + * + * Gets private data set on the picture via + * gst_h265_picture_set_user_data() previously. + * + * Returns: (transfer none): The previously set user_data + */ +gpointer +gst_h265_picture_get_user_data (GstH265Picture * picture) +{ + return picture->user_data; +} + +struct _GstH265Dpb +{ + GArray *pic_list; + gint max_num_pics; +}; + +/** + * gst_h265_dpb_new: + * + * Create new #GstH265Dpb + * + * Returns: a new #GstH265Dpb + */ +GstH265Dpb * +gst_h265_dpb_new (void) +{ + GstH265Dpb *dpb; + + dpb = g_new0 (GstH265Dpb, 1); + + dpb->pic_list = + g_array_sized_new (FALSE, TRUE, sizeof (GstH265Picture *), + GST_H265_DPB_MAX_SIZE); + g_array_set_clear_func (dpb->pic_list, + (GDestroyNotify) gst_h265_picture_clear); + + return dpb; +} + +/** + * gst_h265_dpb_set_max_num_pics: + * @dpb: a #GstH265Dpb + * @max_num_pics: the maximum number of picture + * + * Set the number of maximum allowed pictures to store + */ +void +gst_h265_dpb_set_max_num_pics (GstH265Dpb * dpb, gint max_num_pics) +{ + g_return_if_fail (dpb != NULL); + + dpb->max_num_pics = max_num_pics; +} + +/** + * gst_h265_dpb_get_max_num_pics: + * @dpb: a #GstH265Dpb + * + * Returns: the number of maximum pictures + */ +gint +gst_h265_dpb_get_max_num_pics (GstH265Dpb * dpb) +{ + g_return_val_if_fail (dpb != NULL, 0); + + return dpb->max_num_pics; +} + +/** + * gst_h265_dpb_free: + * @dpb: a #GstH265Dpb to free + * + * Free the @dpb + */ +void +gst_h265_dpb_free (GstH265Dpb * dpb) +{ + g_return_if_fail (dpb != NULL); + + gst_h265_dpb_clear (dpb); + g_free (dpb); +} + +/** + * gst_h265_dpb_clear: + * @dpb: a #GstH265Dpb + * + * Clear all stored #GstH265Picture + */ +void +gst_h265_dpb_clear (GstH265Dpb * dpb) +{ + g_return_if_fail (dpb != NULL); + + g_array_set_size (dpb->pic_list, 0); +} + +/** + * gst_h265_dpb_add: + * @dpb: a #GstH265Dpb + * @picture: (transfer full): a #GstH265Picture + * + * Store the @picture + */ +void +gst_h265_dpb_add (GstH265Dpb * dpb, GstH265Picture * picture) +{ + g_return_if_fail (dpb != NULL); + g_return_if_fail (GST_IS_H265_PICTURE (picture)); + + g_array_append_val (dpb->pic_list, picture); +} + +/** + * gst_h265_dpb_delete_unused: + * @dpb: a #GstH265Dpb + * + * Delete already outputted and not referenced all pictures from dpb + */ +void +gst_h265_dpb_delete_unused (GstH265Dpb * dpb) +{ + gint i; + + g_return_if_fail (dpb != NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->outputted && !picture->ref) { + GST_TRACE ("remove picture %p (poc %d) from dpb", + picture, picture->pic_order_cnt); + g_array_remove_index (dpb->pic_list, i); + i--; + } + } +} + +/** + * gst_h265_dpb_delete_by_poc: + * @dpb: a #GstH265Dpb + * @poc: a poc of #GstH265Picture to remove + * + * Delete a #GstH265Dpb by @poc + */ +void +gst_h265_dpb_delete_by_poc (GstH265Dpb * dpb, gint poc) +{ + gint i; + + g_return_if_fail (dpb != NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->pic_order_cnt == poc) { + g_array_remove_index (dpb->pic_list, i); + return; + } + } + + GST_WARNING ("Couldn't find picture with poc %d", poc); +} + +/** + * gst_h265_dpb_num_ref_pictures: + * @dpb: a #GstH265Dpb + * + * Returns: The number of referenced pictures + */ +gint +gst_h265_dpb_num_ref_pictures (GstH265Dpb * dpb) +{ + gint i; + gint ret = 0; + + g_return_val_if_fail (dpb != NULL, -1); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->ref) + ret++; + } + + return ret; +} + +/** + * gst_h265_dpb_mark_all_non_ref: + * @dpb: a #GstH265Dpb + * + * Mark all pictures are not referenced + */ +void +gst_h265_dpb_mark_all_non_ref (GstH265Dpb * dpb) +{ + gint i; + + g_return_if_fail (dpb != NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + picture->ref = FALSE; + } +} + +/** + * gst_h265_dpb_get_ref_by_poc: + * @dpb: a #GstH265Dpb + * @poc: a picture order count + * + * Find a short or long term reference picture which has matching poc + * + * Returns: (nullable) (transfer full): a #GstH265Picture + */ +GstH265Picture * +gst_h265_dpb_get_ref_by_poc (GstH265Dpb * dpb, gint poc) +{ + gint i; + + g_return_val_if_fail (dpb != NULL, NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->ref && picture->pic_order_cnt == poc) + return gst_h265_picture_ref (picture); + } + + GST_DEBUG ("No short term reference picture for %d", poc); + + return NULL; +} + +/** + * gst_h265_dpb_get_ref_by_poc: + * @dpb: a #GstH265Dpb + * @poc_lsb: a picture order count lsb + * + * Find a short or long term reference picture which has matching poc_lsb + * + * Returns: (nullable) (transfer full): a #GstH265Picture + */ +GstH265Picture * +gst_h265_dpb_get_ref_by_poc_lsb (GstH265Dpb * dpb, gint poc_lsb) +{ + gint i; + + g_return_val_if_fail (dpb != NULL, NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->ref && picture->pic_order_cnt_lsb == poc_lsb) + return gst_h265_picture_ref (picture); + } + + GST_DEBUG ("No short term reference picture for %d", poc_lsb); + + return NULL; +} + +/** + * gst_h265_dpb_get_short_ref_by_poc: + * @dpb: a #GstH265Dpb + * @poc: a picture order count + * + * Find a short term reference picture which has matching poc + * + * Returns: (nullable) (transfer full): a #GstH265Picture + */ +GstH265Picture * +gst_h265_dpb_get_short_ref_by_poc (GstH265Dpb * dpb, gint poc) +{ + gint i; + + g_return_val_if_fail (dpb != NULL, NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->ref && !picture->long_term && picture->pic_order_cnt == poc) + return gst_h265_picture_ref (picture); + } + + GST_DEBUG ("No short term reference picture for %d", poc); + + return NULL; +} + +/** + * gst_h265_dpb_get_long_ref_by_poc: + * @dpb: a #GstH265Dpb + * @poc: a picture order count + * + * Find a long term reference picture which has matching poc + * + * Returns: (nullable) (transfer full): a #GstH265Picture + */ +GstH265Picture * +gst_h265_dpb_get_long_ref_by_poc (GstH265Dpb * dpb, gint poc) +{ + gint i; + + g_return_val_if_fail (dpb != NULL, NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (picture->ref && picture->long_term && picture->pic_order_cnt == poc) + return gst_h265_picture_ref (picture); + } + + GST_DEBUG ("No long term reference picture for %d", poc); + + return NULL; +} + +/** + * gst_h265_dpb_get_pictures_not_outputted: + * @dpb: a #GstH265Dpb + * @out: (out): a list of #GstH265Dpb + * + * Retrieve all not-outputted pictures from @dpb + */ +void +gst_h265_dpb_get_pictures_not_outputted (GstH265Dpb * dpb, GList ** out) +{ + gint i; + + g_return_if_fail (dpb != NULL); + g_return_if_fail (out != NULL); + + for (i = 0; i < dpb->pic_list->len; i++) { + GstH265Picture *picture = + g_array_index (dpb->pic_list, GstH265Picture *, i); + + if (!picture->outputted) + *out = g_list_append (*out, gst_h265_picture_ref (picture)); + } +} + +/** + * gst_h265_dpb_get_pictures_all: + * @dpb: a #GstH265Dpb + * + * Return: (transfer full): a #GArray of #GstH265Picture stored in @dpb + */ +GArray * +gst_h265_dpb_get_pictures_all (GstH265Dpb * dpb) +{ + g_return_val_if_fail (dpb != NULL, NULL); + + return g_array_ref (dpb->pic_list); +} + +/** + * gst_h265_dpb_get_size: + * @dpb: a #GstH265Dpb + * + * Return: the length of stored dpb array + */ +gint +gst_h265_dpb_get_size (GstH265Dpb * dpb) +{ + g_return_val_if_fail (dpb != NULL, -1); + + return dpb->pic_list->len; +} + +/** + * gst_h265_dpb_is_full: + * @dpb: a #GstH265Dpb + * + * Return: %TRUE if @dpb is full + */ +gboolean +gst_h265_dpb_is_full (GstH265Dpb * dpb) +{ + g_return_val_if_fail (dpb != NULL, -1); + + return dpb->pic_list->len >= dpb->max_num_pics; +} diff --git a/sys/d3d11/gsth265picture.h b/sys/d3d11/gsth265picture.h new file mode 100644 index 0000000000..23e597eeb9 --- /dev/null +++ b/sys/d3d11/gsth265picture.h @@ -0,0 +1,199 @@ +/* GStreamer + * Copyright (C) 2019 Seungha Yang + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_H265_PICTURE_H__ +#define __GST_H265_PICTURE_H__ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_H265_PICTURE (gst_h265_picture_get_type()) +#define GST_IS_H265_PICTURE(obj) (GST_IS_MINI_OBJECT_TYPE(obj, GST_TYPE_H265_PICTURE)) +#define GST_H265_PICTURE(obj) ((GstH265Picture *)obj) +#define GST_H265_PICTURE_CAST(obj) (GST_H265_PICTURE(obj)) + +typedef struct _GstH265Slice GstH265Slice; +typedef struct _GstH265Picture GstH265Picture; + +#define GST_H265_DPB_MAX_SIZE 16 + +struct _GstH265Slice +{ + GstH265SliceHdr header; + + /* parsed nal unit (doesn't take ownership of raw data) */ + GstH265NalUnit nalu; +}; + +typedef enum +{ + GST_H265_PICTURE_FIELD_FRAME, + GST_H265_PICTURE_FILED_TOP_FIELD, + GST_H265_PICTURE_FIELD_BOTTOM_FIELD, +} GstH265PictureField; + +struct _GstH265Picture +{ + GstMiniObject parent; + + GstH265SliceType type; + + GstClockTime pts; + + gint pic_order_cnt; + gint pic_order_cnt_msb; + gint pic_order_cnt_lsb; + + guint32 pic_latency_cnt; /* PicLatencyCount */ + + gboolean output_flag; + gboolean NoRaslOutputFlag; + gboolean NoOutputOfPriorPicsFlag; + gboolean RapPicFlag; /* nalu type between 16 and 21 */ + gboolean IntraPicFlag; /* Intra pic (only Intra slices) */ + + gboolean ref; + gboolean long_term; + gboolean outputted; + + GstH265PictureField field; + + gpointer user_data; + GDestroyNotify notify; +}; + +G_GNUC_INTERNAL +GType gst_h265_picture_get_type (void); + +G_GNUC_INTERNAL +GstH265Picture * gst_h265_picture_new (void); + +G_GNUC_INTERNAL +static inline GstH265Picture * +gst_h265_picture_ref (GstH265Picture * picture) +{ + return (GstH265Picture *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (picture)); +} + +G_GNUC_INTERNAL +static inline void +gst_h265_picture_unref (GstH265Picture * picture) +{ + gst_mini_object_unref (GST_MINI_OBJECT_CAST (picture)); +} + +G_GNUC_INTERNAL +static inline gboolean +gst_h265_picture_replace (GstH265Picture ** old_picture, + GstH265Picture * new_picture) +{ + return gst_mini_object_replace ((GstMiniObject **) old_picture, + (GstMiniObject *) new_picture); +} + +G_GNUC_INTERNAL +static inline void +gst_h265_picture_clear (GstH265Picture ** picture) +{ + if (picture && *picture) { + gst_h265_picture_unref (*picture); + *picture = NULL; + } +} + +G_GNUC_INTERNAL +void gst_h265_picture_set_user_data (GstH265Picture * picture, + gpointer user_data, + GDestroyNotify notify); + +G_GNUC_INTERNAL +gpointer gst_h265_picture_get_user_data (GstH265Picture * picture); + +/******************* + * GstH265Dpb * + *******************/ +typedef struct _GstH265Dpb GstH265Dpb; + +G_GNUC_INTERNAL +GstH265Dpb * gst_h265_dpb_new (void); + +G_GNUC_INTERNAL +void gst_h265_dpb_set_max_num_pics (GstH265Dpb * dpb, + gint max_num_pics); + +G_GNUC_INTERNAL +gint gst_h265_dpb_get_max_num_pics (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +void gst_h265_dpb_free (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +void gst_h265_dpb_clear (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +void gst_h265_dpb_add (GstH265Dpb * dpb, + GstH265Picture * picture); + +G_GNUC_INTERNAL +void gst_h265_dpb_delete_unused (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +void gst_h265_dpb_delete_by_poc (GstH265Dpb * dpb, + gint poc); + +G_GNUC_INTERNAL +gint gst_h265_dpb_num_ref_pictures (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +void gst_h265_dpb_mark_all_non_ref (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +GstH265Picture * gst_h265_dpb_get_ref_by_poc (GstH265Dpb * dpb, + gint poc); + +G_GNUC_INTERNAL +GstH265Picture * gst_h265_dpb_get_ref_by_poc_lsb (GstH265Dpb * dpb, + gint poc_lsb); + +G_GNUC_INTERNAL +GstH265Picture * gst_h265_dpb_get_short_ref_by_poc (GstH265Dpb * dpb, + gint poc); + +G_GNUC_INTERNAL +GstH265Picture * gst_h265_dpb_get_long_ref_by_poc (GstH265Dpb * dpb, + gint poc); + +G_GNUC_INTERNAL +void gst_h265_dpb_get_pictures_not_outputted (GstH265Dpb * dpb, + GList ** out); + +G_GNUC_INTERNAL +GArray * gst_h265_dpb_get_pictures_all (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +gint gst_h265_dpb_get_size (GstH265Dpb * dpb); + +G_GNUC_INTERNAL +gboolean gst_h265_dpb_is_full (GstH265Dpb * dpb); + +G_END_DECLS + +#endif /* __GST_H265_PICTURE_H__ */ diff --git a/sys/d3d11/meson.build b/sys/d3d11/meson.build index d8a470a31e..52e235cac0 100644 --- a/sys/d3d11/meson.build +++ b/sys/d3d11/meson.build @@ -25,6 +25,9 @@ d3d11_dec_sources = [ 'gstvp9picture.c', 'gstvp9decoder.c', 'gstd3d11vp9dec.c', + 'gsth265picture.c', + 'gsth265decoder.c', + 'gstd3d11h265dec.c', ] dxgi_headers = [ diff --git a/sys/d3d11/plugin.c b/sys/d3d11/plugin.c index bab3dfeb20..cd9f7e68d0 100644 --- a/sys/d3d11/plugin.c +++ b/sys/d3d11/plugin.c @@ -32,6 +32,7 @@ #ifdef HAVE_DXVA_H #include "gstd3d11utils.h" #include "gstd3d11h264dec.h" +#include "gstd3d11h265dec.h" #include "gstd3d11vp9dec.h" #endif @@ -48,6 +49,7 @@ GST_DEBUG_CATEGORY (gst_d3d11_debug_layer_debug); #ifdef HAVE_DXVA_H GST_DEBUG_CATEGORY (gst_d3d11_h264_dec_debug); +GST_DEBUG_CATEGORY (gst_d3d11_h265_dec_debug); GST_DEBUG_CATEGORY (gst_d3d11_vp9_dec_debug); #endif @@ -91,11 +93,15 @@ plugin_init (GstPlugin * plugin) "d3d11h264dec", 0, "Direct3D11 H.264 Video Decoder"); GST_DEBUG_CATEGORY_INIT (gst_d3d11_vp9_dec_debug, "d3d11vp9dec", 0, "Direct3D11 VP9 Video Decoder"); + GST_DEBUG_CATEGORY_INIT (gst_d3d11_h265_dec_debug, + "d3d11h265dec", 0, "Direct3D11 H.265 Video Decoder"); gst_element_register (plugin, "d3d11h264dec", GST_RANK_SECONDARY, GST_TYPE_D3D11_H264_DEC); gst_element_register (plugin, "d3d11vp9dec", GST_RANK_SECONDARY, GST_TYPE_D3D11_VP9_DEC); + gst_element_register (plugin, + "d3d11h265dec", GST_RANK_SECONDARY, GST_TYPE_D3D11_H265_DEC); } #endif