qsv: Add JPEG encoder

Note that YUY2 is not supported yet, since the format support is not
implemented now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/2142>
This commit is contained in:
Seungha Yang 2022-04-09 23:32:19 +09:00
parent c2ac57f6c9
commit ad434759a9
7 changed files with 622 additions and 3 deletions

View file

@ -390,8 +390,11 @@ gst_qsv_allocator_lock (mfxHDL pthis, mfxMemId mid, mfxFrameData * ptr)
}
if (frame->map_count == 0) {
guint map_flags = (guint) frame->map_flags;
map_flags &= ~((guint) GST_MAP_QSV);
gst_video_frame_map (&frame->frame, &frame->info, frame->buffer,
frame->map_flags);
(GstMapFlags) map_flags);
}
frame->map_count++;
@ -418,6 +421,20 @@ gst_qsv_allocator_lock (mfxHDL pthis, mfxMemId mid, mfxFrameData * ptr)
ptr->PitchLow = (mfxU16) (stride % (1 << 16));
ptr->Y410 = (mfxY410 *) GST_VIDEO_FRAME_PLANE_DATA (&frame->frame, 0);
break;
case GST_VIDEO_FORMAT_BGRA:
ptr->Pitch = (mfxU16) stride;
ptr->B = (mfxU8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame->frame, 0);
ptr->G = ptr->B + 1;
ptr->R = ptr->B + 2;
ptr->A = ptr->B + 3;
break;
case GST_VIDEO_FORMAT_RGBA:
ptr->Pitch = (mfxU16) stride;
ptr->R = (mfxU8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame->frame, 0);
ptr->G = ptr->R + 1;
ptr->B = ptr->R + 2;
ptr->A = ptr->R + 3;
break;
default:
break;
}
@ -448,6 +465,13 @@ gst_qsv_allocator_unlock (mfxHDL pthis, mfxMemId mid, mfxFrameData * ptr)
g_mutex_unlock (&frame->lock);
/* SDK will not re-lock unless we clear data pointer here. It happens
* on Linux with BGRA JPEG encoding */
ptr->R = nullptr;
ptr->G = nullptr;
ptr->B = nullptr;
ptr->A = nullptr;
return MFX_ERR_NONE;
}

View file

@ -105,6 +105,12 @@ gst_qsv_d3d11_allocator_alloc (GstQsvAllocator * allocator,
case MFX_FOURCC_Y410:
dxgi_format = DXGI_FORMAT_Y410;
break;
case MFX_FOURCC_RGB4:
dxgi_format = DXGI_FORMAT_B8G8R8A8_UNORM;
break;
case MFX_FOURCC_BGR4:
dxgi_format = DXGI_FORMAT_R8G8B8A8_UNORM;
break;
default:
/* TODO: add more formats */
break;

View file

@ -1041,8 +1041,26 @@ gst_qsv_encoder_init_encode_session (GstQsvEncoder * self)
priv->next_surface_index = 0;
g_array_set_size (priv->task_pool, param.AsyncDepth);
if (klass->codec_id == MFX_CODEC_JPEG) {
gdouble factor = 4.0;
/* jpeg zero returns buffer size */
switch (GST_VIDEO_INFO_FORMAT (info)) {
case GST_VIDEO_FORMAT_NV12:
factor = 1.5;
break;
case GST_VIDEO_FORMAT_YUY2:
factor = 2.0;
break;
default:
break;
}
bitstream_size = (guint)
(factor * GST_VIDEO_INFO_WIDTH (info) * GST_VIDEO_INFO_HEIGHT (info));
} else {
bitstream_size =
(guint) param.mfx.BufferSizeInKB * param.mfx.BRCParamMultiplier * 1024;
}
for (guint i = 0; i < priv->task_pool->len; i++) {
GstQsvEncoderTask *task = &g_array_index (priv->task_pool,

View file

@ -0,0 +1,531 @@
/* GStreamer
* Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstqsvjpegenc.h"
#include <vector>
#include <string>
#include <set>
#include <string.h>
#ifdef G_OS_WIN32
#include <gst/d3d11/gstd3d11.h>
#else
#include <gst/va/gstva.h>
#endif
GST_DEBUG_CATEGORY_EXTERN (gst_qsv_jpeg_enc_debug);
#define GST_CAT_DEFAULT gst_qsv_jpeg_enc_debug
enum
{
PROP_0,
PROP_ADAPTER_LUID,
PROP_DEVICE_PATH,
PROP_QUALITY,
};
#define DEFAULT_JPEG_QUALITY 85
typedef struct _GstQsvJpegEncClassData
{
GstCaps *sink_caps;
GstCaps *src_caps;
guint impl_index;
gint64 adapter_luid;
gchar *display_path;
gboolean interlaved;
} GstQsvJpegEncClassData;
typedef struct _GstQsvJpegEnc
{
GstQsvEncoder parent;
GMutex prop_lock;
/* protected by prop_lock */
gboolean property_updated;
/* properties */
guint quality;
} GstQsvJpegEnc;
typedef struct _GstQsvJpegEncClass
{
GstQsvEncoderClass parent_class;
gboolean interlaved;
} GstQsvJpegEncClass;
static GstElementClass *parent_class = nullptr;
#define GST_QSV_JPEG_ENC(object) ((GstQsvJpegEnc *) (object))
#define GST_QSV_JPEG_ENC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstQsvJpegEncClass))
static void gst_qsv_jpeg_enc_finalize (GObject * object);
static void gst_qsv_jpeg_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_qsv_jpeg_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_qsv_jpeg_enc_set_format (GstQsvEncoder * encoder,
GstVideoCodecState * state, mfxVideoParam * param,
GPtrArray * extra_params);
static gboolean gst_qsv_jpeg_enc_set_output_state (GstQsvEncoder * encoder,
GstVideoCodecState * state, mfxSession session);
static GstQsvEncoderReconfigure
gst_qsv_jpeg_enc_check_reconfigure (GstQsvEncoder * encoder, mfxSession session,
mfxVideoParam * param, GPtrArray * extra_params);
static void
gst_qsv_jpeg_enc_class_init (GstQsvJpegEncClass * klass, gpointer data)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstQsvEncoderClass *qsvenc_class = GST_QSV_ENCODER_CLASS (klass);
GstQsvJpegEncClassData *cdata = (GstQsvJpegEncClassData *) data;
qsvenc_class->codec_id = MFX_CODEC_JPEG;
qsvenc_class->impl_index = cdata->impl_index;
qsvenc_class->adapter_luid = cdata->adapter_luid;
qsvenc_class->display_path = cdata->display_path;
object_class->finalize = gst_qsv_jpeg_enc_finalize;
object_class->set_property = gst_qsv_jpeg_enc_set_property;
object_class->get_property = gst_qsv_jpeg_enc_get_property;
#ifdef G_OS_WIN32
g_object_class_install_property (object_class, PROP_ADAPTER_LUID,
g_param_spec_int64 ("adapter-luid", "Adapter LUID",
"DXGI Adapter LUID (Locally Unique Identifier) of created device",
G_MININT64, G_MAXINT64, qsvenc_class->adapter_luid,
(GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE | G_PARAM_READABLE |
G_PARAM_STATIC_STRINGS)));
#else
g_object_class_install_property (object_class, PROP_DEVICE_PATH,
g_param_spec_string ("device-path", "Device Path",
"DRM device path", cdata->display_path,
(GParamFlags) (GST_PARAM_CONDITIONALLY_AVAILABLE |
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
#endif
g_object_class_install_property (object_class, PROP_QUALITY,
g_param_spec_uint ("quality", "Quality",
"Encoding quality, 100 for best quality",
1, 100, DEFAULT_JPEG_QUALITY, (GParamFlags)
(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
gst_element_class_set_static_metadata (element_class,
"Intel Quick Sync Video JPEG Encoder",
"Codec/Encoder/Video/Hardware",
"Intel Quick Sync Video JPEG Encoder",
"Seungha Yang <seungha@centricular.com>");
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
cdata->sink_caps));
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
cdata->src_caps));
qsvenc_class->set_format = GST_DEBUG_FUNCPTR (gst_qsv_jpeg_enc_set_format);
qsvenc_class->set_output_state =
GST_DEBUG_FUNCPTR (gst_qsv_jpeg_enc_set_output_state);
qsvenc_class->check_reconfigure =
GST_DEBUG_FUNCPTR (gst_qsv_jpeg_enc_check_reconfigure);
klass->interlaved = cdata->interlaved;
gst_caps_unref (cdata->sink_caps);
gst_caps_unref (cdata->src_caps);
g_free (cdata);
}
static void
gst_qsv_jpeg_enc_init (GstQsvJpegEnc * self)
{
self->quality = DEFAULT_JPEG_QUALITY;
g_mutex_init (&self->prop_lock);
}
static void
gst_qsv_jpeg_enc_finalize (GObject * object)
{
GstQsvJpegEnc *self = GST_QSV_JPEG_ENC (object);
g_mutex_clear (&self->prop_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_qsv_jpeg_enc_check_update_uint (GstQsvJpegEnc * self, guint * old_val,
guint new_val)
{
if (*old_val == new_val)
return;
g_mutex_lock (&self->prop_lock);
*old_val = new_val;
self->property_updated = TRUE;
g_mutex_unlock (&self->prop_lock);
}
static void
gst_qsv_jpeg_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstQsvJpegEnc *self = GST_QSV_JPEG_ENC (object);
switch (prop_id) {
case PROP_QUALITY:
gst_qsv_jpeg_enc_check_update_uint (self, &self->quality,
g_value_get_uint (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_qsv_jpeg_enc_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstQsvJpegEnc *self = GST_QSV_JPEG_ENC (object);
GstQsvEncoderClass *klass = GST_QSV_ENCODER_GET_CLASS (self);
switch (prop_id) {
case PROP_ADAPTER_LUID:
g_value_set_int64 (value, klass->adapter_luid);
break;
case PROP_DEVICE_PATH:
g_value_set_string (value, klass->display_path);
break;
case PROP_QUALITY:
g_value_set_uint (value, self->quality);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gboolean
gst_qsv_jpeg_enc_set_format (GstQsvEncoder * encoder,
GstVideoCodecState * state, mfxVideoParam * param, GPtrArray * extra_params)
{
GstQsvJpegEnc *self = GST_QSV_JPEG_ENC (encoder);
GstQsvJpegEncClass *klass = GST_QSV_JPEG_ENC_GET_CLASS (self);
GstVideoInfo *info = &state->info;
mfxFrameInfo *frame_info;
frame_info = &param->mfx.FrameInfo;
frame_info->Width = frame_info->CropW = info->width;
frame_info->Height = frame_info->CropH = info->height;
frame_info->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
if (GST_VIDEO_INFO_FPS_N (info) > 0 && GST_VIDEO_INFO_FPS_D (info) > 0) {
frame_info->FrameRateExtN = GST_VIDEO_INFO_FPS_N (info);
frame_info->FrameRateExtD = GST_VIDEO_INFO_FPS_D (info);
} else {
/* HACK: Same as x264enc */
frame_info->FrameRateExtN = 25;
frame_info->FrameRateExtD = 1;
}
frame_info->AspectRatioW = GST_VIDEO_INFO_PAR_N (info);
frame_info->AspectRatioH = GST_VIDEO_INFO_PAR_D (info);
switch (GST_VIDEO_INFO_FORMAT (info)) {
case GST_VIDEO_FORMAT_NV12:
frame_info->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
frame_info->FourCC = MFX_FOURCC_NV12;
frame_info->BitDepthLuma = 8;
frame_info->BitDepthChroma = 8;
break;
case GST_VIDEO_FORMAT_BGRA:
frame_info->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
frame_info->FourCC = MFX_FOURCC_RGB4;
break;
default:
GST_ERROR_OBJECT (self, "Unexpected format %s",
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info)));
return FALSE;
}
g_mutex_lock (&self->prop_lock);
param->mfx.CodecId = MFX_CODEC_JPEG;
param->mfx.CodecProfile = MFX_PROFILE_JPEG_BASELINE;
param->mfx.Quality = self->quality;
if (klass->interlaved)
param->mfx.Interleaved = 1;
else
param->mfx.Interleaved = 0;
param->mfx.RestartInterval = 0;
param->ExtParam = (mfxExtBuffer **) extra_params->pdata;
param->NumExtParam = extra_params->len;
self->property_updated = FALSE;
g_mutex_unlock (&self->prop_lock);
return TRUE;
}
static gboolean
gst_qsv_jpeg_enc_set_output_state (GstQsvEncoder * encoder,
GstVideoCodecState * state, mfxSession session)
{
GstCaps *caps;
GstTagList *tags;
GstVideoCodecState *out_state;
caps = gst_caps_from_string ("image/jpeg");
out_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (encoder),
caps, state);
gst_video_codec_state_unref (out_state);
tags = gst_tag_list_new_empty ();
gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, "qsvjpegenc",
nullptr);
gst_video_encoder_merge_tags (GST_VIDEO_ENCODER (encoder),
tags, GST_TAG_MERGE_REPLACE);
gst_tag_list_unref (tags);
return TRUE;
}
static GstQsvEncoderReconfigure
gst_qsv_jpeg_enc_check_reconfigure (GstQsvEncoder * encoder, mfxSession session,
mfxVideoParam * param, GPtrArray * extra_params)
{
GstQsvJpegEnc *self = GST_QSV_JPEG_ENC (encoder);
GstQsvEncoderReconfigure ret = GST_QSV_ENCODER_RECONFIGURE_NONE;
g_mutex_lock (&self->prop_lock);
if (self->property_updated)
ret = GST_QSV_ENCODER_RECONFIGURE_FULL;
self->property_updated = FALSE;
g_mutex_unlock (&self->prop_lock);
return ret;
}
typedef struct
{
guint width;
guint height;
} Resolution;
void
gst_qsv_jpeg_enc_register (GstPlugin * plugin, guint rank, guint impl_index,
GstObject * device, mfxSession session)
{
mfxVideoParam param;
mfxInfoMFX *mfx;
static const Resolution resolutions_to_check[] = {
{1280, 720}, {1920, 1088}, {2560, 1440}, {3840, 2160}, {4096, 2160},
{7680, 4320}, {8192, 4320}
};
std::vector < std::string > supported_formats;
Resolution max_resolution;
mfxStatus status;
gboolean interlaved = TRUE;
memset (&param, 0, sizeof (mfxVideoParam));
memset (&max_resolution, 0, sizeof (Resolution));
param.AsyncDepth = 4;
param.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;
mfx = &param.mfx;
mfx->LowPower = MFX_CODINGOPTION_UNKNOWN;
mfx->CodecId = MFX_CODEC_JPEG;
mfx->CodecProfile = MFX_PROFILE_JPEG_BASELINE;
mfx->Quality = DEFAULT_JPEG_QUALITY;
mfx->Interleaved = 1;
mfx->RestartInterval = 0;
mfx->FrameInfo.Width = mfx->FrameInfo.CropW = GST_ROUND_UP_16 (320);
mfx->FrameInfo.Height = mfx->FrameInfo.CropH = GST_ROUND_UP_16 (240);
mfx->FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
mfx->FrameInfo.FourCC = MFX_FOURCC_NV12;
mfx->FrameInfo.FrameRateExtN = 30;
mfx->FrameInfo.FrameRateExtD = 1;
mfx->FrameInfo.AspectRatioW = 1;
mfx->FrameInfo.AspectRatioH = 1;
mfx->FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
status = MFXVideoENCODE_Query (session, &param, &param);
if (status == MFX_WRN_PARTIAL_ACCELERATION) {
mfx->Interleaved = 0;
interlaved = FALSE;
status = MFXVideoENCODE_Query (session, &param, &param);
}
if (status != MFX_ERR_NONE)
return;
supported_formats.push_back ("NV12");
mfx->FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
mfx->FrameInfo.FourCC = MFX_FOURCC_RGB4;
status = MFXVideoENCODE_Query (session, &param, &param);
/* TODO: Add YUY2 support, d3d11 doesn't support the format yet */
if (status == MFX_ERR_NONE)
supported_formats.push_back ("BGRA");
mfx->FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
mfx->FrameInfo.FourCC = MFX_FOURCC_NV12;
/* Check max-resolution */
for (guint i = 0; i < G_N_ELEMENTS (resolutions_to_check); i++) {
mfx->FrameInfo.Width = mfx->FrameInfo.CropW = resolutions_to_check[i].width;
mfx->FrameInfo.Height = mfx->FrameInfo.CropH =
resolutions_to_check[i].height;
if (MFXVideoENCODE_Query (session, &param, &param) != MFX_ERR_NONE)
break;
max_resolution.width = resolutions_to_check[i].width;
max_resolution.height = resolutions_to_check[i].height;
}
GST_INFO ("Maximum supported resolution: %dx%d",
max_resolution.width, max_resolution.height);
/* To cover both landscape and portrait,
* select max value (width in this case) */
guint resolution = MAX (max_resolution.width, max_resolution.height);
std::string sink_caps_str = "video/x-raw";
sink_caps_str += ", width=(int) [ 16, " + std::to_string (resolution) + " ]";
sink_caps_str += ", height=(int) [ 16, " + std::to_string (resolution) + " ]";
/* *INDENT-OFF* */
if (supported_formats.size () > 1) {
sink_caps_str += ", format=(string) { ";
bool first = true;
for (const auto &iter: supported_formats) {
if (!first) {
sink_caps_str += ", ";
}
sink_caps_str += iter;
first = false;
}
sink_caps_str += " }";
} else {
sink_caps_str += ", format=(string) " + supported_formats[0];
}
/* *INDENT-ON* */
GstCaps *sink_caps = gst_caps_from_string (sink_caps_str.c_str ());
#ifdef G_OS_WIN32
GstCaps *d3d11_caps = gst_caps_copy (sink_caps);
GstCapsFeatures *caps_features =
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, nullptr);
gst_caps_set_features_simple (d3d11_caps, caps_features);
gst_caps_append (d3d11_caps, sink_caps);
sink_caps = d3d11_caps;
#else
GstCaps *va_caps = gst_caps_copy (sink_caps);
GstCapsFeatures *caps_features =
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_VA, nullptr);
gst_caps_set_features_simple (va_caps, caps_features);
gst_caps_append (va_caps, sink_caps);
sink_caps = va_caps;
#endif
std::string src_caps_str = "image/jpeg";
src_caps_str += ", width=(int) [ 16, " + std::to_string (resolution) + " ]";
src_caps_str += ", height=(int) [ 16, " + std::to_string (resolution) + " ]";
GstCaps *src_caps = gst_caps_from_string (src_caps_str.c_str ());
GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
GstQsvJpegEncClassData *cdata = g_new0 (GstQsvJpegEncClassData, 1);
cdata->sink_caps = sink_caps;
cdata->src_caps = src_caps;
cdata->impl_index = impl_index;
cdata->interlaved = interlaved;
#ifdef G_OS_WIN32
gint64 device_luid;
g_object_get (device, "adapter-luid", &device_luid, nullptr);
cdata->adapter_luid = device_luid;
#else
gchar *display_path;
g_object_get (device, "path", &display_path, nullptr);
cdata->display_path = display_path;
#endif
GType type;
gchar *type_name;
gchar *feature_name;
GTypeInfo type_info = {
sizeof (GstQsvJpegEncClass),
nullptr,
nullptr,
(GClassInitFunc) gst_qsv_jpeg_enc_class_init,
nullptr,
cdata,
sizeof (GstQsvJpegEnc),
0,
(GInstanceInitFunc) gst_qsv_jpeg_enc_init,
};
type_name = g_strdup ("GstQsvJpegEnc");
feature_name = g_strdup ("qsvjpegenc");
gint index = 0;
while (g_type_from_name (type_name)) {
index++;
g_free (type_name);
g_free (feature_name);
type_name = g_strdup_printf ("GstQsvJpegDevice%dEnc", index);
feature_name = g_strdup_printf ("qsvjpegdevice%denc", index);
}
type = g_type_register_static (GST_TYPE_QSV_ENCODER, type_name, &type_info,
(GTypeFlags) 0);
if (rank > 0 && index != 0)
rank--;
if (!gst_element_register (plugin, feature_name, rank, type))
GST_WARNING ("Failed to register plugin '%s'", type_name);
g_free (type_name);
g_free (feature_name);
}

View file

@ -0,0 +1,34 @@
/* GStreamer
* Copyright (C) 2022 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#pragma once
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstqsvencoder.h"
G_BEGIN_DECLS
void gst_qsv_jpeg_enc_register (GstPlugin * plugin,
guint rank,
guint impl_index,
GstObject * device,
mfxSession session);
G_END_DECLS

View file

@ -5,6 +5,7 @@ qsv_sources = [
'gstqsvh264dec.cpp',
'gstqsvh264enc.cpp',
'gstqsvh265enc.cpp',
'gstqsvjpegenc.cpp',
'gstqsvutils.cpp',
'gstqsvvp9enc.cpp',
'plugin.cpp',

View file

@ -27,6 +27,7 @@
#include "gstqsvh264dec.h"
#include "gstqsvh264enc.h"
#include "gstqsvh265enc.h"
#include "gstqsvjpegenc.h"
#include "gstqsvvp9enc.h"
#include <string.h>
@ -47,6 +48,7 @@ GST_DEBUG_CATEGORY (gst_qsv_encoder_debug);
GST_DEBUG_CATEGORY (gst_qsv_h264_dec_debug);
GST_DEBUG_CATEGORY (gst_qsv_h264_enc_debug);
GST_DEBUG_CATEGORY (gst_qsv_h265_enc_debug);
GST_DEBUG_CATEGORY (gst_qsv_jpeg_enc_debug);
GST_DEBUG_CATEGORY (gst_qsv_vp9_enc_debug);
#define GST_CAT_DEFAULT gst_qsv_debug
@ -228,6 +230,8 @@ plugin_init (GstPlugin * plugin)
"qsvh264enc", 0, "qsvh264enc");
GST_DEBUG_CATEGORY_INIT (gst_qsv_h265_enc_debug,
"qsvh265enc", 0, "qsvh265enc");
GST_DEBUG_CATEGORY_INIT (gst_qsv_jpeg_enc_debug,
"qsvjpegenc", 0, "qsvjpegenc");
GST_DEBUG_CATEGORY_INIT (gst_qsv_vp9_enc_debug, "qsvvp9enc", 0, "qsvvp9enc");
do {
@ -257,6 +261,7 @@ plugin_init (GstPlugin * plugin)
gst_qsv_h264_enc_register (plugin, GST_RANK_NONE, i, device, session);
gst_qsv_h265_enc_register (plugin, GST_RANK_NONE, i, device, session);
gst_qsv_jpeg_enc_register (plugin, GST_RANK_NONE, i, device, session);
gst_qsv_vp9_enc_register (plugin, GST_RANK_NONE, i, device, session);
next: