mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-23 18:21:04 +00:00
applemedia: don't call CVPixelBufferLockBaseAddress when doing texture sharing
When doing texture sharing we don't need to call CVPixelBufferLockBaseAddress to map the buffer in CPU. This cuts about 10% relative cpu time from a vtdec ! glimagesink pipeline.
This commit is contained in:
parent
5f547c5600
commit
8734abb1ce
5 changed files with 23 additions and 20 deletions
|
@ -1083,7 +1083,7 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
buf = gst_core_media_buffer_new (cmbuf, FALSE);
|
||||
buf = gst_core_media_buffer_new (cmbuf, FALSE, TRUE);
|
||||
dur = CMSampleBufferGetDuration (cmbuf);
|
||||
ts = CMSampleBufferGetPresentationTimeStamp (cmbuf);
|
||||
if (dur.value != 0) {
|
||||
|
|
|
@ -863,7 +863,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|||
}
|
||||
}
|
||||
|
||||
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
|
||||
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache == NULL);
|
||||
if (format == GST_VIDEO_FORMAT_RGBA) {
|
||||
/* So now buf contains BGRA data (!) . Since downstream is actually going to
|
||||
* use the GL upload meta to get RGBA textures (??), we need to override the
|
||||
|
|
|
@ -86,7 +86,7 @@ gst_core_media_buffer_get_video_format (OSType format)
|
|||
|
||||
static gboolean
|
||||
gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
||||
CVPixelBufferRef pixel_buf, gboolean * has_padding)
|
||||
CVPixelBufferRef pixel_buf, gboolean * has_padding, gboolean map)
|
||||
{
|
||||
guint n_planes;
|
||||
gsize offset[GST_VIDEO_MAX_PLANES] = { 0 };
|
||||
|
@ -94,7 +94,7 @@ gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||
GstVideoMeta *video_meta;
|
||||
UInt32 size;
|
||||
|
||||
if (CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess) {
|
||||
if (map && CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess) {
|
||||
GST_ERROR ("Could not lock pixel buffer base address");
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -116,10 +116,12 @@ gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||
offset[i] = plane_offset;
|
||||
plane_offset += size;
|
||||
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
|
||||
CVPixelBufferGetBaseAddressOfPlane (pixel_buf, i), size, 0, size,
|
||||
NULL, NULL));
|
||||
if (map) {
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
|
||||
CVPixelBufferGetBaseAddressOfPlane (pixel_buf, i), size, 0,
|
||||
size, NULL, NULL));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
|
@ -128,10 +130,12 @@ gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||
offset[0] = 0;
|
||||
size = stride[0] * CVPixelBufferGetHeight (pixel_buf);
|
||||
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
|
||||
CVPixelBufferGetBaseAddress (pixel_buf), size, 0, size, NULL,
|
||||
NULL));
|
||||
if (map) {
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
|
||||
CVPixelBufferGetBaseAddress (pixel_buf), size, 0, size, NULL,
|
||||
NULL));
|
||||
}
|
||||
}
|
||||
|
||||
video_meta =
|
||||
|
@ -237,7 +241,7 @@ gst_video_info_init_from_pixel_buffer (GstVideoInfo * info,
|
|||
|
||||
GstBuffer *
|
||||
gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
||||
gboolean use_video_meta)
|
||||
gboolean use_video_meta, gboolean map)
|
||||
{
|
||||
CVImageBufferRef image_buf;
|
||||
CMBlockBufferRef block_buf;
|
||||
|
@ -271,13 +275,13 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
|||
}
|
||||
|
||||
if (!gst_core_media_buffer_wrap_pixel_buffer (buf, &info, meta->pixel_buf,
|
||||
&has_padding)) {
|
||||
&has_padding, map)) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
/* If the video meta API is not supported, remove padding by
|
||||
* copying the core media buffer to a system memory buffer */
|
||||
if (has_padding && !use_video_meta) {
|
||||
if (map && has_padding && !use_video_meta) {
|
||||
GstBuffer *copy_buf;
|
||||
copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info);
|
||||
if (!copy_buf) {
|
||||
|
@ -289,11 +293,9 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
|||
}
|
||||
|
||||
} else if (block_buf != NULL) {
|
||||
|
||||
if (!gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) {
|
||||
if (map && !gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
} else {
|
||||
goto error;
|
||||
}
|
||||
|
|
|
@ -43,7 +43,8 @@ typedef struct _GstCoreMediaMeta
|
|||
|
||||
|
||||
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
||||
gboolean use_video_meta);
|
||||
gboolean use_video_meta,
|
||||
gboolean map);
|
||||
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
|
||||
(GstBuffer * buf);
|
||||
GType gst_core_media_meta_api_get_type (void);
|
||||
|
|
|
@ -1272,7 +1272,7 @@ gst_vtenc_enqueue_buffer (void *outputCallbackRefCon,
|
|||
|
||||
/* We are dealing with block buffers here, so we don't need
|
||||
* to enable the use of the video meta API on the core media buffer */
|
||||
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE);
|
||||
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE, TRUE);
|
||||
|
||||
g_async_queue_push (self->cur_outframes, frame);
|
||||
|
||||
|
|
Loading…
Reference in a new issue