mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-02-17 03:35:21 +00:00
applemedia: always fill GstBuffers with GstMemory
Always fill buffers with our custom memory. The custom memory will avoid mapping CV/CM buffers unless necessary.
This commit is contained in:
parent
8577224c74
commit
f6154b4cc0
10 changed files with 56 additions and 91 deletions
|
@ -1079,7 +1079,7 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
buf = gst_core_media_buffer_new (cmbuf, FALSE, TRUE);
|
||||
buf = gst_core_media_buffer_new (cmbuf, FALSE);
|
||||
CFRelease (cmbuf);
|
||||
if (buf == NULL)
|
||||
return NULL;
|
||||
|
|
|
@ -939,7 +939,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|||
}
|
||||
}
|
||||
|
||||
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache == NULL);
|
||||
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
|
||||
if (*buf == NULL) {
|
||||
CFRelease (sbuf);
|
||||
return GST_FLOW_ERROR;
|
||||
|
|
|
@ -228,7 +228,7 @@ gst_video_info_init_from_pixel_buffer (GstVideoInfo * info,
|
|||
|
||||
GstBuffer *
|
||||
gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
||||
gboolean use_video_meta, gboolean map)
|
||||
gboolean use_video_meta)
|
||||
{
|
||||
CVImageBufferRef image_buf;
|
||||
CMBlockBufferRef block_buf;
|
||||
|
@ -250,11 +250,11 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
|||
goto error;
|
||||
}
|
||||
|
||||
gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, &has_padding, map);
|
||||
gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, &has_padding);
|
||||
|
||||
/* If the video meta API is not supported, remove padding by
|
||||
* copying the core media buffer to a system memory buffer */
|
||||
if (map && has_padding && !use_video_meta) {
|
||||
if (has_padding && !use_video_meta) {
|
||||
GstBuffer *copy_buf;
|
||||
copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info);
|
||||
if (!copy_buf) {
|
||||
|
@ -266,7 +266,7 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
|||
}
|
||||
|
||||
} else if (block_buf != NULL) {
|
||||
if (map && !gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) {
|
||||
if (!gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) {
|
||||
goto error;
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -43,8 +43,7 @@ typedef struct _GstCoreMediaMeta
|
|||
|
||||
|
||||
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
||||
gboolean use_video_meta,
|
||||
gboolean map);
|
||||
gboolean use_video_meta);
|
||||
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
|
||||
(GstBuffer * buf);
|
||||
GType gst_core_media_meta_api_get_type (void);
|
||||
|
|
|
@ -85,22 +85,19 @@ gst_core_video_meta_get_info (void)
|
|||
|
||||
void
|
||||
gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
||||
CVPixelBufferRef pixel_buf, gboolean * has_padding, gboolean map)
|
||||
CVPixelBufferRef pixel_buf, gboolean * has_padding)
|
||||
{
|
||||
guint n_planes;
|
||||
gsize offset[GST_VIDEO_MAX_PLANES] = { 0 };
|
||||
gint stride[GST_VIDEO_MAX_PLANES] = { 0 };
|
||||
UInt32 size;
|
||||
GstAppleCoreVideoPixelBuffer *gpixbuf;
|
||||
|
||||
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
|
||||
*has_padding = FALSE;
|
||||
|
||||
if (CVPixelBufferIsPlanar (pixel_buf)) {
|
||||
gint i, size = 0, plane_offset = 0;
|
||||
GstAppleCoreVideoPixelBuffer *gpixbuf;
|
||||
|
||||
if (map) {
|
||||
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
|
||||
}
|
||||
|
||||
n_planes = CVPixelBufferGetPlaneCount (pixel_buf);
|
||||
for (i = 0; i < n_planes; i++) {
|
||||
|
@ -114,33 +111,22 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||
offset[i] = plane_offset;
|
||||
plane_offset += size;
|
||||
|
||||
if (map) {
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_apple_core_video_memory_new_wrapped (gpixbuf, i, size));
|
||||
}
|
||||
}
|
||||
|
||||
if (map) {
|
||||
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_apple_core_video_memory_new_wrapped (gpixbuf, i, size));
|
||||
}
|
||||
} else {
|
||||
|
||||
n_planes = 1;
|
||||
stride[0] = CVPixelBufferGetBytesPerRow (pixel_buf);
|
||||
offset[0] = 0;
|
||||
size = stride[0] * CVPixelBufferGetHeight (pixel_buf);
|
||||
|
||||
if (map) {
|
||||
GstAppleCoreVideoPixelBuffer *gpixbuf;
|
||||
|
||||
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_apple_core_video_memory_new_wrapped (gpixbuf,
|
||||
GST_APPLE_CORE_VIDEO_NO_PLANE, size));
|
||||
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
|
||||
}
|
||||
gst_buffer_append_memory (buf,
|
||||
gst_apple_core_video_memory_new_wrapped (gpixbuf,
|
||||
GST_APPLE_CORE_VIDEO_NO_PLANE, size));
|
||||
}
|
||||
|
||||
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
|
||||
|
||||
if (info) {
|
||||
GstVideoMeta *video_meta;
|
||||
|
||||
|
@ -152,8 +138,7 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||
}
|
||||
|
||||
GstBuffer *
|
||||
gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo,
|
||||
gboolean map)
|
||||
gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo)
|
||||
{
|
||||
CVPixelBufferRef pixbuf = NULL;
|
||||
GstBuffer *buf;
|
||||
|
@ -174,7 +159,7 @@ gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo,
|
|||
meta->cvbuf = CVBufferRetain (cvbuf);
|
||||
meta->pixbuf = pixbuf;
|
||||
|
||||
gst_core_video_wrap_pixel_buffer (buf, vinfo, pixbuf, &has_padding, map);
|
||||
gst_core_video_wrap_pixel_buffer (buf, vinfo, pixbuf, &has_padding);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
|
|
@ -41,13 +41,11 @@ typedef struct _GstCoreVideoMeta
|
|||
} GstCoreVideoMeta;
|
||||
|
||||
GstBuffer * gst_core_video_buffer_new (CVBufferRef cvbuf,
|
||||
GstVideoInfo *info,
|
||||
gboolean map);
|
||||
GstVideoInfo *info);
|
||||
void gst_core_video_wrap_pixel_buffer (GstBuffer * buf,
|
||||
GstVideoInfo * info,
|
||||
CVPixelBufferRef pixel_buf,
|
||||
gboolean * has_padding,
|
||||
gboolean map);
|
||||
gboolean * has_padding);
|
||||
GType gst_core_video_meta_api_get_type (void);
|
||||
|
||||
G_END_DECLS
|
||||
|
|
|
@ -434,7 +434,7 @@ openFailed:
|
|||
[queueLock unlockWithCondition:
|
||||
([queue count] == 0) ? NO_FRAMES : HAS_FRAME_OR_STOP_REQUEST];
|
||||
|
||||
*buf = gst_core_video_buffer_new ((CVBufferRef)frame, NULL, TRUE);
|
||||
*buf = gst_core_video_buffer_new ((CVBufferRef)frame, NULL);
|
||||
CVBufferRelease (frame);
|
||||
|
||||
[self timestampBuffer:*buf];
|
||||
|
|
|
@ -145,20 +145,20 @@ cv_pixel_buffer_from_gst_buffer (GstBuffer * buffer)
|
|||
}
|
||||
|
||||
#if HAVE_IOS
|
||||
static gboolean
|
||||
gl_mem_from_buffer (GstVideoTextureCache * cache,
|
||||
GstBuffer * buffer, GstMemory **mem1, GstMemory **mem2)
|
||||
static void
|
||||
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
|
||||
{
|
||||
CVOpenGLESTextureRef texture = NULL;
|
||||
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (buffer);
|
||||
GstVideoTextureCache *cache = data->cache;
|
||||
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (data->input_buffer);
|
||||
GstGLTextureTarget gl_target;
|
||||
GstGLBaseMemoryAllocator *base_mem_alloc;
|
||||
GstGLVideoAllocationParams *params;
|
||||
GstBuffer *output_buffer;
|
||||
|
||||
base_mem_alloc = GST_GL_BASE_MEMORY_ALLOCATOR (gst_gl_memory_allocator_get_default (cache->ctx));
|
||||
|
||||
*mem1 = NULL;
|
||||
*mem2 = NULL;
|
||||
output_buffer = gst_buffer_new ();
|
||||
gst_buffer_copy_into (output_buffer, data->input_buffer, GST_BUFFER_COPY_METADATA, 0, -1);
|
||||
|
||||
CVOpenGLESTextureCacheFlush (cache->cache, 0);
|
||||
|
||||
|
@ -178,8 +178,9 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||
CVOpenGLESTextureGetName (texture), texture,
|
||||
(GDestroyNotify) CFRelease);
|
||||
|
||||
*mem1 = (GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||
(GstGLAllocationParams *) params);
|
||||
gst_buffer_append_memory (output_buffer,
|
||||
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||
(GstGLAllocationParams *) params));
|
||||
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
||||
break;
|
||||
case GST_VIDEO_FORMAT_NV12: {
|
||||
|
@ -204,8 +205,9 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||
CVOpenGLESTextureGetName (texture), texture,
|
||||
(GDestroyNotify) CFRelease);
|
||||
|
||||
*mem1 = (GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||
(GstGLAllocationParams *) params);
|
||||
gst_buffer_append_memory (output_buffer,
|
||||
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||
(GstGLAllocationParams *) params));
|
||||
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
||||
|
||||
textype = gst_gl_texture_type_from_format (cache->ctx, GST_VIDEO_FORMAT_NV12, 1);
|
||||
|
@ -225,8 +227,9 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||
CVOpenGLESTextureGetName (texture), texture,
|
||||
(GDestroyNotify) CFRelease);
|
||||
|
||||
*mem2 = (GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||
(GstGLAllocationParams *) params);
|
||||
gst_buffer_append_memory (output_buffer,
|
||||
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||
(GstGLAllocationParams *) params));
|
||||
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
||||
break;
|
||||
}
|
||||
|
@ -237,21 +240,23 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||
|
||||
gst_object_unref (base_mem_alloc);
|
||||
|
||||
return TRUE;
|
||||
data->output_buffer = output_buffer;
|
||||
|
||||
return;
|
||||
|
||||
error:
|
||||
return FALSE;
|
||||
data->output_buffer = NULL;
|
||||
}
|
||||
#else /* !HAVE_IOS */
|
||||
|
||||
static gboolean
|
||||
gl_mem_from_buffer (GstVideoTextureCache * cache,
|
||||
GstBuffer * buffer, GstMemory **mem1, GstMemory **mem2)
|
||||
static void
|
||||
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
|
||||
{
|
||||
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (buffer);
|
||||
IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixel_buf);
|
||||
GstVideoTextureCache *cache = data->cache;
|
||||
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (data->input_buffer);
|
||||
IOSurfaceRef surface = CVPixelBufferGetIOSurface (pixel_buf);
|
||||
|
||||
*mem1 = *mem2 = NULL;
|
||||
data->output_buffer = gst_buffer_new ();
|
||||
gst_buffer_copy_into (data->output_buffer, data->input_buffer, GST_BUFFER_COPY_METADATA, 0, -1);
|
||||
for (int i = 0; i < GST_VIDEO_INFO_N_PLANES (&cache->input_info); i++) {
|
||||
GstIOSurfaceMemory *mem;
|
||||
|
||||
|
@ -260,41 +265,21 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||
surface, GST_GL_TEXTURE_TARGET_RECTANGLE, &cache->input_info,
|
||||
i, NULL, pixel_buf, (GDestroyNotify) CFRelease);
|
||||
|
||||
if (i == 0)
|
||||
*mem1 = (GstMemory *) mem;
|
||||
else
|
||||
*mem2 = (GstMemory *) mem;
|
||||
gst_buffer_append_memory (data->output_buffer, (GstMemory *) mem);
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
#endif
|
||||
|
||||
static void
|
||||
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
|
||||
{
|
||||
GstMemory *mem1 = NULL, *mem2 = NULL;
|
||||
GstVideoTextureCache *cache = data->cache;
|
||||
GstBuffer *buffer = data->input_buffer;
|
||||
|
||||
if (!gl_mem_from_buffer (cache, buffer, &mem1, &mem2)) {
|
||||
gst_buffer_unref (buffer);
|
||||
return;
|
||||
}
|
||||
|
||||
gst_buffer_append_memory (buffer, mem1);
|
||||
if (mem2)
|
||||
gst_buffer_append_memory (buffer, mem2);
|
||||
|
||||
data->output_buffer = buffer;
|
||||
}
|
||||
|
||||
GstBuffer *
|
||||
gst_video_texture_cache_get_gl_buffer (GstVideoTextureCache * cache,
|
||||
GstBuffer * cv_buffer)
|
||||
{
|
||||
ContextThreadData data = {cache, cv_buffer, NULL};
|
||||
|
||||
gst_gl_context_thread_add (cache->ctx,
|
||||
(GstGLContextThreadFunc) _do_get_gl_buffer, &data);
|
||||
|
||||
gst_buffer_unref (cv_buffer);
|
||||
|
||||
return data.output_buffer;
|
||||
}
|
||||
|
|
|
@ -767,9 +767,7 @@ gst_vtdec_session_output_callback (void *decompression_output_ref_con,
|
|||
GST_WARNING_OBJECT (vtdec, "Output state not configured, release buffer");
|
||||
frame->flags &= VTDEC_FRAME_FLAG_SKIP;
|
||||
} else {
|
||||
buf =
|
||||
gst_core_video_buffer_new (image_buffer, &state->info,
|
||||
vtdec->texture_cache == NULL);
|
||||
buf = gst_core_video_buffer_new (image_buffer, &state->info);
|
||||
gst_video_codec_state_unref (state);
|
||||
GST_BUFFER_PTS (buf) = pts.value;
|
||||
GST_BUFFER_DURATION (buf) = duration.value;
|
||||
|
|
|
@ -1295,7 +1295,7 @@ gst_vtenc_enqueue_buffer (void *outputCallbackRefCon,
|
|||
|
||||
/* We are dealing with block buffers here, so we don't need
|
||||
* to enable the use of the video meta API on the core media buffer */
|
||||
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE, TRUE);
|
||||
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE);
|
||||
|
||||
beach:
|
||||
/* needed anyway so the frame will be released */
|
||||
|
|
Loading…
Reference in a new issue