diff --git a/sys/applemedia/avfassetsrc.m b/sys/applemedia/avfassetsrc.m index ac77900628..0a40c0dbcc 100644 --- a/sys/applemedia/avfassetsrc.m +++ b/sys/applemedia/avfassetsrc.m @@ -1083,7 +1083,7 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) return NULL; } - buf = gst_core_media_buffer_new (cmbuf, FALSE); + buf = gst_core_media_buffer_new (cmbuf, FALSE, TRUE); dur = CMSampleBufferGetDuration (cmbuf); ts = CMSampleBufferGetPresentationTimeStamp (cmbuf); if (dur.value != 0) { diff --git a/sys/applemedia/avfvideosrc.m b/sys/applemedia/avfvideosrc.m index 0f375c26e2..72d9586997 100644 --- a/sys/applemedia/avfvideosrc.m +++ b/sys/applemedia/avfvideosrc.m @@ -863,7 +863,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer } } - *buf = gst_core_media_buffer_new (sbuf, useVideoMeta); + *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache == NULL); if (format == GST_VIDEO_FORMAT_RGBA) { /* So now buf contains BGRA data (!) . Since downstream is actually going to * use the GL upload meta to get RGBA textures (??), we need to override the diff --git a/sys/applemedia/coremediabuffer.c b/sys/applemedia/coremediabuffer.c index ac9703dfaf..1070da5265 100644 --- a/sys/applemedia/coremediabuffer.c +++ b/sys/applemedia/coremediabuffer.c @@ -86,7 +86,7 @@ gst_core_media_buffer_get_video_format (OSType format) static gboolean gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info, - CVPixelBufferRef pixel_buf, gboolean * has_padding) + CVPixelBufferRef pixel_buf, gboolean * has_padding, gboolean map) { guint n_planes; gsize offset[GST_VIDEO_MAX_PLANES] = { 0 }; @@ -94,7 +94,7 @@ gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info, GstVideoMeta *video_meta; UInt32 size; - if (CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess) { + if (map && CVPixelBufferLockBaseAddress (pixel_buf, 0) != kCVReturnSuccess) { GST_ERROR ("Could not lock pixel buffer base address"); return FALSE; } @@ -116,10 +116,12 @@ gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info, offset[i] = plane_offset; plane_offset += size; - gst_buffer_append_memory (buf, - gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, - CVPixelBufferGetBaseAddressOfPlane (pixel_buf, i), size, 0, size, - NULL, NULL)); + if (map) { + gst_buffer_append_memory (buf, + gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, + CVPixelBufferGetBaseAddressOfPlane (pixel_buf, i), size, 0, + size, NULL, NULL)); + } } } else { @@ -128,10 +130,12 @@ gst_core_media_buffer_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info, offset[0] = 0; size = stride[0] * CVPixelBufferGetHeight (pixel_buf); - gst_buffer_append_memory (buf, - gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, - CVPixelBufferGetBaseAddress (pixel_buf), size, 0, size, NULL, - NULL)); + if (map) { + gst_buffer_append_memory (buf, + gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, + CVPixelBufferGetBaseAddress (pixel_buf), size, 0, size, NULL, + NULL)); + } } video_meta = @@ -237,7 +241,7 @@ gst_video_info_init_from_pixel_buffer (GstVideoInfo * info, GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf, - gboolean use_video_meta) + gboolean use_video_meta, gboolean map) { CVImageBufferRef image_buf; CMBlockBufferRef block_buf; @@ -271,13 +275,13 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf, } if (!gst_core_media_buffer_wrap_pixel_buffer (buf, &info, meta->pixel_buf, - &has_padding)) { + &has_padding, map)) { goto error; } /* If the video meta API is not supported, remove padding by * copying the core media buffer to a system memory buffer */ - if (has_padding && !use_video_meta) { + if (map && has_padding && !use_video_meta) { GstBuffer *copy_buf; copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info); if (!copy_buf) { @@ -289,11 +293,9 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf, } } else if (block_buf != NULL) { - - if (!gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) { + if (map && !gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) { goto error; } - } else { goto error; } diff --git a/sys/applemedia/coremediabuffer.h b/sys/applemedia/coremediabuffer.h index bc18acfd95..2b3b74d6e3 100644 --- a/sys/applemedia/coremediabuffer.h +++ b/sys/applemedia/coremediabuffer.h @@ -43,7 +43,8 @@ typedef struct _GstCoreMediaMeta GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf, - gboolean use_video_meta); + gboolean use_video_meta, + gboolean map); CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer (GstBuffer * buf); GType gst_core_media_meta_api_get_type (void); diff --git a/sys/applemedia/vtenc.c b/sys/applemedia/vtenc.c index 269172b363..94bc32d4bf 100644 --- a/sys/applemedia/vtenc.c +++ b/sys/applemedia/vtenc.c @@ -1272,7 +1272,7 @@ gst_vtenc_enqueue_buffer (void *outputCallbackRefCon, /* We are dealing with block buffers here, so we don't need * to enable the use of the video meta API on the core media buffer */ - frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE); + frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE, TRUE); g_async_queue_push (self->cur_outframes, frame);