openni2src: Some random cleanup and minor fixes

This commit is contained in:
Sebastian Dröge 2013-12-02 16:59:14 +01:00
parent 64675f0712
commit 1ba3edf682

View file

@ -41,20 +41,14 @@
#endif #endif
#include "gstopenni2src.h" #include "gstopenni2src.h"
\
GST_DEBUG_CATEGORY_STATIC (openni2src_debug); GST_DEBUG_CATEGORY_STATIC (openni2src_debug);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw, " GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{RGBA, RGB, GRAY16_LE}"))
"format = (string) {RGBA, RGB, GRAY16_LE} "
"framerate = (fraction) [0/1, MAX], "
"width = (int) [ 1, MAX ], "
"height = (int) [ 1, MAX ]")
); );
static GstElementClass *parent_class = NULL;
enum enum
{ {
PROP_0, PROP_0,
@ -80,8 +74,9 @@ gst_openni2_src_sourcetype_get_type (void)
static const GEnumValue values[] = { static const GEnumValue values[] = {
{SOURCETYPE_DEPTH, "Get depth readings", "depth"}, {SOURCETYPE_DEPTH, "Get depth readings", "depth"},
{SOURCETYPE_COLOR, "Get color readings", "color"}, {SOURCETYPE_COLOR, "Get color readings", "color"},
{SOURCETYPE_BOTH, "Get color and depth (as alpha) readings - EXPERIMENTAL", {SOURCETYPE_BOTH,
"both"}, "Get color and depth (as alpha) readings - EXPERIMENTAL",
"both"},
{0, NULL, NULL}, {0, NULL, NULL},
}; };
etype = g_enum_register_static ("GstOpenni2SrcSourcetype", values); etype = g_enum_register_static ("GstOpenni2SrcSourcetype", values);
@ -93,9 +88,9 @@ gst_openni2_src_sourcetype_get_type (void)
static void gst_openni2_src_dispose (GObject * object); static void gst_openni2_src_dispose (GObject * object);
static void gst_openni2_src_finalize (GObject * gobject); static void gst_openni2_src_finalize (GObject * gobject);
static void gst_openni2_src_set_property (GObject * object, guint prop_id, static void gst_openni2_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec); const GValue * value, GParamSpec * pspec);
static void gst_openni2_src_get_property (GObject * object, guint prop_id, static void gst_openni2_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
/* basesrc methods */ /* basesrc methods */
static gboolean gst_openni2_src_start (GstBaseSrc * bsrc); static gboolean gst_openni2_src_start (GstBaseSrc * bsrc);
@ -104,23 +99,22 @@ static GstCaps *gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter);
/* element methods */ /* element methods */
static GstStateChangeReturn gst_openni2_src_change_state (GstElement * element, static GstStateChangeReturn gst_openni2_src_change_state (GstElement * element,
GstStateChange transition); GstStateChange transition);
/* pushsrc method */ /* pushsrc method */
static GstFlowReturn gst_openni2src_fill (GstPushSrc * src, static GstFlowReturn gst_openni2src_fill (GstPushSrc * src, GstBuffer * buf);
GstBuffer * buf);
/* OpenNI2 interaction methods */ /* OpenNI2 interaction methods */
static gboolean openni2_initialise_library (); static gboolean openni2_initialise_library ();
static GstFlowReturn openni2_initialise_devices (GstOpenni2Src * src); static GstFlowReturn openni2_initialise_devices (GstOpenni2Src * src);
static GstFlowReturn openni2_read_gstbuffer (GstOpenni2Src * src, static GstFlowReturn openni2_read_gstbuffer (GstOpenni2Src * src,
GstBuffer * buf); GstBuffer * buf);
static void openni2_finalise (GstOpenni2Src * src); static void openni2_finalise (GstOpenni2Src * src);
#define parent_class gst_openni2_src_parent_class
G_DEFINE_TYPE (GstOpenni2Src, gst_openni2_src, GST_TYPE_PUSH_SRC) G_DEFINE_TYPE (GstOpenni2Src, gst_openni2_src, GST_TYPE_PUSH_SRC)
static void static void gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstPushSrcClass *pushsrc_class; GstPushSrcClass *pushsrc_class;
@ -130,7 +124,6 @@ gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
gobject_class = (GObjectClass *) klass; gobject_class = (GObjectClass *) klass;
basesrc_class = (GstBaseSrcClass *) klass; basesrc_class = (GstBaseSrcClass *) klass;
pushsrc_class = (GstPushSrcClass *) klass; pushsrc_class = (GstPushSrcClass *) klass;
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
gobject_class->dispose = gst_openni2_src_dispose; gobject_class->dispose = gst_openni2_src_dispose;
gobject_class->finalize = gst_openni2_src_finalize; gobject_class->finalize = gst_openni2_src_finalize;
@ -139,9 +132,8 @@ gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
g_object_class_install_property g_object_class_install_property
(gobject_class, PROP_LOCATION, (gobject_class, PROP_LOCATION,
g_param_spec_string ("location", "Location", g_param_spec_string ("location", "Location",
"Source uri, can be a file or a device.", "", "Source uri, can be a file or a device.", "", (GParamFlags)
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SOURCETYPE, g_object_class_install_property (gobject_class, PROP_SOURCETYPE,
g_param_spec_enum ("sourcetype", g_param_spec_enum ("sourcetype",
"Device source type", "Device source type",
@ -170,7 +162,7 @@ gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
"OpenNI2 Device Source"); "OpenNI2 Device Source");
/* OpenNI2 initialisation inside this function */ /* OpenNI2 initialisation inside this function */
openni2_initialise_library(); openni2_initialise_library ();
} }
static void static void
@ -183,8 +175,10 @@ static void
gst_openni2_src_dispose (GObject * object) gst_openni2_src_dispose (GObject * object)
{ {
GstOpenni2Src *ni2src = GST_OPENNI2_SRC (object); GstOpenni2Src *ni2src = GST_OPENNI2_SRC (object);
if (ni2src->gst_caps) if (ni2src->gst_caps)
gst_caps_unref (ni2src->gst_caps); gst_caps_unref (ni2src->gst_caps);
G_OBJECT_CLASS (parent_class)->dispose (object); G_OBJECT_CLASS (parent_class)->dispose (object);
} }
@ -200,7 +194,7 @@ gst_openni2_src_finalize (GObject * gobject)
ni2src->uri_name = NULL; ni2src->uri_name = NULL;
} }
if (ni2src->gst_caps) if (ni2src->gst_caps)
gst_caps_unref(ni2src->gst_caps); gst_caps_unref (ni2src->gst_caps);
ni2src->gst_caps = NULL; ni2src->gst_caps = NULL;
G_OBJECT_CLASS (parent_class)->finalize (gobject); G_OBJECT_CLASS (parent_class)->finalize (gobject);
@ -225,6 +219,7 @@ gst_openni2_src_set_property (GObject * object, guint prop_id,
openni2src->uri_name = NULL; openni2src->uri_name = NULL;
} }
openni2src->uri_name = g_value_dup_string (value); openni2src->uri_name = g_value_dup_string (value);
/* Action! */ /* Action! */
openni2_initialise_devices (openni2src); openni2_initialise_devices (openni2src);
break; break;
@ -270,22 +265,25 @@ gst_openni2_src_start (GstBaseSrc * bsrc)
{ {
GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc); GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
openni::Status rc = openni::STATUS_OK; openni::Status rc = openni::STATUS_OK;
if (src->depth.isValid ()){
rc = src->depth.start(); if (src->depth.isValid ()) {
if (rc != openni::STATUS_OK){ rc = src->depth.start ();
GST_ERROR_OBJECT( src, "Couldn't start the depth stream\n%s\n", if (rc != openni::STATUS_OK) {
openni::OpenNI::getExtendedError()); GST_ERROR_OBJECT (src, "Couldn't start the depth stream\n%s\n",
openni::OpenNI::getExtendedError ());
return FALSE; return FALSE;
} }
} }
if (src->color.isValid ()){
rc = src->color.start(); if (src->color.isValid ()) {
if (rc != openni::STATUS_OK){ rc = src->color.start ();
GST_ERROR_OBJECT( src, "Couldn't start the color stream\n%s\n", if (rc != openni::STATUS_OK) {
openni::OpenNI::getExtendedError()); GST_ERROR_OBJECT (src, "Couldn't start the color stream\n%s\n",
openni::OpenNI::getExtendedError ());
return FALSE; return FALSE;
} }
} }
return TRUE; return TRUE;
} }
@ -295,9 +293,10 @@ gst_openni2_src_stop (GstBaseSrc * bsrc)
GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc); GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
if (src->depth.isValid ()) if (src->depth.isValid ())
src->depth.stop(); src->depth.stop ();
if (src->color.isValid ()) if (src->color.isValid ())
src->color.stop(); src->color.stop ();
return TRUE; return TRUE;
} }
@ -306,19 +305,21 @@ gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
{ {
GstOpenni2Src *ni2src; GstOpenni2Src *ni2src;
GstCaps *caps; GstCaps *caps;
ni2src = GST_OPENNI2_SRC (src); ni2src = GST_OPENNI2_SRC (src);
GST_OBJECT_LOCK (ni2src); GST_OBJECT_LOCK (ni2src);
if (ni2src->gst_caps) { if (ni2src->gst_caps) {
GST_OBJECT_UNLOCK (ni2src); GST_OBJECT_UNLOCK (ni2src);
return (filter) return (filter)
? gst_caps_intersect_full (filter, ni2src->gst_caps, GST_CAPS_INTERSECT_FIRST) ? gst_caps_intersect_full (filter, ni2src->gst_caps,
GST_CAPS_INTERSECT_FIRST)
: gst_caps_ref (ni2src->gst_caps); : gst_caps_ref (ni2src->gst_caps);
} }
// If we are here, we need to compose the caps and return them. // If we are here, we need to compose the caps and return them.
caps = gst_caps_new_empty(); caps = gst_caps_new_empty ();
if (ni2src->colorpixfmt != openni::PIXEL_FORMAT_RGB888) if (ni2src->colorpixfmt != openni::PIXEL_FORMAT_RGB888)
return caps; /* Uh oh, not RGB :? Not supported. */ return caps; /* Uh oh, not RGB :? Not supported. */
if (ni2src->depth.isValid () && ni2src->color.isValid () && if (ni2src->depth.isValid () && ni2src->color.isValid () &&
ni2src->sourcetype == SOURCETYPE_BOTH) { ni2src->sourcetype == SOURCETYPE_BOTH) {
@ -327,30 +328,28 @@ gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
"framerate", GST_TYPE_FRACTION, ni2src->fps, 1, "framerate", GST_TYPE_FRACTION, ni2src->fps, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, ni2src->width, "width", G_TYPE_INT, ni2src->width,
"height", G_TYPE_INT, ni2src->height, "height", G_TYPE_INT, ni2src->height, NULL);
NULL);
} else if (ni2src->depth.isValid () && ni2src->sourcetype == SOURCETYPE_DEPTH) { } else if (ni2src->depth.isValid () && ni2src->sourcetype == SOURCETYPE_DEPTH) {
caps = gst_caps_new_simple ("video/x-raw", caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "GRAY16_LE", "format", G_TYPE_STRING, "GRAY16_LE",
"framerate", GST_TYPE_FRACTION, ni2src->fps, 1, "framerate", GST_TYPE_FRACTION, ni2src->fps, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, ni2src->width, "width", G_TYPE_INT, ni2src->width,
"height", G_TYPE_INT, ni2src->height, "height", G_TYPE_INT, ni2src->height, NULL);
NULL);
} else if (ni2src->color.isValid () && ni2src->sourcetype == SOURCETYPE_COLOR) { } else if (ni2src->color.isValid () && ni2src->sourcetype == SOURCETYPE_COLOR) {
caps = gst_caps_new_simple ("video/x-raw", caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB", "format", G_TYPE_STRING, "RGB",
"framerate", GST_TYPE_FRACTION, ni2src->fps, 1, "framerate", GST_TYPE_FRACTION, ni2src->fps, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, ni2src->width, "width", G_TYPE_INT, ni2src->width,
"height", G_TYPE_INT, ni2src->height, "height", G_TYPE_INT, ni2src->height, NULL);
NULL);
} }
GST_INFO_OBJECT (ni2src, "probed caps: %" GST_PTR_FORMAT, caps); GST_INFO_OBJECT (ni2src, "probed caps: %" GST_PTR_FORMAT, caps);
ni2src->gst_caps = gst_caps_ref(caps); ni2src->gst_caps = gst_caps_ref (caps);
GST_OBJECT_UNLOCK (ni2src); GST_OBJECT_UNLOCK (ni2src);
return (filter) return (filter)
? gst_caps_intersect_full (filter, ni2src->gst_caps, GST_CAPS_INTERSECT_FIRST) ? gst_caps_intersect_full (filter, ni2src->gst_caps,
GST_CAPS_INTERSECT_FIRST)
: gst_caps_ref (ni2src->gst_caps); : gst_caps_ref (ni2src->gst_caps);
} }
@ -382,7 +381,7 @@ gst_openni2_src_change_state (GstElement * element, GstStateChange transition)
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL: case GST_STATE_CHANGE_READY_TO_NULL:
gst_openni2_src_stop(GST_BASE_SRC(src)); gst_openni2_src_stop (GST_BASE_SRC (src));
if (src->gst_caps) { if (src->gst_caps) {
gst_caps_unref (src->gst_caps); gst_caps_unref (src->gst_caps);
src->gst_caps = NULL; src->gst_caps = NULL;
@ -420,7 +419,8 @@ openni2_initialise_library ()
openni::Status rc = openni::STATUS_OK; openni::Status rc = openni::STATUS_OK;
rc = openni::OpenNI::initialize (); rc = openni::OpenNI::initialize ();
if (rc != openni::STATUS_OK) { if (rc != openni::STATUS_OK) {
GST_ERROR("Initialization failed: %s", openni::OpenNI::getExtendedError ()); GST_ERROR ("Initialization failed: %s",
openni::OpenNI::getExtendedError ());
openni::OpenNI::shutdown (); openni::OpenNI::shutdown ();
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
@ -432,6 +432,7 @@ openni2_initialise_devices (GstOpenni2Src * src)
{ {
openni::Status rc = openni::STATUS_OK; openni::Status rc = openni::STATUS_OK;
const char *deviceURI = openni::ANY_DEVICE; const char *deviceURI = openni::ANY_DEVICE;
if (src->uri_name) if (src->uri_name)
deviceURI = src->uri_name; deviceURI = src->uri_name;
@ -452,9 +453,10 @@ openni2_initialise_devices (GstOpenni2Src * src)
GST_ERROR_OBJECT (src, "%s", openni::OpenNI::getExtendedError ()); GST_ERROR_OBJECT (src, "%s", openni::OpenNI::getExtendedError ());
src->depth.destroy (); src->depth.destroy ();
} }
} else } else {
GST_WARNING_OBJECT (src, "Couldn't find depth stream: %s", GST_WARNING_OBJECT (src, "Couldn't find depth stream: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
}
/** color sensor **/ /** color sensor **/
rc = src->color.create (src->device, openni::SENSOR_COLOR); rc = src->color.create (src->device, openni::SENSOR_COLOR);
@ -465,10 +467,10 @@ openni2_initialise_devices (GstOpenni2Src * src)
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
src->color.destroy (); src->color.destroy ();
} }
} else } else {
GST_WARNING_OBJECT (src, "Couldn't find color stream: %s", GST_WARNING_OBJECT (src, "Couldn't find color stream: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
}
if (!src->depth.isValid () && !src->color.isValid ()) { if (!src->depth.isValid () && !src->color.isValid ()) {
GST_ERROR_OBJECT (src, "No valid streams. Exiting\n"); GST_ERROR_OBJECT (src, "No valid streams. Exiting\n");
@ -489,9 +491,9 @@ openni2_initialise_devices (GstOpenni2Src * src)
if (depthWidth == colorWidth && depthHeight == colorHeight) { if (depthWidth == colorWidth && depthHeight == colorHeight) {
src->width = depthWidth; src->width = depthWidth;
src->height = depthHeight; src->height = depthHeight;
src->fps = src->depthVideoMode.getFps(); src->fps = src->depthVideoMode.getFps ();
src->colorpixfmt = src->colorVideoMode.getPixelFormat(); src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
src->depthpixfmt = src->depthVideoMode.getPixelFormat(); src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
} else { } else {
GST_ERROR_OBJECT (src, "Error - expect color and depth to be" GST_ERROR_OBJECT (src, "Error - expect color and depth to be"
" in same resolution: D: %dx%d vs C: %dx%d", " in same resolution: D: %dx%d vs C: %dx%d",
@ -499,20 +501,20 @@ openni2_initialise_devices (GstOpenni2Src * src)
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
GST_INFO_OBJECT (src, "DEPTH&COLOR resolution: %dx%d", GST_INFO_OBJECT (src, "DEPTH&COLOR resolution: %dx%d",
src->width, src->height); src->width, src->height);
} else if (src->depth.isValid ()) { } else if (src->depth.isValid ()) {
src->depthVideoMode = src->depth.getVideoMode (); src->depthVideoMode = src->depth.getVideoMode ();
src->width = src->depthVideoMode.getResolutionX (); src->width = src->depthVideoMode.getResolutionX ();
src->height = src->depthVideoMode.getResolutionY (); src->height = src->depthVideoMode.getResolutionY ();
src->fps = src->depthVideoMode.getFps(); src->fps = src->depthVideoMode.getFps ();
src->depthpixfmt = src->depthVideoMode.getPixelFormat(); src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
GST_INFO_OBJECT (src, "DEPTH resolution: %dx%d", src->width, src->height); GST_INFO_OBJECT (src, "DEPTH resolution: %dx%d", src->width, src->height);
} else if (src->color.isValid ()) { } else if (src->color.isValid ()) {
src->colorVideoMode = src->color.getVideoMode (); src->colorVideoMode = src->color.getVideoMode ();
src->width = src->colorVideoMode.getResolutionX (); src->width = src->colorVideoMode.getResolutionX ();
src->height = src->colorVideoMode.getResolutionY (); src->height = src->colorVideoMode.getResolutionY ();
src->fps = src->colorVideoMode.getFps(); src->fps = src->colorVideoMode.getFps ();
src->colorpixfmt = src->colorVideoMode.getPixelFormat(); src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
GST_INFO_OBJECT (src, "COLOR resolution: %dx%d", src->width, src->height); GST_INFO_OBJECT (src, "COLOR resolution: %dx%d", src->width, src->height);
} else { } else {
GST_ERROR_OBJECT (src, "Expected at least one of the streams to be valid."); GST_ERROR_OBJECT (src, "Expected at least one of the streams to be valid.");
@ -528,100 +530,105 @@ openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
openni::Status rc = openni::STATUS_OK; openni::Status rc = openni::STATUS_OK;
openni::VideoStream * pStream = &(src->depth); openni::VideoStream * pStream = &(src->depth);
int changedStreamDummy; int changedStreamDummy;
GstMapInfo info;
/* Block until we get some data */ /* Block until we get some data */
rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy, rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy,
SAMPLE_READ_WAIT_TIMEOUT); SAMPLE_READ_WAIT_TIMEOUT);
if (rc != openni::STATUS_OK) { if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read timeout: %s", GST_ERROR_OBJECT (src, "Frame read timeout: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
GstMapInfo info;
if (src->depth.isValid () && src->color.isValid () && if (src->depth.isValid () && src->color.isValid () &&
src->sourcetype == SOURCETYPE_BOTH) { src->sourcetype == SOURCETYPE_BOTH) {
rc = src->depth.readFrame (&src->depthFrame); rc = src->depth.readFrame (&src->depthFrame);
if (rc != openni::STATUS_OK) { if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s", GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
rc = src->color.readFrame (&src->colorFrame); rc = src->color.readFrame (&src->colorFrame);
if (rc != openni::STATUS_OK) { if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s", GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
if ((src->colorFrame.getStrideInBytes() != src->colorFrame.getWidth()) || if ((src->colorFrame.getStrideInBytes () != src->colorFrame.getWidth ()) ||
(src->depthFrame.getStrideInBytes() != 2*src->depthFrame.getWidth())) { (src->depthFrame.getStrideInBytes () !=
2 * src->depthFrame.getWidth ())) {
// This case is not handled - yet :B // This case is not handled - yet :B
GST_ERROR_OBJECT(src, "Stride does not coincide with width"); GST_ERROR_OBJECT (src, "Stride does not coincide with width");
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
int framesize = src->colorFrame.getDataSize() + src->depthFrame.getDataSize()/2; int framesize =
buf = gst_buffer_new_and_alloc(framesize); src->colorFrame.getDataSize () + src->depthFrame.getDataSize () / 2;
buf = gst_buffer_new_and_alloc (framesize);
/* Copy colour information */ /* Copy colour information */
gst_buffer_map(buf, &info, (GstMapFlags)(GST_MAP_WRITE)); gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_WRITE));
memcpy(info.data, src->colorFrame.getData(), src->colorFrame.getDataSize()); memcpy (info.data, src->colorFrame.getData (),
guint8* pData = info.data + src->colorFrame.getDataSize(); src->colorFrame.getDataSize ());
guint8 *pData = info.data + src->colorFrame.getDataSize ();
/* Add depth as 8bit alpha channel, depth is 16bit samples. */ /* Add depth as 8bit alpha channel, depth is 16bit samples. */
guint16* pDepth = (guint16*) src->depthFrame.getData(); guint16 *pDepth = (guint16 *) src->depthFrame.getData ();
for( int i=0; i < src->depthFrame.getDataSize()/2; ++i) for (int i = 0; i < src->depthFrame.getDataSize () / 2; ++i)
pData[i] = pDepth[i] >> 8; pData[i] = pDepth[i] >> 8;
GST_WARNING_OBJECT (src, "sending buffer (%d+%d)B [%08llu]", GST_BUFFER_PTS (buf) = src->colorFrame.getTimestamp () * 1000;
src->colorFrame.getDataSize(), GST_LOG_OBJECT (src, "sending buffer (%d+%d)B [%08llu]",
src->depthFrame.getDataSize (), src->colorFrame.getDataSize (),
(long long) src->depthFrame.getTimestamp ()); src->depthFrame.getDataSize (),
gst_buffer_unmap(buf, &info); (long long) src->depthFrame.getTimestamp ());
gst_buffer_unmap (buf, &info);
} else if (src->depth.isValid () && src->sourcetype == SOURCETYPE_DEPTH) { } else if (src->depth.isValid () && src->sourcetype == SOURCETYPE_DEPTH) {
rc = src->depth.readFrame (&src->depthFrame); rc = src->depth.readFrame (&src->depthFrame);
if (rc != openni::STATUS_OK) { if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s", GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
if (src->depthFrame.getStrideInBytes() != 2*src->depthFrame.getWidth()) { if (src->depthFrame.getStrideInBytes () != 2 * src->depthFrame.getWidth ()) {
// This case is not handled - yet :B // This case is not handled - yet :B
GST_ERROR_OBJECT(src, "Stride does not coincide with width"); GST_ERROR_OBJECT (src, "Stride does not coincide with width");
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
int framesize = src->depthFrame.getDataSize(); int framesize = src->depthFrame.getDataSize ();
buf = gst_buffer_new_and_alloc(framesize); buf = gst_buffer_new_and_alloc (framesize);
gst_buffer_map(buf, &info, (GstMapFlags)(GST_MAP_WRITE)); gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_WRITE));
memcpy(info.data, src->depthFrame.getData(), framesize); memcpy (info.data, src->depthFrame.getData (), framesize);
GST_BUFFER_PTS(buf) = src->depthFrame.getTimestamp() * 1000; GST_BUFFER_PTS (buf) = src->depthFrame.getTimestamp () * 1000;
GST_WARNING_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]", GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]",
src->depthFrame.getWidth (), src->depthFrame.getWidth (),
src->depthFrame.getHeight (), src->depthFrame.getHeight (),
src->depthFrame.getDataSize (), src->depthFrame.getDataSize (),
(long long) src->depthFrame.getTimestamp ()); (long long) src->depthFrame.getTimestamp ());
gst_buffer_unmap(buf, &info); gst_buffer_unmap (buf, &info);
} else if (src->color.isValid () && src->sourcetype == SOURCETYPE_COLOR) { } else if (src->color.isValid () && src->sourcetype == SOURCETYPE_COLOR) {
rc = src->color.readFrame (&src->colorFrame); rc = src->color.readFrame (&src->colorFrame);
if (rc != openni::STATUS_OK) { if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s", GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ()); openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
if (src->colorFrame.getStrideInBytes() != src->colorFrame.getWidth()) { if (src->colorFrame.getStrideInBytes () != src->colorFrame.getWidth ()) {
// This case is not handled - yet :B // This case is not handled - yet :B
GST_ERROR_OBJECT(src, "Stride does not coincide with width"); GST_ERROR_OBJECT (src, "Stride does not coincide with width");
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
int framesize = src->colorFrame.getDataSize();
buf = gst_buffer_new_and_alloc(framesize); int framesize = src->colorFrame.getDataSize ();
gst_buffer_map(buf, &info, (GstMapFlags)(GST_MAP_WRITE)); buf = gst_buffer_new_and_alloc (framesize);
memcpy(info.data, src->depthFrame.getData(), framesize); gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_WRITE));
GST_BUFFER_PTS(buf) = src->colorFrame.getTimestamp() * 1000; memcpy (info.data, src->depthFrame.getData (), framesize);
GST_WARNING_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]", GST_BUFFER_PTS (buf) = src->colorFrame.getTimestamp () * 1000;
src->colorFrame.getWidth (), GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]",
src->colorFrame.getHeight (), src->colorFrame.getWidth (),
src->colorFrame.getDataSize (), src->colorFrame.getHeight (),
(long long) src->colorFrame.getTimestamp ()); src->colorFrame.getDataSize (),
gst_buffer_unmap(buf, &info); (long long) src->colorFrame.getTimestamp ());
gst_buffer_unmap (buf, &info);
} }
return GST_FLOW_OK; return GST_FLOW_OK;
} }
@ -629,7 +636,7 @@ openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
static void static void
openni2_finalise (GstOpenni2Src * src) openni2_finalise (GstOpenni2Src * src)
{ {
src->depth.destroy(); src->depth.destroy ();
src->color.destroy(); src->color.destroy ();
openni::OpenNI::shutdown (); openni::OpenNI::shutdown ();
} }