openni2src: Some random cleanup and minor fixes

This commit is contained in:
Sebastian Dröge 2013-12-02 16:59:14 +01:00
parent 64675f0712
commit 1ba3edf682

View file

@ -41,20 +41,14 @@
#endif
#include "gstopenni2src.h"
\
GST_DEBUG_CATEGORY_STATIC (openni2src_debug);
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw, "
"format = (string) {RGBA, RGB, GRAY16_LE} "
"framerate = (fraction) [0/1, MAX], "
"width = (int) [ 1, MAX ], "
"height = (int) [ 1, MAX ]")
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{RGBA, RGB, GRAY16_LE}"))
);
static GstElementClass *parent_class = NULL;
enum
{
PROP_0,
@ -80,8 +74,9 @@ gst_openni2_src_sourcetype_get_type (void)
static const GEnumValue values[] = {
{SOURCETYPE_DEPTH, "Get depth readings", "depth"},
{SOURCETYPE_COLOR, "Get color readings", "color"},
{SOURCETYPE_BOTH, "Get color and depth (as alpha) readings - EXPERIMENTAL",
"both"},
{SOURCETYPE_BOTH,
"Get color and depth (as alpha) readings - EXPERIMENTAL",
"both"},
{0, NULL, NULL},
};
etype = g_enum_register_static ("GstOpenni2SrcSourcetype", values);
@ -93,9 +88,9 @@ gst_openni2_src_sourcetype_get_type (void)
static void gst_openni2_src_dispose (GObject * object);
static void gst_openni2_src_finalize (GObject * gobject);
static void gst_openni2_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
const GValue * value, GParamSpec * pspec);
static void gst_openni2_src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
GValue * value, GParamSpec * pspec);
/* basesrc methods */
static gboolean gst_openni2_src_start (GstBaseSrc * bsrc);
@ -104,23 +99,22 @@ static GstCaps *gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter);
/* element methods */
static GstStateChangeReturn gst_openni2_src_change_state (GstElement * element,
GstStateChange transition);
GstStateChange transition);
/* pushsrc method */
static GstFlowReturn gst_openni2src_fill (GstPushSrc * src,
GstBuffer * buf);
static GstFlowReturn gst_openni2src_fill (GstPushSrc * src, GstBuffer * buf);
/* OpenNI2 interaction methods */
static gboolean openni2_initialise_library ();
static GstFlowReturn openni2_initialise_devices (GstOpenni2Src * src);
static GstFlowReturn openni2_read_gstbuffer (GstOpenni2Src * src,
GstBuffer * buf);
GstBuffer * buf);
static void openni2_finalise (GstOpenni2Src * src);
#define parent_class gst_openni2_src_parent_class
G_DEFINE_TYPE (GstOpenni2Src, gst_openni2_src, GST_TYPE_PUSH_SRC)
static void
gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
static void gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
{
GObjectClass *gobject_class;
GstPushSrcClass *pushsrc_class;
@ -130,7 +124,6 @@ gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
gobject_class = (GObjectClass *) klass;
basesrc_class = (GstBaseSrcClass *) klass;
pushsrc_class = (GstPushSrcClass *) klass;
parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
gobject_class->dispose = gst_openni2_src_dispose;
gobject_class->finalize = gst_openni2_src_finalize;
@ -139,9 +132,8 @@ gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
g_object_class_install_property
(gobject_class, PROP_LOCATION,
g_param_spec_string ("location", "Location",
"Source uri, can be a file or a device.", "",
(GParamFlags)
(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
"Source uri, can be a file or a device.", "", (GParamFlags)
(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_SOURCETYPE,
g_param_spec_enum ("sourcetype",
"Device source type",
@ -170,7 +162,7 @@ gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
"OpenNI2 Device Source");
/* OpenNI2 initialisation inside this function */
openni2_initialise_library();
openni2_initialise_library ();
}
static void
@ -183,8 +175,10 @@ static void
gst_openni2_src_dispose (GObject * object)
{
GstOpenni2Src *ni2src = GST_OPENNI2_SRC (object);
if (ni2src->gst_caps)
gst_caps_unref (ni2src->gst_caps);
G_OBJECT_CLASS (parent_class)->dispose (object);
}
@ -200,7 +194,7 @@ gst_openni2_src_finalize (GObject * gobject)
ni2src->uri_name = NULL;
}
if (ni2src->gst_caps)
gst_caps_unref(ni2src->gst_caps);
gst_caps_unref (ni2src->gst_caps);
ni2src->gst_caps = NULL;
G_OBJECT_CLASS (parent_class)->finalize (gobject);
@ -225,6 +219,7 @@ gst_openni2_src_set_property (GObject * object, guint prop_id,
openni2src->uri_name = NULL;
}
openni2src->uri_name = g_value_dup_string (value);
/* Action! */
openni2_initialise_devices (openni2src);
break;
@ -270,22 +265,25 @@ gst_openni2_src_start (GstBaseSrc * bsrc)
{
GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
openni::Status rc = openni::STATUS_OK;
if (src->depth.isValid ()){
rc = src->depth.start();
if (rc != openni::STATUS_OK){
GST_ERROR_OBJECT( src, "Couldn't start the depth stream\n%s\n",
openni::OpenNI::getExtendedError());
if (src->depth.isValid ()) {
rc = src->depth.start ();
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Couldn't start the depth stream\n%s\n",
openni::OpenNI::getExtendedError ());
return FALSE;
}
}
if (src->color.isValid ()){
rc = src->color.start();
if (rc != openni::STATUS_OK){
GST_ERROR_OBJECT( src, "Couldn't start the color stream\n%s\n",
openni::OpenNI::getExtendedError());
if (src->color.isValid ()) {
rc = src->color.start ();
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Couldn't start the color stream\n%s\n",
openni::OpenNI::getExtendedError ());
return FALSE;
}
}
return TRUE;
}
@ -295,9 +293,10 @@ gst_openni2_src_stop (GstBaseSrc * bsrc)
GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
if (src->depth.isValid ())
src->depth.stop();
src->depth.stop ();
if (src->color.isValid ())
src->color.stop();
src->color.stop ();
return TRUE;
}
@ -306,19 +305,21 @@ gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
{
GstOpenni2Src *ni2src;
GstCaps *caps;
ni2src = GST_OPENNI2_SRC (src);
GST_OBJECT_LOCK (ni2src);
if (ni2src->gst_caps) {
GST_OBJECT_UNLOCK (ni2src);
return (filter)
? gst_caps_intersect_full (filter, ni2src->gst_caps, GST_CAPS_INTERSECT_FIRST)
? gst_caps_intersect_full (filter, ni2src->gst_caps,
GST_CAPS_INTERSECT_FIRST)
: gst_caps_ref (ni2src->gst_caps);
}
// If we are here, we need to compose the caps and return them.
caps = gst_caps_new_empty();
caps = gst_caps_new_empty ();
if (ni2src->colorpixfmt != openni::PIXEL_FORMAT_RGB888)
return caps; /* Uh oh, not RGB :? Not supported. */
return caps; /* Uh oh, not RGB :? Not supported. */
if (ni2src->depth.isValid () && ni2src->color.isValid () &&
ni2src->sourcetype == SOURCETYPE_BOTH) {
@ -327,30 +328,28 @@ gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
"framerate", GST_TYPE_FRACTION, ni2src->fps, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, ni2src->width,
"height", G_TYPE_INT, ni2src->height,
NULL);
"height", G_TYPE_INT, ni2src->height, NULL);
} else if (ni2src->depth.isValid () && ni2src->sourcetype == SOURCETYPE_DEPTH) {
caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "GRAY16_LE",
"framerate", GST_TYPE_FRACTION, ni2src->fps, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, ni2src->width,
"height", G_TYPE_INT, ni2src->height,
NULL);
"height", G_TYPE_INT, ni2src->height, NULL);
} else if (ni2src->color.isValid () && ni2src->sourcetype == SOURCETYPE_COLOR) {
caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"framerate", GST_TYPE_FRACTION, ni2src->fps, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
"width", G_TYPE_INT, ni2src->width,
"height", G_TYPE_INT, ni2src->height,
NULL);
"height", G_TYPE_INT, ni2src->height, NULL);
}
GST_INFO_OBJECT (ni2src, "probed caps: %" GST_PTR_FORMAT, caps);
ni2src->gst_caps = gst_caps_ref(caps);
ni2src->gst_caps = gst_caps_ref (caps);
GST_OBJECT_UNLOCK (ni2src);
return (filter)
? gst_caps_intersect_full (filter, ni2src->gst_caps, GST_CAPS_INTERSECT_FIRST)
? gst_caps_intersect_full (filter, ni2src->gst_caps,
GST_CAPS_INTERSECT_FIRST)
: gst_caps_ref (ni2src->gst_caps);
}
@ -382,7 +381,7 @@ gst_openni2_src_change_state (GstElement * element, GstStateChange transition)
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
gst_openni2_src_stop(GST_BASE_SRC(src));
gst_openni2_src_stop (GST_BASE_SRC (src));
if (src->gst_caps) {
gst_caps_unref (src->gst_caps);
src->gst_caps = NULL;
@ -420,7 +419,8 @@ openni2_initialise_library ()
openni::Status rc = openni::STATUS_OK;
rc = openni::OpenNI::initialize ();
if (rc != openni::STATUS_OK) {
GST_ERROR("Initialization failed: %s", openni::OpenNI::getExtendedError ());
GST_ERROR ("Initialization failed: %s",
openni::OpenNI::getExtendedError ());
openni::OpenNI::shutdown ();
return GST_FLOW_ERROR;
}
@ -432,6 +432,7 @@ openni2_initialise_devices (GstOpenni2Src * src)
{
openni::Status rc = openni::STATUS_OK;
const char *deviceURI = openni::ANY_DEVICE;
if (src->uri_name)
deviceURI = src->uri_name;
@ -452,9 +453,10 @@ openni2_initialise_devices (GstOpenni2Src * src)
GST_ERROR_OBJECT (src, "%s", openni::OpenNI::getExtendedError ());
src->depth.destroy ();
}
} else
} else {
GST_WARNING_OBJECT (src, "Couldn't find depth stream: %s",
openni::OpenNI::getExtendedError ());
}
/** color sensor **/
rc = src->color.create (src->device, openni::SENSOR_COLOR);
@ -465,10 +467,10 @@ openni2_initialise_devices (GstOpenni2Src * src)
openni::OpenNI::getExtendedError ());
src->color.destroy ();
}
} else
} else {
GST_WARNING_OBJECT (src, "Couldn't find color stream: %s",
openni::OpenNI::getExtendedError ());
}
if (!src->depth.isValid () && !src->color.isValid ()) {
GST_ERROR_OBJECT (src, "No valid streams. Exiting\n");
@ -489,9 +491,9 @@ openni2_initialise_devices (GstOpenni2Src * src)
if (depthWidth == colorWidth && depthHeight == colorHeight) {
src->width = depthWidth;
src->height = depthHeight;
src->fps = src->depthVideoMode.getFps();
src->colorpixfmt = src->colorVideoMode.getPixelFormat();
src->depthpixfmt = src->depthVideoMode.getPixelFormat();
src->fps = src->depthVideoMode.getFps ();
src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
} else {
GST_ERROR_OBJECT (src, "Error - expect color and depth to be"
" in same resolution: D: %dx%d vs C: %dx%d",
@ -499,20 +501,20 @@ openni2_initialise_devices (GstOpenni2Src * src)
return GST_FLOW_ERROR;
}
GST_INFO_OBJECT (src, "DEPTH&COLOR resolution: %dx%d",
src->width, src->height);
src->width, src->height);
} else if (src->depth.isValid ()) {
src->depthVideoMode = src->depth.getVideoMode ();
src->width = src->depthVideoMode.getResolutionX ();
src->height = src->depthVideoMode.getResolutionY ();
src->fps = src->depthVideoMode.getFps();
src->depthpixfmt = src->depthVideoMode.getPixelFormat();
src->fps = src->depthVideoMode.getFps ();
src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
GST_INFO_OBJECT (src, "DEPTH resolution: %dx%d", src->width, src->height);
} else if (src->color.isValid ()) {
src->colorVideoMode = src->color.getVideoMode ();
src->width = src->colorVideoMode.getResolutionX ();
src->height = src->colorVideoMode.getResolutionY ();
src->fps = src->colorVideoMode.getFps();
src->colorpixfmt = src->colorVideoMode.getPixelFormat();
src->fps = src->colorVideoMode.getFps ();
src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
GST_INFO_OBJECT (src, "COLOR resolution: %dx%d", src->width, src->height);
} else {
GST_ERROR_OBJECT (src, "Expected at least one of the streams to be valid.");
@ -528,100 +530,105 @@ openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
openni::Status rc = openni::STATUS_OK;
openni::VideoStream * pStream = &(src->depth);
int changedStreamDummy;
GstMapInfo info;
/* Block until we get some data */
rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy,
SAMPLE_READ_WAIT_TIMEOUT);
SAMPLE_READ_WAIT_TIMEOUT);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read timeout: %s",
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
GstMapInfo info;
if (src->depth.isValid () && src->color.isValid () &&
src->sourcetype == SOURCETYPE_BOTH) {
rc = src->depth.readFrame (&src->depthFrame);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ());
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
rc = src->color.readFrame (&src->colorFrame);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ());
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
if ((src->colorFrame.getStrideInBytes() != src->colorFrame.getWidth()) ||
(src->depthFrame.getStrideInBytes() != 2*src->depthFrame.getWidth())) {
if ((src->colorFrame.getStrideInBytes () != src->colorFrame.getWidth ()) ||
(src->depthFrame.getStrideInBytes () !=
2 * src->depthFrame.getWidth ())) {
// This case is not handled - yet :B
GST_ERROR_OBJECT(src, "Stride does not coincide with width");
GST_ERROR_OBJECT (src, "Stride does not coincide with width");
return GST_FLOW_ERROR;
}
int framesize = src->colorFrame.getDataSize() + src->depthFrame.getDataSize()/2;
buf = gst_buffer_new_and_alloc(framesize);
int framesize =
src->colorFrame.getDataSize () + src->depthFrame.getDataSize () / 2;
buf = gst_buffer_new_and_alloc (framesize);
/* Copy colour information */
gst_buffer_map(buf, &info, (GstMapFlags)(GST_MAP_WRITE));
memcpy(info.data, src->colorFrame.getData(), src->colorFrame.getDataSize());
guint8* pData = info.data + src->colorFrame.getDataSize();
gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_WRITE));
memcpy (info.data, src->colorFrame.getData (),
src->colorFrame.getDataSize ());
guint8 *pData = info.data + src->colorFrame.getDataSize ();
/* Add depth as 8bit alpha channel, depth is 16bit samples. */
guint16* pDepth = (guint16*) src->depthFrame.getData();
for( int i=0; i < src->depthFrame.getDataSize()/2; ++i)
guint16 *pDepth = (guint16 *) src->depthFrame.getData ();
for (int i = 0; i < src->depthFrame.getDataSize () / 2; ++i)
pData[i] = pDepth[i] >> 8;
GST_WARNING_OBJECT (src, "sending buffer (%d+%d)B [%08llu]",
src->colorFrame.getDataSize(),
src->depthFrame.getDataSize (),
(long long) src->depthFrame.getTimestamp ());
gst_buffer_unmap(buf, &info);
GST_BUFFER_PTS (buf) = src->colorFrame.getTimestamp () * 1000;
GST_LOG_OBJECT (src, "sending buffer (%d+%d)B [%08llu]",
src->colorFrame.getDataSize (),
src->depthFrame.getDataSize (),
(long long) src->depthFrame.getTimestamp ());
gst_buffer_unmap (buf, &info);
} else if (src->depth.isValid () && src->sourcetype == SOURCETYPE_DEPTH) {
rc = src->depth.readFrame (&src->depthFrame);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ());
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
if (src->depthFrame.getStrideInBytes() != 2*src->depthFrame.getWidth()) {
if (src->depthFrame.getStrideInBytes () != 2 * src->depthFrame.getWidth ()) {
// This case is not handled - yet :B
GST_ERROR_OBJECT(src, "Stride does not coincide with width");
GST_ERROR_OBJECT (src, "Stride does not coincide with width");
return GST_FLOW_ERROR;
}
int framesize = src->depthFrame.getDataSize();
buf = gst_buffer_new_and_alloc(framesize);
gst_buffer_map(buf, &info, (GstMapFlags)(GST_MAP_WRITE));
memcpy(info.data, src->depthFrame.getData(), framesize);
GST_BUFFER_PTS(buf) = src->depthFrame.getTimestamp() * 1000;
GST_WARNING_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]",
src->depthFrame.getWidth (),
src->depthFrame.getHeight (),
src->depthFrame.getDataSize (),
(long long) src->depthFrame.getTimestamp ());
gst_buffer_unmap(buf, &info);
int framesize = src->depthFrame.getDataSize ();
buf = gst_buffer_new_and_alloc (framesize);
gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_WRITE));
memcpy (info.data, src->depthFrame.getData (), framesize);
GST_BUFFER_PTS (buf) = src->depthFrame.getTimestamp () * 1000;
GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]",
src->depthFrame.getWidth (),
src->depthFrame.getHeight (),
src->depthFrame.getDataSize (),
(long long) src->depthFrame.getTimestamp ());
gst_buffer_unmap (buf, &info);
} else if (src->color.isValid () && src->sourcetype == SOURCETYPE_COLOR) {
rc = src->color.readFrame (&src->colorFrame);
if (rc != openni::STATUS_OK) {
GST_ERROR_OBJECT (src, "Frame read error: %s",
openni::OpenNI::getExtendedError ());
openni::OpenNI::getExtendedError ());
return GST_FLOW_ERROR;
}
if (src->colorFrame.getStrideInBytes() != src->colorFrame.getWidth()) {
if (src->colorFrame.getStrideInBytes () != src->colorFrame.getWidth ()) {
// This case is not handled - yet :B
GST_ERROR_OBJECT(src, "Stride does not coincide with width");
GST_ERROR_OBJECT (src, "Stride does not coincide with width");
return GST_FLOW_ERROR;
}
int framesize = src->colorFrame.getDataSize();
buf = gst_buffer_new_and_alloc(framesize);
gst_buffer_map(buf, &info, (GstMapFlags)(GST_MAP_WRITE));
memcpy(info.data, src->depthFrame.getData(), framesize);
GST_BUFFER_PTS(buf) = src->colorFrame.getTimestamp() * 1000;
GST_WARNING_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]",
src->colorFrame.getWidth (),
src->colorFrame.getHeight (),
src->colorFrame.getDataSize (),
(long long) src->colorFrame.getTimestamp ());
gst_buffer_unmap(buf, &info);
int framesize = src->colorFrame.getDataSize ();
buf = gst_buffer_new_and_alloc (framesize);
gst_buffer_map (buf, &info, (GstMapFlags) (GST_MAP_WRITE));
memcpy (info.data, src->depthFrame.getData (), framesize);
GST_BUFFER_PTS (buf) = src->colorFrame.getTimestamp () * 1000;
GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB [%08llu]",
src->colorFrame.getWidth (),
src->colorFrame.getHeight (),
src->colorFrame.getDataSize (),
(long long) src->colorFrame.getTimestamp ());
gst_buffer_unmap (buf, &info);
}
return GST_FLOW_OK;
}
@ -629,7 +636,7 @@ openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
static void
openni2_finalise (GstOpenni2Src * src)
{
src->depth.destroy();
src->color.destroy();
src->depth.destroy ();
src->color.destroy ();
openni::OpenNI::shutdown ();
}