OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/0000700000175000017500000000000012240433507020370 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/0000700000175000017500000000000012240433507021753 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniCEnums.h0000600000175000017500000000544112240433507023772 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_C_ENUMS_H_ #define _ONI_C_ENUMS_H_ /** Possible failure values */ typedef enum { ONI_STATUS_OK = 0, ONI_STATUS_ERROR = 1, ONI_STATUS_NOT_IMPLEMENTED = 2, ONI_STATUS_NOT_SUPPORTED = 3, ONI_STATUS_BAD_PARAMETER = 4, ONI_STATUS_OUT_OF_FLOW = 5, ONI_STATUS_NO_DEVICE = 6, ONI_STATUS_TIME_OUT = 102, } OniStatus; /** The source of the stream */ typedef enum { ONI_SENSOR_IR = 1, ONI_SENSOR_COLOR = 2, ONI_SENSOR_DEPTH = 3, } OniSensorType; /** All available formats of the output of a stream */ typedef enum { // Depth ONI_PIXEL_FORMAT_DEPTH_1_MM = 100, ONI_PIXEL_FORMAT_DEPTH_100_UM = 101, ONI_PIXEL_FORMAT_SHIFT_9_2 = 102, ONI_PIXEL_FORMAT_SHIFT_9_3 = 103, // Color ONI_PIXEL_FORMAT_RGB888 = 200, ONI_PIXEL_FORMAT_YUV422 = 201, ONI_PIXEL_FORMAT_GRAY8 = 202, ONI_PIXEL_FORMAT_GRAY16 = 203, ONI_PIXEL_FORMAT_JPEG = 204, ONI_PIXEL_FORMAT_YUYV = 205, } OniPixelFormat; typedef enum { ONI_DEVICE_STATE_OK = 0, ONI_DEVICE_STATE_ERROR = 1, ONI_DEVICE_STATE_NOT_READY = 2, ONI_DEVICE_STATE_EOF = 3 } OniDeviceState; typedef enum { ONI_IMAGE_REGISTRATION_OFF = 0, ONI_IMAGE_REGISTRATION_DEPTH_TO_COLOR = 1, } OniImageRegistrationMode; enum { ONI_TIMEOUT_NONE = 0, ONI_TIMEOUT_FOREVER = -1, }; #endif // _ONI_C_ENUMS_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniPlatform.h0000600000175000017500000000602412240433507024362 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PLATFORM_H_ #define _ONI_PLATFORM_H_ // Supported platforms #define ONI_PLATFORM_WIN32 1 #define ONI_PLATFORM_LINUX_X86 2 #define ONI_PLATFORM_LINUX_ARM 3 #define ONI_PLATFORM_MACOSX 4 #define ONI_PLATFORM_ANDROID_ARM 5 #if (defined _WIN32) # ifndef RC_INVOKED # if _MSC_VER < 1300 # error OpenNI Platform Abstraction Layer - Win32 - Microsoft Visual Studio version below 2003 (7.0) are not supported! # endif # endif # include "Win32/OniPlatformWin32.h" #elif defined (ANDROID) && defined (__arm__) # include "Android-Arm/OniPlatformAndroid-Arm.h" #elif (__linux__ && (i386 || __x86_64__)) # include "Linux-x86/OniPlatformLinux-x86.h" #elif (__linux__ && __arm__) # include "Linux-Arm/OniPlatformLinux-Arm.h" #elif _ARC # include "ARC/OniPlaformARC.h" #elif (__APPLE__) # include "MacOSX/OniPlatformMacOSX.h" #else # error Xiron Platform Abstraction Layer - Unsupported Platform! #endif #ifdef __cplusplus # define ONI_C extern "C" # define ONI_C_API_EXPORT ONI_C ONI_API_EXPORT # define ONI_C_API_IMPORT ONI_C ONI_API_IMPORT # define ONI_CPP_API_EXPORT ONI_API_EXPORT # define ONI_CPP_API_IMPORT ONI_API_IMPORT #else // __cplusplus # define ONI_C_API_EXPORT ONI_API_EXPORT # define ONI_C_API_IMPORT ONI_API_IMPORT #endif // __cplusplus #ifdef OPENNI2_EXPORT # define ONI_C_API ONI_C_API_EXPORT # define ONI_CPP_API ONI_CPP_API_EXPORT #else // OPENNI2_EXPORT # define ONI_C_API ONI_C_API_IMPORT # define ONI_CPP_API ONI_CPP_API_IMPORT #endif // OPENNI2_EXPORT #endif // _ONI_PLATFORM_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniVersion.h0000600000175000017500000000520012240433507024216 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include "OniPlatform.h" #define ONI_VERSION_MAJOR 2 #define ONI_VERSION_MINOR 2 #define ONI_VERSION_MAINTENANCE 0 #define ONI_VERSION_BUILD 33 /** OpenNI version (in brief string format): "Major.Minor.Maintenance (Build)" */ #define ONI_BRIEF_VERSION_STRING \ ONI_STRINGIFY(ONI_VERSION_MAJOR) "." \ ONI_STRINGIFY(ONI_VERSION_MINOR) "." \ ONI_STRINGIFY(ONI_VERSION_MAINTENANCE) \ " (Build " ONI_STRINGIFY(ONI_VERSION_BUILD) ")" /** OpenNI version (in numeric format): (OpenNI major version * 100000000 + OpenNI minor version * 1000000 + OpenNI maintenance version * 10000 + OpenNI build version). */ #define ONI_VERSION (ONI_VERSION_MAJOR*100000000 + ONI_VERSION_MINOR*1000000 + ONI_VERSION_MAINTENANCE*10000 + ONI_VERSION_BUILD) #define ONI_CREATE_API_VERSION(major, minor) ((major)*1000 + (minor)) #define ONI_API_VERSION ONI_CREATE_API_VERSION(ONI_VERSION_MAJOR, ONI_VERSION_MINOR) /** OpenNI version (in string format): "Major.Minor.Maintenance.Build-Platform (MMM DD YYYY HH:MM:SS)". */ #define ONI_VERSION_STRING \ ONI_BRIEF_VERSION_STRING "-" \ ONI_PLATFORM_STRING " (" ONI_TIMESTAMP ")" OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Linux-Arm/0000700000175000017500000000000012240433507023567 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Linux-Arm/OniPlatformLinux-Arm.h0000600000175000017500000000407412240433507027736 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PLATFORM_LINUX_ARM_H_ #define _ONI_PLATFORM_LINUX_ARM_H_ // Start with Linux-x86, and override what's different #include "../Linux-x86/OniPlatformLinux-x86.h" //--------------------------------------------------------------------------- // Platform Basic Definition //--------------------------------------------------------------------------- #undef ONI_PLATFORM #undef ONI_PLATFORM_STRING #define ONI_PLATFORM ONI_PLATFORM_LINUX_ARM #define ONI_PLATFORM_STRING "Linux-Arm" #endif //_ONI_PLATFORM_LINUX_ARM_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Linux-x86/0000700000175000017500000000000012240433507023475 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Linux-x86/OniPlatformLinux-x86.h0000600000175000017500000001074012240433507027547 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PLATFORM_LINUX_X86_H_ #define _ONI_PLATFORM_LINUX_X86_H_ //--------------------------------------------------------------------------- // Prerequisites //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- // Includes //--------------------------------------------------------------------------- #include #include #include #include #include #include #include //--------------------------------------------------------------------------- // Platform Basic Definition //--------------------------------------------------------------------------- #define ONI_PLATFORM ONI_PLATFORM_LINUX_X86 #define ONI_PLATFORM_STRING "Linux-x86" //--------------------------------------------------------------------------- // Platform Capabilities //--------------------------------------------------------------------------- #define ONI_PLATFORM_ENDIAN_TYPE ONI_PLATFORM_IS_LITTLE_ENDIAN #define ONI_PLATFORM_SUPPORTS_DYNAMIC_LIBS 1 //--------------------------------------------------------------------------- // Memory //--------------------------------------------------------------------------- /** The default memory alignment. */ #define ONI_DEFAULT_MEM_ALIGN 16 /** The thread static declarator (using TLS). */ #define ONI_THREAD_STATIC __thread //--------------------------------------------------------------------------- // Files //--------------------------------------------------------------------------- /** The maximum allowed file path size (in bytes). */ #define ONI_FILE_MAX_PATH 256 //--------------------------------------------------------------------------- // Call back //--------------------------------------------------------------------------- /** The std call type. */ #define ONI_STDCALL __stdcall /** The call back calling convention. */ #define ONI_CALLBACK_TYPE /** The C and C++ calling convension. */ #define ONI_C_DECL //--------------------------------------------------------------------------- // Macros //--------------------------------------------------------------------------- /** Returns the date and time at compile time. */ #define ONI_TIMESTAMP __DATE__ " " __TIME__ /** Converts n into a pre-processor string. */ #define ONI_STRINGIFY(n) ONI_STRINGIFY_HELPER(n) #define ONI_STRINGIFY_HELPER(n) #n //--------------------------------------------------------------------------- // API Export/Import Macros //--------------------------------------------------------------------------- /** Indicates an exported shared library function. */ #define ONI_API_EXPORT __attribute__ ((visibility("default"))) /** Indicates an imported shared library function. */ #define ONI_API_IMPORT /** Indicates a deprecated function */ #define ONI_API_DEPRECATED(msg) __attribute__((warning("This function is deprecated: " msg))) #endif //_ONI_PLATFORM_LINUX_X86_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/PS1080.h0000600000175000017500000004726612240433507023000 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _PS1080_H_ #define _PS1080_H_ #include /** The maximum permitted Xiron device name string length. */ #define XN_DEVICE_MAX_STRING_LENGTH 200 /* * private properties of PS1080 devices. * * @remarks * properties structure is 0x1080XXYY where XX is range and YY is code. * range values: * F0 - device properties * E0 - device commands * 00 - common stream properties * 10 - depth stream properties * 20 - color stream properties */ enum { /*******************************************************************/ /* Device properties */ /*******************************************************************/ /** unsigned long long (XnSensorUsbInterface) */ XN_MODULE_PROPERTY_USB_INTERFACE = 0x1080F001, // "UsbInterface" /** Boolean */ XN_MODULE_PROPERTY_MIRROR = 0x1080F002, // "Mirror" /** unsigned long long, get only */ XN_MODULE_PROPERTY_RESET_SENSOR_ON_STARTUP = 0x1080F004, // "ResetSensorOnStartup" /** unsigned long long, get only */ XN_MODULE_PROPERTY_LEAN_INIT = 0x1080F005, // "LeanInit" /** char[XN_DEVICE_MAX_STRING_LENGTH], get only */ XN_MODULE_PROPERTY_SERIAL_NUMBER = 0x1080F006, // "ID" /** XnVersions, get only */ XN_MODULE_PROPERTY_VERSION = 0x1080F007, // "Version" /** Boolean */ XN_MODULE_PROPERTY_FIRMWARE_FRAME_SYNC = 0x1080F008, /** Boolean */ XN_MODULE_PROPERTY_HOST_TIMESTAMPS = 0x1080FF77, // "HostTimestamps" /** Boolean */ XN_MODULE_PROPERTY_CLOSE_STREAMS_ON_SHUTDOWN = 0x1080FF78, // "CloseStreamsOnShutdown" /** Integer */ XN_MODULE_PROPERTY_FIRMWARE_LOG_INTERVAL = 0x1080FF7F, // "FirmwareLogInterval" /** Boolean */ XN_MODULE_PROPERTY_PRINT_FIRMWARE_LOG = 0x1080FF80, // "FirmwareLogPrint" /** Integer */ XN_MODULE_PROPERTY_FIRMWARE_LOG_FILTER = 0x1080FF81, // "FirmwareLogFilter" /** String, get only */ XN_MODULE_PROPERTY_FIRMWARE_LOG = 0x1080FF82, // "FirmwareLog" /** Integer */ XN_MODULE_PROPERTY_FIRMWARE_CPU_INTERVAL = 0x1080FF83, // "FirmwareCPUInterval" /** String, get only */ XN_MODULE_PROPERTY_PHYSICAL_DEVICE_NAME = 0x1080FF7A, // "PhysicalDeviceName" /** String, get only */ XN_MODULE_PROPERTY_VENDOR_SPECIFIC_DATA = 0x1080FF7B, // "VendorSpecificData" /** String, get only */ XN_MODULE_PROPERTY_SENSOR_PLATFORM_STRING = 0x1080FF7C, // "SensorPlatformString" /*******************************************************************/ /* Device commands (activated via SetProperty/GetProperty) */ /*******************************************************************/ /** XnInnerParam */ XN_MODULE_PROPERTY_FIRMWARE_PARAM = 0x1080E001, // "FirmwareParam" /** unsigned long long, set only */ XN_MODULE_PROPERTY_RESET = 0x1080E002, // "Reset" /** XnControlProcessingData */ XN_MODULE_PROPERTY_IMAGE_CONTROL = 0x1080E003, // "ImageControl" /** XnControlProcessingData */ XN_MODULE_PROPERTY_DEPTH_CONTROL = 0x1080E004, // "DepthControl" /** XnAHBData */ XN_MODULE_PROPERTY_AHB = 0x1080E005, // "AHB" /** XnLedState */ XN_MODULE_PROPERTY_LED_STATE = 0x1080E006, // "LedState" /** Boolean */ XN_MODULE_PROPERTY_EMITTER_STATE = 0x1080E007, // "EmitterState" /** XnCmosBlankingUnits */ XN_MODULE_PROPERTY_CMOS_BLANKING_UNITS = 0x1080FF74, // "CmosBlankingUnits" /** XnCmosBlankingTime */ XN_MODULE_PROPERTY_CMOS_BLANKING_TIME = 0x1080FF75, // "CmosBlankingTime" /** XnFlashFileList, get only */ XN_MODULE_PROPERTY_FILE_LIST = 0x1080FF84, // "FileList" /** XnParamFlashData, get only */ XN_MODULE_PROPERTY_FLASH_CHUNK = 0x1080FF85, // "FlashChunk" XN_MODULE_PROPERTY_FILE = 0x1080FF86, // "FlashFile" /** Integer */ XN_MODULE_PROPERTY_DELETE_FILE = 0x1080FF87, // "DeleteFile" XN_MODULE_PROPERTY_FILE_ATTRIBUTES = 0x1080FF88, // "FileAttributes" XN_MODULE_PROPERTY_TEC_SET_POINT = 0x1080FF89, // "TecSetPoint" /** get only */ XN_MODULE_PROPERTY_TEC_STATUS = 0x1080FF8A, // "TecStatus" /** get only */ XN_MODULE_PROPERTY_TEC_FAST_CONVERGENCE_STATUS = 0x1080FF8B, // "TecFastConvergenceStatus" XN_MODULE_PROPERTY_EMITTER_SET_POINT = 0x1080FF8C, // "EmitterSetPoint" /** get only */ XN_MODULE_PROPERTY_EMITTER_STATUS = 0x1080FF8D, // "EmitterStatus" XN_MODULE_PROPERTY_I2C = 0x1080FF8E, // "I2C" /** Integer, set only */ XN_MODULE_PROPERTY_BIST = 0x1080FF8F, // "BIST" /** XnProjectorFaultData, set only */ XN_MODULE_PROPERTY_PROJECTOR_FAULT = 0x1080FF90, // "ProjectorFault" /** Boolean, set only */ XN_MODULE_PROPERTY_APC_ENABLED = 0x1080FF91, // "APCEnabled" /** Boolean */ XN_MODULE_PROPERTY_FIRMWARE_TEC_DEBUG_PRINT = 0x1080FF92, // "TecDebugPrint" /*******************************************************************/ /* Common stream properties */ /*******************************************************************/ /** unsigned long long */ XN_STREAM_PROPERTY_INPUT_FORMAT = 0x10800001, // "InputFormat" /** unsigned long long (XnCroppingMode) */ XN_STREAM_PROPERTY_CROPPING_MODE = 0x10800002, // "CroppingMode" /*******************************************************************/ /* Depth stream properties */ /*******************************************************************/ /** unsigned long long */ XN_STREAM_PROPERTY_CLOSE_RANGE = 0x1080F003, // "CloseRange" /** XnPixelRegistration - get only */ XN_STREAM_PROPERTY_PIXEL_REGISTRATION = 0x10801001, // "PixelRegistration" /** unsigned long long */ XN_STREAM_PROPERTY_WHITE_BALANCE_ENABLED = 0x10801002, // "WhiteBalancedEnabled" /** unsigned long long */ XN_STREAM_PROPERTY_GAIN = 0x10801003, // "Gain" /** unsigned long long */ XN_STREAM_PROPERTY_HOLE_FILTER = 0x10801004, // "HoleFilter" /** unsigned long long (XnProcessingType) */ XN_STREAM_PROPERTY_REGISTRATION_TYPE = 0x10801005, // "RegistrationType" /** XnDepthAGCBin* */ XN_STREAM_PROPERTY_AGC_BIN = 0x10801006, // "AGCBin" /** unsigned long long, get only */ XN_STREAM_PROPERTY_CONST_SHIFT = 0x10801007, // "ConstShift" /** unsigned long long, get only */ XN_STREAM_PROPERTY_PIXEL_SIZE_FACTOR = 0x10801008, // "PixelSizeFactor" /** unsigned long long, get only */ XN_STREAM_PROPERTY_MAX_SHIFT = 0x10801009, // "MaxShift" /** unsigned long long, get only */ XN_STREAM_PROPERTY_PARAM_COEFF = 0x1080100A, // "ParamCoeff" /** unsigned long long, get only */ XN_STREAM_PROPERTY_SHIFT_SCALE = 0x1080100B, // "ShiftScale" /** unsigned long long, get only */ XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE = 0x1080100C, // "ZPD" /** double, get only */ XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE = 0x1080100D, // "ZPPS" /** double, get only */ XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE = 0x1080100E, // "LDDIS" /** double, get only */ XN_STREAM_PROPERTY_DCMOS_RCMOS_DISTANCE = 0x1080100F, // "DCRCDIS" /** OniDepthPixel[], get only */ XN_STREAM_PROPERTY_S2D_TABLE = 0x10801010, // "S2D" /** unsigned short[], get only */ XN_STREAM_PROPERTY_D2S_TABLE = 0x10801011, // "D2S" /** get only */ XN_STREAM_PROPERTY_DEPTH_SENSOR_CALIBRATION_INFO = 0x10801012, /** Boolean */ XN_STREAM_PROPERTY_GMC_MODE = 0x1080FF44, // "GmcMode" /** Boolean */ XN_STREAM_PROPERTY_GMC_DEBUG = 0x1080FF45, // "GmcDebug" /** Boolean */ XN_STREAM_PROPERTY_WAVELENGTH_CORRECTION = 0x1080FF46, // "WavelengthCorrection" /** Boolean */ XN_STREAM_PROPERTY_WAVELENGTH_CORRECTION_DEBUG = 0x1080FF47, // "WavelengthCorrectionDebug" /*******************************************************************/ /* Color stream properties */ /*******************************************************************/ /** Integer */ XN_STREAM_PROPERTY_FLICKER = 0x10802001, // "Flicker" }; typedef enum { XN_SENSOR_FW_VER_UNKNOWN = 0, XN_SENSOR_FW_VER_0_17 = 1, XN_SENSOR_FW_VER_1_1 = 2, XN_SENSOR_FW_VER_1_2 = 3, XN_SENSOR_FW_VER_3_0 = 4, XN_SENSOR_FW_VER_4_0 = 5, XN_SENSOR_FW_VER_5_0 = 6, XN_SENSOR_FW_VER_5_1 = 7, XN_SENSOR_FW_VER_5_2 = 8, XN_SENSOR_FW_VER_5_3 = 9, XN_SENSOR_FW_VER_5_4 = 10, XN_SENSOR_FW_VER_5_5 = 11, XN_SENSOR_FW_VER_5_6 = 12, XN_SENSOR_FW_VER_5_7 = 13, XN_SENSOR_FW_VER_5_8 = 14, } XnFWVer; typedef enum { XN_SENSOR_VER_UNKNOWN = 0, XN_SENSOR_VER_2_0 = 1, XN_SENSOR_VER_3_0 = 2, XN_SENSOR_VER_4_0 = 3, XN_SENSOR_VER_5_0 = 4 } XnSensorVer; typedef enum { XN_SENSOR_HW_VER_UNKNOWN = 0, XN_SENSOR_HW_VER_FPDB_10 = 1, XN_SENSOR_HW_VER_CDB_10 = 2, XN_SENSOR_HW_VER_RD_3 = 3, XN_SENSOR_HW_VER_RD_5 = 4, XN_SENSOR_HW_VER_RD1081 = 5, XN_SENSOR_HW_VER_RD1082 = 6, XN_SENSOR_HW_VER_RD109 = 7 } XnHWVer; typedef enum { XN_SENSOR_CHIP_VER_UNKNOWN = 0, XN_SENSOR_CHIP_VER_PS1000 = 1, XN_SENSOR_CHIP_VER_PS1080 = 2, XN_SENSOR_CHIP_VER_PS1080A6 = 3 } XnChipVer; typedef enum { XN_CMOS_TYPE_IMAGE = 0, XN_CMOS_TYPE_DEPTH = 1, XN_CMOS_COUNT } XnCMOSType; typedef enum { XN_IO_IMAGE_FORMAT_BAYER = 0, XN_IO_IMAGE_FORMAT_YUV422 = 1, XN_IO_IMAGE_FORMAT_JPEG = 2, XN_IO_IMAGE_FORMAT_JPEG_420 = 3, XN_IO_IMAGE_FORMAT_JPEG_MONO = 4, XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422 = 5, XN_IO_IMAGE_FORMAT_UNCOMPRESSED_BAYER = 6, XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUYV = 7, } XnIOImageFormats; typedef enum { XN_IO_DEPTH_FORMAT_UNCOMPRESSED_16_BIT = 0, XN_IO_DEPTH_FORMAT_COMPRESSED_PS = 1, XN_IO_DEPTH_FORMAT_UNCOMPRESSED_10_BIT = 2, XN_IO_DEPTH_FORMAT_UNCOMPRESSED_11_BIT = 3, XN_IO_DEPTH_FORMAT_UNCOMPRESSED_12_BIT = 4, } XnIODepthFormats; typedef enum { XN_RESET_TYPE_POWER = 0, XN_RESET_TYPE_SOFT = 1, XN_RESET_TYPE_SOFT_FIRST = 2, } XnParamResetType; typedef enum XnSensorUsbInterface { XN_SENSOR_USB_INTERFACE_DEFAULT = 0, XN_SENSOR_USB_INTERFACE_ISO_ENDPOINTS = 1, XN_SENSOR_USB_INTERFACE_BULK_ENDPOINTS = 2, XN_SENSOR_USB_INTERFACE_ISO_ENDPOINTS_LOW_DEPTH = 3, } XnSensorUsbInterface; typedef enum XnProcessingType { XN_PROCESSING_DONT_CARE = 0, XN_PROCESSING_HARDWARE = 1, XN_PROCESSING_SOFTWARE = 2, } XnProcessingType; typedef enum XnCroppingMode { XN_CROPPING_MODE_NORMAL = 1, XN_CROPPING_MODE_INCREASED_FPS = 2, XN_CROPPING_MODE_SOFTWARE_ONLY = 3, } XnCroppingMode; enum { XN_ERROR_STATE_OK = 0, XN_ERROR_STATE_DEVICE_PROJECTOR_FAULT = 1, XN_ERROR_STATE_DEVICE_OVERHEAT = 2, }; typedef enum XnFirmwareCroppingMode { XN_FIRMWARE_CROPPING_MODE_DISABLED = 0, XN_FIRMWARE_CROPPING_MODE_NORMAL = 1, XN_FIRMWARE_CROPPING_MODE_INCREASED_FPS = 2, } XnFirmwareCroppingMode; typedef enum { XnLogFilterDebug = 0x0001, XnLogFilterInfo = 0x0002, XnLogFilterError = 0x0004, XnLogFilterProtocol = 0x0008, XnLogFilterAssert = 0x0010, XnLogFilterConfig = 0x0020, XnLogFilterFrameSync = 0x0040, XnLogFilterAGC = 0x0080, XnLogFilterTelems = 0x0100, XnLogFilterAll = 0xFFFF } XnLogFilter; typedef enum { XnFileAttributeReadOnly = 0x8000 } XnFilePossibleAttributes; typedef enum { XnFlashFileTypeFileTable = 0x00, XnFlashFileTypeScratchFile = 0x01, XnFlashFileTypeBootSector = 0x02, XnFlashFileTypeBootManager = 0x03, XnFlashFileTypeCodeDownloader = 0x04, XnFlashFileTypeMonitor = 0x05, XnFlashFileTypeApplication = 0x06, XnFlashFileTypeFixedParams = 0x07, XnFlashFileTypeDescriptors = 0x08, XnFlashFileTypeDefaultParams = 0x09, XnFlashFileTypeImageCmos = 0x0A, XnFlashFileTypeDepthCmos = 0x0B, XnFlashFileTypeAlgorithmParams = 0x0C, XnFlashFileTypeReferenceQVGA = 0x0D, XnFlashFileTypeReferenceVGA = 0x0E, XnFlashFileTypeMaintenance = 0x0F, XnFlashFileTypeDebugParams = 0x10, XnFlashFileTypePrimeProcessor = 0x11, XnFlashFileTypeGainControl = 0x12, XnFlashFileTypeRegistartionParams = 0x13, XnFlashFileTypeIDParams = 0x14, XnFlashFileTypeSensorTECParams = 0x15, XnFlashFileTypeSensorAPCParams = 0x16, XnFlashFileTypeSensorProjectorFaultParams = 0x17, XnFlashFileTypeProductionFile = 0x18, XnFlashFileTypeUpgradeInProgress = 0x19, XnFlashFileTypeWavelengthCorrection = 0x1A, XnFlashFileTypeGMCReferenceOffset = 0x1B, XnFlashFileTypeSensorNESAParams = 0x1C, XnFlashFileTypeSensorFault = 0x1D, XnFlashFileTypeVendorData = 0x1E, } XnFlashFileType; typedef enum XnBistType { //Auto tests XN_BIST_IMAGE_CMOS = 1 << 0, XN_BIST_IR_CMOS = 1 << 1, XN_BIST_POTENTIOMETER = 1 << 2, XN_BIST_FLASH = 1 << 3, XN_BIST_FULL_FLASH = 1 << 4, XN_BIST_PROJECTOR_TEST_MASK = 1 << 5, XN_BIST_TEC_TEST_MASK = 1 << 6, // Manual tests XN_BIST_NESA_TEST_MASK = 1 << 7, XN_BIST_NESA_UNLIMITED_TEST_MASK = 1 << 8, // Mask of all the auto tests XN_BIST_ALL = (0xFFFFFFFF & ~XN_BIST_NESA_TEST_MASK & ~XN_BIST_NESA_UNLIMITED_TEST_MASK), } XnBistType; typedef enum XnBistError { XN_BIST_RAM_TEST_FAILURE = 1 << 0, XN_BIST_IR_CMOS_CONTROL_BUS_FAILURE = 1 << 1, XN_BIST_IR_CMOS_DATA_BUS_FAILURE = 1 << 2, XN_BIST_IR_CMOS_BAD_VERSION = 1 << 3, XN_BIST_IR_CMOS_RESET_FAILUE = 1 << 4, XN_BIST_IR_CMOS_TRIGGER_FAILURE = 1 << 5, XN_BIST_IR_CMOS_STROBE_FAILURE = 1 << 6, XN_BIST_COLOR_CMOS_CONTROL_BUS_FAILURE = 1 << 7, XN_BIST_COLOR_CMOS_DATA_BUS_FAILURE = 1 << 8, XN_BIST_COLOR_CMOS_BAD_VERSION = 1 << 9, XN_BIST_COLOR_CMOS_RESET_FAILUE = 1 << 10, XN_BIST_FLASH_WRITE_LINE_FAILURE = 1 << 11, XN_BIST_FLASH_TEST_FAILURE = 1 << 12, XN_BIST_POTENTIOMETER_CONTROL_BUS_FAILURE = 1 << 13, XN_BIST_POTENTIOMETER_FAILURE = 1 << 14, XN_BIST_AUDIO_TEST_FAILURE = 1 << 15, XN_BIST_PROJECTOR_TEST_LD_FAIL = 1 << 16, XN_BIST_PROJECTOR_TEST_LD_FAILSAFE_TRIG_FAIL = 1 << 17, XN_BIST_PROJECTOR_TEST_FAILSAFE_HIGH_FAIL = 1 << 18, XN_BIST_PROJECTOR_TEST_FAILSAFE_LOW_FAIL = 1 << 19, XN_TEC_TEST_HEATER_CROSSED = 1 << 20, XN_TEC_TEST_HEATER_DISCONNETED = 1 << 21, XN_TEC_TEST_TEC_CROSSED = 1 << 22, XN_TEC_TEST_TEC_FAULT = 1 << 23, } XnBistError; typedef enum XnDepthCMOSType { XN_DEPTH_CMOS_NONE = 0, XN_DEPTH_CMOS_MT9M001 = 1, XN_DEPTH_CMOS_AR130 = 2, } XnDepthCMOSType; typedef enum XnImageCMOSType { XN_IMAGE_CMOS_NONE = 0, XN_IMAGE_CMOS_MT9M112 = 1, XN_IMAGE_CMOS_MT9D131 = 2, XN_IMAGE_CMOS_MT9M114 = 3, } XnImageCMOSType; #define XN_IO_MAX_I2C_BUFFER_SIZE 10 #define XN_MAX_LOG_SIZE (6*1024) #pragma pack (push, 1) typedef struct XnSDKVersion { unsigned char nMajor; unsigned char nMinor; unsigned char nMaintenance; unsigned short nBuild; } XnSDKVersion; typedef struct { unsigned char nMajor; unsigned char nMinor; unsigned short nBuild; unsigned int nChip; unsigned short nFPGA; unsigned short nSystemVersion; XnSDKVersion SDK; XnHWVer HWVer; XnFWVer FWVer; XnSensorVer SensorVer; XnChipVer ChipVer; } XnVersions; typedef struct { unsigned short nParam; unsigned short nValue; } XnInnerParamData; typedef struct XnDepthAGCBin { unsigned short nBin; unsigned short nMin; unsigned short nMax; } XnDepthAGCBin; typedef struct XnControlProcessingData { unsigned short nRegister; unsigned short nValue; } XnControlProcessingData; typedef struct XnAHBData { unsigned int nRegister; unsigned int nValue; unsigned int nMask; } XnAHBData; typedef struct XnPixelRegistration { unsigned int nDepthX; unsigned int nDepthY; uint16_t nDepthValue; unsigned int nImageXRes; unsigned int nImageYRes; unsigned int nImageX; // out unsigned int nImageY; // out } XnPixelRegistration; typedef struct XnLedState { uint16_t nLedID; uint16_t nState; } XnLedState; typedef struct XnCmosBlankingTime { XnCMOSType nCmosID; float nTimeInMilliseconds; uint16_t nNumberOfFrames; } XnCmosBlankingTime; typedef struct XnCmosBlankingUnits { XnCMOSType nCmosID; uint16_t nUnits; uint16_t nNumberOfFrames; } XnCmosBlankingUnits; typedef struct XnI2CWriteData { uint16_t nBus; uint16_t nSlaveAddress; uint16_t cpWriteBuffer[XN_IO_MAX_I2C_BUFFER_SIZE]; uint16_t nWriteSize; } XnI2CWriteData; typedef struct XnI2CReadData { uint16_t nBus; uint16_t nSlaveAddress; uint16_t cpReadBuffer[XN_IO_MAX_I2C_BUFFER_SIZE]; uint16_t cpWriteBuffer[XN_IO_MAX_I2C_BUFFER_SIZE]; uint16_t nReadSize; uint16_t nWriteSize; } XnI2CReadData; typedef struct XnTecData { uint16_t m_SetPointVoltage; uint16_t m_CompensationVoltage; uint16_t m_TecDutyCycle; //duty cycle on heater/cooler uint16_t m_HeatMode; //TRUE - heat, FALSE - cool int32_t m_ProportionalError; int32_t m_IntegralError; int32_t m_DerivativeError; uint16_t m_ScanMode; //0 - crude, 1 - precise } XnTecData; typedef struct XnTecFastConvergenceData { int16_t m_SetPointTemperature; // set point temperature in celsius, // scaled by factor of 100 (extra precision) int16_t m_MeasuredTemperature; // measured temperature in celsius, // scaled by factor of 100 (extra precision) int32_t m_ProportionalError; // proportional error in system clocks int32_t m_IntegralError; // integral error in system clocks int32_t m_DerivativeError; // derivative error in system clocks uint16_t m_ScanMode; // 0 - initial, 1 - crude, 2 - precise uint16_t m_HeatMode; // 0 - idle, 1 - heat, 2 - cool uint16_t m_TecDutyCycle; // duty cycle on heater/cooler in percents uint16_t m_TemperatureRange; // 0 - cool, 1 - room, 2 - warm } XnTecFastConvergenceData; typedef struct XnEmitterData { uint16_t m_State; //idle, calibrating uint16_t m_SetPointVoltage; //this is what should be written to the XML uint16_t m_SetPointClocks; //target cross duty cycle uint16_t m_PD_Reading; //current cross duty cycle in system clocks(high time) uint16_t m_EmitterSet; //duty cycle on emitter set in system clocks (high time). uint16_t m_EmitterSettingLogic; //TRUE = positive logic, FALSE = negative logic uint16_t m_LightMeasureLogic; //TRUE - positive logic, FALSE - negative logic uint16_t m_IsAPCEnabled; uint16_t m_EmitterSetStepSize; // in MilliVolts uint16_t m_ApcTolerance; // in system clocks (only valid up till v5.2) uint16_t m_SubClocking; //in system clocks (only valid from v5.3) uint16_t m_Precision; // (only valid from v5.3) } XnEmitterData; typedef struct { uint16_t nId; uint16_t nAttribs; } XnFileAttributes; typedef struct { uint32_t nOffset; const char* strFileName; uint16_t nAttributes; } XnParamFileData; typedef struct { uint32_t nOffset; uint32_t nSize; unsigned char* pData; } XnParamFlashData; typedef struct { uint16_t nId; uint16_t nType; uint32_t nVersion; uint32_t nOffset; uint32_t nSize; uint16_t nCrc; uint16_t nAttributes; uint16_t nReserve; } XnFlashFile; typedef struct { XnFlashFile* pFiles; uint16_t nFiles; } XnFlashFileList; typedef struct XnProjectorFaultData { uint16_t nMinThreshold; uint16_t nMaxThreshold; int32_t bProjectorFaultEvent; } XnProjectorFaultData; typedef struct XnBist { uint32_t nTestsMask; uint32_t nFailures; } XnBist; #pragma pack (pop) #endif //_PS1080_H_OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Android-Arm/0000700000175000017500000000000012240433507024050 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Android-Arm/OniPlatformAndroid-Arm.h0000600000175000017500000000437212240433507030501 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PLATFORM_ANDROID_ARM_H_ #define _ONI_PLATFORM_ANDROID_ARM_H_ // Start with Linux-x86, and override what's different #include "../Linux-x86/OniPlatformLinux-x86.h" //--------------------------------------------------------------------------- // Platform Basic Definition //--------------------------------------------------------------------------- #undef ONI_PLATFORM #undef ONI_PLATFORM_STRING #define ONI_PLATFORM ONI_PLATFORM_ANDROID_ARM #define ONI_PLATFORM_STRING "Android-Arm" #ifdef HAVE_ANDROID_OS #define ONI_PLATFORM_ANDROID_OS #undef ONI_PLATFORM_STRING #define ONI_PLATFORM_STRING "AndroidOS-Arm" #endif #endif //_ONI_PLATFORM_LINUX_ARM_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/PSLink.h0000600000175000017500000001302312240433507023265 0ustar jsprickejspricke#ifndef __XN_PRIME_CLIENT_PROPS_H__ #define __XN_PRIME_CLIENT_PROPS_H__ #include enum { /**** Device properties ****/ /* XnDetailedVersion, get only */ LINK_PROP_FW_VERSION = 0x12000001, // "FWVersion" /* Int, get only */ LINK_PROP_VERSIONS_INFO_COUNT = 0x12000002, // "VersionsInfoCount" /* General - array - XnComponentVersion * count elements, get only */ LINK_PROP_VERSIONS_INFO = 0x12000003, // "VersionsInfo" /* Int - 0 means off, 1 means on. */ LINK_PROP_EMITTER_ACTIVE = 0x12000008, // "EmitterActive" /* String. Set only */ LINK_PROP_PRESET_FILE = 0x1200000a, // "PresetFile" /* Get only */ LINK_PROP_BOOT_STATUS = 0x1200000b, /**** Device commands ****/ /* XnCommandGetFwStreams */ LINK_COMMAND_GET_FW_STREAM_LIST = 0x1200F001, /* XnCommandCreateStream */ LINK_COMMAND_CREATE_FW_STREAM = 0x1200F002, /* XnCommandDestroyStream */ LINK_COMMAND_DESTROY_FW_STREAM = 0x1200F003, /* XnCommandStartStream */ LINK_COMMAND_START_FW_STREAM = 0x1200F004, /* XnCommandStopStream */ LINK_COMMAND_STOP_FW_STREAM = 0x1200F005, /* XnCommandGetFwStreamVideoModeList */ LINK_COMMAND_GET_FW_STREAM_VIDEO_MODE_LIST = 0x1200F006, /* XnCommandSetFwStreamVideoMode */ LINK_COMMAND_SET_FW_STREAM_VIDEO_MODE = 0x1200F007, /* XnCommandGetFwStreamVideoMode */ LINK_COMMAND_GET_FW_STREAM_VIDEO_MODE = 0x1200F008, /**** Stream properties ****/ /* Int. 1 - Shifts 9.3, 2 - Grayscale16, 3 - YUV422, 4 - Bayer8 */ LINK_PROP_PIXEL_FORMAT = 0x12001001, // "PixelFormat" /* Int. 0 - None, 1 - 8z, 2 - 16z, 3 - 24z, 4 - 6-bit, 5 - 10-bit, 6 - 11-bit, 7 - 12-bit */ LINK_PROP_COMPRESSION = 0x12001002, // "Compression" /**** Depth Stream properties ****/ /* Real, get only */ LINK_PROP_DEPTH_SCALE = 0x1200000b, // "DepthScale" /* Int, get only */ LINK_PROP_MAX_SHIFT = 0x12002001, // "MaxShift" /* Int, get only */ LINK_PROP_ZERO_PLANE_DISTANCE = 0x12002002, // "ZPD" /* Int, get only */ LINK_PROP_CONST_SHIFT = 0x12002003, // "ConstShift" /* Int, get only */ LINK_PROP_PARAM_COEFF = 0x12002004, // "ParamCoeff" /* Int, get only */ LINK_PROP_SHIFT_SCALE = 0x12002005, // "ShiftScale" /* Real, get only */ LINK_PROP_ZERO_PLANE_PIXEL_SIZE = 0x12002006, // "ZPPS" /* Real, get only */ LINK_PROP_ZERO_PLANE_OUTPUT_PIXEL_SIZE = 0x12002007, // "ZPOPS" /* Real, get only */ LINK_PROP_EMITTER_DEPTH_CMOS_DISTANCE = 0x12002008, // "LDDIS" /* General - array - MaxShift * XnDepthPixel elements, get only */ LINK_PROP_SHIFT_TO_DEPTH_TABLE = 0x12002009, // "S2D" /* General - array - MaxDepth * uint16_t elements, get only */ LINK_PROP_DEPTH_TO_SHIFT_TABLE = 0x1200200a, // "D2S" }; typedef enum XnFileZone { XN_ZONE_FACTORY = 0x0000, XN_ZONE_UPDATE = 0x0001, } XnFileZone; typedef enum XnBootErrorCode { XN_BOOT_OK = 0x0000, XN_BOOT_BAD_CRC = 0x0001, XN_BOOT_UPLOAD_IN_PROGRESS = 0x0002, XN_BOOT_FW_LOAD_FAILED = 0x0003, } XnBootErrorCode; typedef enum XnFwStreamType { XN_FW_STREAM_TYPE_COLOR = 0x0001, XN_FW_STREAM_TYPE_IR = 0x0002, XN_FW_STREAM_TYPE_SHIFTS = 0x0003, XN_FW_STREAM_TYPE_AUDIO = 0x0004, XN_FW_STREAM_TYPE_DY = 0x0005, XN_FW_STREAM_TYPE_LOG = 0x0008, } XnFwStreamType; typedef enum XnFwPixelFormat { XN_FW_PIXEL_FORMAT_NONE = 0x0000, XN_FW_PIXEL_FORMAT_SHIFTS_9_3 = 0x0001, XN_FW_PIXEL_FORMAT_GRAYSCALE16 = 0x0002, XN_FW_PIXEL_FORMAT_YUV422 = 0x0003, XN_FW_PIXEL_FORMAT_BAYER8 = 0x0004, } XnFwPixelFormat; typedef enum XnFwCompressionType { XN_FW_COMPRESSION_NONE = 0x0000, XN_FW_COMPRESSION_8Z = 0x0001, XN_FW_COMPRESSION_16Z = 0x0002, XN_FW_COMPRESSION_24Z = 0x0003, XN_FW_COMPRESSION_6_BIT_PACKED = 0x0004, XN_FW_COMPRESSION_10_BIT_PACKED = 0x0005, XN_FW_COMPRESSION_11_BIT_PACKED = 0x0006, XN_FW_COMPRESSION_12_BIT_PACKED = 0x0007, } XnFwCompressionType; #pragma pack (push, 1) #define XN_MAX_VERSION_MODIFIER_LENGTH 16 typedef struct XnDetailedVersion { uint8_t m_nMajor; uint8_t m_nMinor; uint16_t m_nMaintenance; uint32_t m_nBuild; char m_strModifier[XN_MAX_VERSION_MODIFIER_LENGTH]; } XnDetailedVersion; typedef struct XnBootStatus { XnFileZone zone; XnBootErrorCode errorCode; } XnBootStatus; typedef struct XnFwStreamInfo { XnFwStreamType type; char creationInfo[80]; } XnFwStreamInfo; typedef struct XnFwStreamVideoMode { uint32_t m_nXRes; uint32_t m_nYRes; uint32_t m_nFPS; XnFwPixelFormat m_nPixelFormat; XnFwCompressionType m_nCompression; } XnFwStreamVideoMode; typedef struct XnCommandGetFwStreamList { uint32_t count; // in: number of allocated elements in streams array. out: number of written elements in the array XnFwStreamInfo* streams; } XnCommandGetFwStreamList; typedef struct XnCommandCreateStream { XnFwStreamType type; const char* creationInfo; uint32_t id; // out } XnCommandCreateStream; typedef struct XnCommandDestroyStream { uint32_t id; } XnCommandDestroyStream; typedef struct XnCommandStartStream { uint32_t id; } XnCommandStartStream; typedef struct XnCommandStopStream { uint32_t id; } XnCommandStopStream; typedef struct XnCommandGetFwStreamVideoModeList { int streamId; uint32_t count; // in: number of allocated elements in videoModes array. out: number of written elements in the array XnFwStreamVideoMode* videoModes; } XnCommandGetFwStreamVideoModeList; typedef struct XnCommandSetFwStreamVideoMode { int streamId; XnFwStreamVideoMode videoMode; } XnCommandSetFwStreamVideoMode; typedef struct XnCommandGetFwStreamVideoMode { int streamId; XnFwStreamVideoMode videoMode; // out } XnCommandGetFwStreamVideoMode; #pragma pack (pop) #endif //__XN_PRIME_CLIENT_PROPS_H__ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/MacOSX/0000700000175000017500000000000012240433507023045 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/MacOSX/OniPlatformMacOSX.h0000600000175000017500000000414012240433507026464 0ustar jsprickejspricke/***************************************************************************** * * * PrimeSense PSCommon Library * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of PSCommon. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PLATFORM_MACOSX_H_ #define _ONI_PLATFORM_MACOSX_H_ // Start with Linux-x86, and override what's different #include "../Linux-x86/OniPlatformLinux-x86.h" #include #undef ONI_PLATFORM #undef ONI_PLATFORM_STRING #define ONI_PLATFORM ONI_PLATFORM_MACOSX #define ONI_PLATFORM_STRING "MacOSX" #define ONI_PLATFORM_HAS_NO_TIMED_OPS #define ONI_PLATFORM_HAS_NO_CLOCK_GETTIME #define ONI_PLATFORM_HAS_NO_SCHED_PARAM #define ONI_PLATFORM_HAS_BUILTIN_SEMUN #undef ONI_THREAD_STATIC #define ONI_THREAD_STATIC #endif //_ONI_PLATFORM_MACOSX_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Win32/0000700000175000017500000000000012240433507022655 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Win32/OniPlatformWin32.h0000600000175000017500000001311612240433507026107 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PLATFORM_WIN32_H_ #define _ONI_PLATFORM_WIN32_H_ //--------------------------------------------------------------------------- // Prerequisites //--------------------------------------------------------------------------- #ifndef WINVER // Allow use of features specific to Windows XP or later #define WINVER 0x0501 #endif #ifndef _WIN32_WINNT // Allow use of features specific to Windows XP or later #define _WIN32_WINNT 0x0501 #endif #ifndef _WIN32_WINDOWS // Allow use of features specific to Windows 98 or later #define _WIN32_WINDOWS 0x0410 #endif #ifndef _WIN32_IE // Allow use of features specific to IE 6.0 or later #define _WIN32_IE 0x0600 #endif #define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers // Undeprecate CRT functions #ifndef _CRT_SECURE_NO_DEPRECATE #define _CRT_SECURE_NO_DEPRECATE 1 #endif //--------------------------------------------------------------------------- // Includes //--------------------------------------------------------------------------- #include #include #include #include #include #include #include #include #include #if _MSC_VER < 1600 // Visual Studio 2008 and older doesn't have stdint.h... typedef signed char int8_t; typedef short int16_t; typedef int int32_t; typedef __int64 int64_t; typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned int uint32_t; typedef unsigned __int64 uint64_t; #else #include #endif //--------------------------------------------------------------------------- // Platform Basic Definition //--------------------------------------------------------------------------- #define ONI_PLATFORM ONI_PLATFORM_WIN32 #define ONI_PLATFORM_STRING "Win32" //--------------------------------------------------------------------------- // Platform Capabilities //--------------------------------------------------------------------------- #define ONI_PLATFORM_ENDIAN_TYPE ONI_PLATFORM_IS_LITTLE_ENDIAN #define ONI_PLATFORM_SUPPORTS_DYNAMIC_LIBS 1 //--------------------------------------------------------------------------- // Memory //--------------------------------------------------------------------------- /** The default memory alignment. */ #define ONI_DEFAULT_MEM_ALIGN 16 /** The thread static declarator (using TLS). */ #define ONI_THREAD_STATIC __declspec(thread) //--------------------------------------------------------------------------- // Files //--------------------------------------------------------------------------- /** The maximum allowed file path size (in bytes). */ #define ONI_FILE_MAX_PATH MAX_PATH //--------------------------------------------------------------------------- // Call backs //--------------------------------------------------------------------------- /** The std call type. */ #define ONI_STDCALL __stdcall /** The call back calling convention. */ #define ONI_CALLBACK_TYPE ONI_STDCALL /** The C and C++ calling convension. */ #define ONI_C_DECL __cdecl //--------------------------------------------------------------------------- // Macros //--------------------------------------------------------------------------- /** Returns the date and time at compile time. */ #define ONI_TIMESTAMP __DATE__ " " __TIME__ /** Converts n into a pre-processor string. */ #define ONI_STRINGIFY(n) ONI_STRINGIFY_HELPER(n) #define ONI_STRINGIFY_HELPER(n) #n //--------------------------------------------------------------------------- // API Export/Import Macros //--------------------------------------------------------------------------- /** Indicates an exported shared library function. */ #define ONI_API_EXPORT __declspec(dllexport) /** Indicates an imported shared library function. */ #define ONI_API_IMPORT __declspec(dllimport) /** Indicates a deprecated function */ #if _MSC_VER < 1400 // Before VS2005 there was no support for declspec deprecated... #define ONI_API_DEPRECATED(msg) #else #define ONI_API_DEPRECATED(msg) __declspec(deprecated(msg)) #endif #endif //_ONI_PLATFORM_WIN32_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniProperties.h0000600000175000017500000000554312240433507024737 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_PROPERTIES_H_ #define _ONI_PROPERTIES_H_ namespace openni { // Device properties enum { DEVICE_PROPERTY_FIRMWARE_VERSION = 0, // string DEVICE_PROPERTY_DRIVER_VERSION = 1, // OniVersion DEVICE_PROPERTY_HARDWARE_VERSION = 2, // int DEVICE_PROPERTY_SERIAL_NUMBER = 3, // string DEVICE_PROPERTY_ERROR_STATE = 4, // ?? DEVICE_PROPERTY_IMAGE_REGISTRATION = 5, // OniImageRegistrationMode // Files DEVICE_PROPERTY_PLAYBACK_SPEED = 100, // float DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED = 101, // OniBool }; // Stream properties enum { STREAM_PROPERTY_CROPPING = 0, // OniCropping* STREAM_PROPERTY_HORIZONTAL_FOV = 1, // float: radians STREAM_PROPERTY_VERTICAL_FOV = 2, // float: radians STREAM_PROPERTY_VIDEO_MODE = 3, // OniVideoMode* STREAM_PROPERTY_MAX_VALUE = 4, // int STREAM_PROPERTY_MIN_VALUE = 5, // int STREAM_PROPERTY_STRIDE = 6, // int STREAM_PROPERTY_MIRRORING = 7, // OniBool STREAM_PROPERTY_NUMBER_OF_FRAMES = 8, // int // Camera STREAM_PROPERTY_AUTO_WHITE_BALANCE = 100, // OniBool STREAM_PROPERTY_AUTO_EXPOSURE = 101, // OniBool STREAM_PROPERTY_EXPOSURE = 102, // int STREAM_PROPERTY_GAIN = 103, // int }; // Device commands (for Invoke) enum { DEVICE_COMMAND_SEEK = 1, // OniSeek }; } // namespace openni #endif // _ONI_PROPERTIES_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniEnums.h0000600000175000017500000000532712240433507023672 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_ENUMS_H_ #define _ONI_ENUMS_H_ namespace openni { /** Possible failure values */ typedef enum { STATUS_OK = 0, STATUS_ERROR = 1, STATUS_NOT_IMPLEMENTED = 2, STATUS_NOT_SUPPORTED = 3, STATUS_BAD_PARAMETER = 4, STATUS_OUT_OF_FLOW = 5, STATUS_NO_DEVICE = 6, STATUS_TIME_OUT = 102, } Status; /** The source of the stream */ typedef enum { SENSOR_IR = 1, SENSOR_COLOR = 2, SENSOR_DEPTH = 3, } SensorType; /** All available formats of the output of a stream */ typedef enum { // Depth PIXEL_FORMAT_DEPTH_1_MM = 100, PIXEL_FORMAT_DEPTH_100_UM = 101, PIXEL_FORMAT_SHIFT_9_2 = 102, PIXEL_FORMAT_SHIFT_9_3 = 103, // Color PIXEL_FORMAT_RGB888 = 200, PIXEL_FORMAT_YUV422 = 201, PIXEL_FORMAT_GRAY8 = 202, PIXEL_FORMAT_GRAY16 = 203, PIXEL_FORMAT_JPEG = 204, PIXEL_FORMAT_YUYV = 205, } PixelFormat; typedef enum { DEVICE_STATE_OK = 0, DEVICE_STATE_ERROR = 1, DEVICE_STATE_NOT_READY = 2, DEVICE_STATE_EOF = 3 } DeviceState; typedef enum { IMAGE_REGISTRATION_OFF = 0, IMAGE_REGISTRATION_DEPTH_TO_COLOR = 1, } ImageRegistrationMode; static const int TIMEOUT_NONE = 0; static const int TIMEOUT_FOREVER = -1; } // namespace openni #endif // _ONI_ENUMS_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniCTypes.h0000600000175000017500000001304212240433507024003 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_TYPES_H_ #define _ONI_TYPES_H_ #include "OniPlatform.h" #include "OniCEnums.h" /** Basic types **/ typedef int OniBool; #ifndef TRUE #define TRUE 1 #endif //TRUE #ifndef FALSE #define FALSE 0 #endif //FALSE #define ONI_MAX_STR 256 #define ONI_MAX_SENSORS 10 struct OniCallbackHandleImpl; typedef struct OniCallbackHandleImpl* OniCallbackHandle; /** Holds an OpenNI version number, which consists of four separate numbers in the format: @c major.minor.maintenance.build. For example: 2.0.0.20. */ typedef struct { /** Major version number, incremented for major API restructuring. */ int major; /** Minor version number, incremented when significant new features added. */ int minor; /** Maintenance build number, incremented for new releases that primarily provide minor bug fixes. */ int maintenance; /** Build number. Incremented for each new API build. Generally not shown on the installer and download site. */ int build; } OniVersion; typedef int OniHardwareVersion; /** Description of the output: format and resolution */ typedef struct { OniPixelFormat pixelFormat; int resolutionX; int resolutionY; int fps; } OniVideoMode; /** List of supported video modes by a specific source */ typedef struct { OniSensorType sensorType; int numSupportedVideoModes; OniVideoMode *pSupportedVideoModes; } OniSensorInfo; /** Basic description of a device */ typedef struct { char uri[ONI_MAX_STR]; char vendor[ONI_MAX_STR]; char name[ONI_MAX_STR]; uint16_t usbVendorId; uint16_t usbProductId; } OniDeviceInfo; struct _OniDevice; typedef _OniDevice* OniDeviceHandle; struct _OniStream; typedef _OniStream* OniStreamHandle; struct _OniRecorder; typedef _OniRecorder* OniRecorderHandle; /** All information of the current frame */ typedef struct { int dataSize; void* data; OniSensorType sensorType; uint64_t timestamp; int frameIndex; int width; int height; OniVideoMode videoMode; OniBool croppingEnabled; int cropOriginX; int cropOriginY; int stride; } OniFrame; typedef void (ONI_CALLBACK_TYPE* OniNewFrameCallback)(OniStreamHandle stream, void* pCookie); typedef void (ONI_CALLBACK_TYPE* OniGeneralCallback)(void* pCookie); typedef void (ONI_CALLBACK_TYPE* OniDeviceInfoCallback)(const OniDeviceInfo* pInfo, void* pCookie); typedef void (ONI_CALLBACK_TYPE* OniDeviceStateCallback)(const OniDeviceInfo* pInfo, OniDeviceState deviceState, void* pCookie); typedef void* (ONI_CALLBACK_TYPE* OniFrameAllocBufferCallback)(int size, void* pCookie); typedef void (ONI_CALLBACK_TYPE* OniFrameFreeBufferCallback)(void* data, void* pCookie); typedef struct { OniDeviceInfoCallback deviceConnected; OniDeviceInfoCallback deviceDisconnected; OniDeviceStateCallback deviceStateChanged; } OniDeviceCallbacks; typedef struct { int enabled; int originX; int originY; int width; int height; } OniCropping; // Pixel types /** Pixel type used to store depth images. */ typedef uint16_t OniDepthPixel; /** Pixel type used to store 16-bit grayscale images */ typedef uint16_t OniGrayscale16Pixel; /** Pixel type used to store 8-bit grayscale/bayer images */ typedef uint8_t OniGrayscale8Pixel; #pragma pack (push, 1) /** Holds the value of a single color image pixel in 24-bit RGB format. */ typedef struct { /* Red value of this pixel. */ uint8_t r; /* Green value of this pixel. */ uint8_t g; /* Blue value of this pixel. */ uint8_t b; } OniRGB888Pixel; /** Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel). The first pixel has the values y1, u, v. The second pixel has the values y2, u, v. */ typedef struct { /** First chrominance value for two pixels, stored as blue luminance difference signal. */ uint8_t u; /** Overall luminance value of first pixel. */ uint8_t y1; /** Second chrominance value for two pixels, stored as red luminance difference signal. */ uint8_t v; /** Overall luminance value of second pixel. */ uint8_t y2; } OniYUV422DoublePixel; #pragma pack (pop) typedef struct { int frameIndex; OniStreamHandle stream; } OniSeek; #endif // _ONI_TYPES_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniCAPI.h0000600000175000017500000003154712240433507023322 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_C_API_H_ #define _ONI_C_API_H_ #include "OniPlatform.h" #include "OniCTypes.h" #include "OniCProperties.h" #include "OniVersion.h" /******************************************** General APIs */ /** Initialize OpenNI2. Use ONI_API_VERSION as the version. */ ONI_C_API OniStatus oniInitialize(int apiVersion); /** Shutdown OpenNI2 */ ONI_C_API void oniShutdown(); /** * Get the list of currently connected device. * Each device is represented by its OniDeviceInfo. * pDevices will be allocated inside. */ ONI_C_API OniStatus oniGetDeviceList(OniDeviceInfo** pDevices, int* pNumDevices); /** Release previously allocated device list */ ONI_C_API OniStatus oniReleaseDeviceList(OniDeviceInfo* pDevices); ONI_C_API OniStatus oniRegisterDeviceCallbacks(OniDeviceCallbacks* pCallbacks, void* pCookie, OniCallbackHandle* pHandle); ONI_C_API void oniUnregisterDeviceCallbacks(OniCallbackHandle handle); /** Wait for any of the streams to have a new frame */ ONI_C_API OniStatus oniWaitForAnyStream(OniStreamHandle* pStreams, int numStreams, int* pStreamIndex, int timeout); /** Get the current version of OpenNI2 */ ONI_C_API OniVersion oniGetVersion(); /** Translate from format to number of bytes per pixel. Will return 0 for formats in which the number of bytes per pixel isn't fixed. */ ONI_C_API int oniFormatBytesPerPixel(OniPixelFormat format); /** Get internal error */ ONI_C_API const char* oniGetExtendedError(); /******************************************** Device APIs */ /** Open a device. Uri can be taken from the matching OniDeviceInfo. */ ONI_C_API OniStatus oniDeviceOpen(const char* uri, OniDeviceHandle* pDevice); /** Close a device */ ONI_C_API OniStatus oniDeviceClose(OniDeviceHandle device); /** Get the possible configurations available for a specific source, or NULL if the source does not exist. */ ONI_C_API const OniSensorInfo* oniDeviceGetSensorInfo(OniDeviceHandle device, OniSensorType sensorType); /** Get the OniDeviceInfo of a certain device. */ ONI_C_API OniStatus oniDeviceGetInfo(OniDeviceHandle device, OniDeviceInfo* pInfo); /** Create a new stream in the device. The stream will originate from the source. */ ONI_C_API OniStatus oniDeviceCreateStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream); ONI_C_API OniStatus oniDeviceEnableDepthColorSync(OniDeviceHandle device); ONI_C_API void oniDeviceDisableDepthColorSync(OniDeviceHandle device); ONI_C_API OniBool oniDeviceGetDepthColorSyncEnabled(OniDeviceHandle device); /** Set property in the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */ ONI_C_API OniStatus oniDeviceSetProperty(OniDeviceHandle device, int propertyId, const void* data, int dataSize); /** Get property in the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */ ONI_C_API OniStatus oniDeviceGetProperty(OniDeviceHandle device, int propertyId, void* data, int* pDataSize); /** Check if the property is supported by the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */ ONI_C_API OniBool oniDeviceIsPropertySupported(OniDeviceHandle device, int propertyId); /** Invoke an internal functionality of the device. */ ONI_C_API OniStatus oniDeviceInvoke(OniDeviceHandle device, int commandId, void* data, int dataSize); /** Check if a command is supported, for invoke */ ONI_C_API OniBool oniDeviceIsCommandSupported(OniDeviceHandle device, int commandId); ONI_C_API OniBool oniDeviceIsImageRegistrationModeSupported(OniDeviceHandle device, OniImageRegistrationMode mode); /** @internal */ ONI_C_API OniStatus oniDeviceOpenEx(const char* uri, const char* mode, OniDeviceHandle* pDevice); /******************************************** Stream APIs */ /** Destroy an existing stream */ ONI_C_API void oniStreamDestroy(OniStreamHandle stream); /** Get the OniSensorInfo of the certain stream. */ ONI_C_API const OniSensorInfo* oniStreamGetSensorInfo(OniStreamHandle stream); /** Start generating data from the stream. */ ONI_C_API OniStatus oniStreamStart(OniStreamHandle stream); /** Stop generating data from the stream. */ ONI_C_API void oniStreamStop(OniStreamHandle stream); /** Get the next frame from the stream. This function is blocking until there is a new frame from the stream. For timeout, use oniWaitForStreams() first */ ONI_C_API OniStatus oniStreamReadFrame(OniStreamHandle stream, OniFrame** pFrame); /** Register a callback to when the stream has a new frame. */ ONI_C_API OniStatus oniStreamRegisterNewFrameCallback(OniStreamHandle stream, OniNewFrameCallback handler, void* pCookie, OniCallbackHandle* pHandle); /** Unregister a previously registered callback to when the stream has a new frame. */ ONI_C_API void oniStreamUnregisterNewFrameCallback(OniStreamHandle stream, OniCallbackHandle handle); /** Set property in the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */ ONI_C_API OniStatus oniStreamSetProperty(OniStreamHandle stream, int propertyId, const void* data, int dataSize); /** Get property in the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */ ONI_C_API OniStatus oniStreamGetProperty(OniStreamHandle stream, int propertyId, void* data, int* pDataSize); /** Check if the property is supported the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */ ONI_C_API OniBool oniStreamIsPropertySupported(OniStreamHandle stream, int propertyId); /** Invoke an internal functionality of the stream. */ ONI_C_API OniStatus oniStreamInvoke(OniStreamHandle stream, int commandId, void* data, int dataSize); /** Check if a command is supported, for invoke */ ONI_C_API OniBool oniStreamIsCommandSupported(OniStreamHandle stream, int commandId); /** Sets the stream buffer allocation functions. Note that this function may only be called while stream is not started. */ ONI_C_API OniStatus oniStreamSetFrameBuffersAllocator(OniStreamHandle stream, OniFrameAllocBufferCallback alloc, OniFrameFreeBufferCallback free, void* pCookie); //// /** Mark another user of the frame. */ ONI_C_API void oniFrameAddRef(OniFrame* pFrame); /** Mark that the frame is no longer needed. */ ONI_C_API void oniFrameRelease(OniFrame* pFrame); // ONI_C_API OniStatus oniConvertRealWorldToProjective(OniStreamHandle stream, OniFloatPoint3D* pRealWorldPoint, OniFloatPoint3D* pProjectivePoint); // ONI_C_API OniStatus oniConvertProjectiveToRealWorld(OniStreamHandle stream, OniFloatPoint3D* pProjectivePoint, OniFloatPoint3D* pRealWorldPoint); /** * Creates a recorder that records to a file. * @param [in] fileName The name of the file that will contain the recording. * @param [out] pRecorder Points to the handle to the newly created recorder. * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniCreateRecorder(const char* fileName, OniRecorderHandle* pRecorder); /** * Attaches a stream to a recorder. The amount of attached streams is virtually * infinite. You cannot attach a stream after you have started a recording, if * you do: an error will be returned by oniRecorderAttachStream. * @param [in] recorder The handle to the recorder. * @param [in] stream The handle to the stream. * @param [in] allowLossyCompression Allows/denies lossy compression * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniRecorderAttachStream( OniRecorderHandle recorder, OniStreamHandle stream, OniBool allowLossyCompression); /** * Starts recording. There must be at least one stream attached to the recorder, * if not: oniRecorderStart will return an error. * @param[in] recorder The handle to the recorder. * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniRecorderStart(OniRecorderHandle recorder); /** * Stops recording. You can resume recording via oniRecorderStart. * @param[in] recorder The handle to the recorder. * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API void oniRecorderStop(OniRecorderHandle recorder); /** * Stops recording if needed, and destroys a recorder. * @param [in,out] recorder The handle to the recorder, the handle will be * invalidated (nullified) when the function returns. * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniRecorderDestroy(OniRecorderHandle* pRecorder); ONI_C_API OniStatus oniCoordinateConverterDepthToWorld(OniStreamHandle depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ); ONI_C_API OniStatus oniCoordinateConverterWorldToDepth(OniStreamHandle depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ); ONI_C_API OniStatus oniCoordinateConverterDepthToColor(OniStreamHandle depthStream, OniStreamHandle colorStream, int depthX, int depthY, OniDepthPixel depthZ, int* pColorX, int* pColorY); /******************************************** Log APIs */ /** * Change the log output folder * @param const char * strOutputFolder [in] path to the desirebale folder * * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniSetLogOutputFolder(const char* strOutputFolder); /** * Get the current log file name * @param char * strFileName [out] hold the returned file name * @param int nBufferSize [in] size of strFileName * * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniGetLogFileName(char* strFileName, int nBufferSize); /** * Set the Minimum severity for log produce * @param const char * strMask [in] Name of the logger * * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniSetLogMinSeverity(int nMinSeverity); /** * Configures if log entries will be printed to console. * @param OniBool bConsoleOutput [in] TRUE to print log entries to console, FALSE otherwise. * * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniSetLogConsoleOutput(OniBool bConsoleOutput); /** * Configures if log entries will be printed to a log file. * @param OniBool bFileOutput [in] TRUE to print log entries to the file, FALSE otherwise. * * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniSetLogFileOutput(OniBool bFileOutput); #if ONI_PLATFORM == ONI_PLATFORM_ANDROID_ARM /** * Configures if log entries will be printed to the Android log. * @param OniBool bAndroidOutput [in] TRUE to print log entries to the Android log, FALSE otherwise. * * @retval ONI_STATUS_OK Upon successful completion. * @retval ONI_STATUS_ERROR Upon any kind of failure. */ ONI_C_API OniStatus oniSetLogAndroidOutput(OniBool bAndroidOutput); #endif #endif // _ONI_C_API_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Driver/0000700000175000017500000000000012240433507023206 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Driver/OniDriverTypes.h0000600000175000017500000000512712240433507026314 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_DRIVER_TYPES_H_ #define _ONI_DRIVER_TYPES_H_ #include #include #define ONI_STREAM_PROPERTY_PRIVATE_BASE XN_MAX_UINT16 typedef struct { int dataSize; void* data; } OniGeneralBuffer; /////// DriverServices struct OniDriverServices { void* driverServices; void (ONI_CALLBACK_TYPE* errorLoggerAppend)(void* driverServices, const char* format, va_list args); void (ONI_CALLBACK_TYPE* errorLoggerClear)(void* driverServices); void (ONI_CALLBACK_TYPE* log)(void* driverServices, int severity, const char* file, int line, const char* mask, const char* message); }; struct OniStreamServices { void* streamServices; int (ONI_CALLBACK_TYPE* getDefaultRequiredFrameSize)(void* streamServices); OniFrame* (ONI_CALLBACK_TYPE* acquireFrame)(void* streamServices); // returns a frame with size corresponding to getRequiredFrameSize() void (ONI_CALLBACK_TYPE* addFrameRef)(void* streamServices, OniFrame* pframe); void (ONI_CALLBACK_TYPE* releaseFrame)(void* streamServices, OniFrame* pframe); }; #endif // _ONI_DRIVER_TYPES_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/Driver/OniDriverAPI.h0000600000175000017500000004405412240433507025623 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_DRIVER_API_H_ #define _ONI_DRIVER_API_H_ #include "OniPlatform.h" #include "OniCTypes.h" #include "OniCProperties.h" #include "OniDriverTypes.h" #include namespace oni { namespace driver { class DeviceBase; class StreamBase; typedef void (ONI_CALLBACK_TYPE* DeviceConnectedCallback)(const OniDeviceInfo*, void* pCookie); typedef void (ONI_CALLBACK_TYPE* DeviceDisconnectedCallback)(const OniDeviceInfo*, void* pCookie); typedef void (ONI_CALLBACK_TYPE* DeviceStateChangedCallback)(const OniDeviceInfo* deviceId, int errorState, void* pCookie); typedef void (ONI_CALLBACK_TYPE* NewFrameCallback)(StreamBase* streamId, OniFrame*, void* pCookie); typedef void (ONI_CALLBACK_TYPE* PropertyChangedCallback)(void* sender, int propertyId, const void* data, int dataSize, void* pCookie); class StreamServices : public OniStreamServices { public: int getDefaultRequiredFrameSize() { return OniStreamServices::getDefaultRequiredFrameSize(streamServices); } OniFrame* acquireFrame() { return OniStreamServices::acquireFrame(streamServices); } void addFrameRef(OniFrame* pFrame) { OniStreamServices::addFrameRef(streamServices, pFrame); } void releaseFrame(OniFrame* pFrame) { OniStreamServices::releaseFrame(streamServices, pFrame); } }; class StreamBase { public: StreamBase() : m_newFrameCallback(NULL), m_propertyChangedCallback(NULL) {} virtual ~StreamBase() {} virtual void setServices(StreamServices* pStreamServices) { m_pServices = pStreamServices; } virtual OniStatus setProperty(int /*propertyId*/, const void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;} virtual OniStatus getProperty(int /*propertyId*/, void* /*data*/, int* /*pDataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;} virtual OniBool isPropertySupported(int /*propertyId*/) {return FALSE;} virtual OniStatus invoke(int /*commandId*/, void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;} virtual OniBool isCommandSupported(int /*commandId*/) {return FALSE;} virtual int getRequiredFrameSize() { return getServices().getDefaultRequiredFrameSize(); } virtual OniStatus start() = 0; virtual void stop() = 0; virtual void setNewFrameCallback(NewFrameCallback handler, void* pCookie) { m_newFrameCallback = handler; m_newFrameCallbackCookie = pCookie; } virtual void setPropertyChangedCallback(PropertyChangedCallback handler, void* pCookie) { m_propertyChangedCallback = handler; m_propertyChangedCookie = pCookie; } virtual void notifyAllProperties() { return; } virtual OniStatus convertDepthToColorCoordinates(StreamBase* /*colorStream*/, int /*depthX*/, int /*depthY*/, OniDepthPixel /*depthZ*/, int* /*pColorX*/, int* /*pColorY*/) { return ONI_STATUS_NOT_SUPPORTED; } protected: void raiseNewFrame(OniFrame* pFrame) { (*m_newFrameCallback)(this, pFrame, m_newFrameCallbackCookie); } void raisePropertyChanged(int propertyId, const void* data, int dataSize) { (*m_propertyChangedCallback)(this, propertyId, data, dataSize, m_propertyChangedCookie); } StreamServices& getServices() { return *m_pServices; } private: StreamServices* m_pServices; NewFrameCallback m_newFrameCallback; void* m_newFrameCallbackCookie; PropertyChangedCallback m_propertyChangedCallback; void* m_propertyChangedCookie; }; class DeviceBase { public: DeviceBase() {} virtual ~DeviceBase() {} virtual OniStatus getSensorInfoList(OniSensorInfo** pSensorInfos, int* numSensors) = 0; virtual StreamBase* createStream(OniSensorType) = 0; virtual void destroyStream(StreamBase* pStream) = 0; virtual OniStatus setProperty(int /*propertyId*/, const void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;} virtual OniStatus getProperty(int /*propertyId*/, void* /*data*/, int* /*pDataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;} virtual OniBool isPropertySupported(int /*propertyId*/) {return FALSE;} virtual OniStatus invoke(int /*commandId*/, void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;} virtual OniBool isCommandSupported(int /*commandId*/) {return FALSE;} virtual OniStatus tryManualTrigger() {return ONI_STATUS_OK;} virtual void setPropertyChangedCallback(PropertyChangedCallback handler, void* pCookie) { m_propertyChangedCallback = handler; m_propertyChangedCookie = pCookie; } virtual void notifyAllProperties() { return; } virtual OniBool isImageRegistrationModeSupported(OniImageRegistrationMode mode) { return (mode == ONI_IMAGE_REGISTRATION_OFF); } protected: void raisePropertyChanged(int propertyId, const void* data, int dataSize) { (*m_propertyChangedCallback)(this, propertyId, data, dataSize, m_propertyChangedCookie); } private: PropertyChangedCallback m_propertyChangedCallback; void* m_propertyChangedCookie; }; class DriverServices { public: DriverServices(OniDriverServices* pDriverServices) : m_pDriverServices(pDriverServices) {} void errorLoggerAppend(const char* format, ...) { va_list args; va_start(args, format); m_pDriverServices->errorLoggerAppend(m_pDriverServices->driverServices, format, args); va_end(args); } void errorLoggerClear() { m_pDriverServices->errorLoggerClear(m_pDriverServices->driverServices); } void log(int severity, const char* file, int line, const char* mask, const char* message) { m_pDriverServices->log(m_pDriverServices->driverServices, severity, file, line, mask, message); } private: OniDriverServices* m_pDriverServices; }; class DriverBase { public: DriverBase(OniDriverServices* pDriverServices) : m_services(pDriverServices) {} virtual ~DriverBase() {} virtual OniStatus initialize(DeviceConnectedCallback connectedCallback, DeviceDisconnectedCallback disconnectedCallback, DeviceStateChangedCallback deviceStateChangedCallback, void* pCookie) { m_deviceConnectedEvent = connectedCallback; m_deviceDisconnectedEvent = disconnectedCallback; m_deviceStateChangedEvent = deviceStateChangedCallback; m_pCookie = pCookie; return ONI_STATUS_OK; } virtual DeviceBase* deviceOpen(const char* uri, const char* mode) = 0; virtual void deviceClose(DeviceBase* pDevice) = 0; virtual void shutdown() = 0; virtual OniStatus tryDevice(const char* /*uri*/) { return ONI_STATUS_ERROR;} virtual void* enableFrameSync(StreamBase** /*pStreams*/, int /*streamCount*/) { return NULL; } virtual void disableFrameSync(void* /*frameSyncGroup*/) {} protected: void deviceConnected(const OniDeviceInfo* pInfo) { (m_deviceConnectedEvent)(pInfo, m_pCookie); } void deviceDisconnected(const OniDeviceInfo* pInfo) { (m_deviceDisconnectedEvent)(pInfo, m_pCookie); } void deviceStateChanged(const OniDeviceInfo* pInfo, int errorState) { (m_deviceStateChangedEvent)(pInfo, errorState, m_pCookie); } DriverServices& getServices() { return m_services; } private: DeviceConnectedCallback m_deviceConnectedEvent; DeviceDisconnectedCallback m_deviceDisconnectedEvent; DeviceStateChangedCallback m_deviceStateChangedEvent; void* m_pCookie; DriverServices m_services; }; }} // oni::driver #define ONI_EXPORT_DRIVER(DriverClass) \ \ oni::driver::DriverBase* g_pDriver = NULL; \ \ /* As Driver */ \ ONI_C_API_EXPORT void oniDriverCreate(OniDriverServices* driverServices) { \ g_pDriver = XN_NEW(DriverClass, driverServices); \ } \ ONI_C_API_EXPORT void oniDriverDestroy() \ { \ g_pDriver->shutdown(); \ XN_DELETE(g_pDriver); g_pDriver = NULL; \ } \ ONI_C_API_EXPORT OniStatus oniDriverInitialize(oni::driver::DeviceConnectedCallback deviceConnectedCallback, \ oni::driver::DeviceDisconnectedCallback deviceDisconnectedCallback, \ oni::driver::DeviceStateChangedCallback deviceStateChangedCallback, \ void* pCookie) \ { \ return g_pDriver->initialize(deviceConnectedCallback, deviceDisconnectedCallback, deviceStateChangedCallback, pCookie); \ } \ \ ONI_C_API_EXPORT OniStatus oniDriverTryDevice(const char* uri) \ { \ return g_pDriver->tryDevice(uri); \ } \ \ /* As Device */ \ ONI_C_API_EXPORT oni::driver::DeviceBase* oniDriverDeviceOpen(const char* uri, const char* mode) \ { \ return g_pDriver->deviceOpen(uri, mode); \ } \ ONI_C_API_EXPORT void oniDriverDeviceClose(oni::driver::DeviceBase* pDevice) \ { \ g_pDriver->deviceClose(pDevice); \ } \ \ ONI_C_API_EXPORT OniStatus oniDriverDeviceGetSensorInfoList(oni::driver::DeviceBase* pDevice, OniSensorInfo** pSensorInfos, \ int* numSensors) \ { \ return pDevice->getSensorInfoList(pSensorInfos, numSensors); \ } \ \ ONI_C_API_EXPORT oni::driver::StreamBase* oniDriverDeviceCreateStream(oni::driver::DeviceBase* pDevice, \ OniSensorType sensorType) \ { \ return pDevice->createStream(sensorType); \ } \ \ ONI_C_API_EXPORT void oniDriverDeviceDestroyStream(oni::driver::DeviceBase* pDevice, oni::driver::StreamBase* pStream) \ { \ return pDevice->destroyStream(pStream); \ } \ \ ONI_C_API_EXPORT OniStatus oniDriverDeviceSetProperty(oni::driver::DeviceBase* pDevice, int propertyId, \ const void* data, int dataSize) \ { \ return pDevice->setProperty(propertyId, data, dataSize); \ } \ ONI_C_API_EXPORT OniStatus oniDriverDeviceGetProperty(oni::driver::DeviceBase* pDevice, int propertyId, \ void* data, int* pDataSize) \ { \ return pDevice->getProperty(propertyId, data, pDataSize); \ } \ ONI_C_API_EXPORT OniBool oniDriverDeviceIsPropertySupported(oni::driver::DeviceBase* pDevice, int propertyId) \ { \ return pDevice->isPropertySupported(propertyId); \ } \ ONI_C_API_EXPORT void oniDriverDeviceSetPropertyChangedCallback(oni::driver::DeviceBase* pDevice, \ oni::driver::PropertyChangedCallback handler, void* pCookie) \ { \ pDevice->setPropertyChangedCallback(handler, pCookie); \ } \ ONI_C_API_EXPORT void oniDriverDeviceNotifyAllProperties(oni::driver::DeviceBase* pDevice) \ { \ pDevice->notifyAllProperties(); \ } \ ONI_C_API_EXPORT OniStatus oniDriverDeviceInvoke(oni::driver::DeviceBase* pDevice, int commandId, \ void* data, int dataSize) \ { \ return pDevice->invoke(commandId, data, dataSize); \ } \ ONI_C_API_EXPORT OniBool oniDriverDeviceIsCommandSupported(oni::driver::DeviceBase* pDevice, int commandId) \ { \ return pDevice->isCommandSupported(commandId); \ } \ ONI_C_API_EXPORT OniStatus oniDriverDeviceTryManualTrigger(oni::driver::DeviceBase* pDevice) \ { \ return pDevice->tryManualTrigger(); \ } \ ONI_C_API_EXPORT OniBool oniDriverDeviceIsImageRegistrationModeSupported(oni::driver::DeviceBase* pDevice, \ OniImageRegistrationMode mode) \ { \ return pDevice->isImageRegistrationModeSupported(mode); \ } \ \ /* As Stream */ \ ONI_C_API_EXPORT void oniDriverStreamSetServices(oni::driver::StreamBase* pStream, OniStreamServices* pServices) \ { \ pStream->setServices((oni::driver::StreamServices*)pServices); \ } \ \ ONI_C_API_EXPORT OniStatus oniDriverStreamSetProperty(oni::driver::StreamBase* pStream, int propertyId, \ const void* data, int dataSize) \ { \ return pStream->setProperty(propertyId, data, dataSize); \ } \ ONI_C_API_EXPORT OniStatus oniDriverStreamGetProperty(oni::driver::StreamBase* pStream, int propertyId, void* data, \ int* pDataSize) \ { \ return pStream->getProperty(propertyId, data, pDataSize); \ } \ ONI_C_API_EXPORT OniBool oniDriverStreamIsPropertySupported(oni::driver::StreamBase* pStream, int propertyId) \ { \ return pStream->isPropertySupported(propertyId); \ } \ ONI_C_API_EXPORT void oniDriverStreamSetPropertyChangedCallback(oni::driver::StreamBase* pStream, \ oni::driver::PropertyChangedCallback handler, void* pCookie) \ { \ pStream->setPropertyChangedCallback(handler, pCookie); \ } \ ONI_C_API_EXPORT void oniDriverStreamNotifyAllProperties(oni::driver::StreamBase* pStream) \ { \ pStream->notifyAllProperties(); \ } \ ONI_C_API_EXPORT OniStatus oniDriverStreamInvoke(oni::driver::StreamBase* pStream, int commandId, \ void* data, int dataSize) \ { \ return pStream->invoke(commandId, data, dataSize); \ } \ ONI_C_API_EXPORT OniBool oniDriverStreamIsCommandSupported(oni::driver::StreamBase* pStream, int commandId) \ { \ return pStream->isCommandSupported(commandId); \ } \ \ ONI_C_API_EXPORT OniStatus oniDriverStreamStart(oni::driver::StreamBase* pStream) \ { \ return pStream->start(); \ } \ ONI_C_API_EXPORT void oniDriverStreamStop(oni::driver::StreamBase* pStream) \ { \ pStream->stop(); \ } \ \ ONI_C_API_EXPORT int oniDriverStreamGetRequiredFrameSize(oni::driver::StreamBase* pStream) \ { \ return pStream->getRequiredFrameSize(); \ } \ \ ONI_C_API_EXPORT void oniDriverStreamSetNewFrameCallback(oni::driver::StreamBase* pStream, \ oni::driver::NewFrameCallback handler, void* pCookie) \ { \ pStream->setNewFrameCallback(handler, pCookie); \ } \ \ ONI_C_API_EXPORT OniStatus oniDriverStreamConvertDepthToColorCoordinates(oni::driver::StreamBase* pDepthStream, \ oni::driver::StreamBase* pColorStream, int depthX, int depthY, OniDepthPixel depthZ, int* pColorX, int* pColorY) \ { \ return pDepthStream->convertDepthToColorCoordinates(pColorStream, depthX, depthY, depthZ, pColorX, pColorY); \ } \ \ ONI_C_API_EXPORT void* oniDriverEnableFrameSync(oni::driver::StreamBase** pStreams, int streamCount) \ { \ return g_pDriver->enableFrameSync(pStreams, streamCount); \ } \ \ ONI_C_API_EXPORT void oniDriverDisableFrameSync(void* frameSyncGroup) \ { \ return g_pDriver->disableFrameSync(frameSyncGroup); \ } \ #endif // _ONI_DRIVER_API_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OniCProperties.h0000600000175000017500000000563412240433507025043 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_C_PROPERTIES_H_ #define _ONI_C_PROPERTIES_H_ // Device properties enum { ONI_DEVICE_PROPERTY_FIRMWARE_VERSION = 0, // By implementation ONI_DEVICE_PROPERTY_DRIVER_VERSION = 1, // OniVersion ONI_DEVICE_PROPERTY_HARDWARE_VERSION = 2, // int ONI_DEVICE_PROPERTY_SERIAL_NUMBER = 3, // string ONI_DEVICE_PROPERTY_ERROR_STATE = 4, // ?? ONI_DEVICE_PROPERTY_IMAGE_REGISTRATION = 5, // OniImageRegistrationMode // Files ONI_DEVICE_PROPERTY_PLAYBACK_SPEED = 100, // float ONI_DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED = 101, // OniBool }; // Stream properties enum { ONI_STREAM_PROPERTY_CROPPING = 0, // OniCropping* ONI_STREAM_PROPERTY_HORIZONTAL_FOV = 1, // float: radians ONI_STREAM_PROPERTY_VERTICAL_FOV = 2, // float: radians ONI_STREAM_PROPERTY_VIDEO_MODE = 3, // OniVideoMode* ONI_STREAM_PROPERTY_MAX_VALUE = 4, // int ONI_STREAM_PROPERTY_MIN_VALUE = 5, // int ONI_STREAM_PROPERTY_STRIDE = 6, // int ONI_STREAM_PROPERTY_MIRRORING = 7, // OniBool ONI_STREAM_PROPERTY_NUMBER_OF_FRAMES = 8, // int // Camera ONI_STREAM_PROPERTY_AUTO_WHITE_BALANCE = 100, // OniBool ONI_STREAM_PROPERTY_AUTO_EXPOSURE = 101, // OniBool ONI_STREAM_PROPERTY_EXPOSURE = 102, // int ONI_STREAM_PROPERTY_GAIN = 103, // int }; // Device commands (for Invoke) enum { ONI_DEVICE_COMMAND_SEEK = 1, // OniSeek }; #endif // _ONI_C_PROPERTIES_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/PrimeSense.h0000600000175000017500000001611012240433507024177 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _PRIME_SENSE_H_ #define _PRIME_SENSE_H_ #include /** * Additional properties for PrimeSense devices * * @remarks * properties structure is 0x1D27XXYY where XX is range and YY is code. * range values: * 00 - common stream properties * 10 - depth stream properties * E0 - device commands * F0 - device properties */ enum { // Stream Properties PS_PROPERTY_DUMP_DATA = 0x1d270001, // boolean // Device Properties PS_PROPERTY_USB_INTERFACE = 0x1d27F001, // values from XnUsbInterfaceType }; /** * Additional commands for PrimeSense devices * * @remarks * Commands structure is 0x1D27XXYY where XX is range and YY is code. * range values: * E0 - device commands */ enum { // Device Commands - use via invoke() PS_COMMAND_AHB_READ = 0x1d27E001, // XnCommandAHB PS_COMMAND_AHB_WRITE = 0x1d27E002, // XnCommandAHB PS_COMMAND_I2C_READ = 0x1d27E003, // XnCommandI2C PS_COMMAND_I2C_WRITE = 0x1d27E004, // XnCommandI2C PS_COMMAND_SOFT_RESET = 0x1d27E005, // no arguments PS_COMMAND_POWER_RESET = 0x1d27E006, // no arguments PS_COMMAND_BEGIN_FIRMWARE_UPDATE = 0x1d27E007, // no arguments PS_COMMAND_END_FIRMWARE_UPDATE = 0x1d27E008, // no arguments PS_COMMAND_UPLOAD_FILE = 0x1d27E009, // XnCommandUploadFile PS_COMMAND_DOWNLOAD_FILE = 0x1d27E00A, // XnCommandDownloadFile PS_COMMAND_GET_FILE_LIST = 0x1d27E00B, // an array of XnFileEntry PS_COMMAND_FORMAT_ZONE = 0x1d27E00C, // XnCommandFormatZone PS_COMMAND_DUMP_ENDPOINT = 0x1d27E00D, // XnCommandDumpEndpoint PS_COMMAND_GET_I2C_DEVICE_LIST = 0x1d27E00E, // XnCommandGetI2CDevices PS_COMMAND_GET_BIST_LIST = 0x1d27E00F, // XnCommandGetBistList PS_COMMAND_EXECUTE_BIST = 0x1d27E010, // XnCommandExecuteBist PS_COMMAND_USB_TEST = 0x1d27E011, // XnCommandUsbTest PS_COMMAND_GET_LOG_MASK_LIST = 0x1d27E012, // XnCommandGetLogMaskList PS_COMMAND_SET_LOG_MASK_STATE = 0x1d27E013, // XnCommandSetLogMaskState PS_COMMAND_START_LOG = 0x1d27E014, // no arguments PS_COMMAND_STOP_LOG = 0x1d27E015, // no arguments }; typedef enum XnUsbInterfaceType { PS_USB_INTERFACE_DONT_CARE = 0, PS_USB_INTERFACE_ISO_ENDPOINTS = 1, PS_USB_INTERFACE_BULK_ENDPOINTS = 2, } XnUsbInterfaceType; #pragma pack (push, 1) // Data Types typedef struct XnFwFileVersion { uint8_t major; uint8_t minor; uint8_t maintenance; uint8_t build; } XnFwFileVersion; typedef enum XnFwFileFlags { XN_FILE_FLAG_BAD_CRC = 0x0001, } XnFwFileFlags; typedef struct XnFwFileEntry { char name[32]; XnFwFileVersion version; uint32_t address; uint32_t size; uint16_t crc; uint16_t zone; XnFwFileFlags flags; // bitmap } XnFwFileEntry; typedef struct XnI2CDeviceInfo { uint32_t id; char name[32]; } XnI2CDeviceInfo; typedef struct XnBistInfo { uint32_t id; char name[32]; } XnBistInfo; typedef struct XnFwLogMask { uint32_t id; char name[32]; } XnFwLogMask; typedef struct XnUsbTestEndpointResult { double averageBytesPerSecond; uint32_t lostPackets; } XnUsbTestEndpointResult; // Commands typedef struct XnCommandAHB { uint32_t address; // Address of this register uint32_t offsetInBits; // Offset of the field in bits within address uint32_t widthInBits; // Width of the field in bits uint32_t value; // For read requests, this is where the actual value will be filled. For write requests, the value to write. } XnCommandAHB; typedef struct XnCommandI2C { uint32_t deviceID; // Device to communicate with uint32_t addressSize; // Size of the address, in bytes (1-4) uint32_t address; // Address uint32_t valueSize; // Size of the value, in bytes (1-4) uint32_t mask; // For write request - a mask to be applied to the value. For read requests - ignored. uint32_t value; // For write request - the value to be written. For read requests - the place where the actual value is written to } XnCommandI2C; typedef struct XnCommandUploadFile { const char* filePath; uint32_t uploadToFactory; } XnCommandUploadFile; typedef struct XnCommandDownloadFile { uint16_t zone; const char* firmwareFileName; const char* targetPath; } XnCommandDownloadFile; typedef struct XnCommandGetFileList { uint32_t count; // in: number of allocated elements in files array. out: number of written elements in the array XnFwFileEntry* files; } XnCommandGetFileList; typedef struct XnCommandFormatZone { uint8_t zone; } XnCommandFormatZone; typedef struct XnCommandDumpEndpoint { uint8_t endpoint; bool enabled; } XnCommandDumpEndpoint; typedef struct XnCommandGetI2CDeviceList { uint32_t count; // in: number of allocated elements in devices array. out: number of written elements in the array XnI2CDeviceInfo* devices; } XnCommandGetI2CDeviceList; typedef struct XnCommandGetBistList { uint32_t count; // in: number of allocated elements in tests array. out: number of written elements in the array XnBistInfo* tests; } XnCommandGetBistList; typedef struct XnCommandExecuteBist { uint32_t id; uint32_t errorCode; uint32_t extraDataSize; // in: number of allocated bytes in extraData. out: number of written bytes in extraData uint8_t* extraData; } XnCommandExecuteBist; typedef struct XnCommandUsbTest { uint32_t seconds; uint32_t endpointCount; // in: number of allocated bytes in endpoints array. out: number of written bytes in array XnUsbTestEndpointResult* endpoints; } XnCommandUsbTest; typedef struct XnCommandGetLogMaskList { uint32_t count; // in: number of allocated elements in masks array. out: number of written elements in the array XnFwLogMask* masks; } XnCommandGetLogMaskList; typedef struct XnCommandSetLogMaskState { uint32_t mask; bool enabled; } XnCommandSetLogMaskState; #pragma pack (pop) #endif //_PRIME_SENSE_H_OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Include/OpenNI.h0000600000175000017500000026762612240433507023301 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _OPENNI_H_ #define _OPENNI_H_ #include "OniPlatform.h" #include "OniProperties.h" #include "OniEnums.h" #include "OniCAPI.h" #include "OniCProperties.h" /** openni is the namespace of the entire C++ API of OpenNI */ namespace openni { /** Pixel type used to store depth images. */ typedef uint16_t DepthPixel; /** Pixel type used to store IR images. */ typedef uint16_t Grayscale16Pixel; // structs /** Holds an OpenNI version number, which consists of four separate numbers in the format: @c major.minor.maintenance.build. For example: 2.0.0.20. */ typedef struct { /** Major version number, incremented for major API restructuring. */ int major; /** Minor version number, incremented when significant new features added. */ int minor; /** Maintenance build number, incremented for new releases that primarily provide minor bug fixes. */ int maintenance; /** Build number. Incremented for each new API build. Generally not shown on the installer and download site. */ int build; } Version; /** Holds the value of a single color image pixel in 24-bit RGB format. */ typedef struct { /* Red value of this pixel. */ uint8_t r; /* Green value of this pixel. */ uint8_t g; /* Blue value of this pixel. */ uint8_t b; } RGB888Pixel; /** Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel). The first pixel has the values y1, u, v. The second pixel has the values y2, u, v. */ typedef struct { /** First chrominance value for two pixels, stored as blue luminance difference signal. */ uint8_t u; /** Overall luminance value of first pixel. */ uint8_t y1; /** Second chrominance value for two pixels, stored as red luminance difference signal. */ uint8_t v; /** Overall luminance value of second pixel. */ uint8_t y2; } YUV422DoublePixel; /** This special URI can be passed to @ref Device::open() when the application has no concern for a specific device. */ #if ONI_PLATFORM != ONI_PLATFORM_WIN32 #pragma GCC diagnostic ignored "-Wunused-variable" #pragma GCC diagnostic push #endif static const char* ANY_DEVICE = NULL; #if ONI_PLATFORM != ONI_PLATFORM_WIN32 #pragma GCC diagnostic pop #endif /** Provides a simple array class used throughout the API. Wraps a primitive array of objects, holding the elements and their count. */ template class Array { public: /** Default constructor. Creates an empty Array and sets the element count to zero. */ Array() : m_data(NULL), m_count(0), m_owner(false) {} /** Constructor. Creates new Array from an existing primitive array of known size. @tparam [in] T Object type this Array will contain. @param [in] data Pointer to a primitive array of objects of type T. @param [in] count Number of elements in the primitive array pointed to by data. */ Array(const T* data, int count) : m_owner(false) { _setData(data, count); } /** Destructor. Destroys the Array object. */ ~Array() { clear(); } /** Getter function for the Array size. @returns Current number of elements in the Array. */ int getSize() const { return m_count; } /** Implements the array indexing operator for the Array class. */ const T& operator[](int index) const {return m_data[index];} /** @internal Setter function for data. Causes this array to wrap an existing primitive array of specified type. The optional data ownership flag controls whether the primitive array this Array wraps will be destroyed when this Array is deconstructed. @param [in] T Type of objects array will contain. @param [in] data Pointer to first object in list. @param [in] count Number of objects in list. @param [in] isOwner Optional flag to indicate data ownership */ void _setData(const T* data, int count, bool isOwner = false) { clear(); m_count = count; m_owner = isOwner; if (!isOwner) { m_data = data; } else { m_data = new T[count]; memcpy((void*)m_data, data, count*sizeof(T)); } } private: Array(const Array&); Array& operator=(const Array&); void clear() { if (m_owner && m_data != NULL) delete []m_data; m_owner = false; m_data = NULL; m_count = 0; } const T* m_data; int m_count; bool m_owner; }; // Forward declaration of all class SensorInfo; class VideoStream; class VideoFrameRef; class Device; class OpenNI; class CameraSettings; class PlaybackControl; /** Encapsulates a group of settings for a @ref VideoStream. Settings stored include frame rate, resolution, and pixel format. This class is used as an input for changing the settings of a @ref VideoStream, as well as an output for reporting the current settings of that class. It is also used by @ref SensorInfo to report available video modes of a stream. Recommended practice is to use @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes, and then to use items from that list to pass new settings to @ref VideoStream. This is much less likely to produce an invalid video mode than instantiating and manually changing objects of this class. */ class VideoMode : private OniVideoMode { public: /** Default constructor, creates an empty VideoMode object. Application programs should, in most cases, use the copy constructor to copy an existing valid video mode. This is much less error prone that creating and attempting to configure a new VideoMode from scratch. */ VideoMode() {} /** Copy constructor, creates a new VideoMode identical to an existing VideoMode. @param [in] other Existing VideoMode to copy. */ VideoMode(const VideoMode& other) { *this = other; } /** Assignment operator. Sets the pixel format, frame rate, and resolution of this VideoMode to equal that of a different VideoMode. @param [in] other Existing VideoMode to copy settings from. */ VideoMode& operator=(const VideoMode& other) { setPixelFormat(other.getPixelFormat()); setResolution(other.getResolutionX(), other.getResolutionY()); setFps(other.getFps()); return *this; } /** Getter function for the pixel format of this VideoMode. @returns Current pixel format setting of this VideoMode. */ PixelFormat getPixelFormat() const { return (PixelFormat)pixelFormat; } /** Getter function for the X resolution of this VideoMode. @returns Current horizontal resolution of this VideoMode, in pixels. */ int getResolutionX() const { return resolutionX; } /** Getter function for the Y resolution of this VideoMode. @returns Current vertical resolution of this VideoMode, in pixels. */ int getResolutionY() const {return resolutionY;} /** Getter function for the frame rate of this VideoMode. @returns Current frame rate, measured in frames per second. */ int getFps() const { return fps; } /** Setter function for the pixel format of this VideoMode. Application use of this function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes. @param [in] format Desired new pixel format for this VideoMode. */ void setPixelFormat(PixelFormat format) { this->pixelFormat = (OniPixelFormat)format; } /** Setter function for the resolution of this VideoMode. Application use of this function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes. @param [in] resolutionX Desired new horizontal resolution in pixels. @param [in] resolutionY Desired new vertical resolution in pixels. */ void setResolution(int resolutionX, int resolutionY) { this->resolutionX = resolutionX; this->resolutionY = resolutionY; } /** Setter function for the frame rate. Application use of this function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes. @param [in] fps Desired new frame rate, measured in frames per second. */ void setFps(int fps) { this->fps = fps; } friend class SensorInfo; friend class VideoStream; friend class VideoFrameRef; }; /** The SensorInfo class encapsulates all info related to a specific sensor in a specific device. A @ref Device object holds a SensorInfo object for each sensor it contains. A @ref VideoStream object holds one SensorInfo object, describing the sensor used to produce that stream. A given SensorInfo object will contain the type of the sensor (Depth, IR or Color), and a list of all video modes that the sensor can support. Each available video mode will have a single VideoMode object that can be queried to get the details of that mode. SensorInfo objects should be the only source of VideoMode objects for the vast majority of application programs. Application programs will never directly instantiate objects of type SensorInfo. In fact, no public constructors are provided. SensorInfo objects should be obtained either from a Device or @ref VideoStream, and in turn be used to provide available video modes for that sensor. */ class SensorInfo { public: /** Provides the sensor type of the sensor this object is associated with. @returns Type of the sensor. */ SensorType getSensorType() const { return (SensorType)m_pInfo->sensorType; } /** Provides a list of video modes that this sensor can support. This function is the recommended method to be used by applications to obtain @ref VideoMode objects. @returns Reference to an array of @ref VideoMode objects, one for each supported video mode. */ const Array& getSupportedVideoModes() const { return m_videoModes; } private: SensorInfo(const SensorInfo&); SensorInfo& operator=(const SensorInfo&); SensorInfo() : m_pInfo(NULL), m_videoModes(NULL, 0) {} SensorInfo(const OniSensorInfo* pInfo) : m_pInfo(NULL), m_videoModes(NULL, 0) { _setInternal(pInfo); } void _setInternal(const OniSensorInfo* pInfo) { m_pInfo = pInfo; if (pInfo == NULL) { m_videoModes._setData(NULL, 0); } else { m_videoModes._setData(static_cast(pInfo->pSupportedVideoModes), pInfo->numSupportedVideoModes); } } const OniSensorInfo* m_pInfo; Array m_videoModes; friend class VideoStream; friend class Device; }; /** The DeviceInfo class encapsulates info related to a specific device. Applications will generally obtain objects of this type via calls to @ref OpenNI::enumerateDevices() or @ref openni::Device::getDeviceInfo(), and then use the various accessor functions to obtain specific information on that device. There should be no reason for application code to instantiate this object directly. */ class DeviceInfo : private OniDeviceInfo { public: /** Returns the device URI. URI can be used by @ref Device::open to open a specific device. The URI string format is determined by the driver. */ const char* getUri() const { return uri; } /** Returns a the vendor name for this device. */ const char* getVendor() const { return vendor; } /** Returns the device name for this device. */ const char* getName() const { return name; } /** Returns the USB VID code for this device. */ uint16_t getUsbVendorId() const { return usbVendorId; } /** Returns the USB PID code for this device. */ uint16_t getUsbProductId() const { return usbProductId; } friend class Device; friend class OpenNI; }; /** The @ref VideoFrameRef class encapsulates a single video frame - the output of a @ref VideoStream at a specific time. The data contained will be a single frame of color, IR, or depth video, along with associated meta data. An object of type @ref VideoFrameRef does not actually hold the data of the frame, but only a reference to it. The reference can be released by destroying the @ref VideoFrameRef object, or by calling the @ref release() method. The actual data of the frame is freed when the last reference to it is released. The usual way to obtain @ref VideoFrameRef objects is by a call to @ref VideoStream.:readFrame(). All data references by a @ref VideoFrameRef is stored as a primitive array of pixels. Each pixel will be of a type according to the configured pixel format (see @ref VideoMode). */ class VideoFrameRef { public: /** Default constructor. Creates a new empty @ref VideoFrameRef object. This object will be invalid until initialized by a call to @ref VideoStream::readFrame(). */ VideoFrameRef() { m_pFrame = NULL; } /** Destroy this object and release the reference to the frame. */ ~VideoFrameRef() { release(); } /** Copy constructor. Creates a new @ref VideoFrameRef object. The newly created object will reference the same frame current object references. @param [in] other Another @ref VideoFrameRef object. */ VideoFrameRef(const VideoFrameRef& other) : m_pFrame(NULL) { _setFrame(other.m_pFrame); } /** Make this @ref VideoFrameRef object reference the same frame that the @c other frame references. If this object referenced another frame before calling this method, the previous frame will be released. @param [in] other Another @ref VideoFrameRef object. */ VideoFrameRef& operator=(const VideoFrameRef& other) { _setFrame(other.m_pFrame); return *this; } /** Getter function for the size of the data contained by this object. Useful primarily when allocating buffers. @returns Current size of data pointed to by this object, measured in bytes. */ inline int getDataSize() const { return m_pFrame->dataSize; } /** Getter function for the array of data pointed to by this object. @returns Pointer to the actual frame data array. Type of data pointed to can be determined according to the pixel format (can be obtained by calling @ref getVideoMode()). */ inline const void* getData() const { return m_pFrame->data; } /** Getter function for the sensor type used to produce this frame. Used to determine whether this is an IR, Color or Depth frame. See the @ref SensorType enumeration for all possible return values from this function. @returns The type of sensor used to produce this frame. */ inline SensorType getSensorType() const { return (SensorType)m_pFrame->sensorType; } /** Returns a reference to the @ref VideoMode object assigned to this frame. This object describes the video mode the sensor was configured to when the frame was produced and can be used to determine the pixel format and resolution of the data. It will also provide the frame rate that the sensor was running at when it recorded this frame. @returns Reference to the @ref VideoMode assigned to this frame. */ inline const VideoMode& getVideoMode() const { return static_cast(m_pFrame->videoMode); } /** Provides a timestamp for the frame. The 'zero' point for this stamp is implementation specific, but all streams from the same device are guaranteed to use the same zero. This value can therefore be used to compute time deltas between frames from the same device, regardless of whether they are from the same stream. @returns Timestamp of frame, measured in microseconds from an arbitrary zero */ inline uint64_t getTimestamp() const { return m_pFrame->timestamp; } /** Frames are provided sequential frame ID numbers by the sensor that produced them. If frame synchronization has been enabled for a device via @ref Device::setDepthColorSyncEnabled(), then frame numbers for corresponding frames of depth and color are guaranteed to match. If frame synchronization is not enabled, then there is no guarantee of matching frame indexes between @ref VideoStream "VideoStreams". In the latter case, applications should use timestamps instead of frame indexes to align frames in time. @returns Index number for this frame. */ inline int getFrameIndex() const { return m_pFrame->frameIndex; } /** Gives the current width of this frame, measured in pixels. If cropping is enabled, this will be the width of the cropping window. If cropping is not enabled, then this will simply be equal to the X resolution of the @ref VideoMode used to produce this frame. @returns Width of this frame in pixels. */ inline int getWidth() const { return m_pFrame->width; } /** Gives the current height of this frame, measured in pixels. If cropping is enabled, this will be the length of the cropping window. If cropping is not enabled, then this will simply be equal to the Y resolution of the @ref VideoMode used to produce this frame. */ inline int getHeight() const { return m_pFrame->height; } /** Indicates whether cropping was enabled when the frame was produced. @return true if cropping is enabled, false otherwise */ inline bool getCroppingEnabled() const { return m_pFrame->croppingEnabled == TRUE; } /** Indicates the X coordinate of the upper left corner of the crop window. @return Distance of crop origin from left side of image, in pixels. */ inline int getCropOriginX() const { return m_pFrame->cropOriginX; } /** Indicates the Y coordinate of the upper left corner of the crop window. @return Distance of crop origin from top of image, in pixels. */ inline int getCropOriginY() const { return m_pFrame->cropOriginY; } /** Gives the length of one row of pixels, measured in bytes. Primarily useful for indexing the array which contains the data. @returns Stride of the array which contains the image for this frame, in bytes */ inline int getStrideInBytes() const { return m_pFrame->stride; } /** Check if this object references an actual frame. */ inline bool isValid() const { return m_pFrame != NULL; } /** Release the reference to the frame. Once this method is called, the object becomes invalid, and no method should be called other than the assignment operator, or passing this object to a @ref VideoStream::readFrame() call. */ void release() { if (m_pFrame != NULL) { oniFrameRelease(m_pFrame); m_pFrame = NULL; } } /** @internal */ void _setFrame(OniFrame* pFrame) { setReference(pFrame); if (pFrame != NULL) { oniFrameAddRef(pFrame); } } /** @internal */ OniFrame* _getFrame() { return m_pFrame; } private: friend class VideoStream; inline void setReference(OniFrame* pFrame) { // Initial - don't addref. This is the reference from OpenNI release(); m_pFrame = pFrame; } OniFrame* m_pFrame; // const!!? }; /** The @ref VideoStream object encapsulates a single video stream from a device. Once created, it is used to start data flow from the device, and to read individual frames of data. This is the central class used to obtain data in OpenNI. It provides the ability to manually read data in a polling loop, as well as providing events and a Listener class that can be used to implement event-driven data acquisition. Aside from the video data frames themselves, the class offers a number of functions used for obtaining information about a @ref VideoStream. Field of view, available video modes, and minimum and maximum valid pixel values can all be obtained. In addition to obtaining data, the @ref VideoStream object is used to set all configuration properties that apply to a specific stream (rather than to an entire device). In particular, it is used to control cropping, mirroring, and video modes. A pointer to a valid, initialized device that provides the desired stream type is required to create a stream. Several video streams can be created to stream data from the same sensor. This is useful if several components of an application need to read frames separately. While some device might allow different streams from the same sensor to have different configurations, most devices will have a single configuration for the sensor, shared by all streams. */ class VideoStream { public: /** The @ref VideoStream::NewFrameListener class is provided to allow the implementation of event driven frame reading. To use it, create a class that inherits from it and implement override the onNewFrame() method. Then, register your created class with an active @ref VideoStream using the @ref VideoStream::addNewFrameListener() function. Once this is done, the event handler function you implemented will be called whenever a new frame becomes available. You may call @ref VideoStream::readFrame() from within the event handler. */ class NewFrameListener { public: /** Default constructor. */ NewFrameListener() : m_callbackHandle(NULL) { } virtual ~NewFrameListener() { } /** Derived classes should implement this function to handle new frames. */ virtual void onNewFrame(VideoStream&) = 0; private: friend class VideoStream; static void ONI_CALLBACK_TYPE callback(OniStreamHandle streamHandle, void* pCookie) { NewFrameListener* pListener = (NewFrameListener*)pCookie; VideoStream stream; stream._setHandle(streamHandle); pListener->onNewFrame(stream); stream._setHandle(NULL); } OniCallbackHandle m_callbackHandle; }; class FrameAllocator { public: virtual ~FrameAllocator() {} virtual void* allocateFrameBuffer(int size) = 0; virtual void freeFrameBuffer(void* data) = 0; private: friend class VideoStream; static void* ONI_CALLBACK_TYPE allocateFrameBufferCallback(int size, void* pCookie) { FrameAllocator* pThis = (FrameAllocator*)pCookie; return pThis->allocateFrameBuffer(size); } static void ONI_CALLBACK_TYPE freeFrameBufferCallback(void* data, void* pCookie) { FrameAllocator* pThis = (FrameAllocator*)pCookie; pThis->freeFrameBuffer(data); } }; /** Default constructor. Creates a new, non-valid @ref VideoStream object. The object created will be invalid until its create() function is called with a valid Device. */ VideoStream() : m_stream(NULL), m_sensorInfo(), m_pCameraSettings(NULL), m_isOwner(true) {} /** Handle constructor. Creates a VideoStream object based on the given initialized handle. This object will not destroy the underlying handle when @ref destroy() or destructor is called */ explicit VideoStream(OniStreamHandle handle) : m_stream(NULL), m_sensorInfo(), m_pCameraSettings(NULL), m_isOwner(false) { _setHandle(handle); } /** Destructor. The destructor calls the destroy() function, but it is considered a best practice for applications to call destroy() manually on any @ref VideoStream that they run create() on. */ ~VideoStream() { destroy(); } /** Checks to see if this object has been properly initialized and currently points to a valid stream. @returns true if this object has been previously initialized, false otherwise. */ bool isValid() const { return m_stream != NULL; } /** Creates a stream of frames from a specific sensor type of a specific device. You must supply a reference to a Device that supplies the sensor type requested. You can use @ref Device::hasSensor() to check whether a given sensor is available on your target device before calling create(). @param [in] device A reference to the @ref Device you want to create the stream on. @param [in] sensorType The type of sensor the stream should produce data from. @returns Status code indicating success or failure for this operation. */ inline Status create(const Device& device, SensorType sensorType); /** Destroy this stream. This function is currently called automatically by the destructor, but it is considered a best practice for applications to manually call this function on any @ref VideoStream that they call create() for. */ inline void destroy(); /** Provides the @ref SensorInfo object associated with the sensor that is producing this @ref VideoStream. Note that this function will return NULL if the stream has not yet been initialized with the create() function. @ref SensorInfo is useful primarily as a means of learning which video modes are valid for this VideoStream. @returns Reference to the SensorInfo object associated with the sensor providing this stream. */ const SensorInfo& getSensorInfo() const { return m_sensorInfo; } /** Starts data generation from this video stream. */ Status start() { if (!isValid()) { return STATUS_ERROR; } return (Status)oniStreamStart(m_stream); } /** Stops data generation from this video stream. */ void stop() { if (!isValid()) { return; } oniStreamStop(m_stream); } /** Read the next frame from this video stream, delivered as a @ref VideoFrameRef. This is the primary method for manually obtaining frames of video data. If no new frame is available, the call will block until one is available. To avoid blocking, use @ref VideoStream::Listener to implement an event driven architecture. Another alternative is to use @ref OpenNI::waitForAnyStream() to wait for new frames from several streams. @param [out] pFrame Pointer to a @ref VideoFrameRef object to hold the reference to the new frame. @returns Status code to indicated success or failure of this function. */ Status readFrame(VideoFrameRef* pFrame) { if (!isValid()) { return STATUS_ERROR; } OniFrame* pOniFrame; Status rc = (Status)oniStreamReadFrame(m_stream, &pOniFrame); pFrame->setReference(pOniFrame); return rc; } /** Adds a new Listener to receive this VideoStream onNewFrame event. See @ref VideoStream::NewFrameListener for more information on implementing an event driven frame reading architecture. An instance of a listener can be added to only one source. @param [in] pListener Pointer to a @ref VideoStream::NewFrameListener object (or a derivative) that will respond to this event. @returns Status code indicating success or failure of the operation. */ Status addNewFrameListener(NewFrameListener* pListener) { if (!isValid()) { return STATUS_ERROR; } return (Status)oniStreamRegisterNewFrameCallback(m_stream, pListener->callback, pListener, &pListener->m_callbackHandle); } /** Removes a Listener from this video stream list. The listener removed will no longer receive new frame events from this stream. @param [in] pListener Pointer to the listener object to be removed. */ void removeNewFrameListener(NewFrameListener* pListener) { if (!isValid()) { return; } oniStreamUnregisterNewFrameCallback(m_stream, pListener->m_callbackHandle); pListener->m_callbackHandle = NULL; } /** Sets the frame buffers allocator for this video stream. @param [in] pAllocator Pointer to the frame buffers allocator object. Pass NULL to return to default frame allocator. @returns ONI_STATUS_OUT_OF_FLOW The frame buffers allocator cannot be set while stream is streaming. */ Status setFrameBuffersAllocator(FrameAllocator* pAllocator) { if (!isValid()) { return STATUS_ERROR; } if (pAllocator == NULL) { return (Status)oniStreamSetFrameBuffersAllocator(m_stream, NULL, NULL, NULL); } else { return (Status)oniStreamSetFrameBuffersAllocator(m_stream, pAllocator->allocateFrameBufferCallback, pAllocator->freeFrameBufferCallback, pAllocator); } } /** @internal Get an internal handle. This handle can be used via the C API. */ OniStreamHandle _getHandle() const { return m_stream; } /** Gets an object through which several camera settings can be configured. @returns NULL if the stream doesn't support camera settings. */ CameraSettings* getCameraSettings() {return m_pCameraSettings;} /** General function for obtaining the value of stream specific properties. There are convenience functions available for all commonly used properties, so it is not expected that applications will make direct use of the getProperty function very often. @param [in] propertyId The numerical ID of the property to be queried. @param [out] data Place to store the value of the property. @param [in,out] dataSize IN: Size of the buffer passed in the @c data argument. OUT: the actual written size. @returns Status code indicating success or failure of this operation. */ Status getProperty(int propertyId, void* data, int* dataSize) const { if (!isValid()) { return STATUS_ERROR; } return (Status)oniStreamGetProperty(m_stream, propertyId, data, dataSize); } /** General function for setting the value of stream specific properties. There are convenience functions available for all commonly used properties, so it is not expected that applications will make direct use of the setProperty function very often. @param [in] propertyId The numerical ID of the property to be set. @param [in] data Place to store the data to be written to the property. @param [in] dataSize Size of the data to be written to the property. @returns Status code indicating success or failure of this operation. */ Status setProperty(int propertyId, const void* data, int dataSize) { if (!isValid()) { return STATUS_ERROR; } return (Status)oniStreamSetProperty(m_stream, propertyId, data, dataSize); } /** Get the current video mode information for this video stream. This includes its resolution, fps and stream format. @returns Current video mode information for this video stream. */ VideoMode getVideoMode() const { VideoMode videoMode; getProperty(STREAM_PROPERTY_VIDEO_MODE, static_cast(&videoMode)); return videoMode; } /** Changes the current video mode of this stream. Recommended practice is to use @ref Device::getSensorInfo(), and then @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video mode settings for this stream. Then, pass a valid @ref VideoMode to @ref setVideoMode to ensure correct operation. @param [in] videoMode Desired new video mode for this stream. returns Status code indicating success or failure of this operation. */ Status setVideoMode(const VideoMode& videoMode) { return setProperty(STREAM_PROPERTY_VIDEO_MODE, static_cast(videoMode)); } /** Provides the maximum possible value for pixels obtained by this stream. This is most useful for getting the maximum possible value of depth streams. @returns Maximum possible pixel value. */ int getMaxPixelValue() const { int maxValue; Status rc = getProperty(STREAM_PROPERTY_MAX_VALUE, &maxValue); if (rc != STATUS_OK) { return 0; } return maxValue; } /** Provides the smallest possible value for pixels obtains by this VideoStream. This is most useful for getting the minimum possible value that will be reported by a depth stream. @returns Minimum possible pixel value that can come from this stream. */ int getMinPixelValue() const { int minValue; Status rc = getProperty(STREAM_PROPERTY_MIN_VALUE, &minValue); if (rc != STATUS_OK) { return 0; } return minValue; } /** Checks whether this stream supports cropping. @returns true if the stream supports cropping, false if it does not. */ bool isCroppingSupported() const { return isPropertySupported(STREAM_PROPERTY_CROPPING); } /** Obtains the current cropping settings for this stream. @param [out] pOriginX X coordinate of the upper left corner of the cropping window @param [out] pOriginY Y coordinate of the upper left corner of the cropping window @param [out] pWidth Horizontal width of the cropping window, in pixels @param [out] pHeight Vertical width of the cropping window, in pixels returns true if cropping is currently enabled, false if it is not. */ bool getCropping(int* pOriginX, int* pOriginY, int* pWidth, int* pHeight) const { OniCropping cropping; bool enabled = false; Status rc = getProperty(STREAM_PROPERTY_CROPPING, &cropping); if (rc == STATUS_OK) { *pOriginX = cropping.originX; *pOriginY = cropping.originY; *pWidth = cropping.width; *pHeight = cropping.height; enabled = (cropping.enabled == TRUE); } return enabled; } /** Changes the cropping settings for this stream. You can use the @ref isCroppingSupported() function to make sure cropping is supported before calling this function. @param [in] originX New X coordinate of the upper left corner of the cropping window. @param [in] originY New Y coordinate of the upper left corner of the cropping window. @param [in] width New horizontal width for the cropping window, in pixels. @param [in] height New vertical height for the cropping window, in pixels. @returns Status code indicating success or failure of this operation. */ Status setCropping(int originX, int originY, int width, int height) { OniCropping cropping; cropping.enabled = true; cropping.originX = originX; cropping.originY = originY; cropping.width = width; cropping.height = height; return setProperty(STREAM_PROPERTY_CROPPING, cropping); } /** Disables cropping. @returns Status code indicating success or failure of this operation. */ Status resetCropping() { OniCropping cropping; cropping.enabled = false; return setProperty(STREAM_PROPERTY_CROPPING, cropping); } /** Check whether mirroring is currently turned on for this stream. @returns true if mirroring is currently enabled, false otherwise. */ bool getMirroringEnabled() const { OniBool enabled; Status rc = getProperty(STREAM_PROPERTY_MIRRORING, &enabled); if (rc != STATUS_OK) { return false; } return enabled == TRUE; } /** Enable or disable mirroring for this stream. @param [in] isEnabled true to enable mirroring, false to disable it. @returns Status code indicating the success or failure of this operation. */ Status setMirroringEnabled(bool isEnabled) { return setProperty(STREAM_PROPERTY_MIRRORING, isEnabled ? TRUE : FALSE); } /** Gets the horizontal field of view of frames received from this stream. @returns Horizontal field of view, in radians. */ float getHorizontalFieldOfView() const { float horizontal = 0; getProperty(STREAM_PROPERTY_HORIZONTAL_FOV, &horizontal); return horizontal; } /** Gets the vertical field of view of frames received from this stream. @returns Vertical field of view, in radians. */ float getVerticalFieldOfView() const { float vertical = 0; getProperty(STREAM_PROPERTY_VERTICAL_FOV, &vertical); return vertical; } /** Function for setting a value of a stream property using an arbitrary input type. There are convenience functions available for all commonly used properties, so it is not expected that applications will make direct use of this function very often. @tparam [in] T Data type of the value to be passed to the property. @param [in] propertyId The numerical ID of the property to be set. @param [in] value Data to be sent to the property. @returns Status code indicating success or failure of this operation. */ template Status setProperty(int propertyId, const T& value) { return setProperty(propertyId, &value, sizeof(T)); } /** Function for getting the value from a property using an arbitrary output type. There are convenience functions available for all commonly used properties, so it is not expected that applications will make direct use of this function very often. @tparam [in] T Data type of the value to be read. @param [in] propertyId The numerical ID of the property to be read. @param [in, out] value Pointer to a place to store the value read from the property. @returns Status code indicating success or failure of this operation. */ template Status getProperty(int propertyId, T* value) const { int size = sizeof(T); return getProperty(propertyId, value, &size); } /** Checks if a specific property is supported by the video stream. @param [in] propertyId Property to be checked. @returns true if the property is supported, false otherwise. */ bool isPropertySupported(int propertyId) const { if (!isValid()) { return false; } return oniStreamIsPropertySupported(m_stream, propertyId) == TRUE; } /** Invokes a command that takes an arbitrary data type as its input. It is not expected that application code will need this function frequently, as all commonly used properties have higher level functions provided. @param [in] commandId Numerical code of the property to be invoked. @param [in] data Data to be passed to the property. @param [in] dataSize size of the buffer passed in @c data. @returns Status code indicating success or failure of this operation. */ Status invoke(int commandId, void* data, int dataSize) { if (!isValid()) { return STATUS_ERROR; } return (Status)oniStreamInvoke(m_stream, commandId, data, dataSize); } /** Invokes a command that takes an arbitrary data type as its input. It is not expected that application code will need this function frequently, as all commonly used properties have higher level functions provided. @tparam [in] T Type of data to be passed to the property. @param [in] commandId Numerical code of the property to be invoked. @param [in] value Data to be passed to the property. @returns Status code indicating success or failure of this operation. */ template Status invoke(int commandId, T& value) { return invoke(commandId, &value, sizeof(T)); } /** Checks if a specific command is supported by the video stream. @param [in] commandId Command to be checked. @returns true if the command is supported, false otherwise. */ bool isCommandSupported(int commandId) const { if (!isValid()) { return false; } return (Status)oniStreamIsCommandSupported(m_stream, commandId) == TRUE; } private: friend class Device; void _setHandle(OniStreamHandle stream) { m_sensorInfo._setInternal(NULL); m_stream = stream; if (stream != NULL) { m_sensorInfo._setInternal(oniStreamGetSensorInfo(m_stream)); } } private: VideoStream(const VideoStream& other); VideoStream& operator=(const VideoStream& other); OniStreamHandle m_stream; SensorInfo m_sensorInfo; CameraSettings* m_pCameraSettings; bool m_isOwner; }; /** The Device object abstracts a specific device; either a single hardware device, or a file device holding a recording from a hardware device. It offers the ability to connect to the device, and obtain information about its configuration and the data streams it can offer. It provides the means to query and change all configuration parameters that apply to the device as a whole. This includes enabling depth/color image registration and frame synchronization. Devices are used when creating and initializing @ref VideoStream "VideoStreams" -- you will need a valid pointer to a Device in order to use the VideoStream.create() function. This, along with configuration, is the primary use of this class for application developers. Before devices can be created, @ref OpenNI::initialize() must have been run to make the device drivers on the system available to the API. */ class Device { public: /** Default constructor. Creates a new empty Device object. This object will be invalid until it is initialized by calling its open() function. */ Device() : m_pPlaybackControl(NULL), m_device(NULL), m_isOwner(true) { clearSensors(); } /** Handle constructor. Creates a Device object based on the given initialized handle. This object will not destroy the underlying handle when @ref close() or destructor is called */ explicit Device(OniDeviceHandle handle) : m_pPlaybackControl(NULL), m_device(NULL), m_isOwner(false) { _setHandle(handle); } /** The destructor calls the @ref close() function, but it is considered a best practice for applications to call @ref close() manually on any @ref Device that they run @ref open() on. */ ~Device() { if (m_device != NULL) { close(); } } /** Opens a device. This can either open a device chosen arbitrarily from all devices on the system, or open a specific device selected by passing this function the device URI. To open any device, simply pass the constant@ref ANY_DEVICE to this function. If multiple devices are connected to the system, then one of them will be opened. This procedure is most useful when it is known that exactly one device is (or can be) connected to the system. In that case, requesting a list of all devices and iterating through it would be a waste of effort. If multiple devices are (or may be) connected to a system, then a URI will be required to select a specific device to open. There are two ways to obtain a URI: from a DeviceConnected event, or by calling @ref OpenNI::enumerateDevices(). In the case of a DeviceConnected event, the @ref OpenNI::Listener will be provided with a DeviceInfo object as an argument to its @ref OpenNI::Listener::onDeviceConnected "onDeviceConnected()" function. The DeviceInfo.getUri() function can then be used to obtain the URI. If the application is not using event handlers, then it can also call the static function @ref OpenNI::enumerateDevices(). This will return an array of @ref DeviceInfo objects, one for each device currently available to the system. The application can then iterate through this list and select the desired device. The URI is again obtained via the @ref DeviceInfo::getUri() function. Standard codes of type Status are returned indicating whether opening was successful. @param [in] uri String containing the URI of the device to be opened, or @ref ANY_DEVICE. @returns Status code with the outcome of the open operation. @remark For opening a recording file, pass the file path as a uri. */ inline Status open(const char* uri); /** Closes the device. This properly closes any files or shuts down hardware, as appropriate. This function is currently called by the destructor if not called manually by application code, but it is considered a best practice to manually close any device that was opened. */ inline void close(); /** Provides information about this device in the form of a DeviceInfo object. This object can be used to access the URI of the device, as well as various USB descriptor strings that might be useful to an application. Note that valid device info will not be available if this device has not yet been opened. If you are trying to obtain a URI to open a device, use OpenNI::enumerateDevices() instead. @returns DeviceInfo object for this Device */ const DeviceInfo& getDeviceInfo() const { return m_deviceInfo; } /** This function checks to see if one of the specific sensor types defined in @ref SensorType is available on this device. This allows an application to, for example, query for the presence of a depth sensor, or color sensor. @param [in] sensorType of sensor to query for @returns true if the Device supports the sensor queried, false otherwise. */ bool hasSensor(SensorType sensorType) { int i; for (i = 0; (i < ONI_MAX_SENSORS) && (m_aSensorInfo[i].m_pInfo != NULL); ++i) { if (m_aSensorInfo[i].getSensorType() == sensorType) { return true; } } if (i == ONI_MAX_SENSORS) { return false; } const OniSensorInfo* pInfo = oniDeviceGetSensorInfo(m_device, (OniSensorType)sensorType); if (pInfo == NULL) { return false; } m_aSensorInfo[i]._setInternal(pInfo); return true; } /** Get the @ref SensorInfo for a specific sensor type on this device. The @ref SensorInfo is useful primarily for determining which video modes are supported by the sensor. @param [in] sensorType of sensor to get information about. @returns SensorInfo object corresponding to the sensor type specified, or NULL if such a sensor is not available from this device. */ const SensorInfo* getSensorInfo(SensorType sensorType) { int i; for (i = 0; (i < ONI_MAX_SENSORS) && (m_aSensorInfo[i].m_pInfo != NULL); ++i) { if (m_aSensorInfo[i].getSensorType() == sensorType) { return &m_aSensorInfo[i]; } } // not found. check to see we have additional space if (i == ONI_MAX_SENSORS) { return NULL; } const OniSensorInfo* pInfo = oniDeviceGetSensorInfo(m_device, (OniSensorType)sensorType); if (pInfo == NULL) { return NULL; } m_aSensorInfo[i]._setInternal(pInfo); return &m_aSensorInfo[i]; } /** @internal Get an internal handle. This handle can be used via the C API. */ OniDeviceHandle _getHandle() const { return m_device; } /** Gets an object through which playback of a file device can be controlled. @returns NULL if this device is not a file device. */ PlaybackControl* getPlaybackControl() {return m_pPlaybackControl;} /** Get the value of a general property of the device. There are convenience functions for all the commonly used properties, such as image registration and frame synchronization. It is expected for this reason that this function will rarely be directly used by applications. @param [in] propertyId Numerical ID of the property you would like to check. @param [out] data Place to store the value of the property. @param [in,out] dataSize IN: Size of the buffer passed in the @c data argument. OUT: the actual written size. @returns Status code indicating results of this operation. */ Status getProperty(int propertyId, void* data, int* dataSize) const { return (Status)oniDeviceGetProperty(m_device, propertyId, data, dataSize); } /** Sets the value of a general property of the device. There are convenience functions for all the commonly used properties, such as image registration and frame synchronization. It is expected for this reason that this function will rarely be directly used by applications. @param [in] propertyId The numerical ID of the property to be set. @param [in] data Place to store the data to be written to the property. @param [in] dataSize Size of the data to be written to the property. @returns Status code indicating results of this operation. */ Status setProperty(int propertyId, const void* data, int dataSize) { return (Status)oniDeviceSetProperty(m_device, propertyId, data, dataSize); } /** Checks to see if this device can support registration of color video and depth video. Image registration is used to properly superimpose two images from cameras located at different points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about registration. @returns true if image registration is supported by this device, false otherwise. */ bool isImageRegistrationModeSupported(ImageRegistrationMode mode) const { return (oniDeviceIsImageRegistrationModeSupported(m_device, (OniImageRegistrationMode)mode) == TRUE); } /** Gets the current image registration mode of this device. Image registration is used to properly superimpose two images from cameras located at different points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about registration. @returns Current image registration mode. See @ref ImageRegistrationMode for possible return values. */ ImageRegistrationMode getImageRegistrationMode() const { ImageRegistrationMode mode; Status rc = getProperty(DEVICE_PROPERTY_IMAGE_REGISTRATION, &mode); if (rc != STATUS_OK) { return IMAGE_REGISTRATION_OFF; } return mode; } /** Sets the image registration on this device. Image registration is used to properly superimpose two images from cameras located at different points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about registration. See @ref ImageRegistrationMode for a list of valid settings to pass to this function. It is a good practice to first check if the mode is supported by calling @ref isImageRegistrationModeSupported(). @param [in] mode Desired new value for the image registration mode. @returns Status code for the operation. */ Status setImageRegistrationMode(ImageRegistrationMode mode) { return setProperty(DEVICE_PROPERTY_IMAGE_REGISTRATION, mode); } /** Checks whether this Device object is currently connected to an actual file or hardware device. @returns true if the Device is connected, false otherwise. */ bool isValid() const { return m_device != NULL; } /** Checks whether this device is a file device (i.e. a recording). @returns true if this is a file device, false otherwise. */ bool isFile() const { return isPropertySupported(DEVICE_PROPERTY_PLAYBACK_SPEED) && isPropertySupported(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED) && isCommandSupported(DEVICE_COMMAND_SEEK); } /** Used to turn the depth/color frame synchronization feature on and off. When frame synchronization is enabled, the device will deliver depth and image frames that are separated in time by some maximum value. When disabled, the phase difference between depth and image frame generation cannot be guaranteed. @param [in] isEnabled Set to TRUE to enable synchronization, FALSE to disable it @returns Status code indicating success or failure of this operation */ Status setDepthColorSyncEnabled(bool isEnabled) { Status rc = STATUS_OK; if (isEnabled) { rc = (Status)oniDeviceEnableDepthColorSync(m_device); } else { oniDeviceDisableDepthColorSync(m_device); } return rc; } bool getDepthColorSyncEnabled() { return oniDeviceGetDepthColorSyncEnabled(m_device) == TRUE; } /** Sets a property that takes an arbitrary data type as its input. It is not expected that application code will need this function frequently, as all commonly used properties have higher level functions provided. @tparam T Type of data to be passed to the property. @param [in] propertyId The numerical ID of the property to be set. @param [in] value Place to store the data to be written to the property. @returns Status code indicating success or failure of this operation. */ template Status setProperty(int propertyId, const T& value) { return setProperty(propertyId, &value, sizeof(T)); } /** Checks a property that provides an arbitrary data type as its output. It is not expected that application code will need this function frequently, as all commonly used properties have higher level functions provided. @tparam [in] T Data type of the value to be read. @param [in] propertyId The numerical ID of the property to be read. @param [in, out] value Pointer to a place to store the value read from the property. @returns Status code indicating success or failure of this operation. */ template Status getProperty(int propertyId, T* value) const { int size = sizeof(T); return getProperty(propertyId, value, &size); } /** Checks if a specific property is supported by the device. @param [in] propertyId Property to be checked. @returns true if the property is supported, false otherwise. */ bool isPropertySupported(int propertyId) const { return oniDeviceIsPropertySupported(m_device, propertyId) == TRUE; } /** Invokes a command that takes an arbitrary data type as its input. It is not expected that application code will need this function frequently, as all commonly used properties have higher level functions provided. @param [in] commandId Numerical code of the property to be invoked. @param [in] data Data to be passed to the property. @param [in] dataSize size of the buffer passed in @c data. @returns Status code indicating success or failure of this operation. */ Status invoke(int commandId, void* data, int dataSize) { return (Status)oniDeviceInvoke(m_device, commandId, data, dataSize); } /** Invokes a command that takes an arbitrary data type as its input. It is not expected that application code will need this function frequently, as all commonly used properties have higher level functions provided. @tparam [in] T Type of data to be passed to the property. @param [in] propertyId Numerical code of the property to be invoked. @param [in] value Data to be passed to the property. @returns Status code indicating success or failure of this operation. */ template Status invoke(int propertyId, T& value) { return invoke(propertyId, &value, sizeof(T)); } /** Checks if a specific command is supported by the device. @param [in] commandId Command to be checked. @returns true if the command is supported, false otherwise. */ bool isCommandSupported(int commandId) const { return oniDeviceIsCommandSupported(m_device, commandId) == TRUE; } /** @internal **/ inline Status _openEx(const char* uri, const char* mode); private: Device(const Device&); Device& operator=(const Device&); void clearSensors() { for (int i = 0; i < ONI_MAX_SENSORS; ++i) { m_aSensorInfo[i]._setInternal(NULL); } } inline Status _setHandle(OniDeviceHandle deviceHandle); private: PlaybackControl* m_pPlaybackControl; OniDeviceHandle m_device; DeviceInfo m_deviceInfo; SensorInfo m_aSensorInfo[ONI_MAX_SENSORS]; bool m_isOwner; }; /** * The PlaybackControl class provides access to a series of specific to playing back * a recording from a file device. * * When playing a stream back from a recording instead of playing from a live device, * it is possible to vary playback speed, change the current time location (ie * fast forward / rewind / seek), specify whether the playback should be repeated at the end * of the recording, and query the total size of the recording. * * Since none of these functions make sense in the context of a physical device, they are * split out into a seperate playback control class. To use, simply create your file device, * create a PlaybackControl, and then attach the PlaybackControl to the file device. */ class PlaybackControl { public: /** * Deconstructor. Destroys a PlaybackControl class. The deconstructor presently detaches * from its recording automatically, but it is considered a best practice for applications to * manually detach from any stream that was attached to. */ ~PlaybackControl() { detach(); } /** * Getter function for the current playback speed of this device. * * This value is expressed as a multiple of the speed the original * recording was taken at. For example, if the original recording was at 30fps, and * playback speed is set to 0.5, then the recording will play at 15fps. If playback speed * is set to 2.0, then the recording would playback at 60fps. * * In addition, there are two "special" values. A playback speed of 0.0 indicates that the * playback should occur as fast as the system is capable of returning frames. This is * most useful when testing algorithms on large datasets, as it enables playback to be * done at a much higher rate than would otherwise be possible. * * A value of -1 indicates that speed is "manual". In this mode, new frames will only * become available when an application manually reads them. If used in a polling loop, * this setting also enables systems to read and process frames limited only by * available processing speeds. * * @returns Current playback speed of the device, measured as ratio of recording speed. */ float getSpeed() const { if (!isValid()) { return 0.0f; } float speed; Status rc = m_pDevice->getProperty(DEVICE_PROPERTY_PLAYBACK_SPEED, &speed); if (rc != STATUS_OK) { return 1.0f; } return speed; } /** * Setter function for the playback speed of the device. For a full explaination of * what this value means @see PlaybackControl::getSpeed(). * * @param [in] speed Desired new value of playback speed, as ratio of original recording. * @returns Status code indicating success or failure of this operation. */ Status setSpeed(float speed) { if (!isValid()) { return STATUS_NO_DEVICE; } return m_pDevice->setProperty(DEVICE_PROPERTY_PLAYBACK_SPEED, speed); } /** * Gets the current repeat setting of the file device. * * @returns true if repeat is enabled, false if not enabled. */ bool getRepeatEnabled() const { if (!isValid()) { return false; } OniBool repeat; Status rc = m_pDevice->getProperty(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, &repeat); if (rc != STATUS_OK) { return false; } return repeat == TRUE; } /** * Changes the current repeat mode of the device. If repeat mode is turned on, then the recording will * begin playback again at the beginning after the last frame is read. If turned off, no more frames * will become available after last frame is read. * * @param [in] repeat New value for repeat -- true to enable, false to disable * @returns Status code indicating success or failure of this operations. */ Status setRepeatEnabled(bool repeat) { if (!isValid()) { return STATUS_NO_DEVICE; } return m_pDevice->setProperty(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, repeat ? TRUE : FALSE); } /** * Seeks within a VideoStream to a given FrameID. Note that when this function is called on one * stream, all other streams will also be changed to the corresponding place in the recording. The FrameIDs * of different streams may not match, since FrameIDs may differ for streams that are not synchronized, but * the recording will set all streams to the same moment in time. * * @param [in] stream Stream for which the frameIndex value is valid. * @param [in] frameIndex Frame index to move playback to * @returns Status code indicating success or failure of this operation */ Status seek(const VideoStream& stream, int frameIndex) { if (!isValid()) { return STATUS_NO_DEVICE; } OniSeek seek; seek.frameIndex = frameIndex; seek.stream = stream._getHandle(); return m_pDevice->invoke(DEVICE_COMMAND_SEEK, seek); } /** * Provides the a count of frames that this recording contains for a given stream. This is useful * both to determine the length of the recording, and to ensure that a valid Frame Index is set when using * the @ref PlaybackControl::seek() function. * * @param [in] stream The video stream to count frames for * @returns Number of frames in provided @ref VideoStream, or 0 if the stream is not part of the recording */ int getNumberOfFrames(const VideoStream& stream) const { int numOfFrames = -1; Status rc = stream.getProperty(STREAM_PROPERTY_NUMBER_OF_FRAMES, &numOfFrames); if (rc != STATUS_OK) { return 0; } return numOfFrames; } bool isValid() const { return m_pDevice != NULL; } private: Status attach(Device* device) { if (!device->isValid() || !device->isFile()) { return STATUS_ERROR; } detach(); m_pDevice = device; return STATUS_OK; } void detach() { m_pDevice = NULL; } friend class Device; PlaybackControl(Device* pDevice) : m_pDevice(NULL) { if (pDevice != NULL) { attach(pDevice); } } Device* m_pDevice; }; class CameraSettings { public: // setters Status setAutoExposureEnabled(bool enabled) { return setProperty(STREAM_PROPERTY_AUTO_EXPOSURE, enabled ? TRUE : FALSE); } Status setAutoWhiteBalanceEnabled(bool enabled) { return setProperty(STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled ? TRUE : FALSE); } bool getAutoExposureEnabled() const { OniBool enabled = FALSE; Status rc = getProperty(STREAM_PROPERTY_AUTO_EXPOSURE, &enabled); return rc == STATUS_OK && enabled == TRUE; } bool getAutoWhiteBalanceEnabled() const { OniBool enabled = FALSE; Status rc = getProperty(STREAM_PROPERTY_AUTO_WHITE_BALANCE, &enabled); return rc == STATUS_OK && enabled == TRUE; } Status setGain(int gain) { return setProperty(STREAM_PROPERTY_GAIN, gain); } Status setExposure(int exposure) { return setProperty(STREAM_PROPERTY_EXPOSURE, exposure); } int getGain() { int gain; Status rc = getProperty(STREAM_PROPERTY_GAIN, &gain); if (rc != STATUS_OK) { return 100; } return gain; } int getExposure() { int exposure; Status rc = getProperty(STREAM_PROPERTY_EXPOSURE, &exposure); if (rc != STATUS_OK) { return 0; } return exposure; } bool isValid() const {return m_pStream != NULL;} private: template Status getProperty(int propertyId, T* value) const { if (!isValid()) return STATUS_NOT_SUPPORTED; return m_pStream->getProperty(propertyId, value); } template Status setProperty(int propertyId, const T& value) { if (!isValid()) return STATUS_NOT_SUPPORTED; return m_pStream->setProperty(propertyId, value); } friend class VideoStream; CameraSettings(VideoStream* pStream) { m_pStream = pStream; } VideoStream* m_pStream; }; /** * The OpenNI class is a static entry point to the library. It is used by every OpenNI 2.0 * application to initialize the SDK and drivers to enable creation of valid device objects. * * It also defines a listener class and events that enable for event driven notification of * device connection, device disconnection, and device configuration changes. * * In addition, it gives access to SDK version information and provides a function that allows * you to wait for data to become available on any one of a list of streams (as opposed to * waiting for data on one specific stream with functions provided by the VideoStream class) * */ class OpenNI { public: /** * The OpenNI::DeviceConnectedListener class provides a means of registering for, and responding to * when a device is connected. * * onDeviceConnected is called whenever a new device is connected to the system (ie this event * would be triggered when a new sensor is manually plugged into the host system running the * application) * * To use this class, you should write a new class that inherits from it, and override the * onDeviceConnected method. Once you instantiate your class, use the * OpenNI::addDeviceConnectedListener() function to add your listener object to OpenNI's list of listeners. Your * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceConnectedListener() * function is also provided, if you want to have your class stop listening to these events for any * reason. */ class DeviceConnectedListener { public: DeviceConnectedListener() { m_deviceConnectedCallbacks.deviceConnected = deviceConnectedCallback; m_deviceConnectedCallbacks.deviceDisconnected = NULL; m_deviceConnectedCallbacks.deviceStateChanged = NULL; m_deviceConnectedCallbacksHandle = NULL; } virtual ~DeviceConnectedListener() { } /** * Callback function for the onDeviceConnected event. This function will be * called whenever this event occurs. When this happens, a pointer to the @ref DeviceInfo * object for the newly connected device will be supplied. Note that once a * device is removed, if it was opened by a @ref Device object, that object can no longer be * used to access the device, even if it was reconnected. Once a device was reconnected, * @ref Device::open() should be called again in order to use this device. * * If you wish to open the new device as it is connected, simply query the provided DeviceInfo * object to obtain the URI of the device, and pass this URI to the Device.Open() function. */ virtual void onDeviceConnected(const DeviceInfo*) = 0; private: static void ONI_CALLBACK_TYPE deviceConnectedCallback(const OniDeviceInfo* pInfo, void* pCookie) { DeviceConnectedListener* pListener = (DeviceConnectedListener*)pCookie; pListener->onDeviceConnected(static_cast(pInfo)); } friend class OpenNI; OniDeviceCallbacks m_deviceConnectedCallbacks; OniCallbackHandle m_deviceConnectedCallbacksHandle; }; /** * The OpenNI::DeviceDisconnectedListener class provides a means of registering for, and responding to * when a device is disconnected. * * onDeviceDisconnected is called when a device is removed from the system. Note that once a * device is removed, if it was opened by a @ref Device object, that object can no longer be * used to access the device, even if it was reconnected. Once a device was reconnected, * @ref Device::open() should be called again in order to use this device. * * To use this class, you should write a new class that inherits from it, and override the * onDeviceDisconnected method. Once you instantiate your class, use the * OpenNI::addDeviceDisconnectedListener() function to add your listener object to OpenNI's list of listeners. Your * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceDisconnectedListener() * function is also provided, if you want to have your class stop listening to these events for any * reason. */ class DeviceDisconnectedListener { public: DeviceDisconnectedListener() { m_deviceDisconnectedCallbacks.deviceConnected = NULL; m_deviceDisconnectedCallbacks.deviceDisconnected = deviceDisconnectedCallback; m_deviceDisconnectedCallbacks.deviceStateChanged = NULL; m_deviceDisconnectedCallbacksHandle = NULL; } virtual ~DeviceDisconnectedListener() { } /** * Callback function for the onDeviceDisconnected event. This function will be * called whenever this event occurs. When this happens, a pointer to the DeviceInfo * object for the newly disconnected device will be supplied. Note that once a * device is removed, if it was opened by a @ref Device object, that object can no longer be * used to access the device, even if it was reconnected. Once a device was reconnected, * @ref Device::open() should be called again in order to use this device. */ virtual void onDeviceDisconnected(const DeviceInfo*) = 0; private: static void ONI_CALLBACK_TYPE deviceDisconnectedCallback(const OniDeviceInfo* pInfo, void* pCookie) { DeviceDisconnectedListener* pListener = (DeviceDisconnectedListener*)pCookie; pListener->onDeviceDisconnected(static_cast(pInfo)); } friend class OpenNI; OniDeviceCallbacks m_deviceDisconnectedCallbacks; OniCallbackHandle m_deviceDisconnectedCallbacksHandle; }; /** * The OpenNI::DeviceStateChangedListener class provides a means of registering for, and responding to * when a device's state is changed. * * onDeviceStateChanged is triggered whenever the state of a connected device is changed. * * To use this class, you should write a new class that inherits from it, and override the * onDeviceStateChanged method. Once you instantiate your class, use the * OpenNI::addDeviceStateChangedListener() function to add your listener object to OpenNI's list of listeners. Your * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceStateChangedListener() * function is also provided, if you want to have your class stop listening to these events for any * reason. */ class DeviceStateChangedListener { public: DeviceStateChangedListener() { m_deviceStateChangedCallbacks.deviceConnected = NULL; m_deviceStateChangedCallbacks.deviceDisconnected = NULL; m_deviceStateChangedCallbacks.deviceStateChanged = deviceStateChangedCallback; m_deviceStateChangedCallbacksHandle = NULL; } virtual ~DeviceStateChangedListener() { } /** * Callback function for the onDeviceStateChanged event. This function will be * called whenever this event occurs. When this happens, a pointer to a DeviceInfo * object for the affected device will be supplied, as well as the new DeviceState * value of that device. */ virtual void onDeviceStateChanged(const DeviceInfo*, DeviceState) = 0; private: static void ONI_CALLBACK_TYPE deviceStateChangedCallback(const OniDeviceInfo* pInfo, OniDeviceState state, void* pCookie) { DeviceStateChangedListener* pListener = (DeviceStateChangedListener*)pCookie; pListener->onDeviceStateChanged(static_cast(pInfo), DeviceState(state)); } friend class OpenNI; OniDeviceCallbacks m_deviceStateChangedCallbacks; OniCallbackHandle m_deviceStateChangedCallbacksHandle; }; /** Initialize the library. This will load all available drivers, and see which devices are available It is forbidden to call any other method in OpenNI before calling @ref initialize(). */ static Status initialize() { return (Status)oniInitialize(ONI_API_VERSION); // provide version of API, to make sure proper struct sizes are used } /** Stop using the library. Unload all drivers, close all streams and devices. Once @ref shutdown was called, no other calls to OpenNI is allowed. */ static void shutdown() { oniShutdown(); } /** * Returns the version of OpenNI */ static Version getVersion() { OniVersion oniVersion = oniGetVersion(); Version version; version.major = oniVersion.major; version.minor = oniVersion.minor; version.maintenance = oniVersion.maintenance; version.build = oniVersion.build; return version; } /** * Retrieves the calling thread's last extended error information. The last extended error information is maintained * on a per-thread basis. Multiple threads do not overwrite each other's last extended error information. * * The extended error information is cleared on every call to an OpenNI method, so you should call this method * immediately after a call to an OpenNI method which have failed. */ static const char* getExtendedError() { return oniGetExtendedError(); } /** Fills up an array of @ref DeviceInfo objects with devices that are available. @param [in,out] deviceInfoList An array to be filled with devices. */ static void enumerateDevices(Array* deviceInfoList) { OniDeviceInfo* m_pDeviceInfos; int m_deviceInfoCount; oniGetDeviceList(&m_pDeviceInfos, &m_deviceInfoCount); deviceInfoList->_setData((DeviceInfo*)m_pDeviceInfos, m_deviceInfoCount, true); oniReleaseDeviceList(m_pDeviceInfos); } /** Wait for a new frame from any of the streams provided. The function blocks until any of the streams has a new frame available, or the timeout has passed. @param [in] pStreams An array of streams to wait for. @param [in] streamCount The number of streams in @c pStreams @param [out] pReadyStreamIndex The index of the first stream that has new frame available. @param [in] timeout [Optional] A timeout before returning if no stream has new data. Default value is @ref TIMEOUT_FOREVER. */ static Status waitForAnyStream(VideoStream** pStreams, int streamCount, int* pReadyStreamIndex, int timeout = TIMEOUT_FOREVER) { static const int ONI_MAX_STREAMS = 50; OniStreamHandle streams[ONI_MAX_STREAMS]; if (streamCount > ONI_MAX_STREAMS) { printf("Too many streams for wait: %d > %d\n", streamCount, ONI_MAX_STREAMS); return STATUS_BAD_PARAMETER; } *pReadyStreamIndex = -1; for (int i = 0; i < streamCount; ++i) { if (pStreams[i] != NULL) { streams[i] = pStreams[i]->_getHandle(); } else { streams[i] = NULL; } } Status rc = (Status)oniWaitForAnyStream(streams, streamCount, pReadyStreamIndex, timeout); return rc; } /** * Add a listener to the list of objects that receive the event when a device is connected. See the * @ref OpenNI::DeviceConnectedListener class for details on utilizing the events provided by OpenNI. * * @param pListener Pointer to the Listener to be added to the list * @returns Status code indicating success or failure of this operation. */ static Status addDeviceConnectedListener(DeviceConnectedListener* pListener) { if (pListener->m_deviceConnectedCallbacksHandle != NULL) { return STATUS_ERROR; } return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceConnectedCallbacks, pListener, &pListener->m_deviceConnectedCallbacksHandle); } /** * Add a listener to the list of objects that receive the event when a device is disconnected. See the * @ref OpenNI::DeviceDisconnectedListener class for details on utilizing the events provided by OpenNI. * * @param pListener Pointer to the Listener to be added to the list * @returns Status code indicating success or failure of this operation. */ static Status addDeviceDisconnectedListener(DeviceDisconnectedListener* pListener) { if (pListener->m_deviceDisconnectedCallbacksHandle != NULL) { return STATUS_ERROR; } return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceDisconnectedCallbacks, pListener, &pListener->m_deviceDisconnectedCallbacksHandle); } /** * Add a listener to the list of objects that receive the event when a device's state changes. See the * @ref OpenNI::DeviceStateChangedListener class for details on utilizing the events provided by OpenNI. * * @param pListener Pointer to the Listener to be added to the list * @returns Status code indicating success or failure of this operation. */ static Status addDeviceStateChangedListener(DeviceStateChangedListener* pListener) { if (pListener->m_deviceStateChangedCallbacksHandle != NULL) { return STATUS_ERROR; } return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceStateChangedCallbacks, pListener, &pListener->m_deviceStateChangedCallbacksHandle); } /** * Remove a listener from the list of objects that receive the event when a device is connected. See * the @ref OpenNI::DeviceConnectedListener class for details on utilizing the events provided by OpenNI. * * @param pListener Pointer to the Listener to be removed from the list * @returns Status code indicating the success or failure of this operation. */ static void removeDeviceConnectedListener(DeviceConnectedListener* pListener) { oniUnregisterDeviceCallbacks(pListener->m_deviceConnectedCallbacksHandle); pListener->m_deviceConnectedCallbacksHandle = NULL; } /** * Remove a listener from the list of objects that receive the event when a device is disconnected. See * the @ref OpenNI::DeviceDisconnectedListener class for details on utilizing the events provided by OpenNI. * * @param pListener Pointer to the Listener to be removed from the list * @returns Status code indicating the success or failure of this operation. */ static void removeDeviceDisconnectedListener(DeviceDisconnectedListener* pListener) { oniUnregisterDeviceCallbacks(pListener->m_deviceDisconnectedCallbacksHandle); pListener->m_deviceDisconnectedCallbacksHandle = NULL; } /** * Remove a listener from the list of objects that receive the event when a device's state changes. See * the @ref OpenNI::DeviceStateChangedListener class for details on utilizing the events provided by OpenNI. * * @param pListener Pointer to the Listener to be removed from the list * @returns Status code indicating the success or failure of this operation. */ static void removeDeviceStateChangedListener(DeviceStateChangedListener* pListener) { oniUnregisterDeviceCallbacks(pListener->m_deviceStateChangedCallbacksHandle); pListener->m_deviceStateChangedCallbacksHandle = NULL; } /** * Change the log output folder * @param const char * strLogOutputFolder [in] log required folder * * @retval STATUS_OK Upon successful completion. * @retval STATUS_ERROR Upon any kind of failure. */ static Status setLogOutputFolder(const char *strLogOutputFolder) { return (Status)oniSetLogOutputFolder(strLogOutputFolder); } /** * Get current log file name * @param char * strFileName [out] returned file name buffer * @param int nBufferSize [in] Buffer size * * @retval STATUS_OK Upon successful completion. * @retval STATUS_ERROR Upon any kind of failure. */ static Status getLogFileName(char *strFileName, int nBufferSize) { return (Status)oniGetLogFileName(strFileName, nBufferSize); } /** * Set minimum severity for log produce * @param const char * strMask [in] Logger name * @param int nMinSeverity [in] Logger severity * * @retval STATUS_OK Upon successful completion. * @retval STATUS_ERROR Upon any kind of failure. */ static Status setLogMinSeverity(int nMinSeverity) { return(Status) oniSetLogMinSeverity(nMinSeverity); } /** * Configures if log entries will be printed to console. * @param const OniBool bConsoleOutput [in] TRUE to print log entries to console, FALSE otherwise. * * @retval STATUS_OK Upon successful completion. * @retval STATUS_ERROR Upon any kind of failure. */ static Status setLogConsoleOutput(bool bConsoleOutput) { return (Status)oniSetLogConsoleOutput(bConsoleOutput); } /** * Configures if log entries will be printed to file. * @param const OniBool bConsoleOutput [in] TRUE to print log entries to file, FALSE otherwise. * * @retval STATUS_OK Upon successful completion. * @retval STATUS_ERROR Upon any kind of failure. */ static Status setLogFileOutput(bool bFileOutput) { return (Status)oniSetLogFileOutput(bFileOutput); } #if ONI_PLATFORM == ONI_PLATFORM_ANDROID_ARM /** * Configures if log entries will be printed to the Android log. * @param OniBool bAndroidOutput bAndroidOutput [in] TRUE to print log entries to the Android log, FALSE otherwise. * * @retval STATUS_OK Upon successful completion. * @retval STATUS_ERROR Upon any kind of failure. */ static Status setLogAndroidOutput(bool bAndroidOutput) { return (Status)oniSetLogAndroidOutput(bAndroidOutput); } #endif private: OpenNI() { } }; /** The CoordinateConverter class converts points between the different coordinate systems. Depth and World coordinate systems OpenNI applications commonly use two different coordinate systems to represent depth. These two systems are referred to as Depth and World representation. Depth coordinates are the native data representation. In this system, the frame is a map (two dimensional array), and each pixel is assigned a depth value. This depth value represents the distance between the camera plane and whatever object is in the given pixel. The X and Y coordinates are simply the location in the map, where the origin is the top-left corner of the field of view. World coordinates superimpose a more familiar 3D Cartesian coordinate system on the world, with the camera lens at the origin. In this system, every point is specified by 3 points -- x, y and z. The x axis of this system is along a line that passes through the infrared projector and CMOS imager of the camera. The y axis is parallel to the front face of the camera, and perpendicular to the x axis (it will also be perpendicular to the ground if the camera is upright and level). The z axis runs into the scene, perpendicular to both the x and y axis. From the perspective of the camera, an object moving from left to right is moving along the increasing x axis. An object moving up is moving along the increasing y axis, and an object moving away from the camera is moving along the increasing z axis. Mathematically, the Depth coordinate system is the projection of the scene on the CMOS. If the sensor's angular field of view and resolution are known, then an angular size can be calculated for each pixel. This is how the conversion algorithms work. The dependence of this calculation on FoV and resolution is the reason that a @ref VideoStream pointer must be provided to these functions. The @ref VideoStream pointer is used to determine parameters for the specific points to be converted. Since Depth coordinates are a projective, the apparent size of objects in depth coordinates (measured in pixels) will increase as an object moves closer to the sensor. The size of objects in the World coordinate system is independent of distance from the sensor. Note that converting from Depth to World coordinates is relatively expensive computationally. It is generally not practical to convert the entire raw depth map to World coordinates. A better approach is to have your computer vision algorithm work in Depth coordinates for as long as possible, and only converting a few specific points to World coordinates right before output. Note that when converting from Depth to World or vice versa, the Z value remains the same. */ class CoordinateConverter { public: /** Converts a single point from the World coordinate system to the Depth coordinate system. @param [in] depthStream Reference to an openni::VideoStream that will be used to determine the format of the Depth coordinates @param [in] worldX The X coordinate of the point to be converted, measured in millimeters in World coordinates @param [in] worldY The Y coordinate of the point to be converted, measured in millimeters in World coordinates @param [in] worldZ The Z coordinate of the point to be converted, measured in millimeters in World coordinates @param [out] pDepthX Pointer to a place to store the X coordinate of the output value, measured in pixels with 0 at far left of image @param [out] pDepthY Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0 at top of image @param [out] pDepthZ Pointer to a place to store the Z(depth) coordinate of the output value, measured in the @ref PixelFormat of depthStream */ static Status convertWorldToDepth(const VideoStream& depthStream, float worldX, float worldY, float worldZ, int* pDepthX, int* pDepthY, DepthPixel* pDepthZ) { float depthX, depthY, depthZ; Status rc = (Status)oniCoordinateConverterWorldToDepth(depthStream._getHandle(), worldX, worldY, worldZ, &depthX, &depthY, &depthZ); *pDepthX = (int)depthX; *pDepthY = (int)depthY; *pDepthZ = (DepthPixel)depthZ; return rc; } /** Converts a single point from the World coordinate system to a floating point representation of the Depth coordinate system @param [in] depthStream Reference to an openni::VideoStream that will be used to determine the format of the Depth coordinates @param [in] worldX The X coordinate of the point to be converted, measured in millimeters in World coordinates @param [in] worldY The Y coordinate of the point to be converted, measured in millimeters in World coordinates @param [in] worldZ The Z coordinate of the point to be converted, measured in millimeters in World coordinates @param [out] pDepthX Pointer to a place to store the X coordinate of the output value, measured in pixels with 0.0 at far left of the image @param [out] pDepthY Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0.0 at the top of the image @param [out] pDepthZ Pointer to a place to store the Z(depth) coordinate of the output value, measured in millimeters with 0.0 at the camera lens */ static Status convertWorldToDepth(const VideoStream& depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ) { return (Status)oniCoordinateConverterWorldToDepth(depthStream._getHandle(), worldX, worldY, worldZ, pDepthX, pDepthY, pDepthZ); } /** Converts a single point from the Depth coordinate system to the World coordinate system. @param [in] depthStream Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates @param [in] depthX The X coordinate of the point to be converted, measured in pixels with 0 at the far left of the image @param [in] depthY The Y coordinate of the point to be converted, measured in pixels with 0 at the top of the image @param [in] depthZ the Z(depth) coordinate of the point to be converted, measured in the @ref PixelFormat of depthStream @param [out] pWorldX Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates @param [out] pWorldY Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates @param [out] pWorldZ Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates */ static Status convertDepthToWorld(const VideoStream& depthStream, int depthX, int depthY, DepthPixel depthZ, float* pWorldX, float* pWorldY, float* pWorldZ) { return (Status)oniCoordinateConverterDepthToWorld(depthStream._getHandle(), float(depthX), float(depthY), float(depthZ), pWorldX, pWorldY, pWorldZ); } /** Converts a single point from a floating point representation of the Depth coordinate system to the World coordinate system. @param [in] depthStream Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates @param [in] depthX The X coordinate of the point to be converted, measured in pixels with 0.0 at the far left of the image @param [in] depthY The Y coordinate of the point to be converted, measured in pixels with 0.0 at the top of the image @param [in] depthZ Z(depth) coordinate of the point to be converted, measured in the @ref PixelFormat of depthStream @param [out] pWorldX Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates @param [out] pWorldY Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates @param [out] pWorldZ Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates */ static Status convertDepthToWorld(const VideoStream& depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ) { return (Status)oniCoordinateConverterDepthToWorld(depthStream._getHandle(), depthX, depthY, depthZ, pWorldX, pWorldY, pWorldZ); } /** For a given depth point, provides the coordinates of the corresponding color value. Useful for superimposing the depth and color images. This operation is the same as turning on registration, but is performed on a single pixel rather than the whole image. @param [in] depthStream Reference to a openni::VideoStream that produced the depth value @param [in] colorStream Reference to a openni::VideoStream that we want to find the appropriate color pixel in @param [in] depthX X value of the depth point, given in Depth coordinates and measured in pixels @param [in] depthY Y value of the depth point, given in Depth coordinates and measured in pixels @param [in] depthZ Z(depth) value of the depth point, given in the @ref PixelFormat of depthStream @param [out] pColorX The X coordinate of the color pixel that overlaps the given depth pixel, measured in pixels @param [out] pColorY The Y coordinate of the color pixel that overlaps the given depth pixel, measured in pixels */ static Status convertDepthToColor(const VideoStream& depthStream, const VideoStream& colorStream, int depthX, int depthY, DepthPixel depthZ, int* pColorX, int* pColorY) { return (Status)oniCoordinateConverterDepthToColor(depthStream._getHandle(), colorStream._getHandle(), depthX, depthY, depthZ, pColorX, pColorY); } }; /** * The Recorder class is used to record streams to an ONI file. * * After a recorder is instantiated, it must be initialized with a specific filename where * the recording will be stored. The recorder is then attached to one or more streams. Once * this is complete, the recorder can be told to start recording. The recorder will store * every frame from every stream to the specified file. Later, this file can be used to * initialize a file Device, and used to play back the same data that was recorded. * * Opening a file device is done by passing its path as the uri to the @ref Device::open() method. * * @see PlaybackControl for options available to play a reorded file. * */ class Recorder { public: /** * Creates a recorder. The recorder is not valid, i.e. @ref isValid() returns * false. You must initialize the recorder before use with @ref create(). */ Recorder() : m_recorder(NULL) { } /** * Destroys a recorder. This will also stop recording. */ ~Recorder() { destroy(); } /** * Initializes a recorder. You can initialize the recorder only once. Attempts * to intialize more than once will result in an error code being returned. * * Initialization assigns the recorder to an output file that will be used for * recording. Before use, the @ref attach() function must also be used to assign input * data to the Recorder. * * @param [in] fileName The name of a file which will contain the recording. * @returns Status code which indicates success or failure of the operation. */ Status create(const char* fileName) { if (!isValid()) { return (Status)oniCreateRecorder(fileName, &m_recorder); } return STATUS_ERROR; } /** * Verifies if the recorder is valid, i.e. if one can record with this recorder. A * recorder object is not valid until the @ref create() method is called. * * @returns true if the recorder has been intialized, false otherwise. */ bool isValid() const { return NULL != getHandle(); } /** * Attaches a stream to the recorder. Note, this won't start recording, you * should explicitly start it using @ref start() method. As soon as the recording * process has been started, no more streams can be attached to the recorder. * * @param [in] stream The stream to be recorded. * @param [in] allowLossyCompression [Optional] If this value is true, the recorder might use * a lossy compression, which means that when the recording will be played-back, there might * be small differences from the original frame. Default value is false. */ Status attach(VideoStream& stream, bool allowLossyCompression = false) { if (!isValid() || !stream.isValid()) { return STATUS_ERROR; } return (Status)oniRecorderAttachStream( m_recorder, stream._getHandle(), allowLossyCompression); } /** * Starts recording. * Once this method is called, the recorder will take all subsequent frames from the attached streams * and store them in the file. * You may not attach additional streams once recording was started. */ Status start() { if (!isValid()) { return STATUS_ERROR; } return (Status)oniRecorderStart(m_recorder); } /** * Stops recording. You may use @ref start() to resume the recording. */ void stop() { if (isValid()) { oniRecorderStop(m_recorder); } } /** Destroys the recorder object. */ void destroy() { if (isValid()) { oniRecorderDestroy(&m_recorder); } } private: Recorder(const Recorder&); Recorder& operator=(const Recorder&); /** * Returns a handle of this recorder. */ OniRecorderHandle getHandle() const { return m_recorder; } OniRecorderHandle m_recorder; }; // Implemetation Status VideoStream::create(const Device& device, SensorType sensorType) { OniStreamHandle streamHandle; Status rc = (Status)oniDeviceCreateStream(device._getHandle(), (OniSensorType)sensorType, &streamHandle); if (rc != STATUS_OK) { return rc; } m_isOwner = true; _setHandle(streamHandle); if (isPropertySupported(STREAM_PROPERTY_AUTO_WHITE_BALANCE) && isPropertySupported(STREAM_PROPERTY_AUTO_EXPOSURE)) { m_pCameraSettings = new CameraSettings(this); } return STATUS_OK; } void VideoStream::destroy() { if (!isValid()) { return; } if (m_pCameraSettings != NULL) { delete m_pCameraSettings; m_pCameraSettings = NULL; } if (m_stream != NULL) { if(m_isOwner) oniStreamDestroy(m_stream); m_stream = NULL; } } Status Device::open(const char* uri) { //If we are not the owners, we stick with our own device if(!m_isOwner) { if(isValid()){ return STATUS_OK; }else{ return STATUS_OUT_OF_FLOW; } } OniDeviceHandle deviceHandle; Status rc = (Status)oniDeviceOpen(uri, &deviceHandle); if (rc != STATUS_OK) { return rc; } _setHandle(deviceHandle); return STATUS_OK; } Status Device::_openEx(const char* uri, const char* mode) { //If we are not the owners, we stick with our own device if(!m_isOwner) { if(isValid()){ return STATUS_OK; }else{ return STATUS_OUT_OF_FLOW; } } OniDeviceHandle deviceHandle; Status rc = (Status)oniDeviceOpenEx(uri, mode, &deviceHandle); if (rc != STATUS_OK) { return rc; } _setHandle(deviceHandle); return STATUS_OK; } Status Device::_setHandle(OniDeviceHandle deviceHandle) { if (m_device == NULL) { m_device = deviceHandle; clearSensors(); oniDeviceGetInfo(m_device, &m_deviceInfo); if (isFile()) { m_pPlaybackControl = new PlaybackControl(this); } // Read deviceInfo return STATUS_OK; } return STATUS_OUT_OF_FLOW; } void Device::close() { if (m_pPlaybackControl != NULL) { delete m_pPlaybackControl; m_pPlaybackControl = NULL; } if (m_device != NULL) { if(m_isOwner) { oniDeviceClose(m_device); } m_device = NULL; } } } #endif // _OPEN_NI_HPP_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/ReleaseNotes.txt0000600000175000017500000000225412240433507023527 0ustar jsprickejsprickeOpenNI 2.2.0 Build 33 November 12 2013 Minimum Requirements: --------------------- - Operating Systems: - Windows XP with SP2 and above, Windows 7, Windows 8, on x86 (32/64 bit) - Ubuntu 12.04 (32/64/arm) and above - Android 2.3 and above - Mac OSX 10.7 and above - Processors: - Pentium 4, 1.4GHz and above - AMD Athlon 64/FX 1GHz and above - Arm Cortex A8 and above - Memory: at least 64MB available. - 250MB free hard disk space. - Available USB 2.0 high-speed port. - Development Environment: - Microsoft Visual Studio 2008 and 2010. The compiler can be MSVC compiler or an Intel Compiler 11 and above. - GCC 4.x - Some of the sample applications require a graphics card equivalent to: ATI RADEON x1300 or NVIDIA GeForce 7300. Notes: ------ - On Android, only native support (and samples) is currently provided. Please note that as bionic (Android linker) does not support the rpath option, the samples cannot start as is. To solve this, do one of the following: - Copy OpenNI libraries (libOpenNI2.so, libPS1080.so and libOniFile.so) to /system/lib (requires root) - or - - run `export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH` before starting the native executeable OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/LICENSE0000600000175000017500000002613612240433507021407 0ustar jsprickejspricke Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/0000700000175000017500000000000012240433507021774 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleRead/0000700000175000017500000000000012240433507024021 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleRead/Android.mk0000600000175000017500000000210612240433507025733 0ustar jsprickejspricke# OpenNI 2.x Android makefile. # Copyright (C) 2012 PrimeSense Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) # Sources MY_SRC_FILES := \ $(LOCAL_PATH)/*.cpp MY_SRC_FILE_EXPANDED := $(wildcard $(MY_SRC_FILES)) LOCAL_SRC_FILES := $(MY_SRC_FILE_EXPANDED:$(LOCAL_PATH)/%=%) # C/CPP Flags LOCAL_CFLAGS += $(OPENNI2_CFLAGS) # Includes LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/../../Include \ $(LOCAL_PATH)/../Common # Dependencies LOCAL_SHARED_LIBRARIES := libOpenNI2 # Output LOCAL_MODULE := SimpleRead include $(BUILD_EXECUTABLE) OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleRead/main.cpp0000600000175000017500000000651412240433507025461 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include #include #include "OniSampleUtilities.h" #define SAMPLE_READ_WAIT_TIMEOUT 2000 //2000ms using namespace openni; int main() { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL) { rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK) { printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); return 3; } } rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); return 4; } VideoFrameRef frame; while (!wasKeyboardHit()) { int changedStreamDummy; VideoStream* pStream = &depth; rc = OpenNI::waitForAnyStream(&pStream, 1, &changedStreamDummy, SAMPLE_READ_WAIT_TIMEOUT); if (rc != STATUS_OK) { printf("Wait failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError()); continue; } rc = depth.readFrame(&frame); if (rc != STATUS_OK) { printf("Read failed!\n%s\n", OpenNI::getExtendedError()); continue; } if (frame.getVideoMode().getPixelFormat() != PIXEL_FORMAT_DEPTH_1_MM && frame.getVideoMode().getPixelFormat() != PIXEL_FORMAT_DEPTH_100_UM) { printf("Unexpected frame format\n"); continue; } DepthPixel* pDepth = (DepthPixel*)frame.getData(); int middleIndex = (frame.getHeight()+1)*frame.getWidth()/2; printf("[%08llu] %8d\n", (long long)frame.getTimestamp(), pDepth[middleIndex]); } depth.stop(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; } OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleRead/SimpleRead.vcxproj0000600000175000017500000002217612240433507027475 0ustar jsprickejspricke Debug Win32 Debug x64 Release Win32 Release x64 {BDA3BF24-550A-4BF9-83E5-7B56134EDD40} SimpleRead Application true MultiByte Application true MultiByte Application false true MultiByte Application false true MultiByte $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ Disabled ..\Common;..\..\Include;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true OpenNI2.lib $(OutDir) true Disabled ..\Common;..\..\Include;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true OpenNI2.lib $(OutDir) true Level4 MaxSpeed true ..\Common;..\..\Include;%(AdditionalIncludeDirectories) true true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true OpenNI2.lib $(OutDir) true Level4 MaxSpeed true ..\Common;..\..\Include;%(AdditionalIncludeDirectories) true true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true OpenNI2.lib $(OutDir) true OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleRead/Makefile0000600000175000017500000000041612240433507025464 0ustar jsprickejsprickeinclude ../../ThirdParty/PSCommon/BuildSystem/CommonDefs.mak BIN_DIR = ../../Bin INC_DIRS = \ ../../Include \ ../Common SRC_FILES = *.cpp USED_LIBS += OpenNI2 EXE_NAME = SimpleRead CFLAGS += -Wall include ../../ThirdParty/PSCommon/BuildSystem/CommonCppMakefile OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MultipleStreamRead/0000700000175000017500000000000012240433507025537 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MultipleStreamRead/Android.mk0000600000175000017500000000211612240433507027452 0ustar jsprickejspricke# OpenNI 2.x Android makefile. # Copyright (C) 2012 PrimeSense Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) # Sources MY_SRC_FILES := \ $(LOCAL_PATH)/*.cpp MY_SRC_FILE_EXPANDED := $(wildcard $(MY_SRC_FILES)) LOCAL_SRC_FILES := $(MY_SRC_FILE_EXPANDED:$(LOCAL_PATH)/%=%) # C/CPP Flags LOCAL_CFLAGS += $(OPENNI2_CFLAGS) # Includes LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/../../Include \ $(LOCAL_PATH)/../Common # Dependencies LOCAL_SHARED_LIBRARIES := libOpenNI2 # Output LOCAL_MODULE := MultipleStreamRead include $(BUILD_EXECUTABLE) ././@LongLink0000644000000000000000000000014700000000000011605 Lustar rootrootOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MultipleStreamRead/MultipleStreamRead.vcxprojOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MultipleStreamRead/MultipleStreamRead.vcxpr0000600000175000017500000002220612240433507032372 0ustar jsprickejspricke Debug Win32 Debug x64 Release Win32 Release x64 {920D08AC-452C-4326-BC6E-86FE65848587} MultipleStreamRead Application true MultiByte Application true MultiByte Application false true MultiByte Application false true MultiByte $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ Disabled ..\..\Include;..\Common;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true OpenNI2.lib $(OutDir) true Disabled ..\..\Include;..\Common;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true OpenNI2.lib $(OutDir) true Level4 MaxSpeed true ..\..\Include;..\Common;%(AdditionalIncludeDirectories) true true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true OpenNI2.lib $(OutDir) true Level4 MaxSpeed true ..\..\Include;..\Common;%(AdditionalIncludeDirectories) true true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true OpenNI2.lib $(OutDir) true OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MultipleStreamRead/main.cpp0000600000175000017500000001011712240433507027171 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include #include #include "OniSampleUtilities.h" #define SAMPLE_READ_WAIT_TIMEOUT 2000 //2000ms using namespace openni; void analyzeFrame(const VideoFrameRef& frame) { DepthPixel* pDepth; RGB888Pixel* pColor; int middleIndex = (frame.getHeight()+1)*frame.getWidth()/2; switch (frame.getVideoMode().getPixelFormat()) { case PIXEL_FORMAT_DEPTH_1_MM: case PIXEL_FORMAT_DEPTH_100_UM: pDepth = (DepthPixel*)frame.getData(); printf("[%08llu] %8d\n", (long long)frame.getTimestamp(), pDepth[middleIndex]); break; case PIXEL_FORMAT_RGB888: pColor = (RGB888Pixel*)frame.getData(); printf("[%08llu] 0x%02x%02x%02x\n", (long long)frame.getTimestamp(), pColor[middleIndex].r&0xff, pColor[middleIndex].g&0xff, pColor[middleIndex].b&0xff); break; default: printf("Unknown format\n"); } } int main() { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } VideoStream depth, color; if (device.getSensorInfo(SENSOR_DEPTH) != NULL) { rc = depth.create(device, SENSOR_DEPTH); if (rc == STATUS_OK) { rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the color stream\n%s\n", OpenNI::getExtendedError()); } } else { printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); } } if (device.getSensorInfo(SENSOR_COLOR) != NULL) { rc = color.create(device, SENSOR_COLOR); if (rc == STATUS_OK) { rc = color.start(); if (rc != STATUS_OK) { printf("Couldn't start the color stream\n%s\n", OpenNI::getExtendedError()); } } else { printf("Couldn't create color stream\n%s\n", OpenNI::getExtendedError()); } } VideoFrameRef frame; VideoStream* streams[] = {&depth, &color}; while (!wasKeyboardHit()) { int readyStream = -1; rc = OpenNI::waitForAnyStream(streams, 2, &readyStream, SAMPLE_READ_WAIT_TIMEOUT); if (rc != STATUS_OK) { printf("Wait failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError()); break; } switch (readyStream) { case 0: // Depth depth.readFrame(&frame); break; case 1: // Color color.readFrame(&frame); break; default: printf("Unxpected stream\n"); } analyzeFrame(frame); } depth.stop(); color.stop(); depth.destroy(); color.destroy(); device.close(); OpenNI::shutdown(); return 0; } OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MultipleStreamRead/Makefile0000600000175000017500000000042612240433507027203 0ustar jsprickejsprickeinclude ../../ThirdParty/PSCommon/BuildSystem/CommonDefs.mak BIN_DIR = ../../Bin INC_DIRS = \ ../../Include \ ../Common SRC_FILES = *.cpp USED_LIBS += OpenNI2 EXE_NAME = MultipleStreamRead CFLAGS += -Wall include ../../ThirdParty/PSCommon/BuildSystem/CommonCppMakefile OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/Common/0000700000175000017500000000000012240433507023224 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/Common/OniSampleUtilities.h0000600000175000017500000000673312240433507027173 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_SAMPLE_UTILITIES_H_ #define _ONI_SAMPLE_UTILITIES_H_ #include #include #ifdef WIN32 #include int wasKeyboardHit() { return (int)_kbhit(); } #else // linux #include #include #include #include int wasKeyboardHit() { struct termios oldt, newt; int ch; int oldf; // don't echo and don't wait for ENTER tcgetattr(STDIN_FILENO, &oldt); newt = oldt; newt.c_lflag &= ~(ICANON | ECHO); tcsetattr(STDIN_FILENO, TCSANOW, &newt); oldf = fcntl(STDIN_FILENO, F_GETFL, 0); // make it non-blocking (so we can check without waiting) if (0 != fcntl(STDIN_FILENO, F_SETFL, oldf | O_NONBLOCK)) { return 0; } ch = getchar(); tcsetattr(STDIN_FILENO, TCSANOW, &oldt); if (0 != fcntl(STDIN_FILENO, F_SETFL, oldf)) { return 0; } if(ch != EOF) { ungetc(ch, stdin); return 1; } return 0; } void Sleep(int millisecs) { usleep(millisecs * 1000); } #endif // WIN32 void calculateHistogram(float* pHistogram, int histogramSize, const openni::VideoFrameRef& frame) { const openni::DepthPixel* pDepth = (const openni::DepthPixel*)frame.getData(); // Calculate the accumulative histogram (the yellow display...) memset(pHistogram, 0, histogramSize*sizeof(float)); int restOfRow = frame.getStrideInBytes() / sizeof(openni::DepthPixel) - frame.getWidth(); int height = frame.getHeight(); int width = frame.getWidth(); unsigned int nNumberOfPoints = 0; for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x, ++pDepth) { if (*pDepth != 0) { pHistogram[*pDepth]++; nNumberOfPoints++; } } pDepth += restOfRow; } for (int nIndex=1; nIndex Debug Win32 Debug x64 Release Win32 Release x64 {BDA3BF24-550A-4BF9-83E5-0006134EED40} ClosestPointViewer Application true MultiByte Application true MultiByte Application false true MultiByte Application false true MultiByte $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ Disabled ..\..\Include;..\..\ThirdParty\GL;..\;..\Common;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true glut32.lib;OpenNI2.lib;MWClosestPoint.lib $(OutDir);..\..\ThirdParty\GL true ..\..\Include Disabled ..\..\Include;..\..\ThirdParty\GL;..\;..\Common;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true glut64.lib;OpenNI2.lib;MWClosestPoint.lib $(OutDir);..\..\ThirdParty\GL true ..\..\Include Level4 MaxSpeed true ..\..\Include;..\..\ThirdParty\GL;..\;..\Common;%(AdditionalIncludeDirectories) true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true true glut32.lib;OpenNI2.lib;MWClosestPoint.lib $(OutDir);..\..\ThirdParty\GL true ..\..\Include Level4 MaxSpeed true ..\..\Include;..\..\ThirdParty\GL;..\;..\Common;%(AdditionalIncludeDirectories) true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true true glut64.lib;OpenNI2.lib;MWClosestPoint.lib $(OutDir);..\..\ThirdParty\GL true ..\..\Include OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/ClosestPointViewer/Viewer.h0000600000175000017500000000667312240433507027234 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #ifndef _ONI_SAMPLE_VIEWER_H_ #define _ONI_SAMPLE_VIEWER_H_ #include "MWClosestPoint/MWClosestPoint.h" #include #define MAX_DEPTH 10000 class MyMwListener : public closest_point::ClosestPoint::Listener { public: MyMwListener() : m_ready(false) {} virtual ~MyMwListener() {} void readyForNextData(closest_point::ClosestPoint* pClosestPoint) { openni::Status rc = pClosestPoint->getNextData(m_closest, m_frame); if (rc == openni::STATUS_OK) { // printf("%d, %d, %d\n", m_closest.X, m_closest.Y, m_closest.Z); } else { printf("Update failed\n"); } m_ready = true; } const openni::VideoFrameRef& getFrame() {return m_frame;} const closest_point::IntPoint3D& getClosestPoint() {return m_closest;} bool isAvailable() const {return m_ready;} void setUnavailable() {m_ready = false;} private: openni::VideoFrameRef m_frame; closest_point::IntPoint3D m_closest; bool m_ready; }; class SampleViewer { public: SampleViewer(const char* strSampleName, const char* deviceUri); virtual ~SampleViewer(); virtual openni::Status init(int argc, char **argv); virtual openni::Status run(); //Does not return protected: virtual void display(); virtual void displayPostDraw(){}; // Overload to draw over the screen image virtual void onKey(unsigned char key, int x, int y); virtual openni::Status initOpenGL(int argc, char **argv); void initOpenGLHooks(); void finalize(); private: SampleViewer(const SampleViewer&); SampleViewer& operator=(SampleViewer&); static SampleViewer* ms_self; static void glutIdle(); static void glutDisplay(); static void glutKeyboard(unsigned char key, int x, int y); float m_pDepthHist[MAX_DEPTH]; char m_strSampleName[ONI_MAX_STR]; openni::RGB888Pixel*m_pTexMap; unsigned int m_nTexMapX; unsigned int m_nTexMapY; closest_point::ClosestPoint* m_pClosestPoint; MyMwListener* m_pClosestPointListener; }; #endif // _ONI_SAMPLE_VIEWER_H_ OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/ClosestPointViewer/Viewer.cpp0000600000175000017500000001645412240433507027565 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include "Viewer.h" #if (ONI_PLATFORM == ONI_PLATFORM_MACOSX) #include #else #include #endif #include "OniSampleUtilities.h" #define GL_WIN_SIZE_X 1280 #define GL_WIN_SIZE_Y 1024 #define TEXTURE_SIZE 512 #define DEFAULT_DISPLAY_MODE DISPLAY_MODE_DEPTH #define MIN_NUM_CHUNKS(data_size, chunk_size) ((((data_size)-1) / (chunk_size) + 1)) #define MIN_CHUNKS_SIZE(data_size, chunk_size) (MIN_NUM_CHUNKS(data_size, chunk_size) * (chunk_size)) SampleViewer* SampleViewer::ms_self = NULL; void SampleViewer::glutIdle() { glutPostRedisplay(); } void SampleViewer::glutDisplay() { SampleViewer::ms_self->display(); } void SampleViewer::glutKeyboard(unsigned char key, int x, int y) { SampleViewer::ms_self->onKey(key, x, y); } SampleViewer::SampleViewer(const char* strSampleName, const char* deviceUri) : m_pClosestPoint(NULL), m_pClosestPointListener(NULL) { ms_self = this; strncpy(m_strSampleName, strSampleName, ONI_MAX_STR); m_pClosestPoint = new closest_point::ClosestPoint(deviceUri); } SampleViewer::~SampleViewer() { finalize(); delete[] m_pTexMap; ms_self = NULL; } void SampleViewer::finalize() { if (m_pClosestPoint != NULL) { m_pClosestPoint->resetListener(); delete m_pClosestPoint; m_pClosestPoint = NULL; } if (m_pClosestPointListener != NULL) { delete m_pClosestPointListener; m_pClosestPointListener = NULL; } } openni::Status SampleViewer::init(int argc, char **argv) { m_pTexMap = NULL; if (!m_pClosestPoint->isValid()) { return openni::STATUS_ERROR; } m_pClosestPointListener = new MyMwListener; m_pClosestPoint->setListener(*m_pClosestPointListener); return initOpenGL(argc, argv); } openni::Status SampleViewer::run() //Does not return { glutMainLoop(); return openni::STATUS_OK; } void SampleViewer::display() { if (!m_pClosestPointListener->isAvailable()) { return; } openni::VideoFrameRef depthFrame = m_pClosestPointListener->getFrame(); const closest_point::IntPoint3D& closest = m_pClosestPointListener->getClosestPoint(); m_pClosestPointListener->setUnavailable(); if (m_pTexMap == NULL) { // Texture map init m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getWidth(), TEXTURE_SIZE); m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getHeight(), TEXTURE_SIZE); m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY]; } glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0); if (depthFrame.isValid()) { calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame); } memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel)); float factor[3] = {1, 1, 1}; // check if we need to draw depth frame to texture if (depthFrame.isValid()) { const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData(); openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX; int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel); int width = depthFrame.getWidth(); int height = depthFrame.getHeight(); for (int y = 0; y < height; ++y) { const openni::DepthPixel* pDepth = pDepthRow; openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX(); for (int x = 0; x < width; ++x, ++pDepth, ++pTex) { if (*pDepth != 0) { if (*pDepth == closest.Z) { factor[0] = factor[1] = 0; } // // Add debug lines - every 10cm // else if ((*pDepth / 10) % 10 == 0) // { // factor[0] = factor[2] = 0; // } int nHistValue = m_pDepthHist[*pDepth]; pTex->r = nHistValue*factor[0]; pTex->g = nHistValue*factor[1]; pTex->b = nHistValue*factor[2]; factor[0] = factor[1] = factor[2] = 1; } } pDepthRow += rowSize; pTexRow += m_nTexMapX; } } glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap); // Display the OpenGL texture map glColor4f(1,1,1,1); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); int nXRes = depthFrame.getWidth(); int nYRes = depthFrame.getHeight(); // upper left glTexCoord2f(0, 0); glVertex2f(0, 0); // upper right glTexCoord2f((float)nXRes/(float)m_nTexMapX, 0); glVertex2f(GL_WIN_SIZE_X, 0); // bottom right glTexCoord2f((float)nXRes/(float)m_nTexMapX, (float)nYRes/(float)m_nTexMapY); glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); // bottom left glTexCoord2f(0, (float)nYRes/(float)m_nTexMapY); glVertex2f(0, GL_WIN_SIZE_Y); glEnd(); glDisable(GL_TEXTURE_2D); float closestCoordinates[3] = {closest.X*GL_WIN_SIZE_X/float(depthFrame.getWidth()), closest.Y*GL_WIN_SIZE_Y/float(depthFrame.getHeight()), 0}; glVertexPointer(3, GL_FLOAT, 0, closestCoordinates); glColor3f(1.f, 0.f, 0.f); glPointSize(10); glDrawArrays(GL_POINTS, 0, 1); glFlush(); // Swap the OpenGL display buffers glutSwapBuffers(); } void SampleViewer::onKey(unsigned char key, int /*x*/, int /*y*/) { switch (key) { case 27: finalize(); exit (1); } } openni::Status SampleViewer::initOpenGL(int argc, char **argv) { glutInit(&argc, argv); glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH); glutInitWindowSize(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); glutCreateWindow (m_strSampleName); // glutFullScreen(); glutSetCursor(GLUT_CURSOR_NONE); initOpenGLHooks(); glDisable(GL_DEPTH_TEST); glEnable(GL_TEXTURE_2D); glEnableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_COLOR_ARRAY); return openni::STATUS_OK; } void SampleViewer::initOpenGLHooks() { glutKeyboardFunc(glutKeyboard); glutDisplayFunc(glutDisplay); glutIdleFunc(glutIdle); } OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/ClosestPointViewer/main.cpp0000600000175000017500000000362012240433507027237 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include "Viewer.h" int main(int argc, char** argv) { openni::Status rc = openni::STATUS_OK; const char* deviceURI = openni::ANY_DEVICE; if (argc > 1) { deviceURI = argv[1]; } SampleViewer sampleViewer("ClosestPoint Viewer", deviceURI); rc = sampleViewer.init(argc, argv); if (rc != openni::STATUS_OK) { return 1; } sampleViewer.run(); }OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/ClosestPointViewer/Makefile0000600000175000017500000000075112240433507027251 0ustar jsprickejsprickeinclude ../../ThirdParty/PSCommon/BuildSystem/CommonDefs.mak BIN_DIR = ../../Bin INC_DIRS = \ ../../Include \ ../ \ ../../ThirdParty/GL/ \ ../Common SRC_FILES = *.cpp ifeq ("$(OSTYPE)","Darwin") CFLAGS += -DMACOS LDFLAGS += -framework OpenGL -framework GLUT else CFLAGS += -DUNIX -DGLX_GLXEXT_LEGACY USED_LIBS += glut GL endif USED_LIBS += OpenNI2 MWClosestPoint EXE_NAME = ClosestPointViewer CFLAGS += -Wall include ../../ThirdParty/PSCommon/BuildSystem/CommonCppMakefile OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MWClosestPointApp/0000700000175000017500000000000012240433507025327 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj0000600000175000017500000002303112240433507032300 0ustar jsprickejspricke Debug Win32 Debug x64 Release Win32 Release x64 {A0DB36C9-CE6C-4F61-933C-E53A630D3C7E} MWClosestPointApp Application true MultiByte Application true MultiByte Application false true MultiByte Application false true MultiByte $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ Level3 Disabled ..\MWClosestPoint;..\..\Include true true true $(OutDir) MWClosestPoint.lib;OpenNI2.lib;%(AdditionalDependencies) true ..\..\Include Level3 Disabled ..\MWClosestPoint;..\..\Include true true true $(OutDir) MWClosestPoint.lib;OpenNI2.lib;%(AdditionalDependencies) true ..\..\Include Level3 MaxSpeed true ..\MWClosestPoint;..\..\Include true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true true MWClosestPoint.lib;OpenNI2.lib;%(AdditionalDependencies) $(OutDir) true ..\..\Include Level3 MaxSpeed true ..\MWClosestPoint;..\..\Include true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true true MWClosestPoint.lib;OpenNI2.lib;%(AdditionalDependencies) $(OutDir) true ..\..\Include OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MWClosestPointApp/main.cpp0000600000175000017500000000657612240433507026777 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include #include #ifdef WIN32 #include int wasKeyboardHit() { return (int)_kbhit(); } #else // linux #include #include #include #include int wasKeyboardHit() { struct termios oldt, newt; int ch; int oldf; // don't echo and don't wait for ENTER tcgetattr(STDIN_FILENO, &oldt); newt = oldt; newt.c_lflag &= ~(ICANON | ECHO); tcsetattr(STDIN_FILENO, TCSANOW, &newt); oldf = fcntl(STDIN_FILENO, F_GETFL, 0); // make it non-blocking (so we can check without waiting) if (0 != fcntl(STDIN_FILENO, F_SETFL, oldf | O_NONBLOCK)) { return 0; } ch = getchar(); tcsetattr(STDIN_FILENO, TCSANOW, &oldt); if (0 != fcntl(STDIN_FILENO, F_SETFL, oldf)) { return 0; } if(ch != EOF) { ungetc(ch, stdin); return 1; } return 0; } void Sleep(int ms) { usleep(ms*1000); } #endif // WIN32 class MyMwListener : public closest_point::ClosestPoint::Listener { public: void readyForNextData(closest_point::ClosestPoint* pClosestPoint) { openni::VideoFrameRef frame; closest_point::IntPoint3D closest; openni::Status rc = pClosestPoint->getNextData(closest, frame); if (rc == openni::STATUS_OK) { printf("%d, %d, %d\n", closest.X, closest.Y, closest.Z); } else { printf("Update failed\n"); } } }; int main() { closest_point::ClosestPoint closestPoint; if (!closestPoint.isValid()) { printf("ClosestPoint: error in initialization\n"); return 1; } MyMwListener myListener; closestPoint.setListener(myListener); while (!wasKeyboardHit()) { Sleep(1000); } closestPoint.resetListener(); return 0; } ././@LongLink0000644000000000000000000000015500000000000011604 Lustar rootrootOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj.filtersOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj0000600000175000017500000000164412240433507032306 0ustar jsprickejspricke {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx {93995380-89BD-4b04-88EB-625FBE52EBFB} h;hpp;hxx;hm;inl;inc;xsd {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms Source Files OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/MWClosestPointApp/Makefile0000600000175000017500000000047112240433507026773 0ustar jsprickejsprickeinclude ../../ThirdParty/PSCommon/BuildSystem/CommonDefs.mak BIN_DIR = ../../Bin INC_DIRS = \ ../../Include \ ../MWClosestPoint \ ../Common SRC_FILES = *.cpp USED_LIBS += OpenNI2 MWClosestPoint EXE_NAME = MWClosestPointApp CFLAGS += -Wall include ../../ThirdParty/PSCommon/BuildSystem/CommonCppMakefile OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/Android.mk0000600000175000017500000000136212240433507023711 0ustar jsprickejspricke# OpenNI 2.x Android makefile. # Copyright (C) 2012 PrimeSense Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ifdef OPENNI2_ANDROID_OS_BUILD $(info OpenNI2: Skipping samples in OS build...) else include $(call all-subdir-makefiles) endif OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/EventBasedRead/0000700000175000017500000000000012240433507024610 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/EventBasedRead/EventBasedRead.vcxproj0000600000175000017500000002220212240433507031041 0ustar jsprickejspricke Debug Win32 Debug x64 Release Win32 Release x64 {BDA3BF24-5555-4BF9-83E5-7B56134EDD40} EventBasedRead Application true MultiByte Application true MultiByte Application false true MultiByte Application false true MultiByte $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ $(SolutionDir)Bin\$(Platform)-$(Configuration)\ $(SolutionDir)Bin\Intermediate\$(Platform)-$(Configuration)\$(ProjectName)\ Disabled ..\..\Include;..\Common;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true OpenNI2.lib $(OutDir) true Disabled ..\..\Include;..\Common;%(AdditionalIncludeDirectories) _WINDLL;%(PreprocessorDefinitions) Level4 true true true OpenNI2.lib $(OutDir) true Level4 MaxSpeed true ..\..\Include;..\Common;%(AdditionalIncludeDirectories) true true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true OpenNI2.lib $(OutDir) true Level4 MaxSpeed true ..\..\Include;..\Common;%(AdditionalIncludeDirectories) true true AnySuitable Speed true true false StreamingSIMDExtensions2 Fast true true true OpenNI2.lib $(OutDir) true OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/EventBasedRead/Android.mk0000600000175000017500000000211212240433507026517 0ustar jsprickejspricke# OpenNI 2.x Android makefile. # Copyright (C) 2012 PrimeSense Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) # Sources MY_SRC_FILES := \ $(LOCAL_PATH)/*.cpp MY_SRC_FILE_EXPANDED := $(wildcard $(MY_SRC_FILES)) LOCAL_SRC_FILES := $(MY_SRC_FILE_EXPANDED:$(LOCAL_PATH)/%=%) # C/CPP Flags LOCAL_CFLAGS += $(OPENNI2_CFLAGS) # Includes LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/../../Include \ $(LOCAL_PATH)/../Common # Dependencies LOCAL_SHARED_LIBRARIES := libOpenNI2 # Output LOCAL_MODULE := EventBasedRead include $(BUILD_EXECUTABLE) OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/EventBasedRead/main.cpp0000600000175000017500000001077312240433507026252 0ustar jsprickejspricke/***************************************************************************** * * * OpenNI 2.x Alpha * * Copyright (C) 2012 PrimeSense Ltd. * * * * This file is part of OpenNI. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * *****************************************************************************/ #include #include "OpenNI.h" #include "OniSampleUtilities.h" using namespace openni; void analyzeFrame(const VideoFrameRef& frame) { DepthPixel* pDepth; RGB888Pixel* pColor; int middleIndex = (frame.getHeight()+1)*frame.getWidth()/2; switch (frame.getVideoMode().getPixelFormat()) { case PIXEL_FORMAT_DEPTH_1_MM: case PIXEL_FORMAT_DEPTH_100_UM: pDepth = (DepthPixel*)frame.getData(); printf("[%08llu] %8d\n", (long long)frame.getTimestamp(), pDepth[middleIndex]); break; case PIXEL_FORMAT_RGB888: pColor = (RGB888Pixel*)frame.getData(); printf("[%08llu] 0x%02x%02x%02x\n", (long long)frame.getTimestamp(), pColor[middleIndex].r&0xff, pColor[middleIndex].g&0xff, pColor[middleIndex].b&0xff); break; default: printf("Unknown format\n"); } } class PrintCallback : public VideoStream::NewFrameListener { public: void onNewFrame(VideoStream& stream) { stream.readFrame(&m_frame); analyzeFrame(m_frame); } private: VideoFrameRef m_frame; }; class OpenNIDeviceListener : public OpenNI::DeviceConnectedListener, public OpenNI::DeviceDisconnectedListener, public OpenNI::DeviceStateChangedListener { public: virtual void onDeviceStateChanged(const DeviceInfo* pInfo, DeviceState state) { printf("Device \"%s\" error state changed to %d\n", pInfo->getUri(), state); } virtual void onDeviceConnected(const DeviceInfo* pInfo) { printf("Device \"%s\" connected\n", pInfo->getUri()); } virtual void onDeviceDisconnected(const DeviceInfo* pInfo) { printf("Device \"%s\" disconnected\n", pInfo->getUri()); } }; int main() { Status rc = OpenNI::initialize(); if (rc != STATUS_OK) { printf("Initialize failed\n%s\n", OpenNI::getExtendedError()); return 1; } OpenNIDeviceListener devicePrinter; OpenNI::addDeviceConnectedListener(&devicePrinter); OpenNI::addDeviceDisconnectedListener(&devicePrinter); OpenNI::addDeviceStateChangedListener(&devicePrinter); openni::Array deviceList; openni::OpenNI::enumerateDevices(&deviceList); for (int i = 0; i < deviceList.getSize(); ++i) { printf("Device \"%s\" already connected\n", deviceList[i].getUri()); } Device device; rc = device.open(ANY_DEVICE); if (rc != STATUS_OK) { printf("Couldn't open device\n%s\n", OpenNI::getExtendedError()); return 2; } VideoStream depth; if (device.getSensorInfo(SENSOR_DEPTH) != NULL) { rc = depth.create(device, SENSOR_DEPTH); if (rc != STATUS_OK) { printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError()); } } rc = depth.start(); if (rc != STATUS_OK) { printf("Couldn't start the depth stream\n%s\n", OpenNI::getExtendedError()); } PrintCallback depthPrinter; // Register to new frame depth.addNewFrameListener(&depthPrinter); // Wait while we're getting frames through the printer while (!wasKeyboardHit()) { Sleep(100); } depth.removeNewFrameListener(&depthPrinter); depth.stop(); depth.destroy(); device.close(); OpenNI::shutdown(); return 0; } OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/EventBasedRead/Makefile0000600000175000017500000000042212240433507026250 0ustar jsprickejsprickeinclude ../../ThirdParty/PSCommon/BuildSystem/CommonDefs.mak BIN_DIR = ../../Bin INC_DIRS = \ ../../Include \ ../Common SRC_FILES = *.cpp USED_LIBS += OpenNI2 EXE_NAME = EventBasedRead CFLAGS += -Wall include ../../ThirdParty/PSCommon/BuildSystem/CommonCppMakefile OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/0000700000175000017500000000000012240433507025327 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/0000700000175000017500000000000012240433507026116 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/0000700000175000017500000000000012240433507026705 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/openni/0000700000175000017500000000000012240433507030175 5ustar jsprickejsprickeOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/openni/Samples/0000700000175000017500000000000012240433507031601 5ustar jsprickejspricke././@LongLink0000644000000000000000000000016000000000000011600 Lustar rootrootOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/openni/Samples/SimpleViewer/OpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/openni/Samples/Si0000700000175000017500000000000012240433507032075 5ustar jsprickejspricke././@LongLink0000644000000000000000000000021400000000000011600 Lustar rootrootOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/openni/Samples/SimpleViewer/SimpleViewerApplication.javaOpenNI2-7bef8f639e4d64a85a794e85fe3049dbb2acd32e/Samples/SimpleViewer.java/src/org/openni/Samples/Si0000700000175000017500000001442312240433507032106 0ustar jsprickejsprickepackage org.openni.Samples.SimpleViewer; import org.openni.*; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.util.ArrayList; import java.util.List; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JOptionPane; public class SimpleViewerApplication implements ItemListener { private JFrame mFrame; private JPanel mPanel; private SimpleViewer mViewer; private boolean mShouldRun = true; private Device mDevice; private VideoStream mVideoStream; private ArrayList mDeviceSensors; private ArrayList mSupportedModes; private JComboBox mComboBoxStreams; private JComboBox mComboBoxVideoModes; public SimpleViewerApplication(Device device) { mDevice = device; mFrame = new JFrame("OpenNI Simple Viewer"); mPanel = new JPanel(); mViewer = new SimpleViewer(); // register to key events mFrame.addKeyListener(new KeyListener() { @Override public void keyTyped(KeyEvent arg0) {} @Override public void keyReleased(KeyEvent arg0) {} @Override public void keyPressed(KeyEvent arg0) { if (arg0.getKeyCode() == KeyEvent.VK_ESCAPE) { mShouldRun = false; } } }); // register to closing event mFrame.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { mShouldRun = false; } }); mComboBoxStreams = new JComboBox(); mComboBoxVideoModes = new JComboBox(); mComboBoxStreams.addItem(""); mDeviceSensors = new ArrayList(); if (device.getSensorInfo(SensorType.COLOR) != null) { mDeviceSensors.add(SensorType.COLOR); mComboBoxStreams.addItem("Color"); } if (device.getSensorInfo(SensorType.DEPTH) != null) { mDeviceSensors.add(SensorType.DEPTH); mComboBoxStreams.addItem("Depth"); } mComboBoxStreams.addItemListener(this); mComboBoxVideoModes.addItemListener(this); mViewer.setSize(800,600); mPanel.add("West", mComboBoxStreams); mPanel.add("East", mComboBoxVideoModes); mFrame.add("North", mPanel); mFrame.add("Center", mViewer); mFrame.setSize(mViewer.getWidth() + 20, mViewer.getHeight() + 80); mFrame.setVisible(true); } @Override public void itemStateChanged(ItemEvent e) { if (e.getStateChange() == ItemEvent.DESELECTED) return; if (e.getSource() == mComboBoxStreams) { selectedStreamChanged(); } else if (e.getSource() == mComboBoxVideoModes) { selectedVideoModeChanged(); } } void selectedStreamChanged() { if (mVideoStream != null) { mVideoStream.stop(); mViewer.setStream(null); mVideoStream.destroy(); mVideoStream = null; } int sensorIndex = mComboBoxStreams.getSelectedIndex() - 1; if (sensorIndex == -1) { return; } SensorType type = mDeviceSensors.get(sensorIndex); mVideoStream = VideoStream.create(mDevice, type); List supportedModes = mVideoStream.getSensorInfo().getSupportedVideoModes(); mSupportedModes = new ArrayList(); // now only keeo the ones that our application supports for (VideoMode mode : supportedModes) { switch (mode.getPixelFormat()) { case DEPTH_1_MM: case DEPTH_100_UM: case SHIFT_9_2: case SHIFT_9_3: case RGB888: mSupportedModes.add(mode); break; } } // and add them to combo box mComboBoxVideoModes.removeAllItems(); mComboBoxVideoModes.addItem("