OpenNI introduction_第1页
OpenNI introduction_第2页
OpenNI introduction_第3页
OpenNI introduction_第4页
OpenNI introduction_第5页
已阅读5页,还剩7页未读 继续免费阅读

下载本文档

版权说明:本文档由用户提供并上传,收益归属内容提供方,若内容存在侵权,请进行举报或认领

文档简介

1、Open NI introduction 个人学习整理 OpenNI Data Structures Supported platforms #define ONI_PLATFORM_WIN32 1 #define ONI_PLATFORM_LINUX_X86 2 #define ONI_PLATFORM_LINUX_ARM 3 #define ONI_PLATFORM_MACOSX 4 #define ONI_PLATFORM_ANDROID_ARM 5 Callback definitions #define ONI_STDCALL _stdcall #define ONI_C_DECL

2、#define ONI_API_EXPORT _attribute_ (visibility(default) #ifdef _cplusplus #define ONI_C extern C“ #define ONI_C_API_EXPORT ONI_C ONI_API_EXPORT #define ONI_C_API_IMPORT ONI_C ONI_API_IMPORT #define ONI_CPP_API_EXPORT ONI_API_EXPORT #define ONI_CPP_API_IMPORT ONI_API_IMPORT #else 就不用添加extern “C”了 typ

3、edef struct char uriONI_MAX_STR; char vendorONI_MAX_STR; char nameONI_MAX_STR; uint16_t usbVendorId; uint16_t usbProductId; OniDeviceInfo; typedef struct OniPixelFormat pixelFormat; int resolutionX; int resolutionY; int fps; OniVideoMode; typedef enum/ Depth ONI_PIXEL_FORMAT_DEPTH_1_MM = 100, ONI_PI

4、XEL_FORMAT_DEPTH_100_UM = 101, ONI_PIXEL_FORMAT_SHIFT_9_2 = 102, ONI_PIXEL_FORMAT_SHIFT_9_3 = 103, / Color ONI_PIXEL_FORMAT_RGB888 = 200, ONI_PIXEL_FORMAT_YUV422 = 201, ONI_PIXEL_FORMAT_GRAY8 = 202, ONI_PIXEL_FORMAT_GRAY16 = 203, ONI_PIXEL_FORMAT_JPEG = 204, OniPixelFormat; typedef struct OniSensorT

5、ype sensorType; int numSupportedVideoModes; OniVideoMode *pSupportedVideoModes; OniSensorInfo; typedef enum ONI_SENSOR_IR = 1, ONI_SENSOR_COLOR = 2, ONI_SENSOR_DEPTH = 3, OniSensorType; typedef struct int dataSize; void* data; OniSensorType sensorType; uint64_t timestamp; int frameIndex; int width;i

6、nt height; OniVideoMode videoMode; OniBool croppingEnabled; int cropOriginX; int cropOriginY; int stride; OniFrame; typedef struct OniDeviceInfoCallback deviceConnected; OniDeviceInfoCallback deviceDisconnected; OniDeviceStateCallback deviceStateChanged; OniDeviceCallbacks; struct _OniDevice; typede

7、f _OniDevice* OniDeviceHandle; struct _OniStream; typedef _OniStream* OniStreamHandle; struct _OniRecorder; typedef _OniRecorder* OniRecorderHandle; OpenNI C interfaces General APIs OniStatus oniInitialize(int apiVersion); OniStatus oniGetDeviceList(OniDeviceInfo* pDevices, int* pNumDevices); OniSta

8、tus oniRegisterDeviceCallbacks(OniDeviceCallbacks* pCallbacks, void* pCookie, OniCallbackHandle* pHandle); OniStatus oniWaitForAnyStream(OniStreamHandle* pStreams, int numStreams, int* pStreamIndex, int timeout); Device APIs OniStatus oniDeviceOpen(const char* uri, OniDeviceHandle* pDevice); const O

9、niSensorInfo* oniDeviceGetSensorInfo(OniDeviceHandle device, OniSensorType sensorType); OniStatus oniDeviceGetInfo(OniDeviceHandle device, OniDeviceInfo* pInfo); OniStatus oniDeviceCreateStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream); OniStatus oniDeviceEnableDept

10、hColorSync(OniDeviceHandle device); 两者的采集时间差限 定在一定的范围内 OniStatus oniDeviceSetProperty(OniDeviceHandle device, int propertyId, const void* data, int dataSize); OniBool oniDeviceIsPropertySupported(OniDeviceHandle device, int propertyId); OniStatus oniDeviceInvoke(OniDeviceHandle device, int commandId

11、, const void* data, int dataSize); /* Invoke an internal functionality of the device. */ OniBool oniDeviceIsCommandSupported(OniDeviceHandle device, int commandId); OniBool oniDeviceIsImageRegistrationModeSupported(OniDeviceHandle device, OniImageRegistrationMode mode); /不同位置的camera图像叠加 OpenNI C int

12、erfaces2 Stream APIs const OniSensorInfo* oniStreamGetSensorInfo(OniStreamHandle stream); OniStatus oniStreamStart(OniStreamHandle stream); OniStatus oniStreamReadFrame(OniStreamHandle stream, OniFrame* pFrame); oniStreamRegisterNewFrameCallback(OniStreamHandle stream, OniNewFrameCallback handler, v

13、oid* pCookie, OniCallbackHandle* pHandle); OniStatus oniStreamSetProperty(OniStreamHandle stream, int propertyId, const void* data, int dataSize); OniBool oniStreamIsPropertySupported(OniStreamHandle stream, int propertyId); OniStatus oniStreamInvoke(OniStreamHandle stream, int commandId, const void

14、* data, int dataSize); OniBool oniStreamIsCommandSupported(OniStreamHandle stream, int commandId); void oniFrameAddRef(OniFrame* pFrame); /* Mark another user of the frame. */ Record Stream APIS: 录制到文件 OniStatus oniCreateRecorder(const char* fileName, OniRecorderHandle* pRecorder); OniStatus oniReco

15、rderAttachStream( OniRecorderHandle recorder, OniStreamHandle stream, OniBool allowLossyCompression); OniStatus oniRecorderStart(OniRecorderHandle recorder); 坐标转换 OniStatus oniCoordinateConverterDepthToWorld(OniStreamHandle depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float

16、* pWorldY, float* pWorldZ); OniStatus oniCoordinateConverterWorldToDepth(OniStreamHandle depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ); OniStatus oniCoordinateConverterDepthToColor(OniStreamHandle depthStream, OniStreamHandle colorStream, int

17、depthX, int depthY, OniDepthPixel depthZ, int* pColorX, int* pColorY); OpenNI C+ interfaces class VideoMode : private OniVideoMode 对C 数据结构OniVideoMode的简单OO 封装 class SensorInfo: SensorInfo objects should be the only source of VideoMode objects for the vast majority ofapplication programs. SensorInfo

18、objects should be obtained either from a Device or VideoStream,and in turn be used to provide available video modes for that sensor. class DeviceInfo: Applications will generally obtain objects of this type via calls to OpenNI:enumerateDevices() or openni:Device:getDeviceInfo(), and then use the var

19、ious accessor functions to obtain specificinformation on that device. class VideoFrameRef: The VideoFrameRef class encapsulates a single video frame - the output of a VideoStream at a specific time.The data contained will be a single frame of color, IR, or depth video, along with associated meta dat

20、a. class VideoStream: 对C 数据结构OniVideoStream及其相关操作的简单OO封装 class Device The Device object abstracts a specific device; either a single hardware device, or a file device holding a recording from a hardware device. It offers the ability to connect to the device, and obtain information about its configur

21、ation and the data streams it can offer. class PlaybackControl class PlaybackControl: 对录制的文件的回放操作 class OpenNI class DeviceConnectedListener class DeviceDisconnectedListener class DeviceStateChangedListener static Status initialize() static void shutdown() static Version getVersion() static const ch

22、ar* getExtendedError() static void enumerateDevices(Array* deviceInfoList) static Status waitForAnyStream(VideoStream* pStreams, streamCount, int* pReadyStreamIndex, int timeout = FOREVER) static Status addDeviceConnectedListener(DeviceConnectedListener* pListener) static Status addDeviceDisconnecte

23、dListener(DeviceDisconnectedListener* pListener) static Status addDeviceStateChangedListener(DeviceStateChangedListener* pListener) 入口Class OpenNI C+ interfaces2 typedef struct int handCount; NiteHandData* pHands; int gestureCount; NiteGestureData* pGestures; OniFrame* pDepthFrame; unsigned long lon

24、g timestamp; int frameIndex; NiteHandTrackerFrame; typedef struct NiteHandId id; NitePoint3f position; int state; NiteHandData; typedef enum NITE_HAND_STATE_LOST = 0, NITE_HAND_STATE_NEW = 1, NITE_HAND_STATE_TRACKED = 2, NITE_HAND_STATE_TOUCHING_FOV = 4, NiteHandState; typedef struct NiteGestureType

25、 type; NitePoint3f currentPosition; int state; NiteGestureData; typedef enum NITE_GESTURE_STATE_NEW = 1, NITE_GESTURE_STATE_IN_PROGRESS = 2, NITE_GESTURE_STATE_COMPLETED = 4 NiteGestureState; typedef enum NITE_GESTURE_WAVE, NITE_GESTURE_CLICK, NITE_GESTURE_HAND_RAISE NiteGestureType; typedef struct

26、int userCount; NiteUserData* pUser; NiteUserMap userMap; OniFrame* pDepthFrame; unsigned long long timestamp; int frameIndex; float floorConfidence; NitePlane floor; NiteUserTrackerFrame; typedef struct NiteUserId id; NiteBoundingBox boundingBox; NitePoint3f centerOfMass; int state; NiteSkeleton ske

27、leton; NitePoseData poses2; NiteUserData; typedef enum NITE_USER_STATE_VISIBLE = 1, NITE_USER_STATE_NEW = 2, NITE_USER_STATE_LOST = 4, NiteUserState; typedef struct NiteSkeletonJoint joints15; NiteSkeletonState state; NiteSkeleton; typedef struct NitePoseType type; int state; NitePoseData; typedef e

28、num NITE_POSE_STATE_DETECTING = 1, NITE_POSE_STATE_IN_POSE = 2, NITE_POSE_STATE_ENTER = 4, NITE_POSE_STATE_EXIT = 8 NitePoseState; typedef enum NITE_POSE_PSI, NITE_POSE_CROSSED_HANDS NitePoseType; typedef struct NiteJointType jointType; NitePoint3f position; 真实的 float positionConfidence; NiteQuatern

29、ion orientation; float orientationConfidence; NiteSkeletonJoint; typedef enum NITE_SKELETON_NONE, NITE_SKELETON_CALIBRATING, NITE_SKELETON_TRACKED, NITE_SKELETON_CALIBRATION_xxx, NiteSkeletonState; typedef struct NiteUserId* pixels; int width; int height; int stride; NiteUserMap; typedef struct Nite

30、Point3f point; NitePoint3f normal; NitePlane; typedef enum HEAD, NECK, LEFT_SHOULDER, RIGHT_SHOULDER, LEFT_ELBOW, RIGHT_ELBOW, LEFT_HAND, RIGHT_HAND, TORSO, LEFT_HIP, RIGHT_HIP, LEFT_KNEE, RIGHT_KNEE, LEFT_FOOT, RIGHT_FOOT, NiteJointType; NiTE C interfaces General APIs NiteStatus niteInitialize(); -

31、NiteVersion niteGetVersion(); UserTracker APIs NiteStatus niteInitializeUserTracker(NiteUserTrackerHandle*); NiteStatus niteInitializeUserTrackerByDevice(void*, NiteUserTrackerHandle*); NiteStatus niteStartSkeletonTracking(NiteUserTrackerHandle, NiteUserId); bool niteIsSkeletonTracking(NiteUserTrack

32、erHandle, NiteUserId); NiteStatus niteSetSkeletonSmoothing(NiteUserTrackerHandle, float); NiteStatus niteGetSkeletonSmoothing(NiteUserTrackerHandle, float*); NiteStatus niteStartPoseDetection(NiteUserTrackerHandle, NiteUserId, NitePoseType); NiteStatus niteRegisterUserTrackerCallbacks(NiteUserTracke

33、rHandle, NiteUserTrackerCallbacks*, void*); NiteStatus niteReadUserTrackerFrame(NiteUserTrackerHandle, NiteUserTrackerFrame*); NiteStatus niteUserTrackerFrameAddRef(NiteUserTrackerHandle, NiteUserTrackerFrame*); NiteStatus niteUserTrackerFrameRelease(NiteUserTrackerHandle, NiteUserTrackerFrame*); Ha

34、ndTracker APIs NiteStatus niteInitializeHandTracker(NiteHandTrackerHandle*); NiteStatus niteInitializeHandTrackerByDevice(void*, NiteHandTrackerHandle*); NiteStatus niteStartHandTracking(NiteHandTrackerHandle, const NitePoint3f*, NiteHandId* pNewHandId); NiteStatus niteSetHandSmoothingFactor(NiteHan

35、dTrackerHandle, float); NiteStatus niteGetHandSmoothingFactor(NiteHandTrackerHandle, float*); NiteStatus niteRegisterHandTrackerCallbacks(NiteHandTrackerHandle, NiteHandTrackerCallbacks*, void*); NiteStatus niteReadHandTrackerFrame(NiteHandTrackerHandle, NiteHandTrackerFrame*); NiteStatus niteHandTr

36、ackerFrameAddRef(NiteHandTrackerHandle, NiteHandTrackerFrame*); NiteStatus niteHandTrackerFrameRelease(NiteHandTrackerHandle, NiteHandTrackerFrame*); NiteStatus niteStartGestureDetection(NiteHandTrackerHandle, NiteGestureType); Convert NiteStatus niteConvertJointCoordinatesToDepth(NiteUserTrackerHan

37、dle userTracker, float x, float y, float z, float* pX, float* pY); NiteStatus niteConvertDepthCoordinatesToJoint(NiteUserTrackerHandle userTracker, int x, int y, int z, float* pX, float* pY); NiteStatus niteConvertHandCoordinatesToDepth(NiteHandTrackerHandle handTracker, float x, float y, float z, f

38、loat* pX, float* pY); NiteStatus niteConvertDepthCoordinatesToHand(NiteHandTrackerHandle handTracker, int x, int y, int z, float* pX, float* pY); NiTE C+ interfaces class Point3f: 对坐标点的OO化 class Plane : 对NitePlane的OO化 class Quaternion : 对 NiteQuaternion的OO化 class BoundingBox : 对 NiteBoundingBox的OO化

39、class PoseData : 对 NitePoseData的OO化 class UserMap : 对NiteUserMap的OO化 class SkeletonJointclass Skeletonclass UserData class UserTrackerFrameRef Snapshot of the User Tracker algorithm. It holds all the users identified at this time, including their position, skeleton and such, as well as the floor pla

40、ne class UserTracker This is the main object of the User Tracker algorithm. Through it all the users are accessible. class HandTracker This is the main object of the Hand Tracker algorithm. Through it all the hands and gestures are accessible. class NiTE The NiTE class is a static entry point to the

41、 library. Through it you can initialize the library, as well as create User Trackers and Hand Trackers. HandTracker Flow openni:OpenNI:initialize(); openni:Device m_device; nite:HandTracker* m_pHandTracker; openni:Status rc = m_device.open(deviceUri); nite:NiTE:initialize(); m_pHandTracker-create( m

42、_pHandTracker-startGestureDetection(nite:GESTURE_WAVE); m_pHandTracker-startGestureDetection(nite:GESTURE_CLICK); nite:HandTrackerFrameRef handFrame; openni:VideoFrameRef depthFrame; nite:Status rc = m_pHandTracker-readFrame( depthFrame = handFrame.getDepthFrame(); depthFrame.getVideoMode().getResol

43、utionX() depthFrame.getVideoMode().getResolutionY() openni:DepthPixel* pDepth = depthFrame.getData(); int width = depthFrame.getWidth(); int height = depthFrame.getHeight(); Array for (int i = 0; i startHandTracking (gesturesi.getCurrentPosition(), Request a hand in a specific position, assuming the

44、re really is a hand there.For instance, the position received from a gesture can be used. const nite:Array for (int i = 0; i hands.getSize(); +i) const nite:HandData if (!user.isTracking() printf(Lost hand %dn, user.getId(); nite:HandId id = user.getId(); HistoryBuffer* pHistory = g_historiesid; g_histories.erase(g_histories.find(id); delete pHistory; else if (user.isNew() printf(Found hand %dn, user.getId(); g_historiesuser.getId() = new HistoryBuffer; HistoryBuffer* pHistory = g_histo

温馨提示

  • 1. 本站所有资源如无特殊说明,都需要本地电脑安装OFFICE2007和PDF阅读器。图纸软件为CAD,CAXA,PROE,UG,SolidWorks等.压缩文件请下载最新的WinRAR软件解压。
  • 2. 本站的文档不包含任何第三方提供的附件图纸等,如果需要附件,请联系上传者。文件的所有权益归上传用户所有。
  • 3. 本站RAR压缩包中若带图纸,网页内容里面会有图纸预览,若没有图纸预览就没有图纸。
  • 4. 未经权益所有人同意不得将文件中的内容挪作商业或盈利用途。
  • 5. 人人文库网仅提供信息存储空间,仅对用户上传内容的表现方式做保护处理,对用户上传分享的文档内容本身不做任何修改或编辑,并不能对任何下载内容负责。
  • 6. 下载文件中如有侵权或不适当内容,请与我们联系,我们立即纠正。
  • 7. 本站不保证下载资源的准确性、安全性和完整性, 同时也不承担用户因使用这些下载资源对自己和他人造成任何形式的伤害或损失。

评论

0/150

提交评论