官方说明(太简单)
http://msdn.microsoft.com/en-us/library/dn188671.aspx
微软提供的视频
Kinect for Windows 开发初体验:Kinect Interaction功能介绍(22)
Kinect for Windows 开发初体验:Kinect Interaction 编程接口(23)
通过这二个视频可以了解交互的概念,便于之后的开发。但,显然关于到底如何使用,依旧需要自己去琢磨。
交互数据生成流程图
日本人的C++代码(Need OpenCV),简单检测Grip或者Grip Release的手势状态。
源地址https://github.com/kaorun55/KinectSDKv17Sample (还有其它许多代码,我就不研究了,总之代码十分简洁,很不错!)
他的运行效果就是,你手抓取就显示Grip在界面上,又放开,在显示一个Release在界面上,就不配图了。还是比较准确的,只是有时会误判。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 |
// http://social.msdn.microsoft.com/Forums/ja-JP/kinectsdknuiapi/thread/e4f5a696-ed4f-4a5f-8e54-4b3706f62ad0 #include <iostream> #include <Windows.h> #include <NuiApi.h> #include <KinectInteraction.h> #include <opencv2/opencv.hpp> #define ERROR_CHECK( ret ) \ if ( ret != S_OK ) { \ std::stringstream ss; \ ss << "failed " #ret " " << std::hex << ret << std::endl; \ throw std::runtime_error( ss.str().c_str() ); \ } const NUI_IMAGE_RESOLUTION CAMERA_RESOLUTION = NUI_IMAGE_RESOLUTION_640x480; class KinectAdapter : public INuiInteractionClient { public: HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppv) { std::cout << __FUNCTION__ << std::endl; return S_OK; } ULONG STDMETHODCALLTYPE AddRef() { std::cout << __FUNCTION__ << std::endl; return 1; } ULONG STDMETHODCALLTYPE Release() { std::cout << __FUNCTION__ << std::endl; return 0; } HRESULT STDMETHODCALLTYPE GetInteractionInfoAtLocation(DWORD skeletonTrackingId, NUI_HAND_TYPE handType, FLOAT x, FLOAT y, _Out_ NUI_INTERACTION_INFO *pInteractionInfo) { //std::cout << __FUNCTION__ << std::endl; pInteractionInfo->IsGripTarget = TRUE; return S_OK; } }; class KinectSample { private: INuiSensor* kinect; INuiInteractionStream* stream; KinectAdapter adapter; HANDLE imageStreamHandle; HANDLE depthStreamHandle; HANDLE streamEvent; DWORD width; DWORD height; public: KinectSample() { } ~KinectSample() { // 廔椆張棟 if ( kinect != 0 ) { kinect->NuiShutdown(); kinect->Release(); } } void initialize() { createInstance(); // Kinect偺愝掕傪弶婜壔偡傞 ERROR_CHECK( kinect->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON ) ); // RGB僇儊儔傪弶婜壔偡傞 ERROR_CHECK( kinect->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, CAMERA_RESOLUTION, 0, 2, 0, &imageStreamHandle ) ); // 嫍棧僇儊儔傪弶婜壔偡傞 ERROR_CHECK( kinect->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, CAMERA_RESOLUTION, 0, 2, 0, &depthStreamHandle ) ); // Near儌乕僪 //ERROR_CHECK( kinect->NuiImageStreamSetImageFrameFlags( // depthStreamHandle, NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE ) ); // 僗働儖僩儞傪弶婜壔偡傞 ERROR_CHECK( kinect->NuiSkeletonTrackingEnable( 0, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT ) ); // 僼儗乕儉峏怴僀儀儞僩偺僴儞僪儖傪嶌惉偡傞 streamEvent = ::CreateEvent( 0, TRUE, FALSE, 0 ); ERROR_CHECK( kinect->NuiSetFrameEndEvent( streamEvent, 0 ) ); // 巜掕偟偨夝憸搙偺丄夋柺僒僀僘傪庢摼偡傞 ::NuiImageResolutionToSize(CAMERA_RESOLUTION, width, height ); // 僀儞僞儔僋僔儑儞儔僀僽儔儕偺弶婜壔 ERROR_CHECK( ::NuiCreateInteractionStream( kinect, &adapter, &stream ) ); ERROR_CHECK( stream->Enable( 0 ) ); } void run() { cv::Mat image; // 儊僀儞儖乕僾 while ( 1 ) { // 僨乕僞偺峏怴傪懸偮 DWORD ret = ::WaitForSingleObject( streamEvent, INFINITE ); ::ResetEvent( streamEvent ); drawRgbImage( image ); processDepth(); processSkeleton(); processInteraction(); // 夋憸傪昞帵偡傞 cv::imshow( "KinectSample", image ); // 廔椆偺偨傔偺僉乕擖椡僠僃僢僋寭丄昞帵偺偨傔偺僂僃僀僩 int key = cv::waitKey( 10 ); if ( key == 'q' ) { break; } } } private: void createInstance() { // 愙懕偝傟偰偄傞Kinect偺悢傪庢摼偡傞 int count = 0; ERROR_CHECK( ::NuiGetSensorCount( &count ) ); if ( count == 0 ) { throw std::runtime_error( "Kinect 傪愙懕偟偰偔偩偝偄" ); } // 嵟弶偺Kinect偺僀儞僗僞儞僗傪嶌惉偡傞 ERROR_CHECK( ::NuiCreateSensorByIndex( 0, &kinect ) ); // Kinect偺忬懺傪庢摼偡傞 HRESULT status = kinect->NuiStatus(); if ( status != S_OK ) { throw std::runtime_error( "Kinect 偑棙梡壜擻偱偼偁傝傑偣傫" ); } } void drawRgbImage( cv::Mat& image ) { // RGB僇儊儔偺僼儗乕儉僨乕僞傪庢摼偡傞 NUI_IMAGE_FRAME imageFrame = { 0 }; ERROR_CHECK( kinect->NuiImageStreamGetNextFrame( imageStreamHandle, 0, &imageFrame ) ); // 夋憸僨乕僞傪庢摼偡傞 NUI_LOCKED_RECT colorData; imageFrame.pFrameTexture->LockRect( 0, &colorData, 0, 0 ); // 夋憸僨乕僞傪僐僺乕偡傞 image = cv::Mat( height, width, CV_8UC4, colorData.pBits ); // 僼儗乕儉僨乕僞傪夝曻偡傞 ERROR_CHECK( kinect->NuiImageStreamReleaseFrame( imageStreamHandle, &imageFrame ) ); } void processDepth() { // 嫍棧僇儊儔偺僼儗乕儉僨乕僞傪庢摼偡傞 NUI_IMAGE_FRAME depthFrame = { 0 }; ERROR_CHECK( kinect->NuiImageStreamGetNextFrame( depthStreamHandle, 0, &depthFrame ) ); // 僼儗乕儉僨乕僞傪尦偵丄奼挘嫍棧僨乕僞傪庢摼偡傞 BOOL nearMode = FALSE; INuiFrameTexture *frameTexture = 0; ERROR_CHECK( kinect->NuiImageFrameGetDepthImagePixelFrameTexture( depthStreamHandle, &depthFrame, &nearMode, &frameTexture ) ); // 嫍棧僨乕僞傪庢摼偡傞 NUI_LOCKED_RECT depthData = { 0 }; frameTexture->LockRect( 0, &depthData, 0, 0 ); // Depth僨乕僞傪愝掕偡傞 ERROR_CHECK( stream->ProcessDepth( depthData.size, depthData.pBits, depthFrame.liTimeStamp ) ); // 僼儗乕儉僨乕僞傪夝曻偡傞 frameTexture->UnlockRect( 0 ); ERROR_CHECK( kinect->NuiImageStreamReleaseFrame( depthStreamHandle, &depthFrame ) ); } LARGE_INTEGER skeletonTimeStamp; void processSkeleton() { // 僗働儖僩儞偺僼儗乕儉傪庢摼偡傞 NUI_SKELETON_FRAME skeletonFrame = { 0 }; auto ret = kinect->NuiSkeletonGetNextFrame( 0, &skeletonFrame ); if ( ret != S_OK ) { std::cout << "not skeleton!!" << std::endl; return; } //std::cout << "skeleton!!" << std::endl; // 僗働儖僩儞僨乕僞傪愝掕偡傞 skeletonTimeStamp = skeletonFrame.liTimeStamp; Vector4 reading = { 0 }; ERROR_CHECK( kinect->NuiAccelerometerGetCurrentReading( &reading ) ); ERROR_CHECK( stream->ProcessSkeleton( NUI_SKELETON_COUNT, skeletonFrame.SkeletonData, &reading, skeletonTimeStamp ) ); } void processInteraction() { // 僀儞僞儔僋僔儑儞僼儗乕儉傪庢摼偡傞 NUI_INTERACTION_FRAME interactionFrame = { 0 } ; auto ret = stream->GetNextFrame( 0, &interactionFrame ); if ( ret != S_OK ) { if ( ret == E_POINTER ) { std::cout << "E_POINTER" << std::endl; } else if ( ret == E_NUI_FRAME_NO_DATA ) { std::cout << "E_NUI_FRAME_NO_DATA" << std::endl; } return; } //std::cout << "interaction!!" << std::endl; for ( auto user : interactionFrame.UserInfos ) { if ( user.SkeletonTrackingId != 0 ) { for ( auto hand : user.HandPointerInfos ) { if ( hand.HandEventType != NUI_HAND_EVENT_TYPE::NUI_HAND_EVENT_TYPE_NONE ) { std::cout << EventTypeToString( hand.HandEventType ) << " " << std::endl; } } } } } std::string EventTypeToString( NUI_HAND_EVENT_TYPE eventType ) { if ( eventType == NUI_HAND_EVENT_TYPE::NUI_HAND_EVENT_TYPE_GRIP ) { return "Grip"; } else if ( eventType == NUI_HAND_EVENT_TYPE::NUI_HAND_EVENT_TYPE_GRIPRELEASE ) { return "GripRelease"; } return "None"; } }; void main() { try { KinectSample kinect; kinect.initialize(); kinect.run(); } catch ( std::exception& ex ) { std::cout << ex.what() << std::endl; } } |
从他给的程序第一行的参考可以知道,还是参考了别人的代码(主要是我的,而我又是参考了一部分别人的)
箫不鸣的程序(依旧只能检测Grip and Grip Release状态!效果和日本人的一样)
运行效果图,也就是信息只有一页,实时更新,用来测试的。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 |
// ControlBasicCPPFinal.cpp : Defines the entry point for the console application. //from internet http://social.msdn.microsoft.com/Forums/en-US/kinectsdknuiapi/thread/e4f5a696-ed4f-4a5f-8e54-4b3706f62ad0 //DsoTsen #include "stdafx.h" #include <conio.h> #include <Windows.h> #include <iostream> #include <NuiApi.h> #include <KinectInteraction.h> using namespace std; #define SafeRelease(X) if(X) delete X; //---------------------------------------------------- //#define _WINDOWS INuiSensor *m_pNuiSensor; INuiInteractionStream *m_nuiIStream; class CIneractionClient:public INuiInteractionClient { public: CIneractionClient() {;} ~CIneractionClient() {;} STDMETHOD(GetInteractionInfoAtLocation)(THIS_ DWORD skeletonTrackingId, NUI_HAND_TYPE handType, FLOAT x, FLOAT y, _Out_ NUI_INTERACTION_INFO *pInteractionInfo) { if(pInteractionInfo) { pInteractionInfo->IsPressTarget = FALSE;//must add pInteractionInfo->IsGripTarget = TRUE; //must add // pInteractionInfo->PressTargetControlId = 0; // pInteractionInfo->PressAttractionPointX = 0.f; // pInteractionInfo->PressAttractionPointY = 0.f; return S_OK; } return E_POINTER; //return S_OK; } STDMETHODIMP_(ULONG) AddRef() { return 2; } STDMETHODIMP_(ULONG) Release() { return 1; } STDMETHODIMP QueryInterface(REFIID riid, void **ppv) { return S_OK; } }; CIneractionClient m_nuiIClient; //-------------------------------------------------------------------- HANDLE m_hNextColorFrameEvent; HANDLE m_hNextDepthFrameEvent; HANDLE m_hNextSkeletonEvent; HANDLE m_hNextInteractionEvent; HANDLE m_pColorStreamHandle; HANDLE m_pDepthStreamHandle; HANDLE m_hEvNuiProcessStop; //----------------------------------------------------------------------------------- int DrawColor(HANDLE h) { return 0; } int DrawDepth(HANDLE h) { NUI_IMAGE_FRAME pImageFrame; INuiFrameTexture* pDepthImagePixelFrame; HRESULT hr = m_pNuiSensor->NuiImageStreamGetNextFrame( h, 0, &pImageFrame ); BOOL nearMode = TRUE; m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(m_pDepthStreamHandle, &pImageFrame, &nearMode, &pDepthImagePixelFrame); INuiFrameTexture * pTexture = pDepthImagePixelFrame; NUI_LOCKED_RECT LockedRect; pTexture->LockRect( 0, &LockedRect, NULL, 0 ); if( LockedRect.Pitch != 0 ) { HRESULT hr = m_nuiIStream->ProcessDepth(LockedRect.size,PBYTE(LockedRect.pBits),pImageFrame.liTimeStamp); if( FAILED( hr ) ) { cout<<"Process Depth failed"<<endl; } } pTexture->UnlockRect(0); m_pNuiSensor->NuiImageStreamReleaseFrame( h, &pImageFrame ); return 0; } int DrawSkeleton() { NUI_SKELETON_FRAME SkeletonFrame = {0}; HRESULT hr = m_pNuiSensor->NuiSkeletonGetNextFrame( 0, &SkeletonFrame ); if( FAILED( hr ) ) { cout<<"Get Skeleton Image Frame Failed"<<endl; return -1; } bool bFoundSkeleton = true; static int static_one_is_enough=0; if(static_one_is_enough==0) { cout<<"find skeleton !"<<endl; static_one_is_enough++; } m_pNuiSensor->NuiTransformSmooth(&SkeletonFrame,NULL); Vector4 vTemp; m_pNuiSensor->NuiAccelerometerGetCurrentReading(&vTemp); hr =m_nuiIStream->ProcessSkeleton(NUI_SKELETON_COUNT, SkeletonFrame.SkeletonData, &vTemp, SkeletonFrame.liTimeStamp); if( FAILED( hr ) ) { cout<<"Process Skeleton failed"<<endl; } return 0; } int ShowInteraction() { NUI_INTERACTION_FRAME Interaction_Frame; HRESULT hr = m_nuiIStream->GetNextFrame( 0,&Interaction_Frame ); if(hr != S_OK) { if(hr == E_POINTER) cout<<"E_POINTER "<<endl; else if(hr == E_NUI_FRAME_NO_DATA) { cout<<"E_NUI_FRAME_NO_DATA"<<endl; } return -1; } int trackingID = 0; int event; for(int i=0 ; i<NUI_SKELETON_COUNT ; i++) { COORD pos = {0,0}; HANDLE hOut = GetStdHandle(STD_OUTPUT_HANDLE); //函数句柄 SetConsoleCursorPosition(hOut, pos); static int frameCount=0; frameCount++; for(int j=0;j<NUI_USER_HANDPOINTER_COUNT;j++) { if( ( frameCount%3 )==1 ) { trackingID = Interaction_Frame.UserInfos[i].SkeletonTrackingId; event = Interaction_Frame.UserInfos[i].HandPointerInfos[j].HandEventType; DWORD state = Interaction_Frame.UserInfos[i].HandPointerInfos[j].State; NUI_HAND_TYPE type = Interaction_Frame.UserInfos[i].HandPointerInfos[j].HandType; if(type==NUI_HAND_TYPE_NONE) continue; if((state&&NUI_HANDPOINTER_STATE_TRACKED)==0) continue; if((state&&NUI_HANDPOINTER_STATE_ACTIVE)==0) continue; cout<<"id="<<trackingID<<"--------HandEventType="; if(event == NUI_HAND_EVENT_TYPE_GRIP) { cout<<"Grip !!! "; } else if(event == NUI_HAND_EVENT_TYPE_GRIPRELEASE) { cout<<"Grip Release "; } else { cout<<"No Event! "; } cout<<" HandType="; if(type==NUI_HAND_TYPE_NONE) cout<<"No Hand"; else if(type==NUI_HAND_TYPE_LEFT) cout<<"Left Hand"; else if(type==NUI_HAND_TYPE_RIGHT) cout<<"Right Hand"; cout<<endl; //////NUI_HANDPOINTER_STATE cout<<"STATE_TRACKED = "; if((state&&NUI_HANDPOINTER_STATE_TRACKED)==1) cout<<" TRACKED!"; else cout<<" No TRACKED"; cout<<endl; cout<<"STATE_ACTIVE = "; if((state&&NUI_HANDPOINTER_STATE_ACTIVE)==1) cout<<" ACTIVE"; else cout<<" Not ACTIVE"; cout<<endl; cout<<"STATE_INTERACTIVE = "; if((state&&NUI_HANDPOINTER_STATE_INTERACTIVE)==1) cout<<" INTERACTIVE!"; else cout<<" Not INTERACTIVE"; cout<<endl; cout<<"STATE_PRESSED = "; if((state&&NUI_HANDPOINTER_STATE_PRESSED)==1) cout<<" PRESSED!"; else cout<<" Not PRESSED"; cout<<endl; cout<<"PRIMARY_FOR_USER = "; if((state&&NUI_HANDPOINTER_STATE_PRIMARY_FOR_USER)==1) cout<<" PRIMARY!"; else cout<<" Not PRIMARY"; cout<<endl; // system("\f"); } } } return 0; } DWORD WINAPI KinectDataThread(LPVOID pParam) { HANDLE hEvents[5] = {m_hEvNuiProcessStop,m_hNextColorFrameEvent, m_hNextDepthFrameEvent,m_hNextSkeletonEvent,m_hNextInteractionEvent}; while(1) { int nEventIdx; nEventIdx=WaitForMultipleObjects(sizeof(hEvents)/sizeof(hEvents[0]), hEvents,FALSE,100); if (WAIT_OBJECT_0 == WaitForSingleObject(m_hEvNuiProcessStop, 0)) { break; } // Process signal events if (WAIT_OBJECT_0 == WaitForSingleObject(m_hNextColorFrameEvent, 0)) { DrawColor(m_pColorStreamHandle); } if (WAIT_OBJECT_0 == WaitForSingleObject(m_hNextDepthFrameEvent, 0)) { DrawDepth(m_pDepthStreamHandle); } if (WAIT_OBJECT_0 == WaitForSingleObject(m_hNextSkeletonEvent, 0)) { DrawSkeleton(); } if (WAIT_OBJECT_0 == WaitForSingleObject(m_hNextInteractionEvent, 0)) { ShowInteraction(); } } CloseHandle(m_hEvNuiProcessStop); m_hEvNuiProcessStop = NULL; CloseHandle( m_hNextSkeletonEvent ); CloseHandle( m_hNextDepthFrameEvent ); CloseHandle( m_hNextColorFrameEvent ); CloseHandle( m_hNextInteractionEvent ); return 0; } DWORD ConnectKinect() { INuiSensor * pNuiSensor; HRESULT hr; int iSensorCount = 0; hr = NuiGetSensorCount(&iSensorCount); if (FAILED(hr)) { return hr; } // Look at each Kinect sensor for (int i = 0; i < iSensorCount; ++i) { // Create the sensor so we can check status, if we can't create it, move on to the next hr = NuiCreateSensorByIndex(i, &pNuiSensor); if (FAILED(hr)) { continue; } // Get the status of the sensor, and if connected, then we can initialize it hr = pNuiSensor->NuiStatus(); if (S_OK == hr) { m_pNuiSensor = pNuiSensor; break; } // This sensor wasn't OK, so release it since we're not using it pNuiSensor->Release(); } if (NULL != m_pNuiSensor) { if (SUCCEEDED(hr)) { hr = m_pNuiSensor->NuiInitialize(\ NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX|\ NUI_INITIALIZE_FLAG_USES_COLOR|\ NUI_INITIALIZE_FLAG_USES_SKELETON); if( hr != S_OK ) { cout<<"NuiInitialize failed"<<endl; return hr; } m_hNextColorFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL ); m_pColorStreamHandle = NULL; hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR,NUI_IMAGE_RESOLUTION_640x480, 0, 2, m_hNextColorFrameEvent, &m_pColorStreamHandle); if( FAILED( hr ) ) { cout<<"Could not open image stream video"<<endl; return hr; } m_hNextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL ); m_pDepthStreamHandle = NULL; hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, m_hNextDepthFrameEvent, &m_pDepthStreamHandle); if( FAILED( hr ) ) { cout<<"Could not open depth stream video"<<endl; return hr; } m_hNextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL ); hr = m_pNuiSensor->NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE//| ); if( FAILED( hr ) ) { cout<<"Could not open skeleton stream video"<<endl; return hr; } } } if (NULL == m_pNuiSensor || FAILED(hr)) { cout<<"No ready Kinect found!"<<endl; return E_FAIL; } return hr; } int main() { ConnectKinect(); HRESULT hr; m_hNextInteractionEvent = CreateEvent( NULL,TRUE,FALSE,NULL ); m_hEvNuiProcessStop = CreateEvent(NULL,TRUE,FALSE,NULL); hr = NuiCreateInteractionStream(m_pNuiSensor,(INuiInteractionClient *)&m_nuiIClient,&m_nuiIStream); if( FAILED( hr ) ) { cout<<"Could not open Interation stream video"<<endl; return hr; } hr = m_nuiIStream->Enable(m_hNextInteractionEvent);////////???? if( FAILED( hr ) ) { cout<<"Could not open Interation stream video"<<endl; return hr; } HANDLE m_hProcesss = CreateThread(NULL, 0, KinectDataThread, 0, 0, 0); while(1) { Sleep(1); } m_pNuiSensor->NuiShutdown(); SafeRelease(m_pNuiSensor); return 0; } |
2013-5-25 仅能检测Grip 和 Grip Release状态
google了一下,发现也有其他人遇到同样问题,即得到的交互信息永远都是Press状态,囧。应该是自己不会用GetInteractionInfoAtLocation等函数吧,希望整出来的人教下我。(如果做出来就更新)
其它相关代码,无意google到的
How to do Push to Press with Kinect SDK 1.7 (C#)
Kinect Interactions with WPF – Part I: Getting Started (C#,粗略看了下十分详细,很适合大家学习使用Kinect interaction!)