谈谈NiTE 2手部跟踪在彩色图像上的显示
主要內容:
- NiTE2手部跟蹤流程
- 代碼演示
- 總結
一、NiTE2手部跟蹤流程
我自己都感覺到天天在重復著相同的代碼,但我覺得沒什么不好的,對于新東西的學習只有在重復再重復的過程中,才能積累經驗,較少犯“低級錯誤”的幾率,所以在開始之前,讓我們再熟練熟練NITE 2的手部跟蹤流程,主要包括以下幾個步驟:
1. 初始化NITE環(huán)境: nite::NiTE::initialize();
2. 創(chuàng)建HandTracker手部跟蹤器: HandTracker mHandTracker;?mHandTracker.create(&mDevice);
3. 設定手勢探測(GESTURE_WAVE、GESTURE_CLICK和GESTURE_HAND_RAISE):mHandTracker.startGestureDetection( GESTURE_WAVE );等等;
4. 創(chuàng)建并讀取HandTracker Frame信息:HandTrackerFrameRef mHandFrame;??mHandTracker.readFrame( &mHandFrame );
5. 整個界面幀信息進行分析,統(tǒng)計得到符合的手勢信息:const nite::Array<GestureData>& aGestures = mHandFrame.getGestures();
6. 通過跟蹤得到的手勢信息,開始對該特定手進行手部跟蹤:const Point3f& rPos = rGesture.getCurrentPosition();HandId mHandID;??mHandTracker.startHandTracking( rPos, &mHandID );
7. 讀取并統(tǒng)計目前被跟蹤的手信息:const nite::Array<HandData>& aHands = mHandFrame.getHands();
8. 確定手部是否屬于跟蹤狀態(tài),開始自己的操作:
if( rHand.isTracking() )
??? {
???? // 得到手心坐標
???? const Point3f& rPos = rHand.getPosition();
。。。
}
9. 關閉跟蹤器:mHandTracker.destroy();
10. 最后關閉NITE環(huán)境:nite::NiTE::shutdown();
二、代碼演示
在談談NITE 2與OpenCV結合的第一個程序和談談NITE 2與OpenCV結合提取指尖坐標中我們都是在深度圖像中對獲得的手部信息進行處理,但不知道在彩色圖像中,手部跟蹤獲得手心坐標是怎么樣的?是否也和深度圖像顯示一樣,能夠很好的定位到真正的手心中?為了回答自己的這些問題,模仿談談人體骨骼坐標在彩色圖像中顯示中的方法,將通過NiTE2手部跟蹤得到的手心坐標映射到彩色圖像和深度圖像中,并顯示對比。具體解釋和代碼如下:
#include "stdafx.h" #include <iostream>// OpenCV 頭文件 #include <opencv2/core/core.hpp> #include <opencv2/highgui/highgui.hpp> #include <opencv2/imgproc/imgproc.hpp>#include <OpenNI.h> #include <NiTE.h>using namespace std; using namespace openni; using namespace nite;int main( int argc, char **argv ) {// 初始化OpenNI OpenNI::initialize();// 打開Kinect設備 Device mDevice;mDevice.open( ANY_DEVICE );// 創(chuàng)建深度數(shù)據(jù)流 VideoStream mDepthStream;mDepthStream.create( mDevice, SENSOR_DEPTH );// 設置VideoMode模式 VideoMode mDepthMode;mDepthMode.setResolution( 640, 480 );mDepthMode.setFps( 30 );mDepthMode.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM );mDepthStream.setVideoMode(mDepthMode);// 同樣的設置彩色數(shù)據(jù)流 VideoStream mColorStream;mColorStream.create( mDevice, SENSOR_COLOR );// 設置VideoMode模式 VideoMode mColorMode;mColorMode.setResolution( 640, 480 );mColorMode.setFps( 30 );mColorMode.setPixelFormat( PIXEL_FORMAT_RGB888 );mColorStream.setVideoMode( mColorMode);// 設置深度圖像映射到彩色圖像 mDevice.setImageRegistrationMode( IMAGE_REGISTRATION_DEPTH_TO_COLOR );// 為了得到骨骼數(shù)據(jù),先初始化NiTE NiTE::initialize();// 創(chuàng)建HandTracker跟蹤器 HandTracker mHandTracker;mHandTracker.create(&mDevice);// 設定手勢探測(GESTURE_WAVE、GESTURE_CLICK和GESTURE_HAND_RAISE) mHandTracker.startGestureDetection( GESTURE_WAVE );mHandTracker.startGestureDetection( GESTURE_CLICK );//mHandTracker.startGestureDetection( GESTURE_HAND_RAISE ); mHandTracker.setSmoothingFactor(0.1f);// 創(chuàng)建深度圖像顯示cv::namedWindow("Depth Image", CV_WINDOW_AUTOSIZE);// 創(chuàng)建彩色圖像顯示cv::namedWindow( "Hand Image", CV_WINDOW_AUTOSIZE );// 環(huán)境初始化后,開始獲取深度數(shù)據(jù)流和彩色數(shù)據(jù)流 mDepthStream.start();mColorStream.start();// 獲得最大深度值int iMaxDepth = mDepthStream.getMaxPixelValue();while( true ){// 創(chuàng)建OpenCV::Mat,用于顯示彩色數(shù)據(jù)圖像 cv::Mat cImageBGR;// 讀取深度數(shù)據(jù)幀信息流 VideoFrameRef mDepthFrame;mDepthStream.readFrame(&mDepthFrame);// 讀取彩色數(shù)據(jù)幀信息流 VideoFrameRef mColorFrame;mColorStream.readFrame( &mColorFrame );//將深度數(shù)據(jù)轉換成OpenCV格式const cv::Mat mImageDepth( mDepthFrame.getHeight(), mDepthFrame.getWidth(), CV_16UC1, (void*)mDepthFrame.getData());// 為了讓深度圖像顯示的更加明顯一些,將CV_16UC1 ==> CV_8U格式 cv::Mat mScaledDepth;mImageDepth.convertTo( mScaledDepth, CV_8U, 255.0 / iMaxDepth );// 將彩色數(shù)據(jù)流轉換為OpenCV格式,記得格式是:CV_8UC3(含R\G\B)const cv::Mat mImageRGB( mColorFrame.getHeight(), mColorFrame.getWidth(),CV_8UC3, (void*)mColorFrame.getData() );// RGB ==> BGR cv::cvtColor( mImageRGB, cImageBGR, CV_RGB2BGR );// 讀取幀信息 HandTrackerFrameRef mHandFrame;mHandTracker.readFrame( &mHandFrame );// 整個界面幀信息進行分析,找到符合的手勢const nite::Array<GestureData>& aGestures = mHandFrame.getGestures();for( int i = 0; i < aGestures.getSize(); ++ i ){const GestureData& rGesture = aGestures[i];// 得到的手勢信息中還包含了當前手勢的坐標位置const Point3f& rPos = rGesture.getCurrentPosition();cout << " 手勢位置為: (" << rPos.x << ", " << rPos.y << ", " << rPos.z << ")" << endl;// 得到手勢識別后,開始手部跟蹤 HandId mHandID;mHandTracker.startHandTracking( rPos, &mHandID );cout << "確定手勢位置,開始手部跟蹤" << endl;}const nite::Array<HandData>& aHands = mHandFrame.getHands();for( int i = 0; i < aHands.getSize(); ++ i ){const HandData& rHand = aHands[i];if( rHand.isNew() )cout << " Start tracking";else if( rHand.isLost() )cout << " Lost";// 確定手部是否屬于跟蹤狀態(tài)if( rHand.isTracking() ){// 得到手心坐標const Point3f& rPos = rHand.getPosition();cout << " at " << rPos.x << ", " << rPos.y << ", " << rPos.z;cv::Point2f aPoint;mHandTracker.convertHandCoordinatesToDepth(rPos.x, rPos.y, rPos.z, &aPoint.x, &aPoint.y);// 將手心坐標映射到彩色圖像和深度圖像中cv::circle( cImageBGR, aPoint, 3, cv::Scalar( 0, 0, 255 ), 4 );cv::circle( mScaledDepth, aPoint, 3, cv::Scalar(0, 0, 255), 4);// 在彩色圖像中畫出手的輪廓邊 cv::Point2f ctlPoint, ctrPoint, cdlPoint, cdrPoint;ctlPoint.x = aPoint.x - 100;ctlPoint.y = aPoint.y - 100;ctrPoint.x = aPoint.x - 100;ctrPoint.y = aPoint.y + 100;cdlPoint.x = aPoint.x + 100;cdlPoint.y = aPoint.y - 100;cdrPoint.x = aPoint.x + 100;cdrPoint.y = aPoint.y + 100;cv::line( cImageBGR, ctlPoint, ctrPoint, cv::Scalar( 255, 0, 0 ), 3 );cv::line( cImageBGR, ctlPoint, cdlPoint, cv::Scalar( 255, 0, 0 ), 3 );cv::line( cImageBGR, cdlPoint, cdrPoint, cv::Scalar( 255, 0, 0 ), 3 );cv::line( cImageBGR, ctrPoint, cdrPoint, cv::Scalar( 255, 0, 0 ), 3 );// 在深度圖像中畫出手的輪廓邊 cv::Point2f mtlPoint, mtrPoint, mdlPoint, mdrPoint;mtlPoint.x = aPoint.x - 100;mtlPoint.y = aPoint.y - 100;mtrPoint.x = aPoint.x - 100;mtrPoint.y = aPoint.y + 100;mdlPoint.x = aPoint.x + 100;mdlPoint.y = aPoint.y - 100;mdrPoint.x = aPoint.x + 100;mdrPoint.y = aPoint.y + 100;cv::line( mScaledDepth, mtlPoint, mtrPoint, cv::Scalar( 255, 0, 0 ), 3 );cv::line( mScaledDepth, mtlPoint, mdlPoint, cv::Scalar( 255, 0, 0 ), 3 );cv::line( mScaledDepth, mdlPoint, mdrPoint, cv::Scalar( 255, 0, 0 ), 3 );cv::line( mScaledDepth, mtrPoint, mdrPoint, cv::Scalar( 255, 0, 0 ), 3 );}}// 顯示圖像cv::imshow( "Depth Image", mScaledDepth );cv::imshow( "Hand Image", cImageBGR );// 按鍵“q”退出循環(huán)if( cv::waitKey( 1 ) == 'q' )break;}// 先銷毀手部跟蹤器 mHandTracker.destroy();// 銷毀彩色數(shù)據(jù)流和深度數(shù)據(jù)流 mColorStream.destroy();mDepthStream.destroy();// 關閉Kinect設備 mDevice.close();// 關閉NITE和OpenNI環(huán)境 NiTE::shutdown();OpenNI::shutdown();return 0; }程序運行結果見下圖:
接著畫出手部運動軌跡,直接上代碼:
#include <array> #include <iostream> #include <map> #include <vector>// OpenCV 頭文件 #include <opencv2/core/core.hpp> #include <opencv2/highgui/highgui.hpp> #include <opencv2/imgproc/imgproc.hpp>// NiTE 頭文件 #include <OpenNI.h> #include <NiTE.h>using namespace std; using namespace openni; using namespace nite;int main( int argc, char **argv ) {// 初始化OpenNI OpenNI::initialize();// 打開Kinect設備 Device mDevice;mDevice.open( ANY_DEVICE );// 創(chuàng)建深度數(shù)據(jù)流 VideoStream mDepthStream;mDepthStream.create( mDevice, SENSOR_DEPTH );// 設置VideoMode模式 VideoMode mDepthMode;mDepthMode.setResolution( 640, 480 );mDepthMode.setFps( 30 );mDepthMode.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM );mDepthStream.setVideoMode(mDepthMode);// 同樣的設置彩色數(shù)據(jù)流 VideoStream mColorStream;mColorStream.create( mDevice, SENSOR_COLOR );// 設置VideoMode模式 VideoMode mColorMode;mColorMode.setResolution( 640, 480 );mColorMode.setFps( 30 );mColorMode.setPixelFormat( PIXEL_FORMAT_RGB888 );mColorStream.setVideoMode( mColorMode);// 設置深度圖像映射到彩色圖像 mDevice.setImageRegistrationMode( IMAGE_REGISTRATION_DEPTH_TO_COLOR );// 初始化 NiTEif( NiTE::initialize() != nite::STATUS_OK ){cerr << "NiTE initial error" << endl;return -1;}// 創(chuàng)建HandTracker跟蹤器 HandTracker mHandTracker;if( mHandTracker.create() != nite::STATUS_OK ){cerr << "Can't create user tracker" << endl;return -1;}// 設定手勢探測(GESTURE_WAVE、GESTURE_CLICK和GESTURE_HAND_RAISE) mHandTracker.startGestureDetection( GESTURE_WAVE );mHandTracker.startGestureDetection( GESTURE_CLICK );//mHandTracker.startGestureDetection( GESTURE_HAND_RAISE ); mHandTracker.setSmoothingFactor(0.1f);// 創(chuàng)建深度圖像顯示cv::namedWindow("Depth Image", CV_WINDOW_AUTOSIZE);// 創(chuàng)建彩色圖像顯示cv::namedWindow( "Color Image", CV_WINDOW_AUTOSIZE );// 保存點坐標map< HandId,vector<cv::Point2f> > mapHandData;vector<cv::Point2f> vWaveList;vector<cv::Point2f> vClickList;cv::Point2f ptSize( 3, 3 );array<cv::Scalar,8> aHandColor;aHandColor[0] = cv::Scalar( 255, 0, 0 );aHandColor[1] = cv::Scalar( 0, 255, 0 );aHandColor[2] = cv::Scalar( 0, 0, 255 );aHandColor[3] = cv::Scalar( 255, 255, 0 );aHandColor[4] = cv::Scalar( 255, 0, 255 );aHandColor[5] = cv::Scalar( 0, 255, 255 );aHandColor[6] = cv::Scalar( 255, 255, 255 );aHandColor[7] = cv::Scalar( 0, 0, 0 );// 環(huán)境初始化后,開始獲取深度數(shù)據(jù)流和彩色數(shù)據(jù)流 mDepthStream.start();mColorStream.start();// 獲得最大深度值int iMaxDepth = mDepthStream.getMaxPixelValue();// startwhile( true ){// 創(chuàng)建OpenCV::Mat,用于顯示彩色數(shù)據(jù)圖像 cv::Mat cImageBGR;// 讀取彩色數(shù)據(jù)幀信息流 VideoFrameRef mColorFrame;mColorStream.readFrame( &mColorFrame );// 將彩色數(shù)據(jù)流轉換為OpenCV格式,記得格式是:CV_8UC3(含R\G\B)const cv::Mat mImageRGB( mColorFrame.getHeight(), mColorFrame.getWidth(),CV_8UC3, (void*)mColorFrame.getData() );// RGB ==> BGR cv::cvtColor( mImageRGB, cImageBGR, CV_RGB2BGR );// 獲取手Frame HandTrackerFrameRef mHandFrame;if( mHandTracker.readFrame( &mHandFrame ) == nite::STATUS_OK ){openni::VideoFrameRef mDepthFrame = mHandFrame.getDepthFrame();// 將深度數(shù)據(jù)轉換成OpenCV格式const cv::Mat mImageDepth( mDepthFrame.getHeight(), mDepthFrame.getWidth(), CV_16UC1, (void*)mDepthFrame.getData() );// 為了讓深度圖像顯示的更加明顯一些,將CV_16UC1 ==> CV_8U格式 cv::Mat mScaledDepth, mImageBGR;mImageDepth.convertTo( mScaledDepth, CV_8U, 255.0 / 10000 );// 將灰度圖轉換成BGR格式,為了畫出點的顏色坐標和軌跡 cv::cvtColor( mScaledDepth, mImageBGR, CV_GRAY2BGR );// 檢測手勢const nite::Array<GestureData>& aGestures = mHandFrame.getGestures();for( int i = 0; i < aGestures.getSize(); ++ i ){const GestureData& rGesture = aGestures[i];const Point3f& rPos = rGesture.getCurrentPosition();cv::Point2f rPos2D;mHandTracker.convertHandCoordinatesToDepth( rPos.x, rPos.y, rPos.z, &rPos2D.x, &rPos2D.y );// 畫點switch( rGesture.getType() ){case GESTURE_WAVE:vWaveList.push_back( rPos2D );break;case GESTURE_CLICK:vClickList.push_back( rPos2D );break;}// 手部跟蹤 HandId mHandID;if( mHandTracker.startHandTracking( rPos, &mHandID ) != nite::STATUS_OK )cerr << "Can't track hand" << endl;}// 得到手心坐標const nite::Array<HandData>& aHands = mHandFrame.getHands();for( int i = 0; i < aHands.getSize(); ++ i ){const HandData& rHand = aHands[i];HandId uID = rHand.getId();if( rHand.isNew() ){mapHandData.insert( make_pair( uID, vector<cv::Point2f>() ) );}if( rHand.isTracking() ){// 將手心坐標映射到彩色圖像和深度圖像中const Point3f& rPos = rHand.getPosition();cv::Point2f rPos2D;mHandTracker.convertHandCoordinatesToDepth( rPos.x, rPos.y, rPos.z, &rPos2D.x, &rPos2D.y );mapHandData[uID].push_back( rPos2D );}if( rHand.isLost() )mapHandData.erase( uID );}// 畫點和軌跡for( auto itHand = mapHandData.begin(); itHand != mapHandData.end(); ++ itHand ){const cv::Scalar& rColor = aHandColor[ itHand->first % aHandColor.size() ];const vector<cv::Point2f>& rPoints = itHand->second;for( int i = 1; i < rPoints.size(); ++ i ){cv::line( mImageBGR, rPoints[i-1], rPoints[i], rColor, 2 );cv::line( cImageBGR, rPoints[i-1], rPoints[i], rColor, 2 );}}// 畫 click gesture 軌跡for( auto itPt = vClickList.begin(); itPt != vClickList.end(); ++ itPt ){cv::circle( mImageBGR, *itPt, 5, cv::Scalar( 0, 0, 255 ), 2 );cv::circle( cImageBGR, *itPt, 5, cv::Scalar( 0, 0, 255 ), 2 );}// 畫 wave gesture 軌跡for( auto itPt = vWaveList.begin(); itPt != vWaveList.end(); ++ itPt ){cv::rectangle( mImageBGR, *itPt - ptSize, *itPt + ptSize, cv::Scalar( 0, 255, 0 ), 2 );cv::rectangle( cImageBGR, *itPt - ptSize, *itPt + ptSize, cv::Scalar( 0, 255, 0 ), 2 );}// 顯示imagecv::imshow( "Depth Image", mImageBGR );cv::imshow("Color Image", cImageBGR);mHandFrame.release();}else{cerr << "Can't get new frame" << endl;}// 按鍵“q”退出循環(huán)if( cv::waitKey( 1 ) == 'q' )break;}mHandTracker.destroy();mColorStream.destroy();NiTE::shutdown();OpenNI::shutdown();return 0; }運行結果:
三、總結
最后說明的是:根據(jù)自己的感覺寫代碼,沒做封裝、優(yōu)化、重構,完全是面向過程,而且肯定還存在細節(jié)的問題,會在后面進一步優(yōu)化的。??? 寫的粗糙,歡迎指正批評~~~
轉載于:https://www.cnblogs.com/yemeishu/archive/2013/01/29/2881445.html
總結
以上是生活随笔為你收集整理的谈谈NiTE 2手部跟踪在彩色图像上的显示的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 汽车功能安全标准“ISO 26262”导
- 下一篇: DWTagList