Kinect学习(七):综合提取彩色、深度、人体骨骼点
生活随笔
收集整理的這篇文章主要介紹了
Kinect学习(七):综合提取彩色、深度、人体骨骼点
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
前言
前面的博客中介紹了如何通過Kinect獲得彩色圖像、深度圖像以及人體骨骼點:
- Kinect學習(三):獲取RGB顏色數據
- Kinect學習(四):提取深度數據
- Kinect學習(五):提取帶用戶ID的深度數據
- Kinect學習(六):提取人體關節點數據
這次要將這幾者綜合起來,同時從Kinect那里拿來這些數據。
代碼
這里的代碼只是將前面幾篇博客中的內容整合了一下,就不做過多說明了。
#include <Windows.h> #include <iostream> #include <NuiApi.h> #include <opencv2/opencv.hpp>using namespace std; using namespace cv;typedef struct structBGR {BYTE blue;BYTE green;BYTE red;BYTE player; } BGR;bool tracked[NUI_SKELETON_COUNT] = { FALSE }; cv::Point skeletonPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT] = { cv::Point(0, 0) }; cv::Point colorPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT] = { cv::Point(0, 0) };void getColorImage(HANDLE & colorStreamHandle, cv::Mat & colorImg); BGR Depth2RGB(USHORT depthID); void getDepthImage(HANDLE & depthStreamHandle, cv::Mat & depthImg, cv::Mat & mask); void drawSkeleton(cv::Mat &img, cv::Point pointSet[], int which_one); void getSkeletonImage(cv::Mat & skeletonImg, cv::Mat & colorImg);int main(int argc, char* argv[]) {cv::Mat colorImg;colorImg.create(480, 640, CV_8UC3);cv::Mat depthImg;depthImg.create(240, 320, CV_8UC3);cv::Mat skeletonImg;skeletonImg.create(240, 320, CV_8UC3);cv::Mat mask;mask.create(240, 320, CV_8UC3);HANDLE colorEvent = CreateEvent(NULL, TRUE, FALSE, NULL);HANDLE depthEvent = CreateEvent(NULL, TRUE, FALSE, NULL);HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);HANDLE colorStreamHandle = NULL;HANDLE depthStreamHandle = NULL;HRESULT hr;hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX| NUI_INITIALIZE_FLAG_USES_SKELETON);if (FAILED(hr)){cout << "Nui initialize failed." << endl;return hr;}hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, colorEvent, &colorStreamHandle);if (FAILED(hr)){cout << "Can not open color stream." << endl;return hr;}hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_320x240, 0, 2, depthEvent, &depthStreamHandle);if (FAILED(hr)){cout << "Can not open depth stream." << endl;return hr;}hr = NuiSkeletonTrackingEnable(skeletonEvent, 0);if (FAILED(hr)){cout << "Can not enable skeleton tracking." << endl;return hr;}cv::namedWindow("mask", CV_WINDOW_AUTOSIZE);cv::namedWindow("colorImg", CV_WINDOW_AUTOSIZE);cv::namedWindow("depthImg", CV_WINDOW_AUTOSIZE);cv::namedWindow("skeletonImg", CV_WINDOW_AUTOSIZE);while (1){if (WaitForSingleObject(colorEvent, 0) == 0){getColorImage(colorStreamHandle, colorImg);}if (WaitForSingleObject(depthEvent, 0) == 0){getDepthImage(depthStreamHandle, depthImg, mask);}if (WaitForSingleObject(skeletonEvent, 0) == 0){getSkeletonImage(skeletonImg, colorImg);}cv::imshow("mask", mask);cv::imshow("colorImg", colorImg);cv::imshow("depthImg", depthImg);cv::imshow("skeletonImg", skeletonImg);if (cv::waitKey(20) == 27)break;}NuiShutdown();cv::destroyAllWindows();return 0; }void getColorImage(HANDLE & colorStreamHandle, cv::Mat & colorImg) {const NUI_IMAGE_FRAME * pImageFrame = NULL;HRESULT hr = NuiImageStreamGetNextFrame(colorStreamHandle, 0, &pImageFrame);if (FAILED(hr)){cout << "Could not get color image" << endl;NuiShutdown();return;}INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;NUI_LOCKED_RECT LockedRect;pTexture->LockRect(0, &LockedRect, NULL, 0);if (LockedRect.Pitch != 0){for (int i = 0; i < colorImg.rows; i++){uchar *ptr = colorImg.ptr<uchar>(i); //第i行的指針//每個字節代表一個顏色信息,直接使用ucharuchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;for (int j = 0;j < colorImg.cols;j++){//內部數據是4個字節,0-1-2是BGR,第4個現在未使用ptr[3 * j] = pBuffer[4 * j];ptr[3 * j + 1] = pBuffer[4 * j + 1];ptr[3 * j + 2] = pBuffer[4 * j + 2];}}}else{cout << "捕獲彩色圖像出錯" << endl;}pTexture->UnlockRect(0);NuiImageStreamReleaseFrame(colorStreamHandle, pImageFrame); }// 處理深度數據的每一個像素,如果屬于同一個用戶的ID,那么像素就標為同種顏色,不同的用戶, // 其ID不一樣,顏色的標示也不一樣,如果不屬于某個用戶的像素,那么就采用原來的深度值 BGR Depth2RGB(USHORT depthID) {//每像素共16bit的信息,其中最低3位是ID(所捕捉到的人的ID),剩下的13位才是信息 USHORT realDepth = (depthID & 0xfff8) >> 3; //深度信息,高13位USHORT player = depthID & 0x0007; //提取用戶ID信息,低3位//因為提取的信息是距離信息,為了便于顯示,這里歸一化為0-255BYTE depth = (BYTE)(255 * realDepth / 0x1fff);BGR color_data;color_data.blue = color_data.green = color_data.red = 0;color_data.player = player;//RGB三個通道的值都是相等的話,就是灰度的 //Kinect系統能夠處理辨識傳感器前多至6個人物的信息,但同一時刻最多只有2個玩家可被追蹤(即骨骼跟蹤)switch (player){case 0:color_data.blue = depth / 2;color_data.green = depth / 2;color_data.red = depth / 2;break;case 1:color_data.blue = depth;break;case 2:color_data.green = depth;break;case 3:color_data.red = depth;break;case 4:color_data.blue = depth;color_data.green = depth;color_data.red = depth / 4;break;case 5:color_data.blue = depth;color_data.green = depth / 4;color_data.red = depth;break;case 6:color_data.blue = depth / 4;color_data.green = depth;color_data.red = depth;break;}return color_data; }void getDepthImage(HANDLE & depthStreamHandle, cv::Mat & depthImg, cv::Mat & mask) {const NUI_IMAGE_FRAME * pImageFrame = NULL;HRESULT hr = NuiImageStreamGetNextFrame(depthStreamHandle, 0, &pImageFrame);if (FAILED(hr)){cout << "Could not get depth image" << endl;NuiShutdown();return;}INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;NUI_LOCKED_RECT LockedRect;pTexture->LockRect(0, &LockedRect, NULL, 0);if (LockedRect.Pitch != 0){for (int i = 0;i < depthImg.rows;i++){uchar * ptr = depthImg.ptr<uchar>(i);uchar * ptr_mask = mask.ptr<uchar>(i);uchar *pBufferRun = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;USHORT * pBuffer = (USHORT*)pBufferRun;for (int j = 0;j < depthImg.cols;j++){// ptr[j] = 255 - (uchar)(255 * pBuffer[j] / 0x0fff); //直接將數據歸一化處理// ptr[j] = (uchar)(255 * pBuffer[j] / 0x0fff); //直接將數據歸一化處理BGR rgb = Depth2RGB(pBuffer[j]);ptr[3 * j] = rgb.blue;ptr[3 * j + 1] = rgb.green;ptr[3 * j + 2] = rgb.red;switch (rgb.player){case 0:ptr_mask[3 * j] = 0;ptr_mask[3 * j + 1] = 0;ptr_mask[3 * j + 2] = 0;break;case 1:ptr_mask[3 * j] = 255;ptr_mask[3 * j + 1] = 0;ptr_mask[3 * j + 2] = 0;break;case 2:ptr_mask[3 * j] = 0;ptr_mask[3 * j + 1] = 255;ptr_mask[3 * j + 2] = 0;break;case 3:ptr_mask[3 * j] = 0;ptr_mask[3 * j + 1] = 0;ptr_mask[3 * j + 2] = 255;break;case 4:ptr_mask[3 * j] = 255;ptr_mask[3 * j + 1] = 255;ptr_mask[3 * j + 2] = 0;break;case 5:ptr_mask[3 * j] = 255;ptr_mask[3 * j + 1] = 0;ptr_mask[3 * j + 2] = 255;break;case 6:ptr_mask[3 * j] = 0;ptr_mask[3 * j + 1] = 255;ptr_mask[3 * j + 2] = 255;break;default:ptr_mask[3 * j] = 0;ptr_mask[3 * j + 1] = 0;ptr_mask[3 * j + 2] = 0;break;}}}}else{cout << "捕獲深度圖像出錯" << endl;}pTexture->UnlockRect(0);NuiImageStreamReleaseFrame(depthStreamHandle, pImageFrame);}void drawSkeleton(cv::Mat &img, cv::Point pointSet[], int which_one) {cv::Scalar color;switch (which_one){case 0:color = cv::Scalar(255, 0, 0);break;case 1:color = cv::Scalar(0, 255, 0);break;case 2:color = cv::Scalar(0, 0, 255);break;case 3:color = cv::Scalar(255, 255, 0);break;case 4:color = cv::Scalar(255, 0, 255);break;case 5:color = cv::Scalar(0, 255, 255);break;}// 脊柱if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != 0 || pointSet[NUI_SKELETON_POSITION_HEAD].y != 0) &&(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&(pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0) &&(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);// 左上肢if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);// 右上肢if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);// 左下肢if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);// 右下肢if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0) &&(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != 0))cv::line(img, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2); }void getSkeletonImage(cv::Mat & skeletonImg, cv::Mat & colorImg) {NUI_SKELETON_FRAME skeletonFrame = { 0 }; //骨骼幀的定義 bool foundSkeleton = false;HRESULT hr = NuiSkeletonGetNextFrame(0, &skeletonFrame);if (SUCCEEDED(hr)){//NUI_SKELETON_COUNT是檢測到的骨骼數(即,跟蹤到的人數)for (int i = 0;i < NUI_SKELETON_COUNT;i++){NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;// Kinect最多檢測到6個人,但只能跟蹤2個人的骨骼,再檢查是否跟蹤到了if (trackingState == NUI_SKELETON_TRACKED){foundSkeleton = true;}}}if (!foundSkeleton){return;}NuiTransformSmooth(&skeletonFrame, NULL);skeletonImg.setTo(0);for (int i = 0;i < NUI_SKELETON_COUNT;i++){// 判斷是否是一個正確骨骼的條件:骨骼被跟蹤到并且肩部中心(頸部位置)必須跟蹤到if (skeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED && skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED){float fx, fy;// 拿到所有跟蹤到的關節點的坐標,并轉換為我們的深度空間的坐標,因為我們是在深度圖像中 // 把這些關節點標記出來的 // NUI_SKELETON_POSITION_COUNT為跟蹤到的一個骨骼的關節點的數目,為20for (int j = 0;j < NUI_SKELETON_POSITION_COUNT;j++){NuiTransformSkeletonToDepthImage(skeletonFrame.SkeletonData[i].SkeletonPositions[j], &fx, &fy);skeletonPoint[i][j].x = (int)fx;skeletonPoint[i][j].y = (int)fy;}for (int j = 0;j < NUI_SKELETON_POSITION_COUNT;j++){if (skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[j] != NUI_SKELETON_POSITION_NOT_TRACKED){cv::circle(skeletonImg, skeletonPoint[i][j], 3, cv::Scalar(0, 255, 255), 1, 8, 0);tracked[i] = true;// 在彩色圖中也繪制骨骼關鍵點LONG color_x, color_y;NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, 0,skeletonPoint[i][j].x, skeletonPoint[i][j].y, 0, &color_x, &color_y);colorPoint[i][j].x = (int)color_x;colorPoint[i][j].y = (int)color_y;cv::circle(colorImg, colorPoint[i][j], 4, cv::Scalar(0, 255, 255), 1, 8, 0);}}drawSkeleton(skeletonImg, skeletonPoint[i], i);drawSkeleton(colorImg, colorPoint[i], i);}}}結果
(由于有我同學的照片,所以打了碼)
參考資料
總結
以上是生活随笔為你收集整理的Kinect学习(七):综合提取彩色、深度、人体骨骼点的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: Kinect学习(六):提取人体关节点数
- 下一篇: Eigen库基本操作