双目相机下目标三维坐标计算(四)
生活随笔
收集整理的這篇文章主要介紹了
双目相机下目标三维坐标计算(四)
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
本文來自公眾號:機器人視覺
完成雙目相機標定以后,獲得雙目相機的參數矩陣
包括左右相機的內參數矩陣、左右相機的畸變系數矩陣、右相機相對于左相機的旋轉矩陣與平移矩陣
已知左右相機圖像中的對應點坐標,獲取目標在雙目視覺傳感器下三維坐標的流程如下:
1、將雙目相機標定參數整理如下:
//左相機內參數矩陣 float leftIntrinsic[3][3] = { 3061.6936, -0.8869, 641.3042, 0, 3058.8751, 508.9555, 0, 0, 1 };//左相機畸變系數 float leftDistortion[1][5] = { -0.0133, 0.6503, 0.0029, -0.0049, -16.8704 }; //左相機旋轉矩陣 float leftRotation[3][3] = { 1, 0, 0, 0, 1, 0, 0, 0, 1 }; //左相機平移向量 float leftTranslation[1][3] = { 0, 0, 0 };//右相機內參數矩陣 float rightIntrinsic[3][3] = { 3069.2482, -0.8951, 620.5357, 0, 3069.2450, 532.7122, 0, 0, 1 }; //右相機畸變系數 float rightDistortion[1][5] = { -0.0593, 3.4501, 0.0003, -8.5614, -58.3116 }; //右相機旋轉矩陣 float rightRotation[3][3] = { 0.9989, 0.0131, -0.0439, -0.0121, 0.9996, 0.0233, 0.0441, -0.0228, 0.9987}; //右相機平移向量 float rightTranslation[1][3] = {-73.8389, 2.6712, 3.3792};2、二維像素坐標與相機坐標系下三維坐標轉換
//************************************ // Description: 根據左右相機中成像坐標求解空間坐標 // Method: uv2xyz // FullName: uv2xyz // Parameter: Point2f uvLeft // Parameter: Point2f uvRight // Returns: cv::Point3f //************************************ Point3f uv2xyz(Point2f uvLeft, Point2f uvRight) {// [u1] |X| [u2] |X|//Z*|v1| = Ml*|Y| Z*|v2| = Mr*|Y|// [ 1] |Z| [ 1] |Z|// |1| |1|Mat mLeftRotation = Mat(3, 3, CV_32F, leftRotation);Mat mLeftTranslation = Mat(3, 1, CV_32F, leftTranslation);Mat mLeftRT = Mat(3, 4, CV_32F);//左相機M矩陣hconcat(mLeftRotation, mLeftTranslation, mLeftRT);Mat mLeftIntrinsic = Mat(3, 3, CV_32F, leftIntrinsic);Mat mLeftM = mLeftIntrinsic * mLeftRT;//cout<<"左相機M矩陣 = "<<endl<<mLeftM<<endl;Mat mRightRotation = Mat(3, 3, CV_32F, rightRotation);Mat mRightTranslation = Mat(3, 1, CV_32F, rightTranslation);Mat mRightRT = Mat(3, 4, CV_32F);//右相機M矩陣hconcat(mRightRotation, mRightTranslation, mRightRT);Mat mRightIntrinsic = Mat(3, 3, CV_32F, rightIntrinsic);Mat mRightM = mRightIntrinsic * mRightRT;//cout<<"右相機M矩陣 = "<<endl<<mRightM<<endl;//最小二乘法A矩陣Mat A = Mat(4, 3, CV_32F);A.at<float>(0, 0) = uvLeft.x * mLeftM.at<float>(2, 0) - mLeftM.at<float>(0, 0);A.at<float>(0, 1) = uvLeft.x * mLeftM.at<float>(2, 1) - mLeftM.at<float>(0, 1);A.at<float>(0, 2) = uvLeft.x * mLeftM.at<float>(2, 2) - mLeftM.at<float>(0, 2);A.at<float>(1, 0) = uvLeft.y * mLeftM.at<float>(2, 0) - mLeftM.at<float>(1, 0);A.at<float>(1, 1) = uvLeft.y * mLeftM.at<float>(2, 1) - mLeftM.at<float>(1, 1);A.at<float>(1, 2) = uvLeft.y * mLeftM.at<float>(2, 2) - mLeftM.at<float>(1, 2);A.at<float>(2, 0) = uvRight.x * mRightM.at<float>(2, 0) - mRightM.at<float>(0, 0);A.at<float>(2, 1) = uvRight.x * mRightM.at<float>(2, 1) - mRightM.at<float>(0, 1);A.at<float>(2, 2) = uvRight.x * mRightM.at<float>(2, 2) - mRightM.at<float>(0, 2);A.at<float>(3, 0) = uvRight.y * mRightM.at<float>(2, 0) - mRightM.at<float>(1, 0);A.at<float>(3, 1) = uvRight.y * mRightM.at<float>(2, 1) - mRightM.at<float>(1, 1);A.at<float>(3, 2) = uvRight.y * mRightM.at<float>(2, 2) - mRightM.at<float>(1, 2);//最小二乘法B矩陣Mat B = Mat(4, 1, CV_32F);B.at<float>(0, 0) = mLeftM.at<float>(0, 3) - uvLeft.x * mLeftM.at<float>(2, 3);B.at<float>(1, 0) = mLeftM.at<float>(1, 3) - uvLeft.y * mLeftM.at<float>(2, 3);B.at<float>(2, 0) = mRightM.at<float>(0, 3) - uvRight.x * mRightM.at<float>(2, 3);B.at<float>(3, 0) = mRightM.at<float>(1, 3) - uvRight.y * mRightM.at<float>(2, 3);Mat XYZ = Mat(3, 1, CV_32F);//采用SVD最小二乘法求解XYZsolve(A, B, XYZ, DECOMP_SVD);//cout<<"空間坐標為 = "<<endl<<XYZ<<endl;//世界坐標系中坐標Point3f world;world.x = XYZ.at<float>(0, 0);world.y = XYZ.at<float>(1, 0);world.z = XYZ.at<float>(2, 0);return world; }//************************************ // Description: 將世界坐標系中的點投影到左右相機成像坐標系中 // Method: xyz2uv // FullName: xyz2uv // Parameter: Point3f worldPoint // Parameter: float intrinsic[3][3] // Parameter: float translation[1][3] // Parameter: float rotation[3][3] // Returns: cv::Point2f //************************************ Point2f xyz2uv(Point3f worldPoint, float intrinsic[3][3], float translation[1][3], float rotation[3][3]) {// [fx s x0] [Xc] [Xw] [u] 1 [Xc]//K = |0 fy y0| TEMP = [R T] |Yc| = TEMP*|Yw| | | = —*K *|Yc|// [ 0 0 1 ] [Zc] |Zw| [v] Zc [Zc]// [1 ]Point3f c;c.x = rotation[0][0] * worldPoint.x + rotation[0][1] * worldPoint.y + rotation[0][2] * worldPoint.z + translation[0][0] * 1;c.y = rotation[1][0] * worldPoint.x + rotation[1][1] * worldPoint.y + rotation[1][2] * worldPoint.z + translation[0][1] * 1;c.z = rotation[2][0] * worldPoint.x + rotation[2][1] * worldPoint.y + rotation[2][2] * worldPoint.z + translation[0][2] * 1;Point2f uv;uv.x = (intrinsic[0][0] * c.x + intrinsic[0][1] * c.y + intrinsic[0][2] * c.z) / c.z;uv.y = (intrinsic[1][0] * c.x + intrinsic[1][1] * c.y + intrinsic[1][2] * c.z) / c.z;return uv; }3、由像素坐標獲取三維坐標
Point2f l = (638, 393);Point2f r = (85, 502);Point3f worldPoint;worldPoint = uv2xyz(l, r);cout << "空間坐標為:" << endl << uv2xyz(l, r) << endl;
更換點對測試
更換點對測試:
線結構光傳感器標定(相機標定+結構光標定)完整流程(一)
https://blog.csdn.net/qq_27353621/article/details/120787942
UR機器人手眼標定(二)
https://blog.csdn.net/qq_27353621/article/details/121603215
雙目相機標定(三)
https://blog.csdn.net/qq_27353621/article/details/121031972
公眾號:機器人視覺
總結
以上是生活随笔為你收集整理的双目相机下目标三维坐标计算(四)的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 通过commons-email-1.5简
- 下一篇: redis GEO 结构 坐标计算