虽迟但到,手眼标定代码实现篇
生活随笔
收集整理的這篇文章主要介紹了
虽迟但到,手眼标定代码实现篇
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
你好,我是小魚。今天周末,在小仙女帶領下,剪了個帥氣發型。今天說說手眼標定的代碼實現。
之前介紹過手眼標定算法Tsai的原理,今天介紹算法的代碼實現,分別有Python、C++、Matlab版本的算法實現方式。
- 該算法適用于將相機裝在手抓上和將相機裝在外部兩種情況
- 論文已經傳到git上,地址:https://gitee.com/ohhuo/handeye-tsai
Python版本
使用前需要安裝庫:
pip3 install transforms3dpip3 install numpy #!/usr/bin/env python # coding: utf-8 import transforms3d as tfs import numpy as np import math ? def get_matrix_eular_radu(x,y,z,rx,ry,rz):rmat = tfs.euler.euler2mat(math.radians(rx),math.radians(ry),math.radians(rz))rmat = tfs.affines.compose(np.squeeze(np.asarray((x,y,z))), rmat, [1, 1, 1])return rmat ? def skew(v):return np.array([[0,-v[2],v[1]],[v[2],0,-v[0]],[-v[1],v[0],0]]) ? def rot2quat_minimal(m):quat = tfs.quaternions.mat2quat(m[0:3,0:3])return quat[1:] ? def quatMinimal2rot(q):p = np.dot(q.T,q)w = np.sqrt(np.subtract(1,p[0][0]))return tfs.quaternions.quat2mat([w,q[0],q[1],q[2]]) ? hand = [1.1988093940033604, -0.42405585264804424, 0.18828251788562061, 151.3390418721659, -18.612399542280507, 153.05074895025035,1.1684831621733476, -0.183273375514656, 0.12744868246620855, -161.57083804238462, 9.07159838346732, 89.1641128844487,1.1508343174145468, -0.22694301453461405, 0.26625166858469146, 177.8815855486261, 0.8991159570568988, 77.67286224959672] camera = [-0.16249272227287292, -0.047310635447502136, 0.4077761471271515, -56.98037030812389, -6.16739631361851, -115.84333735802369,0.03955405578017235, -0.013497642241418362, 0.33975949883461, -100.87129330834215, -17.192685528625265, -173.07354634882094,-0.08517949283123016, 0.00957852229475975, 0.46546608209609985, -90.85270962096058, 0.9315977976503153, 175.2059707654342] ? ? Hgs,Hcs = [],[] for i in range(0,len(hand),6):Hgs.append(get_matrix_eular_radu(hand[i],hand[i+1],hand[i+2],hand[i+3],hand[i+4],hand[i+5])) Hcs.append(get_matrix_eular_radu(camera[i],camera[i+1],camera[i+2],camera[i+3],camera[i+4],camera[i+5])) ? Hgijs = [] Hcijs = [] A = [] B = [] size = 0 for i in range(len(Hgs)):for j in range(i+1,len(Hgs)):size += 1Hgij = np.dot(np.linalg.inv(Hgs[j]),Hgs[i])Hgijs.append(Hgij)Pgij = np.dot(2,rot2quat_minimal(Hgij)) ?Hcij = np.dot(Hcs[j],np.linalg.inv(Hcs[i]))Hcijs.append(Hcij)Pcij = np.dot(2,rot2quat_minimal(Hcij)) ?A.append(skew(np.add(Pgij,Pcij)))B.append(np.subtract(Pcij,Pgij)) MA = np.asarray(A).reshape(size*3,3) MB = np.asarray(B).reshape(size*3,1) Pcg_ = np.dot(np.linalg.pinv(MA),MB) pcg_norm = np.dot(np.conjugate(Pcg_).T,Pcg_) Pcg = np.sqrt(np.add(1,np.dot(Pcg_.T,Pcg_))) Pcg = np.dot(np.dot(2,Pcg_),np.linalg.inv(Pcg)) Rcg = quatMinimal2rot(np.divide(Pcg,2)).reshape(3,3) ? ? A = [] B = [] id = 0 for i in range(len(Hgs)):for j in range(i+1,len(Hgs)):Hgij = Hgijs[id]Hcij = Hcijs[id]A.append(np.subtract(Hgij[0:3,0:3],np.eye(3,3)))B.append(np.subtract(np.dot(Rcg,Hcij[0:3,3:4]),Hgij[0:3,3:4]))id += 1 ? MA = np.asarray(A).reshape(size*3,3) MB = np.asarray(B).reshape(size*3,1) Tcg = np.dot(np.linalg.pinv(MA),MB).reshape(3,) print(tfs.affines.compose(Tcg,np.squeeze(Rcg),[1,1,1]))運行結果:
python3 tsai.py [[-0.01522186 -0.99983174 -0.01023609 -0.02079774] [ 0.99976822 -0.01506342 -0.01538198 0.00889827] [ 0.0152252 -0.01046786 0.99982929 0.08324514] [ 0. 0. 0. 1. ]]C++版本:
//Reference: //R. Y. Tsai and R. K. Lenz, "A new technique for fully autonomous and efficient 3D robotics hand/eye calibration." //In IEEE Transactions on Robotics and Automation, vol. 5, no. 3, pp. 345-358, June 1989. //C++ code converted from Zoran Lazarevic's Matlab code: //http://lazax.com/www.cs.columbia.edu/~laza/html/Stewart/matlab/handEye.m static void calibrateHandEyeTsai(const std::vector<Mat>& Hg, const std::vector<Mat>& Hc,Mat& R_cam2gripper, Mat& t_cam2gripper) {//Number of unique camera position pairsint K = static_cast<int>((Hg.size()*Hg.size() - Hg.size()) / 2.0);//Will store: skew(Pgij+Pcij)Mat A(3*K, 3, CV_64FC1);//Will store: Pcij - PgijMat B(3*K, 1, CV_64FC1); ?std::vector<Mat> vec_Hgij, vec_Hcij;vec_Hgij.reserve(static_cast<size_t>(K));vec_Hcij.reserve(static_cast<size_t>(K)); ?int idx = 0;for (size_t i = 0; i < Hg.size(); i++){for (size_t j = i+1; j < Hg.size(); j++, idx++){//Defines coordinate transformation from Gi to Gj//Hgi is from Gi (gripper) to RW (robot base)//Hgj is from Gj (gripper) to RW (robot base)Mat Hgij = homogeneousInverse(Hg[j]) * Hg[i]; //eq 6vec_Hgij.push_back(Hgij);//Rotation axis for Rgij which is the 3D rotation from gripper coordinate frame Gi to GjMat Pgij = 2*rot2quatMinimal(Hgij); ?//Defines coordinate transformation from Ci to Cj//Hci is from CW (calibration target) to Ci (camera)//Hcj is from CW (calibration target) to Cj (camera)Mat Hcij = Hc[j] * homogeneousInverse(Hc[i]); //eq 7vec_Hcij.push_back(Hcij);//Rotation axis for RcijMat Pcij = 2*rot2quatMinimal(Hcij); ?//Left-hand side: skew(Pgij+Pcij)skew(Pgij+Pcij).copyTo(A(Rect(0, idx*3, 3, 3)));//Right-hand side: Pcij - PgijMat diff = Pcij - Pgij;diff.copyTo(B(Rect(0, idx*3, 1, 3)));}} ? ? ?Mat Pcg_;//Rotation from camera to gripper is obtained from the set of equations:// skew(Pgij+Pcij) * Pcg_ = Pcij - Pgij (eq 12)solve(A, B, Pcg_, DECOMP_SVD); ?Mat Pcg_norm = Pcg_.t() * Pcg_;//Obtained non-unit quaternion is scaled back to unit value that//designates camera-gripper rotationMat Pcg = 2 * Pcg_ / sqrt(1 + Pcg_norm.at<double>(0,0)); //eq 14 ?Mat Rcg = quatMinimal2rot(Pcg/2.0); ?idx = 0;for (size_t i = 0; i < Hg.size(); i++){for (size_t j = i+1; j < Hg.size(); j++, idx++){//Defines coordinate transformation from Gi to Gj//Hgi is from Gi (gripper) to RW (robot base)//Hgj is from Gj (gripper) to RW (robot base)Mat Hgij = vec_Hgij[static_cast<size_t>(idx)];//Defines coordinate transformation from Ci to Cj//Hci is from CW (calibration target) to Ci (camera)//Hcj is from CW (calibration target) to Cj (camera)Mat Hcij = vec_Hcij[static_cast<size_t>(idx)]; ?//Left-hand side: (Rgij - I)Mat diff = Hgij(Rect(0,0,3,3)) - Mat::eye(3,3,CV_64FC1);diff.copyTo(A(Rect(0, idx*3, 3, 3))); ?//Right-hand side: Rcg*Tcij - Tgijdiff = Rcg*Hcij(Rect(3, 0, 1, 3)) - Hgij(Rect(3, 0, 1, 3));diff.copyTo(B(Rect(0, idx*3, 1, 3)));}} ?Mat Tcg;//Translation from camera to gripper is obtained from the set of equations:// (Rgij - I) * Tcg = Rcg*Tcij - Tgij (eq 15)solve(A, B, Tcg, DECOMP_SVD); ?R_cam2gripper = Rcg;t_cam2gripper = Tcg; }C++版本食用方法:
終端指令
git clone https://gitee.com/ohhuo/handeye-tsai.git cd handeye-tsai/cpp mkdir build cd buildcmake .. make./opencv_example示例:
?
Matlab版本:
% handEye - performs hand/eye calibration % % gHc = handEye(bHg, wHc) % % bHg - pose of gripper relative to the robot base.. % (Gripper center is at: g0 = Hbg * [0;0;0;1] ) % Matrix dimensions are 4x4xM, where M is .. % .. number of camera positions. % Algorithm gives a non-singular solution when .. % .. at least 3 positions are given % Hbg(:,:,i) is i-th homogeneous transformation matrix % wHc - pose of camera relative to the world .. % (relative to the calibration block) % Dimension: size(Hwc) = size(Hbg) % gHc - 4x4 homogeneous transformation from gripper to camera % , that is the camera position relative to the gripper. % Focal point of the camera is positioned, .. % .. relative to the gripper, at % f = gHc*[0;0;0;1]; % % References: R.Tsai, R.K.Lenz "A new Technique for Fully Autonomous % and Efficient 3D Robotics Hand/Eye calibration", IEEE % trans. on robotics and Automaion, Vol.5, No.3, June 1989 % % Notation: wHc - pose of camera frame (c) in the world (w) coordinate system % .. If a point coordinates in camera frame (cP) are known % .. wP = wHc * cP % .. we get the point coordinates (wP) in world coord.sys. % .. Also refered to as transformation from camera to world % ? function gHc = handEye(bHg, wHc) ? M = size(bHg,3); ? K = (M*M-M)/2; % Number of unique camera position pairs A = zeros(3*K,3); % will store: skew(Pgij+Pcij) B = zeros(3*K,1); % will store: Pcij - Pgij k = 0; ? % Now convert from wHc notation to Hc notation used in Tsai paper. Hg = bHg; % Hc = cHw = inv(wHc); We do it in a loop because wHc is given, not cHw Hc = zeros(4,4,M); for i = 1:M, Hc(:,:,i) = inv(wHc(:,:,i)); end; ? for i = 1:M,for j = i+1:M;Hgij = inv(Hg(:,:,j))*Hg(:,:,i); % Transformation from i-th to j-th gripper posePgij = 2*rot2quat(Hgij); % ... and the corresponding quaternion ?Hcij = Hc(:,:,j)*inv(Hc(:,:,i)); % Transformation from i-th to j-th camera posePcij = 2*rot2quat(Hcij); % ... and the corresponding quaternion ?k = k+1; % Form linear system of equationsA((3*k-3)+(1:3), 1:3) = skew(Pgij+Pcij); % left-hand sideB((3*k-3)+(1:3)) = Pcij - Pgij; % right-hand side ?end; end; ? % Rotation from camera to gripper is obtained from the set of equations: % skew(Pgij+Pcij) * Pcg_ = Pcij - Pgij % Gripper with camera is first moved to M different poses, then the gripper % .. and camera poses are obtained for all poses. The above equation uses % .. invariances present between each pair of i-th and j-th pose. ? Pcg_ = A \ B; % Solve the equation A*Pcg_ = B ? % Obtained non-unit quaternin is scaled back to unit value that % .. designates camera-gripper rotation Pcg = 2 * Pcg_ / sqrt(1 + Pcg_'*Pcg_); ? Rcg = quat2rot(Pcg/2); % Rotation matrix ? ? % Calculate translational component k = 0; for i = 1:M,for j = i+1:M;Hgij = inv(Hg(:,:,j))*Hg(:,:,i); % Transformation from i-th to j-th gripper poseHcij = Hc(:,:,j)*inv(Hc(:,:,i)); % Transformation from i-th to j-th camera pose ?k = k+1; % Form linear system of equationsA((3*k-3)+(1:3), 1:3) = Hgij(1:3,1:3)-eye(3); % left-hand sideB((3*k-3)+(1:3)) = Rcg(1:3,1:3)*Hcij(1:3,4) - Hgij(1:3,4); % right-hand side ?end; end; ? Tcg = A \ B; ? gHc = transl(Tcg) * Rcg; % incorporate translation with rotation ? ? return如果有錯誤的地方,還請各位指出,小魚會第一時間改正~
作者介紹:
我是小魚,機器人領域資深玩家,現深圳某獨腳獸機器人算法工程師一枚
初中學習編程,高中開始學習機器人,大學期間打機器人相關比賽實現月入2W+(比賽獎金)
目前在輸出機器人學習指南、論文注解、工作經驗,歡迎大家關注小魚,一起交流技術,學習機器人
?
總結
以上是生活随笔為你收集整理的虽迟但到,手眼标定代码实现篇的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 高等数学张宇18讲 第十八讲 第二型曲线
- 下一篇: 25款实用的桌面版博客编辑器