虽迟但到,手眼标定代码实现篇
之前介绍过手眼标定算法Tsai的原理,今天介绍算法的代码实现,分别有Python、C++、Matlab版本的算法实现方式。
•该算法适用于将相机装在手抓上和将相机装在外部两种情况
•论文已经传到git上,地址:
https://gitee.com/ohhuo/handeye-tsai
Python版本
使用前需要安装库:
pip3 install transforms3d
pip3 install numpy
!/usr/bin/env python
coding: utf-8
import transforms3d as tfs
import numpy as np
import math
def get_matrix_eular_radu(x,y,z,rx,ry,rz):
rmat = tfs.euler.euler2mat(math.radians(rx),math.radians(ry),math.radians(rz))
rmat = tfs.affines.compose(np.squeeze(np.asarray((x,y,z))), rmat, [1, 1, 1])
return rmat
def skew(v):
return np.array([[0,-v[2],v[1]],
[v[2],0,-v[0]],
[-v[1],v[0],0]])
def rot2quat_minimal(m):
quat = tfs.quaternions.mat2quat(m[0:3,0:3])
return quat[1:]
def quatMinimal2rot(q):
p = np.dot(q.T,q)
w = np.sqrt(np.subtract(1,p[0][0]))
return tfs.quaternions.quat2mat([w,q[0],q[1],q[2]])
hand = [1.1988093940033604, -0.42405585264804424, 0.18828251788562061, 151.3390418721659, -18.612399542280507, 153.05074895025035,
1.1684831621733476, -0.183273375514656, 0.12744868246620855, -161.57083804238462, 9.07159838346732, 89.1641128844487,
1.1508343174145468, -0.22694301453461405, 0.26625166858469146, 177.8815855486261, 0.8991159570568988, 77.67286224959672]
camera = [-0.16249272227287292, -0.047310635447502136, 0.4077761471271515, -56.98037030812389, -6.16739631361851, -115.84333735802369,
0.03955405578017235, -0.013497642241418362, 0.33975949883461, -100.87129330834215, -17.192685528625265, -173.07354634882094,
-0.08517949283123016, 0.00957852229475975, 0.46546608209609985, -90.85270962096058, 0.9315977976503153, 175.2059707654342]
Hgs,Hcs = [],[]
for i in range(0,len(hand),6):
Hgs.append(get_matrix_eular_radu(hand[i],hand[i+1],hand[i+2],hand[i+3],hand[i+4],hand[i+5]))
Hcs.append(get_matrix_eular_radu(camera[i],camera[i+1],camera[i+2],camera[i+3],camera[i+4],camera[i+5]))
Hgijs = []
Hcijs = []
A = []
B = []
size = 0
for i in range(len(Hgs)):
for j in range(i+1,len(Hgs)):
size += 1
Hgij = np.dot(np.linalg.inv(Hgs[j]),Hgs[i])
Hgijs.append(Hgij)
Pgij = np.dot(2,rot2quat_minimal(Hgij))
Hcij = np.dot(Hcs[j],np.linalg.inv(Hcs[i]))
Hcijs.append(Hcij)
Pcij = np.dot(2,rot2quat_minimal(Hcij))
A.append(skew(np.add(Pgij,Pcij)))
B.append(np.subtract(Pcij,Pgij))
MA = np.asarray(A).reshape(size3,3)
MB = np.asarray(B).reshape(size3,1)
Pcg_ = np.dot(np.linalg.pinv(MA),MB)
pcgnorm = np.dot(np.conjugate(Pcg).T,Pcg)
Pcg = np.sqrt(np.add(1,np.dot(Pcg.T,Pcg)))
Pcg = np.dot(np.dot(2,Pcg),np.linalg.inv(Pcg))
Rcg = quatMinimal2rot(np.divide(Pcg,2)).reshape(3,3)
A = []
B = []
id = 0
for i in range(len(Hgs)):
for j in range(i+1,len(Hgs)):
Hgij = Hgijs[id]
Hcij = Hcijs[id]
A.append(np.subtract(Hgij[0:3,0:3],np.eye(3,3)))
B.append(np.subtract(np.dot(Rcg,Hcij[0:3,3:4]),Hgij[0:3,3:4]))
id += 1
MA = np.asarray(A).reshape(size3,3)
MB = np.asarray(B).reshape(size3,1)
Tcg = np.dot(np.linalg.pinv(MA),MB).reshape(3,)
print(tfs.affines.compose(Tcg,np.squeeze(Rcg),[1,1,1]))
运行结果:
python3 tsai.py
[[-0.01522186 -0.99983174 -0.01023609 -0.02079774]
[ 0.99976822 -0.01506342 -0.01538198 0.00889827]
[ 0.0152252 -0.01046786 0.99982929 0.08324514]
[ 0. 0. 0. 1. ]]
C++版本:
//Reference:
//R. Y. Tsai and R. K. Lenz, "A new technique for fully autonomous and efficient 3D robotics hand/eye calibration."
//In IEEE Transactions on Robotics and Automation, vol. 5, no. 3, pp. 345-358, June 1989.
//C++ code converted from Zoran Lazarevic's Matlab code:
//http://lazax.com/www.cs.columbia.edu/~laza/html/Stewart/matlab/handEye.m
static void calibrateHandEyeTsai(const std::vector
{
//Number of unique camera position pairs
int K = static_cast
//Will store: skew(Pgij+Pcij)
Mat A(3K, 3, CV_64FC1);
//Will store: Pcij - Pgij
Mat B(3*K, 1, CV_64FC1);
std::vector<Mat> vec_Hgij, vec_Hcij;
vec_Hgij.reserve(static_cast<size_t>(K));
vec_Hcij.reserve(static_cast<size_t>(K));
int idx = 0;
for (size_t i = 0; i < Hg.size(); i++)
{
for (size_t j = i+1; j < Hg.size(); j++, idx++)
{
//Defines coordinate transformation from Gi to Gj
//Hgi is from Gi (gripper) to RW (robot base)
//Hgj is from Gj (gripper) to RW (robot base)
Mat Hgij = homogeneousInverse(Hg[j]) * Hg[i]; //eq 6
vec_Hgij.push_back(Hgij);
//Rotation axis for Rgij which is the 3D rotation from gripper coordinate frame Gi to Gj
Mat Pgij = 2*rot2quatMinimal(Hgij);
//Defines coordinate transformation from Ci to Cj
//Hci is from CW (calibration target) to Ci (camera)
//Hcj is from CW (calibration target) to Cj (camera)
Mat Hcij = Hc[j] * homogeneousInverse(Hc[i]); //eq 7
vec_Hcij.push_back(Hcij);
//Rotation axis for Rcij
Mat Pcij = 2*rot2quatMinimal(Hcij);
//Left-hand side: skew(Pgij+Pcij)
skew(Pgij+Pcij).copyTo(A(Rect(0, idx*3, 3, 3)));
//Right-hand side: Pcij - Pgij
Mat diff = Pcij - Pgij;
diff.copyTo(B(Rect(0, idx*3, 1, 3)));
}
}
Mat Pcg_;
//Rotation from camera to gripper is obtained from the set of equations:
// skew(Pgij+Pcij) * Pcg_ = Pcij - Pgij (eq 12)
solve(A, B, Pcg_, DECOMP_SVD);
Mat Pcg_norm = Pcg_.t() * Pcg_;
//Obtained non-unit quaternion is scaled back to unit value that
//designates camera-gripper rotation
Mat Pcg = 2 * Pcg_ / sqrt(1 + Pcg_norm.at<double>(0,0)); //eq 14
Mat Rcg = quatMinimal2rot(Pcg/2.0);
idx = 0;
for (size_t i = 0; i < Hg.size(); i++)
{
for (size_t j = i+1; j < Hg.size(); j++, idx++)
{
//Defines coordinate transformation from Gi to Gj
//Hgi is from Gi (gripper) to RW (robot base)
//Hgj is from Gj (gripper) to RW (robot base)
Mat Hgij = vec_Hgij[static_cast<size_t>(idx)];
//Defines coordinate transformation from Ci to Cj
//Hci is from CW (calibration target) to Ci (camera)
//Hcj is from CW (calibration target) to Cj (camera)
Mat Hcij = vec_Hcij[static_cast<size_t>(idx)];
//Left-hand side: (Rgij - I)
Mat diff = Hgij(Rect(0,0,3,3)) - Mat::eye(3,3,CV_64FC1);
diff.copyTo(A(Rect(0, idx*3, 3, 3)));
//Right-hand side: Rcg*Tcij - Tgij
diff = Rcg*Hcij(Rect(3, 0, 1, 3)) - Hgij(Rect(3, 0, 1, 3));
diff.copyTo(B(Rect(0, idx*3, 1, 3)));
}
}
Mat Tcg;
//Translation from camera to gripper is obtained from the set of equations:
// (Rgij - I) * Tcg = Rcg*Tcij - Tgij (eq 15)
solve(A, B, Tcg, DECOMP_SVD);
R_cam2gripper = Rcg;
t_cam2gripper = Tcg;
}
C++版本食用方法:
终端指令
git clone https://gitee.com/ohhuo/handeye-tsai.git
cd handeye-tsai/cpp
mkdir build
cd build
cmake ..
make
./opencv_example
示例:
sangxin@sangxin-ubu~ git clone https://gitee.com/ohhuo/handeye-tsai.git
正克隆到 'handeye-tsai'...
remote: Enumerating objects: 60, done.
remote: Counting objects: 100% (60/60), done.
remote: Compressing objects: 100% (57/57), done.
remote: Total 60 (delta 9), reused 0 (delta 0), pack-reused 0
展开对象中: 100% (60/60), 完成.
sangxin@sangxin-ubu~ cd handeye-tsai/cpp
sangxin@sangxin-ubu~ mkdir build
sangxin@sangxin-ubu~ cd build
sangxin@sangxin-ubu~ cmake ..
-- The C compiler identification is GNU 7.5.0
-- The CXX compiler identification is GNU 7.5.0
-- Check for working C compiler: /usr/bin/cc
-- Check for working C compiler: /usr/bin/cc -- works
-- Detecting C compiler ABI info
-- Detecting C compiler ABI info - done
-- Detecting C compile features
-- Detecting C compile features - done
-- Check for working CXX compiler: /usr/bin/c++
-- Check for working CXX compiler: /usr/bin/c++ -- works
-- Detecting CXX compiler ABI info
-- Detecting CXX compiler ABI info - done
-- Detecting CXX compile features
-- Detecting CXX compile features - done
-- Found OpenCV: /usr/local (found version "4.5.1")
-- OpenCV library status:
-- config: /usr/local/lib/cmake/opencv4
-- version: 4.5.1
-- libraries: opencv_calib3d;opencv_core;opencv_dnn;opencv_features2d;opencv_flann;opencv_gapi;opencv_highgui;opencv_imgcodecs;opencv_imgproc;opencv_ml;opencv_objdetect;opencv_photo;opencv_stitching;opencv_video;opencv_videoio
-- include path: /usr/local/include/opencv4
-- Configuring done
-- Generating done
-- Build files have been written to: /home/sangxin/code/ramp/other/handeye-tsai/cpp/build
sangxin@sangxin-ubu~ make
Scanning dependencies of target opencv_example
[ 33%] Building CXX object CMakeFiles/opencv_example.dir/example.cpp.o
[ 66%] Building CXX object CMakeFiles/opencv_example.dir/calibration_handeye.cpp.o
[100%] Linking CXX executable opencv_example
[100%] Built target opencv_example
sangxin@sangxin-ubu~ ./opencv_example
Hand eye calibration
[0.02534592279128711, -0.999507800830298, -0.01848621857599331, 0.03902588103574497;
0.99953544041497, 0.02502485833258339, 0.01739712102291752, 0.002933439485668206;
-0.01692594317342544, -0.01891857671220042, 0.9996777480282706, -0.01033683416650518;
0, 0, 0, 1]
Homo_cam2gripper 是否包含旋转矩阵:1
Matlab版本:
% handEye - performs hand/eye calibration
%
% gHc = handEye(bHg, wHc)
%
% bHg - pose of gripper relative to the robot base..
% (Gripper center is at: g0 = Hbg [0;0;0;1] )
% Matrix dimensions are 4x4xM, where M is ..
% .. number of camera positions.
% Algorithm gives a non-singular solution when ..
% .. at least 3 positions are given
% Hbg(:,:,i) is i-th homogeneous transformation matrix
% wHc - pose of camera relative to the world ..
% (relative to the calibration block)
% Dimension: size(Hwc) = size(Hbg)
% gHc - 4x4 homogeneous transformation from gripper to camera
% , that is the camera position relative to the gripper.
% Focal point of the camera is positioned, ..
% .. relative to the gripper, at
% f = gHc[0;0;0;1];
%
% References: R.Tsai, R.K.Lenz "A new Technique for Fully Autonomous
% and Efficient 3D Robotics Hand/Eye calibration", IEEE
% trans. on robotics and Automaion, Vol.5, No.3, June 1989
%
% Notation: wHc - pose of camera frame (c) in the world (w) coordinate system
% .. If a point coordinates in camera frame (cP) are known
% .. wP = wHc * cP
% .. we get the point coordinates (wP) in world coord.sys.
% .. Also refered to as transformation from camera to world
%
function gHc = handEye(bHg, wHc)
M = size(bHg,3);
K = (MM-M)/2; % Number of unique camera position pairs
A = zeros(3K,3); % will store: skew(Pgij+Pcij)
B = zeros(3*K,1); % will store: Pcij - Pgij
k = 0;
% Now convert from wHc notation to Hc notation used in Tsai paper.
Hg = bHg;
% Hc = cHw = inv(wHc); We do it in a loop because wHc is given, not cHw
Hc = zeros(4,4,M); for i = 1:M, Hc(:,:,i) = inv(wHc(:,:,i)); end;
for i = 1:M,
for j = i+1:M;
Hgij = inv(Hg(:,:,j))Hg(:,:,i); % Transformation from i-th to j-th gripper pose
Pgij = 2rot2quat(Hgij); % ... and the corresponding quaternion
Hcij = Hc(:,:,j)*inv(Hc(:,:,i)); % Transformation from i-th to j-th camera pose
Pcij = 2*rot2quat(Hcij); % ... and the corresponding quaternion
k = k+1; % Form linear system of equations
A((3*k-3)+(1:3), 1:3) = skew(Pgij+Pcij); % left-hand side
B((3*k-3)+(1:3)) = Pcij - Pgij; % right-hand side
end;
end;
% Rotation from camera to gripper is obtained from the set of equations:
% skew(Pgij+Pcij) * Pcg_ = Pcij - Pgij
% Gripper with camera is first moved to M different poses, then the gripper
% .. and camera poses are obtained for all poses. The above equation uses
% .. invariances present between each pair of i-th and j-th pose.
Pcg = A \ B; % Solve the equation A*Pcg = B
% Obtained non-unit quaternin is scaled back to unit value that
% .. designates camera-gripper rotation
Pcg = 2 Pcg / sqrt(1 + Pcg'Pcg_);
Rcg = quat2rot(Pcg/2); % Rotation matrix
% Calculate translational component
k = 0;
for i = 1:M,
for j = i+1:M;
Hgij = inv(Hg(:,:,j))Hg(:,:,i); % Transformation from i-th to j-th gripper pose
Hcij = Hc(:,:,j)inv(Hc(:,:,i)); % Transformation from i-th to j-th camera pose
k = k+1; % Form linear system of equations
A((3*k-3)+(1:3), 1:3) = Hgij(1:3,1:3)-eye(3); % left-hand side
B((3*k-3)+(1:3)) = Rcg(1:3,1:3)*Hcij(1:3,4) - Hgij(1:3,4); % right-hand side
end;
end;
Tcg = A \ B;
gHc = transl(Tcg) * Rcg; % incorporate translation with rotation
return
如果有错误的地方,还请各位指出,会第一时间改正~
- 分享
- 举报
-
浏览量:1041次2023-04-14 11:16:33
-
浏览量:1411次2023-04-14 11:48:45
-
浏览量:1102次2023-04-14 11:21:17
-
浏览量:938次2023-04-14 11:18:04
-
浏览量:1141次2023-04-14 11:33:22
-
浏览量:903次2023-10-30 15:02:39
-
浏览量:884次2023-04-14 11:29:55
-
浏览量:3145次2020-08-30 11:44:43
-
浏览量:10595次2020-11-20 13:36:38
-
浏览量:1446次2024-02-23 18:01:42
-
浏览量:6137次2021-03-01 10:14:26
-
浏览量:3173次2021-03-31 17:16:31
-
浏览量:5306次2021-01-14 16:44:46
-
浏览量:1673次2021-01-15 17:16:48
-
浏览量:937次2023-09-25 10:59:14
-
浏览量:672次2023-08-21 10:31:38
-
浏览量:674次2024-02-06 11:56:53
-
浏览量:5424次2021-08-02 09:34:03
-
2024-02-01 15:28:12
-
广告/SPAM
-
恶意灌水
-
违规内容
-
文不对题
-
重复发帖
虽万人吾往矣
感谢您的打赏,如若您也想被打赏,可前往 发表专栏 哦~
举报类型
- 内容涉黄/赌/毒
- 内容侵权/抄袭
- 政治相关
- 涉嫌广告
- 侮辱谩骂
- 其他
详细说明