当前位置:网站首页>(1) Hand eye calibration of face scanner and manipulator (eye on hand)
(1) Hand eye calibration of face scanner and manipulator (eye on hand)
2022-07-26 09:33:00 【Flying 123_ one hundred and twenty-three】
1、 To configure opencv Library and Eigen library
Need to opencv To solve the hand eye calibration algorithm
Need to Eigen in SVD decomposition algorithm
2、 Get point
The coordinates of the point relative to the calibration plate coordinate system
30 30 0
30 60 0
30 150 0
60 30 0
60 150 0
150 30 0
150 60 0
150 150 0
Save as board1.txt file
The coordinates of the point relative to the camera coordinate system
63.5676 355.2545 81.8691
32.552 356.6499 79.4237
-56.0291 359.4486 74.1348
66.4339 356.5228 50.7242
-53.22 360.1268 42.9512
71.6217 356.7818 -37.7328
40.2484 357.923 -40.4321
-48.236 360.6457 -45.6185
Save as camera1.txt file
Change the position and posture of the manipulator , Then obtain the coordinates of the point relative to the camera coordinate system
Save as camera2.txt
Multiple cycles , Get multiple sets of data
3、 Solver
#include <vector>
#include <opencv2/opencv.hpp>
#include <iostream>
#include <math.h>
#include <map>
#include <fstream>
#include <sstream>
#include <Eigen/SVD>
#include <Eigen/Core>
#include<opencv2/core/eigen.hpp>
using namespace std;
using namespace cv;
using namespace Eigen;
// End pose of manipulator ,x,y,z,rx,ry,rz
Mat_<double> ToolPose = (cv::Mat_<double>(7, 6) <<
350, 80, 480, 0, 178, -86,
355, 55, 500, 0, 184, -94,
365, 65, 490, 0, 179, -82,
385, 90, 465, 0, 183, -96,
385, 70, 485, 0, 181, -94,
370, 80, 470, 0, 178, -80,
365, 85, 480, 0, 182, -82);
class coordinate {
public:
double x;
double y;
double z;
};
// Solve the rotation matrix and translation matrix from the calibration plate to the camera
void Solve_board2camera(int num, Mat& T_board2camera, Mat& R_board2camera);
// Solve the rotation matrix and translation matrix from the end of the manipulator to the base
void Solve_gripper2base(const Mat& m, Mat& T_gripper2base, Mat& R_gripper2base);
// Solve Euler angular rotation matrix
Mat eulerAngleToRotateMatrix(const Mat& eulerAngle);
// test
void testResult(Mat& R_camera2gripper, Mat& T_camera2gripper, Mat& tempH_T);
vector<Mat> v_H_board2camera, v_H_camera2gripper, v_H_gripper2base;
vector<Mat> v_Pc; // Store the midpoint coordinates of the camera
int main()
{
cv::Mat T_gripper2base;
cv::Mat R_gripper2base;
cv::Mat H_gripper2base = Mat::eye(4, 4, CV_64FC1);
cv::Mat T_board2camera;
cv::Mat R_board2camera;
cv::Mat H_board2camera = Mat::eye(4, 4, CV_64FC1);
cv::Mat T_camera2gripper;
cv::Mat R_camera2gripper;
cv::Mat H_camera2gripper = Mat::eye(4, 4, CV_64FC1);
cv::Mat tempR, tempT, tempH;
vector<Mat> v_T_gripper2base;
vector<Mat> v_R_gripper2base;
vector<Mat> v_T_board2camera;
vector<Mat> v_R_board2camera;
// Calculation board2camera
// Calculate the conversion relationship between the board and the camera
int num = 0;
for (int i = 0; i < ToolPose.rows; i++)
{
num = i + 1;// Used to open a file
// Solve the rotation matrix and translation matrix from the calibration plate to the camera
Solve_board2camera(num, T_board2camera, R_board2camera);
tempT = T_board2camera.clone();
tempR = R_board2camera.clone();
// Store the rotation matrix and translation matrix from the calibration plate to the camera
v_T_board2camera.push_back(tempT);
v_R_board2camera.push_back(tempR);
// Store transformation matrix
tempR({
0,0,3,3 }).copyTo(H_board2camera({
0,0,3,3 }));
tempT({
0,0,1,3 }).copyTo(H_board2camera({
3,0,1,3 }));
tempH = H_board2camera;
v_H_board2camera.push_back(tempH);
}
cout << "**************************************************************" << endl;
// Calculation gripper2base
// The conversion relationship between the end of the computer arm and the base
for (int i = 0; i < ToolPose.rows; i++)
{
// Solve the rotation matrix and translation matrix from the end of the manipulator to the base
Solve_gripper2base(ToolPose.row(i), T_gripper2base, R_gripper2base);
tempT = T_gripper2base.clone();
tempR = R_gripper2base.clone();
// Store the rotation matrix and translation matrix from the end of the manipulator to the base
v_T_gripper2base.push_back(tempT);
v_R_gripper2base.push_back(tempR);
// Store transformation matrix
tempR({
0,0,3,3 }).copyTo(H_gripper2base({
0,0,3,3 }));
tempT({
0,0,1,3 }).copyTo(H_gripper2base({
3,0,1,3 }));
tempH = H_gripper2base.clone();
v_H_gripper2base.push_back(tempH);
}
// Turn the rotation matrix into an optional vector , Then turn the rotation vector into a rotation matrix , See whether it is the same before and after transformation
//Mat temp_R1;
//Mat temp_R2;
//Mat temp;
//for (int i = 0; i < v_R_board2camera.size(); i++)
//{
// cout << "_________________" << i+1 << "_________________" << endl;
// temp_R1 = v_R_board2camera[i];
// cout << "temp_R1: " << temp_R1 << endl;
// Rodrigues(temp_R1, temp);
// cout << "temp: " << temp << endl;
// Rodrigues(temp, temp_R2);
// cout << "temp_R2: " << temp_R2 << endl;
//}
//system("pause");
// Solve the camera
//opencv Five methods are provided :
cout << "*************************** solve ***************************" << endl;
//cv::calibrateHandEye(v_R_gripper2base, v_T_gripper2base, v_R_board2camera, v_T_board2camera, R_camera2gripper, T_camera2gripper, CALIB_HAND_EYE_TSAI);
//cv::calibrateHandEye(v_R_gripper2base, v_T_gripper2base, v_R_board2camera, v_T_board2camera, R_camera2gripper, T_camera2gripper, CALIB_HAND_EYE_PARK);
cv::calibrateHandEye(v_R_gripper2base, v_T_gripper2base, v_R_board2camera, v_T_board2camera, R_camera2gripper, T_camera2gripper, CALIB_HAND_EYE_HORAUD);
//cv::calibrateHandEye(v_R_gripper2base, v_T_gripper2base, v_R_board2camera, v_T_board2camera, R_camera2gripper, T_camera2gripper, CALIB_HAND_EYE_ANDREFF);
//cv::calibrateHandEye(v_R_gripper2base, v_T_gripper2base, v_R_board2camera, v_T_board2camera, R_camera2gripper, T_camera2gripper, CALIB_HAND_EYE_DANIILIDIS);
cout << "R_camera2gripper: " << endl;
cout << R_camera2gripper << endl;
cout << "T_camera2gripper: " << endl;
cout << T_camera2gripper << endl;
tempR = R_camera2gripper.clone();
tempT = T_camera2gripper.clone();
cout << "*************** test *******************" << endl;
// test result
testResult(tempR, tempT,tempH);
system("pause");
return 0;
}
// Calculation H_board2camera
//**************************************************************************
void Solve_board2camera(int num,Mat& T_board2camera, Mat& R_board2camera)
{
// Source point set p、A by P_board Target point set q、B by P_camera
// Calculation P_board To P_camera The transformation matrix of
string str = to_string(num);
coordinate P_board, P_camera, P_centroid;
vector<coordinate> v_board, v_camera;
cv::Mat PA(3, 1, CV_64FC1);
cv::Mat PB(3, 1, CV_64FC1);
cv::Mat centroidA(3, 1, CV_64FC1);
cv::Mat centroidB(3, 1, CV_64FC1);
cv::Mat H = Mat::zeros(3, 3, CV_64FC1);
cv::Mat tempD, tempU, tempV;
cv::Mat tempH = Mat::eye(4, 4, CV_64FC1);
// Read point information And calculate the centroid coordinates
//------------------------------------------
P_centroid.x = 0;
P_centroid.y = 0;
P_centroid.z = 0;
// Read the coordinates of the points on the board and calculate the centroid
ifstream fin;
fin.open("..//board1.txt", ios::in);
while (!fin.eof())
{
fin >> P_board.x >> P_board.y >> P_board.z;
v_board.push_back(P_board); // Storage
P_centroid.x = P_centroid.x + P_board.x;
P_centroid.y = P_centroid.y + P_board.y;
P_centroid.z = P_centroid.z + P_board.z;
}
fin.close();
// Calculating the center of mass
centroidA.at<double>(0.0) = P_centroid.x / v_board.size();
centroidA.at<double>(1.0) = P_centroid.y / v_board.size();
centroidA.at<double>(2.0) = P_centroid.z / v_board.size();
// Read the coordinates of the midpoint of the camera
P_centroid.x = 0;
P_centroid.y = 0;
P_centroid.z = 0;
fin.open("..//camera"+str +".txt", ios::in);
while (!fin.eof())
{
fin >> P_camera.x >> P_camera.y >> P_camera.z;
//P_camera.x = P_camera.x * 1000;
//P_camera.y = P_camera.y * 1000;
//P_camera.z = P_camera.z * 1000;
//cout << P_camera.x << " " << P_camera.y << " " << P_camera.z << endl;
v_camera.push_back(P_camera); // Storage
P_centroid.x = P_centroid.x + P_camera.x;
P_centroid.y = P_centroid.y + P_camera.y;
P_centroid.z = P_centroid.z + P_camera.z;
}
fin.close();
// Calculating the center of mass
centroidB.at<double>(0.0) = P_centroid.x / v_camera.size();
centroidB.at<double>(1.0) = P_centroid.y / v_camera.size();
centroidB.at<double>(2.0) = P_centroid.z / v_camera.size();
// Calculation H matrix
//--------------------------------------------------------------
for (int i = 0; i < v_board.size(); i++)
{
PA.at<double>(0, 0) = v_board[i].x;
PA.at<double>(1, 0) = v_board[i].y;
PA.at<double>(2, 0) = v_board[i].z;
PB.at<double>(0, 0) = v_camera[i].x;
PB.at<double>(1, 0) = v_camera[i].y;
PB.at<double>(2, 0) = v_camera[i].z;
H = H + (PA - centroidA)*((PB - centroidB).t());
}
Eigen::MatrixXd SVD_H(3, 3);
cv2eigen(H, SVD_H); //Mat Matrix transformation Eigen matrix
//SVD decompose (H) And calculate the rotation matrix and translation matrix
//------------------------------------------------------------------
SVD::compute(H, tempD, tempU, tempV,0);
//-------------------------------------------------------------------
// Conduct svd decompose
Eigen::JacobiSVD<Eigen::MatrixXd> svd_holder(SVD_H,
Eigen::ComputeThinU |
Eigen::ComputeThinV);
// structure SVD Decomposition results
Eigen::MatrixXd U = svd_holder.matrixU();
Eigen::MatrixXd V = svd_holder.matrixV();
Eigen::MatrixXd D = svd_holder.singularValues();
//--------------------------------------------------------------------
//eigen Matrix transformation Mat matrix
eigen2cv(U, tempU);
eigen2cv(V, tempV);
eigen2cv(D, tempD);
// Calculate the rotation matrix and translation matrix from the calibration plate to the camera
R_board2camera = tempV * (tempU.t());
T_board2camera = centroidB - (R_board2camera * centroidA);
Save all camera points to v_Pc in , For later tests
cv::Mat PC(4, 1, CV_64FC1);
cv::Mat PD;
for (int i = 0; i < v_camera.size(); i++)
{
///cout << i << endl;
PC.at<double>(0, 0) = v_camera[i].x;
PC.at<double>(1, 0) = v_camera[i].y;
PC.at<double>(2, 0) = v_camera[i].z;
PC.at<double>(3, 0) = 1;
PD = PC.clone();
v_Pc.push_back(PD);
}
// The point in the test camera is at the midpoint coordinate of the calibration plate
//cout << " test *******************************" << endl;
//R_board2camera({ 0,0,3,3 }).copyTo(tempH({ 0,0,3,3 }));
//T_board2camera({ 0,0,1,3 }).copyTo(tempH({ 3,0,1,3 })); // Get the transformation matrix from the end of the manipulator to the base of the manipulator
//tempH = tempH.inv();
//double Point[1][4] = { -31.8856, -29.4328, 352.0000 ,1 };
//cv::Mat tempPoint(4, 1, CV_64FC1, Point);
//Mat temp;
//temp = tempH*tempPoint;
//cout << "temp: " << temp << endl;
//system("pause");
/*ofstream fout; fout.open("..//testc2b.txt", ios::app); double tempAA[1][3]; Mat temp; tempH = tempH.inv(); for (int i = 0; i < v_camera.size(); i++) { cout << i << endl; PC.at<double>(0, 0) = v_camera[i].x; PC.at<double>(1, 0) = v_camera[i].y; PC.at<double>(2, 0) = v_camera[i].z; PC.at<double>(3, 0) = 1; PD = PC.clone(); v_Pc.push_back(PD); temp = tempH * PD; tempAA[0][0] = temp.at<double>(0, 0); tempAA[0][1] = temp.at<double>(1, 0); tempAA[0][2] = temp.at<double>(2, 0); fout << tempAA[0][0] << " " << tempAA[0][1] << " " << tempAA[0][2] << endl; } v_camera.clear(); fout.close();*/
//system("pause");
}
// Calculation H_gripper2base
//***********************************************************************
void Solve_gripper2base(const Mat& m, Mat& T_gripper2base, Mat& R_gripper2base)
{
cv::Mat EulerAngle;
T_gripper2base = m({
0,0,3,1 }).t();
EulerAngle = m({
3,0,3,1 });
R_gripper2base = eulerAngleToRotateMatrix(EulerAngle);
}
// test result
void testResult(Mat& R_camera2gripper, Mat& T_camera2gripper, Mat& tempH_T)
{
cv::Mat R_vector;// Euler angle in the position of the manipulator
cv::Mat T_vector;// Translation vector of manipulator
cv::Mat R_girpper2base;
cv::Mat T_gripper2base;
cv::Mat H_gripper2base = Mat::eye(4, 4, CV_64FC1);
cv::Mat H_camera2gripper = Mat::eye(4, 4, CV_64FC1);
cv::Mat temp;
// Calculate the conversion matrix from the camera to the end of the manipulator
R_camera2gripper({
0,0,3,3 }).copyTo(H_camera2gripper({
0,0,3,3 }));
T_camera2gripper({
0,0,1,3 }).copyTo(H_camera2gripper({
3,0,1,3 }));
cout << "H_camera2gripper" << H_camera2gripper << endl;
Save the conversion matrix from the camera to the end of the manipulator to a file
ofstream fout;
double H_cam2gri[4][4];
fout.open("..//H_camera2gripper.txt", ios::trunc);
for (int i = 0; i < 4; i++)
{
H_cam2gri[i][0] = H_camera2gripper.at<double>(i, 0);
H_cam2gri[i][1] = H_camera2gripper.at<double>(i, 1);
H_cam2gri[i][2] = H_camera2gripper.at<double>(i, 2);
H_cam2gri[i][3] = H_camera2gripper.at<double>(i, 3);
fout << H_cam2gri[i][0] << '\t' << H_cam2gri[i][1] << '\t' << H_cam2gri[i][2] << '\t' << H_cam2gri[i][3] << endl;
}
fout.close();
//++++++++++++++++++++++++++++++++++++++++++++++++++
// Save the test results to a file
// Test the coordinates of the midpoint of the camera in the base coordinate scene
cv::Mat mat_P(4, 1, CV_64FC1);
double tempAA[1][3];
int n;
fout.open("..//test123.txt", ios::trunc);
Mat tempH;
for (int i = 0; i < v_H_gripper2base.size(); i++) //v_H_gripper2base The transformation matrix from the end of the manipulator to the base
{
cout << "------------------------------------" << endl;
for (int j = 0; j < 8; j++) // Attitude of each manipulator , Save the 9 Points in the camera
{
n = i * 8 + j;
temp = v_H_gripper2base[i] * H_camera2gripper *v_Pc[n];
//cout << "temp: " << temp << endl;
tempAA[0][0] = temp.at<double>(0, 0);
tempAA[0][1] = temp.at<double>(1, 0);
tempAA[0][2] = temp.at<double>(2, 0);
fout << tempAA[0][0] << " " << tempAA[0][1] << " " << tempAA[0][2] << endl;
}
}
fout.close();
cout << " End of calculation !" << endl;
/*cout << "++++++++++++++++++++++++++++++++++++++++++++++++" << endl; cout << "++++++++++++++++++++++++++++++++++++++++++++++++" << endl; cout << "++++++++++++++++++++++++++++++++++++++++++++++++" << endl; double test_dou[3][3] = { 180,180,90 }; cv::Mat test_mat(1, 3, CV_64FC1,test_dou); cout << test_mat << endl; cv::Mat test_result = eulerAngleToRotateMatrix(test_mat); cout << "test_result: " << test_result << endl;*/
}
// Euler angular rotation matrix
//********************************************************************************************
Mat eulerAngleToRotateMatrix(const Mat& eulerAngle)
{
eulerAngle /= (180 / CV_PI); // Degrees to radians
Matx13d m(eulerAngle); //<double, 1, 3>
auto rx = m(0, 0), ry = m(0, 1), rz = m(0, 2);
auto rxs = sin(rx), rxc = cos(rx);
auto rys = sin(ry), ryc = cos(ry);
auto rzs = sin(rz), rzc = cos(rz);
//XYZ Rotation matrix of direction
/*Mat RotX = (Mat_<double>(3, 3) << 1, 0, 0, 0, rxc, -rxs, 0, rxs, rxc);*/
Mat RotX = (Mat_<double>(3, 3) << rxc, -rxs, 0,
rxs, rxc, 0,
0, 0, 1);
Mat RotY = (Mat_<double>(3, 3) << ryc, 0, rys,
0, 1, 0,
-rys, 0, ryc);
Mat RotZ = (Mat_<double>(3, 3) << rzc, -rzs, 0,
rzs, rzc, 0,
0, 0, 1);
// The rotation matrix synthesized in order
cv::Mat rotMat;
rotMat = RotX * RotY * RotZ;
return rotMat;
}
边栏推荐
猜你喜欢

MySql5.7.25源码安装记录

docker配置mysql集群

arcgis的基本使用4

Personality test system V1.0

CSV data file settings of JMeter configuration components

微信小程序学习笔记1

Jmeter配置元件之CSV数据文件设置

uni-app学习总结

【Mysql数据库】mysql基本操作集锦-看得会的基础(增删改查)

After attaching to the process, the breakpoint displays "currently will not hit the breakpoint, and no symbols have been loaded for this document"
随机推荐
asp.net 使用redis缓存(二)
js 表格自动循环滚动,鼠标移入暂停
服务器环境配置全过程
Force button list question
如何添加一个PDB
高斯消元求解矩阵的逆(gauss)
登录模块用例编写
学习笔记之常用数组api 改变原数组和不改变原数组的有哪些?
Arc GIS basic operation 3
Neural network and deep learning-6-support vector machine 1-pytorch
arcgis的基本使用4
服务器、客户端双认证
IIS网站配置
CSV data file settings of JMeter configuration components
After attaching to the process, the breakpoint displays "currently will not hit the breakpoint, and no symbols have been loaded for this document"
Basic use of Arc GIS 2
What are CSDN spaces represented by
网站设计需要的基本知识
js中树与数组的相互转化(树的子节点若为空隐藏children字段)
Simple pedestrian recognition code to 88% accuracy Zheng Zhedong preparation