添加链接
link之家
链接快照平台
  • 输入网页链接,自动生成快照
  • 标签化管理网页链接

你好,我是小智。今天周末,在小仙女带领下,剪了个帅气发型。今天说说手眼标定的代码实现。

之前介绍过手眼标定算法Tsai的原理,今天介绍算法的代码实现,分别有Python、C++、Matlab版本的算法实现方式。

  • 该算法适用于将相机装在手抓上和将相机装在外部两种情况
  • 论文已经传到git上,地址: gitee.com/ohhuo/hande…
  • Python版本

    使用前需要安装库:

    pip3 install transforms3dpip3 install numpy
    
    #!/usr/bin/env python
    # coding: utf-8
    import transforms3d as tfs
    import numpy as npimport math
    def get_matrix_eular_radu(x,y,z,rx,ry,rz):    
    rmat = tfs.euler.euler2mat(math.radians(rx),math.radians(ry),math.radians(rz))    
    rmat = tfs.affines.compose(np.squeeze(np.asarray((x,y,z))), rmat, [1, 1, 1])   
    return rmat
    def skew(v):    
        return np.array([[0,-v[2],v[1]],
        [v[2],0,-v[0]],
        [-v[1],v[0],0]])
    def rot2quat_minimal(m):    
        quat =  tfs.quaternions.mat2quat(m[0:3,0:3])    
        return quat[1:]
    def quatMinimal2rot(q):    
        p = np.dot(q.T,q)    
        w = np.sqrt(np.subtract(1,p[0][0]))    
        return tfs.quaternions.quat2mat([w,q[0],q[1],q[2]])
    hand = [1.1988093940033604, -0.42405585264804424, 0.18828251788562061, 151.3390418721659, -18.612399542280507, 153.05074895025035,1.1684831621733476, -0.183273375514656, 0.12744868246620855, -161.57083804238462, 9.07159838346732, 89.1641128844487,        1.1508343174145468, -0.22694301453461405, 0.26625166858469146, 177.8815855486261, 0.8991159570568988, 77.67286224959672]
    camera = [-0.16249272227287292, -0.047310635447502136, 0.4077761471271515, -56.98037030812389, -6.16739631361851, -115.84333735802369,0.03955405578017235, -0.013497642241418362, 0.33975949883461, -100.87129330834215, -17.192685528625265, -173.07354634882094,          -0.08517949283123016, 0.00957852229475975, 0.46546608209609985, -90.85270962096058, 0.9315977976503153, 175.2059707654342]
    Hgs,Hcs = [],[]
        for i in range(0,len(hand),6):    Hgs.append(get_matrix_eular_radu(hand[i],hand[i+1],hand[i+2],hand[i+3],hand[i+4],hand[i+5]))      Hcs.append(get_matrix_eular_radu(camera[i],camera[i+1],camera[i+2],camera[i+3],camera[i+4],camera[i+5]))
    Hgijs = []
    Hcijs = []
    A = []B = []
    size = 0
    for i in range(len(Hgs)):    
        for j in range(i+1,len(Hgs)):        
            size += 1        
            Hgij = np.dot(np.linalg.inv(Hgs[j]),Hgs[i])        
            Hgijs.append(Hgij)        
            Pgij = np.dot(2,rot2quat_minimal(Hgij))
            Hcij = np.dot(Hcs[j],np.linalg.inv(Hcs[i]))        
            Hcijs.append(Hcij)        
            Pcij = np.dot(2,rot2quat_minimal(Hcij))
            A.append(skew(np.add(Pgij,Pcij)))        
            B.append(np.subtract(Pcij,Pgij))
            MA = np.asarray(A).reshape(size*3,3)
            MB = np.asarray(B).reshape(size*3,1)
            Pcg_  =  np.dot(np.linalg.pinv(MA),MB)
            pcg_norm = np.dot(np.conjugate(Pcg_).T,Pcg_)
            Pcg = np.sqrt(np.add(1,np.dot(Pcg_.T,Pcg_)))
            Pcg = np.dot(np.dot(2,Pcg_),np.linalg.inv(Pcg))
            Rcg = quatMinimal2rot(np.divide(Pcg,2)).reshape(3,3)
            A = []
            B = []
            id = 0
            for i in range(len(Hgs)):    
                for j in range(i+1,len(Hgs)):        
                Hgij = Hgijs[id]        
                Hcij = Hcijs[id]        
                A.append(np.subtract(Hgij[0:3,0:3],np.eye(3,3)))   
                B.append(np.subtract(np.dot(Rcg,Hcij[0:3,3:4]),Hgij[0:3,3:4]))        
                id += 1
    MA = np.asarray(A).reshape(size*3,3)
    MB = np.asarray(B).reshape(size*3,1)
    Tcg = np.dot(np.linalg.pinv(MA),MB).reshape(3,)
    print(tfs.affines.compose(Tcg,np.squeeze(Rcg),[1,1,1]))
    

    运行结果:

    python3 tsai.py                             
    [[-0.01522186 -0.99983174 -0.01023609 -0.02079774] 
    [ 0.99976822 -0.01506342 -0.01538198  0.00889827] 
    [ 0.0152252  -0.01046786  0.99982929  0.08324514] 
    [ 0.          0.          0.          1.        ]]
    

    C++版本:

    //Reference:
    //R. Y. Tsai and R. K. Lenz, "A new technique for fully autonomous and efficient 3D robotics hand/eye calibration."
    //In IEEE Transactions on Robotics and Automation, vol. 5, no. 3, pp. 345-358, June 1989.
    //C++ code converted from Zoran Lazarevic's Matlab code://http://lazax.com/www.cs.columbia.edu/~laza/html/Stewart/matlab/handEye.m
    static void calibrateHandEyeTsai(const std::vector<Mat>& Hg, const std::vector<Mat>& Hc,Mat& R_cam2gripper, Mat& t_cam2gripper)
        //Number of unique camera position pairs    
        int K = static_cast<int>((Hg.size()*Hg.size() - Hg.size()) / 2.0);    
        //Will store: skew(Pgij+Pcij)    
        Mat A(3*K, 3, CV_64FC1);    
        //Will store: Pcij - Pgij    
        Mat B(3*K, 1, CV_64FC1);
        std::vector<Mat> vec_Hgij, vec_Hcij;    
        vec_Hgij.reserve(static_cast<size_t>(K));    
        vec_Hcij.reserve(static_cast<size_t>(K));
        int idx = 0;    
        for (size_t i = 0; i < Hg.size(); i++)
            for (size_t j = i+1; j < Hg.size(); j++, idx++)        
                //Defines coordinate transformation from Gi to Gj            
                //Hgi is from Gi (gripper) to RW (robot base)            
                //Hgj is from Gj (gripper) to RW (robot base)            
                Mat Hgij = homogeneousInverse(Hg[j]) * Hg[i]; //eq 6 
                vec_Hgij.push_back(Hgij);           
                //Rotation axis for Rgij which is the 3D rotation from gripper coordinate frame Gi to Gj            
                Mat Pgij = 2*rot2quatMinimal(Hgij);
                //Defines coordinate transformation from Ci to Cj            
                //Hci is from CW (calibration target) to Ci (camera)            
                //Hcj is from CW (calibration target) to Cj (camera)            
                Mat Hcij = Hc[j] * homogeneousInverse(Hc[i]); //eq 7  
                vec_Hcij.push_back(Hcij);            
                //Rotation axis for Rcij            
                Mat Pcij = 2*rot2quatMinimal(Hcij);
                //Left-hand side: skew(Pgij+Pcij)            
                skew(Pgij+Pcij).copyTo(A(Rect(0, idx*3, 3, 3)));           
                //Right-hand side: Pcij - Pgij            
                Mat diff = Pcij - Pgij;            
                diff.copyTo(B(Rect(0, idx*3, 1, 3)));
        Mat Pcg_;    
        //Rotation from camera to gripper is obtained from the set of equations:    
        //    skew(Pgij+Pcij) * Pcg_ = Pcij - Pgij    (eq 12)    
        solve(A, B, Pcg_, DECOMP_SVD);
        Mat Pcg_norm = Pcg_.t() * Pcg_;    
        //Obtained non-unit quaternion is scaled back to unit value that    
        //designates camera-gripper rotation    
        Mat Pcg = 2 * Pcg_ / sqrt(1 + Pcg_norm.at<double>(0,0)); //eq 14
        Mat Rcg = quatMinimal2rot(Pcg/2.0);
        idx = 0;    
        for (size_t i = 0; i < Hg.size(); i++)    
            for (size_t j = i+1; j < Hg.size(); j++, idx++)        
                    //Defines coordinate transformation from Gi to Gj            
                    //Hgi is from Gi (gripper) to RW (robot base)            
                    //Hgj is from Gj (gripper) to RW (robot base)            
                    Mat Hgij = vec_Hgij[static_cast<size_t>(idx)];            
                    //Defines coordinate transformation from Ci to Cj            
                    //Hci is from CW (calibration target) to Ci (camera)            
                    //Hcj is from CW (calibration target) to Cj (camera)           
                    Mat Hcij = vec_Hcij[static_cast<size_t>(idx)];
                    //Left-hand side: (Rgij - I)            
                    Mat diff = Hgij(Rect(0,0,3,3)) - Mat::eye(3,3,CV_64FC1);    
                    diff.copyTo(A(Rect(0, idx*3, 3, 3)));
                    //Right-hand side: Rcg*Tcij - Tgij           
                    diff = Rcg*Hcij(Rect(3, 0, 1, 3)) - Hgij(Rect(3, 0, 1, 3));   
                    diff.copyTo(B(Rect(0, idx*3, 1, 3)));        
        Mat Tcg;    
        //Translation from camera to gripper is obtained from the set of equations:   
        //(Rgij - I) * Tcg = Rcg*Tcij - Tgij    (eq 15)    
        solve(A, B, Tcg, DECOMP_SVD);
        R_cam2gripper = Rcg;    
        t_cam2gripper = Tcg;
    

    C++版本食用方法:

    git clone https://gitee.com/ohhuo/handeye-tsai.git   
    cd handeye-tsai/cpp     
    mkdir build   
    cd buildcmake ..   
    make./opencv_example
    
    sangxin@sangxin-ubu~ git clone https://gitee.com/ohhuo/handeye-tsai.git      
    正克隆到 'handeye-tsai'...
    remote: Enumerating objects: 60, done.
    remote: Counting objects: 100% (60/60), done.
    remote: Compressing objects: 100% (57/57), done.
    remote: Total 60 (delta 9), reused 0 (delta 0), pack-reused 0展开对象中: 100% (60/60), 完成.
    sangxin@sangxin-ubu~ cd handeye-tsai/cpp                                                       sangxin@sangxin-ubu~ mkdir build   
    sangxin@sangxin-ubu~ 
    cd buildsangxin@sangxin-ubu~ cmake ..        
    -- The C compiler identification is GNU 7.5.0
    -- The CXX compiler identification is GNU 7.5.0
    -- Check for working C compiler: /usr/bin/cc
    -- Check for working C compiler: /usr/bin/cc-- works
    -- Detecting C compiler ABI info
    -- Detecting C compiler ABI info - done
    -- Detecting C compile features
    -- Detecting C compile features - done
    -- Check for working CXX compiler: /usr/bin/c++
    -- Check for working CXX compiler: /usr/bin/c++ 
    -- works-- Detecting CXX compiler ABI info
    -- Detecting CXX compiler ABI info - done
    -- Detecting CXX compile features
    -- Detecting CXX compile features - done
    -- Found OpenCV: /usr/local (found version "4.5.1") 
    -- OpenCV library status:
    --     config: /usr/local/lib/cmake/opencv4
    --     version: 4.5.1
    --     libraries: opencv_calib3d;opencv_core;opencv_dnn;opencv_features2d;opencv_flann;opencv_gapi;opencv_highgui;opencv_imgcodecs;opencv_imgproc;opencv_ml;opencv_objdetect;opencv_photo;opencv_stitching;opencv_video;opencv_videoio--     
    include path: /usr/local/include/opencv4
    -- Configuring done
    -- Generating done
    -- Build files have been written to: /home/sangxin/code/ramp/other/handeye-tsai/cpp/build
    sangxin@sangxin-ubu~ make     
    Scanning dependencies of target opencv_example
    [ 33%] Building CXX object CMakeFiles/opencv_example.dir/example.cpp.o
    [ 66%] Building CXX object CMakeFiles/opencv_example.dir/calibration_handeye.cpp.o
    [100%] Linking CXX executable opencv_example[100%] Built target opencv_example
    sangxin@sangxin-ubu~ ./opencv_example  
    Hand eye calibration
    [0.02534592279128711, -0.999507800830298, -0.01848621857599331, 0.03902588103574497; 0.99953544041497, 0.02502485833258339, 0.01739712102291752, 0.002933439485668206; -0.01692594317342544, -0.01891857671220042, 0.9996777480282706, -0.01033683416650518;
    0, 0, 0, 1]
    Homo_cam2gripper 是否包含旋转矩阵:1
    

    Matlab版本:

    % handEye - performs hand/eye calibration% 
    %     gHc = handEye(bHg, wHc)
    %     bHg - pose of gripper relative to the robot base..
    %           (Gripper center is at: g0 = Hbg * [0;0;0;1] )
    %           Matrix dimensions are 4x4xM, where M is ..
    %           .. number of camera positions. 
    %           Algorithm gives a non-singular solution when ..
    %           .. at least 3 positions are given
    %           Hbg(:,:,i) is i-th homogeneous transformation matrix
    %     wHc - pose of camera relative to the world ..      
    %           (relative to the calibration block)
    %           Dimension: size(Hwc) = size(Hbg)
    %     gHc - 4x4 homogeneous transformation from gripper to camera      
    %           , that is the camera position relative to the gripper.
    %           Focal point of the camera is positioned, ..
    %           .. relative to the gripper, at
    %                 f = gHc*[0;0;0;1];
    % References: R.Tsai, R.K.Lenz "A new Technique for Fully Autonomous
    %           and Efficient 3D Robotics Hand/Eye calibration", IEEE
    %           trans. on robotics and Automaion, Vol.5, No.3, June 1989%% Notation: wHc - pose of camera frame (c) in the world (w) coordinate system
    %                 .. If a point coordinates in camera frame (cP) are known
    %                 ..     wP = wHc * cP
    %                 .. we get the point coordinates (wP) in world coord.sys.
    %                 .. Also refered to as transformation from camera to world
    function gHc = handEye(bHg, wHc)
    M = size(bHg,3);
    K = (M*M-M)/2;               % Number of unique camera position pairsA = zeros(3*K,3);            
    A = zeros(3*K,3);            % will store: skew(Pgij+Pcij)B = zeros(3*K,1);            
    B = zeros(3*K,1);            % will store: Pcij - Pgijk = 0;
    k = 0;
    % Now convert from wHc notation to Hc notation used in Tsai paper.
    Hg = bHg;
    % Hc = cHw = inv(wHc); We do it in a loop because wHc is given, not cHw
    Hc = zeros(4,4,M); for i = 1:M, Hc(:,:,i) = inv(wHc(:,:,i)); end;
    for i = 1:M,   
        for j = i+1:M;        
            Hgij = inv(Hg(:,:,j))*Hg(:,:,i);    % Transformation from i-th to j-th gripper pose             Pgij = 2*rot2quat(Hgij);            % ... and the corresponding quaternion
            Hcij = Hc(:,:,j)*inv(Hc(:,:,i));    % Transformation from i-th to j-th camera pose             Pcij = 2*rot2quat(Hcij);            % ... and the corresponding quaternion
        k = k+1;                            % Form linear system of equations      
        A((3*k-3)+(1:3), 1:3) = skew(Pgij+Pcij); % left-hand side     
        B((3*k-3)+(1:3))      = Pcij - Pgij;     % right-hand side
       end;
    end;
    

    如果有错误的地方,还请各位指出,小智会第一时间改正~

    作者介绍:

    我是小智,机器人领域资深玩家,现深圳某独脚兽机器人算法工程师一枚

    初中学习编程,高中开始学习机器人,大学期间打机器人相关比赛实现月入2W+(比赛奖金)

    目前在输出机器人学习指南、论文注解、工作经验,欢迎大家关注小智,一起交流技术,学习机器人

  • 2023高频前端面试题合集之Vue篇
  • ChatGPT保姆级教程,一分钟学会使用ChatGPT!
  • 2023高频前端面试题合集之网络篇
  •