0
点赞
收藏
分享

微信扫一扫

FPFH特征点云配准

诗远 2022-02-03 阅读 80

1.计算FPFH描述子

#include <pcl/point_types.h>
#include <pcl/features/fpfh.h>
#include<pcl/io/pcd_io.h>
#include<iostream>
#include <pcl/features/normal_3d.h>
#include <pcl/visualization/pcl_plotter.h>// 直方图的可视化
using namespace std;
int main()
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
    if (pcl::io::loadPCDFile("source.pcd", *cloud) == -1)
    {
        cout << "加载失败" << endl;
        return -1;
    }
    //pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>());
    //计算法向量
    pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne;
    ne.setInputCloud(cloud);
    //创建一个空的kdtree对象,并把它传递给法线估计对象
    //基于给出的输入数据集,kdtree将被建立
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>());
    ne.setSearchMethod(tree);
    //输出数据集
    pcl::PointCloud<pcl::Normal>::Ptr cloud_normals(new pcl::PointCloud<pcl::Normal>);
    //使用半径在查询点周围3厘米范围内的所有邻元素
    ne.setRadiusSearch(0.2);
    //计算特征值
    ne.compute(*cloud_normals);

    // Create the FPFH estimation class, and pass the input dataset+normals to it
    pcl::FPFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::FPFHSignature33> fpfh;
    fpfh.setInputCloud(cloud);
    fpfh.setInputNormals(cloud_normals);
    // alternatively, if cloud is of tpe PointNormal, do fpfh.setInputNormals (cloud);

    // Create an empty kdtree representation, and pass it to the FPFH estimation object.
    // Its content will be filled inside the object, based on the given input dataset (as no other search surface is given).
    //pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>);

    fpfh.setSearchMethod(tree);

    // Output datasets
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs(new pcl::PointCloud<pcl::FPFHSignature33>());

    // Use all neighbors in a sphere of radius 5cm
    // IMPORTANT: the radius used here has to be larger than the radius used to estimate the surface normals!!!
    fpfh.setRadiusSearch(0.2);

    // Compute the features
    fpfh.compute(*fpfhs);
    cout << "计算完毕";
}

2.fpfh对应关系

#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/features/fpfh_omp.h> // fpfh加速计算的omp(多核并行计算)
#include <pcl/registration/correspondence_estimation.h> // 获取对应关系的基类
#include <pcl/visualization/pcl_visualizer.h>
#include <boost/thread/thread.hpp>
#include<pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
pcl::PointCloud<pcl::PointXYZ> voxelGrid(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in, float leaf)
{
    pcl::VoxelGrid<pcl::PointXYZ> sor;
    sor.setInputCloud(cloud_in);
    sor.setLeafSize(leaf, leaf, leaf);
    pcl::PointCloud<pcl::PointXYZ> cloud_out;
    sor.filter(cloud_out);
    return cloud_out;
}
using namespace std;

typedef pcl::PointCloud<pcl::FPFHSignature33> fpfhFeature;


fpfhFeature::Ptr compute_fpfh_feature(pcl::PointCloud<pcl::PointXYZ>::Ptr input_cloud)
{
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>());
    //-------------------------法向量估计-----------------------
    pcl::PointCloud<pcl::Normal>::Ptr normals(new pcl::PointCloud<pcl::Normal>);
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::Normal> n;
    n.setInputCloud(input_cloud);
    n.setNumberOfThreads(8);//设置openMP的线程数
    n.setSearchMethod(tree);
    n.setKSearch(10);
    n.compute(*normals);
    //------------------FPFH估计-------------------------------
    fpfhFeature::Ptr fpfh(new fpfhFeature);
    pcl::FPFHEstimationOMP<pcl::PointXYZ, pcl::Normal, pcl::FPFHSignature33> f;
    f.setNumberOfThreads(8); // 指定8核计算
    f.setInputCloud(input_cloud);
    f.setInputNormals(normals);
    f.setSearchMethod(tree);
    f.setKSearch(10);
    f.compute(*fpfh);

    return fpfh;

}

int
main(int argc, char** argv)
{

    pcl::PointCloud<pcl::PointXYZ>::Ptr source_cloud(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr target_cloud(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::io::loadPCDFile<pcl::PointXYZ>("source.pcd", *source_cloud);
    std::cout << "source:" << source_cloud->size() << endl;

    pcl::io::loadPCDFile<pcl::PointXYZ>("target.pcd", *target_cloud);
    std::cout << "target:" << target_cloud->size() << endl;

    std::vector<int>nan_indicies;
    std::vector<int>nan_indicies2;

    pcl::removeNaNFromPointCloud(*source_cloud, *source_cloud, nan_indicies);
    pcl::removeNaNFromPointCloud(*target_cloud, *target_cloud, nan_indicies2);
    pcl::PointCloud<pcl::PointXYZ>::Ptr vox_source_cloud=voxelGrid(source_cloud, 0.7).makeShared();//下采样后的点云
    pcl::PointCloud<pcl::PointXYZ>::Ptr vox_target_cloud = voxelGrid(target_cloud, 0.7).makeShared();


    std::cout << "source_voxed"<<vox_source_cloud->size() << endl;

    std::cout << "target_voxed" << vox_target_cloud->size() << endl;
    //---------------------计算源点云和目标点云的FPFH-----------------
    fpfhFeature::Ptr source_fpfh = compute_fpfh_feature(vox_source_cloud);
    fpfhFeature::Ptr target_fpfh = compute_fpfh_feature(vox_target_cloud);

    //--------------------根据特征描述子获取对应关系------------------
    pcl::registration::CorrespondenceEstimation<pcl::FPFHSignature33, pcl::FPFHSignature33> crude_cor_est;
    boost::shared_ptr<pcl::Correspondences> cru_correspondences(new pcl::Correspondences);
    crude_cor_est.setInputSource(source_fpfh);
    crude_cor_est.setInputTarget(target_fpfh);
    //  crude_cor_est.determineCorrespondences(cru_correspondences,1);     // 获取对应关系,第二个参数为对应关系之间的最大距离阈值
    crude_cor_est.determineReciprocalCorrespondences(*cru_correspondences);// 相互查找的方式获取对应关系
   
    std::cout << "source:" << source_cloud->size() << endl;

    cout << "输入源点云:" << source_cloud->size() << "个" << endl;
    cout << "输入源点云:" << source_cloud->points.size() << "个" << endl;

    cout << "输入目标点云点云:" << target_cloud->points.size() << "个" << endl;
    cout << "获取匹配点对:" << cru_correspondences->size() << endl;

    //---------------------可视化点云及对应关系----------------------
    boost::shared_ptr<pcl::visualization::PCLVisualizer>viewer(new pcl::visualization::PCLVisualizer("显示点云"));
    viewer->setBackgroundColor(0, 0, 0);  //设置背景颜色为黑色
    // 对目标点云着色可视化 (red).
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>target_color(target_cloud, 255, 0, 0);
    viewer->addPointCloud<pcl::PointXYZ>(target_cloud, target_color, "target");
    viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "target");
    // 对源点云着色可视化 (green).
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>source_color(source_cloud, 0, 255, 0);
    viewer->addPointCloud<pcl::PointXYZ>(source_cloud, source_color, "source");
    viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "source");
    //对应关系可视化
    viewer->setWindowName("基于特征描述子的对应");
    viewer->addCorrespondences<pcl::PointXYZ>(source_cloud, target_cloud, *cru_correspondences, "correspondence");
    //viewer->initCameraParameters();
    while (!viewer->wasStopped())
    {
        viewer->spinOnce(100);
        boost::this_thread::sleep(boost::posix_time::microseconds(100000));
    }

    return 0;
}//fpfh显示对应关系

原点云个数为什么会变,不太清楚

(1)点对对应关系函数

(pcl::registration::CorrespondenceEstimation<pcl::PointXYZ, pcl::PointXYZ>core;)

#include<pcl/io/pcd_io.h>
#include<pcl/kdtree/io.h>
#include <pcl/filters/extract_indices.h>
#include<pcl/registration/correspondence_estimation.h>

using namespace std;

int main()
{

    pcl::PointCloud<pcl::PointXYZ>::Ptr source(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr target(new pcl::PointCloud<pcl::PointXYZ>);

    //----------------------读取点云-------------------------
    pcl::io::loadPCDFile<pcl::PointXYZ>("source.pcd", *source);
    pcl::io::loadPCDFile<pcl::PointXYZ>("target.pcd", *target);
    //--------------------提取重叠部分-----------------------
    pcl::registration::CorrespondenceEstimation<pcl::PointXYZ, pcl::PointXYZ>core;
    core.setInputSource(source);
    core.setInputTarget(target);
    boost::shared_ptr<pcl::Correspondences> cor(new pcl::Correspondences);
    core.determineCorrespondences(*cor, 0.5);   //对应点之间的最大距离

    double mean = 0.0, stddev = 0.0;

    pcl::registration::getCorDistMeanStd(*cor, mean, stddev);

    cout << "对应点距离的均值为:" << mean << "\n"
        << "对应点距离平方标准差为:" << stddev << endl;

    return 0;

}//对应点对距离平方的均值和方差

(2)基于距离获取对应关系

#include <iostream>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <boost/thread/thread.hpp>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/registration/correspondence_estimation.h> // 获取对应关系的基类
#include <pcl/registration/correspondence_rejection_distance.h> // 根据距离筛选对应点对

using namespace std;

int
main(int argc, char** argv)
{
    // 加载目标点云
    pcl::PointCloud<pcl::PointXYZ>::Ptr target(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::io::loadPCDFile<pcl::PointXYZ>("plant.pcd", *target);
    cout << "从目标点云中读取 " << target->size() << " 个点" << endl;

    // 加载源点云
    pcl::PointCloud<pcl::PointXYZ>::Ptr source(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::io::loadPCDFile<pcl::PointXYZ>("plant1.pcd", *source);
    cout << "从源点云中读取 " << source->size() << " 个点" << endl;

    //-------------------获取对应关系---------------------------
    pcl::CorrespondencesPtr correspondences(new pcl::Correspondences);
    pcl::registration::CorrespondenceEstimation<pcl::PointXYZ, pcl::PointXYZ> corr_est;
    corr_est.setInputSource(source);
    corr_est.setInputTarget(target);
    corr_est.determineCorrespondences(*correspondences);    // 获取对应关系
    //----------------基于距离筛选对应关系----------------------
    pcl::CorrespondencesPtr  correspondences_result_rej_dist(new pcl::Correspondences);
    pcl::registration::CorrespondenceRejectorDistance corr_rej_dist;
    corr_rej_dist.setInputCorrespondences(correspondences);// 输入对应关系
    corr_rej_dist.setMaximumDistance(0.1);                   // 对应关系之间的最大距离阈值
    corr_rej_dist.getCorrespondences(*correspondences_result_rej_dist); // 输出筛选后的对应关系

    cout << "获取对应点对:" << correspondences->size() << "个" << endl;
    cout << "基于距离筛选后还剩下:" << correspondences_result_rej_dist->size() << "个" << endl;

    //-----------------可视化点云及对应关系--------------------
    boost::shared_ptr<pcl::visualization::PCLVisualizer>viewer(new pcl::visualization::PCLVisualizer("显示点云"));
    viewer->setBackgroundColor(0, 0, 0);  // 设置背景颜色为黑色
    // 对目标点云着色可视化 (red).
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>target_color(target, 255, 0, 0);
    viewer->addPointCloud<pcl::PointXYZ>(target, target_color, "target");
    viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "target");
    // 对源点云着色可视化 (green).
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>input_color(source, 0, 255, 0);
    viewer->addPointCloud<pcl::PointXYZ>(source, input_color, "source");
    viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "source");
    //对应关系可视化
    viewer->setWindowName("基于距离的对应拒绝");
    viewer->addCorrespondences<pcl::PointXYZ>(source, target, *correspondences_result_rej_dist, "correspondence");
    //viewer->initCameraParameters();
    while (!viewer->wasStopped())
    {
        viewer->spinOnce(100);
        boost::this_thread::sleep(boost::posix_time::microseconds(100000));
    }

    system("pause");

    return 0;
}//基于距离获取对应关系

(3)获取对应关系

#include <iostream>
#include <pcl/point_types.h> 
#include <pcl/io/pcd_io.h> 
#include <pcl/features/normal_3d_omp.h>
#include <pcl/registration/correspondence_estimation_normal_shooting.h>
#include <pcl/registration/correspondence_rejection_surface_normal.h>
#include <pcl/registration/transformation_estimation_lm.h>
#include <boost/thread/thread.hpp>
#include <pcl/visualization/pcl_visualizer.h>

using namespace std;

void compute_normal(pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud, pcl::PointCloud<pcl::PointNormal>::Ptr& normals)
{
    pcl::NormalEstimationOMP<pcl::PointXYZ, pcl::PointNormal> n;//OMP加速
    //建立kdtree来进行近邻点集搜索
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>());
    n.setNumberOfThreads(6);//设置openMP的线程数
    //n.setViewPoint(0,0,0);//设置视点,默认为(0,0,0)
    n.setInputCloud(cloud);
    n.setSearchMethod(tree);
    n.setKSearch(10);//点云法向计算时,需要所搜的近邻点大小
    //n.setRadiusSearch(0.03);//半径搜素
    n.compute(*normals);//开始进行法向计
}


int
main(int argc, char** argv)
{
    // 加载源点云
    pcl::PointCloud<pcl::PointXYZ>::Ptr source(new pcl::PointCloud<pcl::PointXYZ>);
    if (pcl::io::loadPCDFile<pcl::PointXYZ>("plant.pcd", *source) == -1)
    {
        PCL_ERROR("读取目标点云失败 \n");
        return (-1);
    }
    cout << "从目标点云中读取 " << source->size() << " 个点" << endl;

    // ----------------------加载目标点云-----------------------
    pcl::PointCloud<pcl::PointXYZ>::Ptr target(new pcl::PointCloud<pcl::PointXYZ>);
    if (pcl::io::loadPCDFile<pcl::PointXYZ>("plant1.pcd", *target) == -1)
    {
        PCL_ERROR("读取源标点云失败 \n");
        return (-1);
    }
    cout << "从源点云中读取 " << target->size() << " 个点" << endl;

    //-------------------------法线估计--------------------------
    pcl::PointCloud<pcl::PointNormal>::Ptr source_normals(new pcl::PointCloud<pcl::PointNormal>);
    compute_normal(source, source_normals);

    pcl::PointCloud<pcl::PointNormal>::Ptr target_normals(new pcl::PointCloud<pcl::PointNormal>);
    compute_normal(target, target_normals);
    //----------------------获取匹配点对-------------------------
    pcl::registration::CorrespondenceEstimationNormalShooting<pcl::PointXYZ, pcl::PointXYZ, pcl::PointNormal>core;
    core.setInputSource(source);
    core.setSourceNormals(source_normals);
    core.setInputTarget(target);
    core.setKSearch(10);
    pcl::CorrespondencesPtr all_correspondences(new pcl::Correspondences);
    core.determineCorrespondences(*all_correspondences, 0.5);//输入的是源点云上法线与目标点云上对应点之间的最大距离
    //core.determineReciprocalCorrespondences(*all_correspondences);   //确定输入点云与目标点云之间的交互对应关系。
    //-------------------剔除错误匹配点对-----------------------
    pcl::registration::CorrespondenceRejectorSurfaceNormal rejector;
    rejector.initializeDataContainer<pcl::PointNormal, pcl::PointNormal>();// 初始化点类型和普通类型的数据容器对象
    rejector.setInputSource<pcl::PointNormal>(source_normals);
    rejector.setInputTarget<pcl::PointNormal>(target_normals);
    rejector.setInputNormals<pcl::PointNormal, pcl::PointNormal>(source_normals);
    rejector.setTargetNormals<pcl::PointNormal, pcl::PointNormal>(target_normals);
    rejector.setInputCorrespondences(all_correspondences);

    float threshold = cos(M_PI * 10 / 180);//计算cos(10°)
    rejector.setThreshold(threshold); // 设置法线之间的夹角阈值,输入的是夹角余弦值
    pcl::CorrespondencesPtr correspondences_after_rejector(new pcl::Correspondences);
    rejector.getCorrespondences(*correspondences_after_rejector);
    //cout << "Correspondences (After) : " << correspondences_after_rejector->size() << "\n";

    Eigen::Matrix4f transformation = Eigen::Matrix4f::Identity();

    pcl::registration::TransformationEstimationLM<pcl::PointXYZ, pcl::PointXYZ> trans_est_lm;
    trans_est_lm.estimateRigidTransformation(*source, *target, *correspondences_after_rejector, transformation);
    pcl::PointCloud<pcl::PointXYZ>::Ptr out(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::transformPointCloud(*source, *out, transformation);
    pcl::io::savePCDFile("left.pcd", *out);
    cout << "原始匹配点对有:" << all_correspondences->size() << "对" << endl;
    cout << "剔除错误点对后有:" << correspondences_after_rejector->size() << "对" << endl;
    cout << "LM求解的变换矩阵为:\n" << transformation << endl;

    //---------------------------可视化-------------------------
    boost::shared_ptr<pcl::visualization::PCLVisualizer>viewer(new pcl::visualization::PCLVisualizer("显示点云"));
    viewer->setBackgroundColor(0, 0, 0);  //设置背景颜色为黑色
    // 对源点云着色可视化 (green).
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>input_color(source, 0, 255, 0);
    viewer->addPointCloud<pcl::PointXYZ>(source, input_color, "source");
    viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "source");
    // 对目标点云着色可视化 (red).
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ>target_color(target, 255, 0, 0);
    viewer->addPointCloud<pcl::PointXYZ>(target, target_color, "target");
    viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "target");

    //----------------------对应关系可视化---------------------
    viewer->addCorrespondences<pcl::PointXYZ>(source, target, *correspondences_after_rejector, "correspondence");
    viewer->initCameraParameters();
    viewer->addText("RejectorSurfaceNormal", 10, 10, "text");
    while (!viewer->wasStopped())
    {
        viewer->spinOnce(100);
        boost::this_thread::sleep(boost::posix_time::microseconds(100000));
    }

    return 0;
}//对应关系

(4)提取点云非重叠部分

#include<pcl/io/pcd_io.h>
#include<pcl/kdtree/io.h>
#include <pcl/filters/extract_indices.h>
#include<pcl/registration/correspondence_estimation.h>
#include<pcl/filters/filter.h>
using namespace std;

int main()
{

    pcl::PointCloud<pcl::PointXYZ>::Ptr source(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr target(new pcl::PointCloud<pcl::PointXYZ>);

    //----------------------读取点云-------------------------
    pcl::io::loadPCDFile<pcl::PointXYZ>("source.pcd", *source);
    pcl::io::loadPCDFile<pcl::PointXYZ>("target.pcd", *target);
    //--------------------提取重叠部分-----------------------
    std::vector<int>nan_indicies;
    pcl::removeNaNFromPointCloud(*source, *source, nan_indicies);
    pcl::removeNaNFromPointCloud(*target, *target, nan_indicies);

    pcl::registration::CorrespondenceEstimation<pcl::PointXYZ, pcl::PointXYZ>core;
    core.setInputSource(source);
    core.setInputTarget(target);
    boost::shared_ptr<pcl::Correspondences> cor(new pcl::Correspondences);
    core.determineReciprocalCorrespondences(*cor, 0.1);   //对应点之间的最大距离

    //------------------构造重叠点云的索引------------------

    vector<int>pointIdxVec_A;
    vector<int>pointIdxVec_B;

    pcl::registration::getQueryIndices(*cor, pointIdxVec_A); // 获取重叠部分对应于setInputSource()输入点云的索引
    pcl::registration::getMatchIndices(*cor, pointIdxVec_B); // 获取重叠部分对应于setInputTarget()输入点云的索引

    //------------------根据索引提取点云--------------------
    pcl::PointCloud<pcl::PointXYZ>::Ptr non_overlapA(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr non_overlapB(new pcl::PointCloud<pcl::PointXYZ>);
    int non_A = target->points.size() - pointIdxVec_B.size();
    int non_B = source->points.size() - pointIdxVec_A.size();
    if (non_A < non_B) {

        pcl::ExtractIndices<pcl::PointXYZ> extrA;
        extrA.setInputCloud(source);//设置输入点云
        extrA.setIndices(std::make_shared<vector<int>>(pointIdxVec_A));//设置索引
        extrA.setNegative(true);   // 提取对应索引之外的点
        extrA.filter(*non_overlapA);
        pcl::io::savePCDFileASCII("non_overlap.pcd", *non_overlapA);
    }
    else {
        pcl::ExtractIndices<pcl::PointXYZ> extrB;
        extrB.setInputCloud(target);
        extrB.setIndices(std::make_shared<vector<int>>(pointIdxVec_B));
        extrB.setNegative(true);
        extrB.filter(*non_overlapB);

        pcl::io::savePCDFileASCII("non_overlap.pcd", *non_overlapB);

    };

    return 0;
}//点云非重叠部分

3.Robust pose estimation of rigid objects

点云类型为PointNormal(pcl::SampleConsensusPrerejective<PointNT, PointNT, FeatureT> align;)

#include <Eigen/Core>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/common/time.h>
#include <pcl/console/print.h>
#include <pcl/features/normal_3d.h>
#include <pcl/features/fpfh.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/io/pcd_io.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/sample_consensus_prerejective.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/visualization/pcl_visualizer.h>

// Types
typedef pcl::PointNormal PointNT;
typedef pcl::PointCloud<PointNT> PointCloudT;
typedef pcl::FPFHSignature33 FeatureT;// FPFH特殊描述子
typedef pcl::FPFHEstimation<PointNT, PointNT, FeatureT> FeatureEstimationT;//FPFH特征估计类
typedef pcl::PointCloud<FeatureT> FeatureCloudT;
typedef pcl::visualization::PointCloudColorHandlerCustom<PointNT> ColorHandlerT;//自定义颜色句柄

// Align a rigid object to a scene with clutter and occlusions
int
main(int argc, char** argv)
{
    // Point clouds
    PointCloudT::Ptr object(new PointCloudT);//目标点云
    PointCloudT::Ptr object_aligned(new PointCloudT);
    PointCloudT::Ptr scene(new PointCloudT);//源点云
    FeatureCloudT::Ptr object_features(new FeatureCloudT);//目标特征描述子
    FeatureCloudT::Ptr scene_features(new FeatureCloudT);//源点云特征描述子

    // Get input object and scene
    if (argc != 3)
    {
        pcl::console::print_error("Syntax is: %s object.pcd scene.pcd\n", argv[0]);
        return (1);
    }

    //  加载目标物体和场景点云
    pcl::console::print_highlight("Loading point clouds...\n");
    if (pcl::io::loadPCDFile<PointNT>(argv[1], *object) < 0 ||
        pcl::io::loadPCDFile<PointNT>(argv[2], *scene) < 0)
    {
        std::vector<int>nan_indicidies;
        pcl::removeNaNFromPointCloud(*object, *object,nan_indicidies);
        pcl::removeNaNFromPointCloud(*scene, *scene,nan_indicidies);
        pcl::console::print_error("Error loading object/scene file!\n");
        return (1);
    }

    // 下采样
    pcl::console::print_highlight("Downsampling...\n");
    pcl::VoxelGrid<PointNT> grid;
    const float leaf = 0.005f;
    grid.setLeafSize(leaf, leaf, leaf);//设置滤波时创建的体素大小
    grid.setInputCloud(object);//设置需要过滤的对象
    grid.filter(*object);//执行滤波处理,存储
    grid.setInputCloud(scene);//设置需要过滤的对象
    grid.filter(*scene);//执行滤波处理,存储

    // 估计场景法线
    pcl::console::print_highlight("Estimating scene normals...\n");
    pcl::NormalEstimation<PointNT, PointNT> nest;//法线估计初始化
    nest.setRadiusSearch(0.01);//一般为点云分辨率的5倍以上
    nest.setInputCloud(scene);
    nest.compute(*scene);

    // 特征估计
    pcl::console::print_highlight("Estimating features...\n");
    FeatureEstimationT fest;//特征估计
    fest.setRadiusSearch(0.025);
    fest.setInputCloud(object);
    fest.setInputNormals(object);
    fest.compute(*object_features);
    fest.setInputCloud(scene);
    fest.setInputNormals(scene);
    fest.compute(*scene_features);

    // 实施配准
    pcl::console::print_highlight("Starting alignment...\n");
    pcl::SampleConsensusPrerejective<PointNT, PointNT, FeatureT> align;
    align.setInputSource(object);
    align.setSourceFeatures(object_features);
    align.setInputTarget(scene);
    align.setTargetFeatures(scene_features);
    align.setMaximumIterations(50000); //  采样一致性迭代次数
    align.setNumberOfSamples(3); //  创建假设所需的样本数
    align.setCorrespondenceRandomness(5); //  使用的临近特征点的数目
    align.setSimilarityThreshold(0.9f); // 多边形边长度相似度阈值
    align.setMaxCorrespondenceDistance(2.5f * 0.005); //  判断是否为内点的距离阈值
    align.setInlierFraction(0.25f); //接受位姿假设所需的内点比例
    {
        pcl::ScopeTime t("Alignment");
        align.align(*object_aligned);
    }

    if (align.hasConverged())
    {
        // Print results
        printf("\n");
        Eigen::Matrix4f transformation = align.getFinalTransformation();
        pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(0, 0), transformation(0, 1), transformation(0, 2));
        pcl::console::print_info("R = | %6.3f %6.3f %6.3f | \n", transformation(1, 0), transformation(1, 1), transformation(1, 2));
        pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(2, 0), transformation(2, 1), transformation(2, 2));
        pcl::console::print_info("\n");
        pcl::console::print_info("t = < %0.3f, %0.3f, %0.3f >\n", transformation(0, 3), transformation(1, 3), transformation(2, 3));
        pcl::console::print_info("\n");
        pcl::console::print_info("Inliers: %i/%i\n", align.getInliers().size(), object->size());

        // Show alignment
        pcl::visualization::PCLVisualizer visu("点云库PCL学习教程第二版-鲁棒位姿估计");
        int v1(0), v2(0);
        visu.createViewPort(0, 0, 0.5, 1, v1);
        visu.createViewPort(0.5, 0, 1, 1, v2);
        visu.setBackgroundColor(255, 255, 255, v1);
        visu.addPointCloud(scene, ColorHandlerT(scene, 0.0, 255.0, 0.0), "scene", v1);
        visu.addPointCloud(object_aligned, ColorHandlerT(object_aligned, 0.0, 0.0, 255.0), "object_aligned", v1);

        visu.addPointCloud(object, ColorHandlerT(object, 0.0, 255.0, 0.0), "object_before_aligned", v2);
        visu.addPointCloud(scene, ColorHandlerT(scene, 0.0, 0.0, 255.0), "scene_v2", v2);
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "scene");
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "object_aligned");
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "object_before_aligned");
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "scene_v2");
        visu.spin();
    }
    else
    {
        pcl::console::print_error("Alignment failed!\n");
        return (1);
    }

    return (0);
}

配准后使用icp

#include <Eigen/Core>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/io/pcd_io.h>

#include <pcl/common/time.h>//计算时间
#include <pcl/console/print.h>//PCL控制台输出

#include <pcl/features/normal_3d_omp.h>//使用OMP需要添加的头文件
#include <pcl/features/fpfh.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/registration/sample_consensus_prerejective.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/registration/icp.h>
// Types
typedef pcl::PointXYZ PointT;
typedef pcl::PointCloud<PointT> PointCloud;
typedef pcl::PointNormal PointNT;
typedef pcl::PointCloud<PointNT> PointCloudT;
typedef pcl::FPFHSignature33 FeatureT;
typedef pcl::FPFHEstimation<PointNT, PointNT, FeatureT> FeatureEstimationT;
typedef pcl::PointCloud<FeatureT> FeatureCloudT;
typedef pcl::visualization::PointCloudColorHandlerCustom<PointNT> ColorHandlerT;
// 将一个刚性物体与一个带有杂波和遮挡的场景对齐
int
main(int argc, char** argv)
{
    // Point clouds
    PointCloudT::Ptr object(new PointCloudT);
    PointCloudT::Ptr object_aligned(new PointCloudT);
    PointCloudT::Ptr scene(new PointCloudT);
    FeatureCloudT::Ptr object_features(new FeatureCloudT);
    FeatureCloudT::Ptr scene_features(new FeatureCloudT);


    //  加载目标物体和场景点云
    pcl::console::print_highlight("加载点云数据\n");

    if (pcl::io::loadPCDFile<PointNT>("target.pcd", *object) < 0 ||
        pcl::io::loadPCDFile<PointNT>("source.pcd", *scene) < 0)
    {
        pcl::console::print_error("点云加载失败!\n");
        return (1);
    }
    std::vector<int>nan_indicies;
    pcl::removeNaNFromPointCloud(*object, *object, nan_indicies);
    pcl::removeNaNFromPointCloud(*scene, *scene, nan_indicies);
    std::cout << object->size()<<endl;
    std::cout << scene->size() << endl;


    // 下采样
    pcl::console::print_highlight("点云下采样\n");
    pcl::VoxelGrid<PointNT> grid;
    const float leaf = 0.8f;
    grid.setLeafSize(leaf, leaf, leaf);
    grid.setInputCloud(object);
    grid.filter(*object);
    grid.setInputCloud(scene);
    grid.filter(*scene);
    std::cout << object->size() << endl;
    std::cout << scene->size() << endl;
    // 估计场景法线
    pcl::console::print_highlight("估计场景点云的法线\n");
    pcl::NormalEstimationOMP<PointNT, PointNT> n;
    n.setNumberOfThreads(4);//设置openMP的线程数
    n.setRadiusSearch(1.25);
    n.setInputCloud(scene);
    n.compute(*scene);
    n.setInputCloud(object);
    n.compute(*object);

    // 特征估计
    pcl::console::print_highlight("计算FPFH特征\n");
    FeatureEstimationT f;
    f.setRadiusSearch(1.25);
    f.setInputCloud(object);
    f.setInputNormals(object);
    f.compute(*object_features);
    f.setInputCloud(scene);
    f.setInputNormals(scene);
    f.compute(*scene_features);

    // 实施配准
    pcl::console::print_highlight("开始进行配准\n");
    pcl::SampleConsensusPrerejective<PointNT, PointNT, FeatureT> align;
    align.setInputSource(scene);
    align.setSourceFeatures(scene_features);
    align.setInputTarget(object);
    align.setTargetFeatures(object_features);
    align.setMaximumIterations(50000);      //  采样一致性迭代次数
    align.setNumberOfSamples(3);            //  创建假设所需的样本数
    align.setCorrespondenceRandomness(5);   //  使用的临近特征点的数目
    align.setSimilarityThreshold(0.8f);     //  多边形边长度相似度阈值
    align.setMaxCorrespondenceDistance(2.6f * 1.0); //  判断是否为内点的距离阈值
    align.setInlierFraction(0.25f);         //  接受位姿假设所需的内点比例
    {
        pcl::ScopeTime t("执行配准");
        align.align(*object_aligned);
    }
    pcl::io::savePCDFileASCII("object_aligned.pcd", *object_aligned);
    PointCloudT::Ptr icp_result(new PointCloudT);
    pcl::IterativeClosestPoint<PointNT, PointNT> icp;
    icp.setInputSource(object_aligned);
    icp.setInputTarget(object);
    icp.setMaxCorrespondenceDistance(1.5);
    // 最大迭代次数
    icp.setMaximumIterations(100);

    icp.setTransformationEpsilon(1e-10);// 两次变化矩阵之间的差值

    icp.setEuclideanFitnessEpsilon(0.02);// 均方误差前后两次迭代方差的插值
    icp.align(*icp_result);
    pcl::io::savePCDFile("icp_result.pcd", *icp_result);
    std::cout << "all aligned!" << endl;
    if (align.hasConverged())
    {
        // Print results
        printf("\n");
        Eigen::Matrix4f transformation = align.getFinalTransformation();
        pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(0, 0), transformation(0, 1), transformation(0, 2));
        pcl::console::print_info("R = | %6.3f %6.3f %6.3f | \n", transformation(1, 0), transformation(1, 1), transformation(1, 2));
        pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(2, 0), transformation(2, 1), transformation(2, 2));
        pcl::console::print_info("\n");
        pcl::console::print_info("t = < %0.3f, %0.3f, %0.3f >\n", transformation(0, 3), transformation(1, 3), transformation(2, 3));
        pcl::console::print_info("\n");
        pcl::console::print_info("Inliers: %i/%i\n", align.getInliers().size(), object->size());

        // Show alignment
        pcl::console::print_highlight("左侧为配准前的位置,右侧为配准后\n");
        pcl::visualization::PCLVisualizer visu("鲁棒位姿估计");
        visu.initCameraParameters();
        int v1(0), v2(0);
        visu.setWindowName("鲁棒位姿估计");
        visu.createViewPort(0, 0, 0.5, 1, v1);
        visu.createViewPort(0.5, 0, 1, 1, v2);
        visu.setBackgroundColor(255, 255, 255, v2);
        visu.addPointCloud(scene, ColorHandlerT(scene, 0.0, 255.0, 0.0), "scene", v2);
        visu.addPointCloud(object_aligned, ColorHandlerT(object_aligned, 0.0, 0.0, 255.0), "object_aligned", v2);

        visu.addPointCloud(object, ColorHandlerT(object, 0.0, 255.0, 0.0), "object_before_aligned", v1);
        visu.addPointCloud(scene, ColorHandlerT(scene, 0.0, 0.0, 255.0), "scene_v2", v1);
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "scene");
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "object_aligned");
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "object_before_aligned");
        visu.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "scene_v2");
        visu.spin();
    }
    else
    {
        pcl::console::print_error("配准失败!\n");
        return (1);
    }


    return (0);
}

RANSAC+FPFH点云输入类型为PointXYZ

#include <Eigen/Core>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/common/time.h>
#include <pcl/console/print.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/features/fpfh_omp.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/io/io.h>
#include <pcl/io/pcd_io.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/sample_consensus_prerejective.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/visualization/pcl_visualizer.h>
#include<pcl/features/normal_3d.h>
#include <pcl/features/fpfh.h>

// Align a rigid object to a scene with clutter and occlusions
int
main(int argc, char** argv)
{
    // Types
    typedef pcl::PointNormal PointNT;
    //typedef pcl::PointXYZ PointNT;
    typedef pcl::PointCloud<PointNT> PointCloudT;
    typedef pcl::FPFHSignature33 FeatureT;
    typedef pcl::FPFHEstimationOMP<PointNT, PointNT, FeatureT> FeatureEstimationT;
    typedef pcl::PointCloud<FeatureT> FeatureCloudT;
    typedef pcl::visualization::PointCloudColorHandlerCustom<PointNT> ColorHandlerT;

    // Point clouds
    PointCloudT::Ptr object(new PointCloudT);
    PointCloudT::Ptr object_aligned(new PointCloudT);
    PointCloudT::Ptr scene(new PointCloudT);
    PointCloudT::Ptr down_s(new PointCloudT);
    PointCloudT::Ptr down_t(new PointCloudT);

    PointCloudT::Ptr normal_s(new PointCloudT);
    PointCloudT::Ptr normal_t(new PointCloudT);

    FeatureCloudT::Ptr object_features(new FeatureCloudT);
    FeatureCloudT::Ptr scene_features(new FeatureCloudT);

    // Load object and scene
    /*pcl::console::print_highlight("Loading point clouds...\n");
    if (pcl::io::loadPCDFile<PointNT>("C:\\Users\\SCQ\\Desktop\\source.pcd", *object) < 0 ||
        pcl::io::loadPCDFile<PointNT>("C:\\Users\\SCQ\\Desktop\\target.pcd", *scene) < 0)
    {
        pcl::console::print_error("Error loading object/scene file!\n");
        return (1);
    }*/
    // Estimate normals for scene
    pcl::console::print_highlight("Estimating scene normals...\n");
    /*pcl::NormalEstimationOMP<PointNT, PointNT> nest;
    nest.setRadiusSearch(0.2);
    nest.setInputCloud(scene);
    nest.compute(*normal_s);
    nest.setRadiusSearch(0.5);
    nest.setInputCloud(object);
    nest.compute(*normal_t);
    pcl::io::savePCDFile("normal22.pcd", *object);
    pcl::io::savePCDFile("normal.pcd", *normal_t);*/

    //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    pcl::console::print_highlight("Loading point clouds...\n");
    pcl::PointCloud<pcl::PointXYZ>::Ptr source(new pcl::PointCloud<pcl::PointXYZ>);
    pcl::PointCloud<pcl::PointXYZ>::Ptr target(new pcl::PointCloud<pcl::PointXYZ>);

    if (pcl::io::loadPCDFile("source.pcd", *source) < 0 ||
        pcl::io::loadPCDFile("target.pcd", *target) < 0)
    {
        pcl::console::print_error("Error loading object/scene file!\n");
        return (1);
    }
    std::vector<int> indices;
    pcl::removeNaNFromPointCloud(*source, *source, indices);
    pcl::removeNaNFromPointCloud(*target, *target, indices);

    //pcl::io::savePCDFileBinary("12300.pcd", *target);

    pcl::NormalEstimation<pcl::PointXYZ, pcl::PointNormal> ne;
    ne.setInputCloud(source);
    pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(new pcl::search::KdTree<pcl::PointXYZ>());
    ne.setSearchMethod(tree);
    ne.setKSearch(15);
    pcl::PointCloud<pcl::PointNormal>::Ptr  cloud_normals(new pcl::PointCloud<pcl::PointNormal>);
    //ne.setRadiusSearch(2);

    ne.compute(*cloud_normals);
    pcl::copyPointCloud(*source, *cloud_normals);
    //pcl::io::savePCDFileBinary("normal.pcd", *cloud_normals);

    //target
    pcl::NormalEstimation<pcl::PointXYZ, pcl::PointNormal> ne2;
    ne2.setInputCloud(target);
    ne2.setSearchMethod(tree);

    pcl::PointCloud<pcl::PointNormal>::Ptr cloud_normalt(new pcl::PointCloud<pcl::PointNormal>);
    //ne.setRadiusSearch(2);
    ne2.setKSearch(15);
    ne2.compute(*cloud_normalt);
    pcl::copyPointCloud(*target, *cloud_normalt);


    // Downsample
    //std::cout << object->size() << endl;
    //std::cout << scene->size() << endl;

    pcl::console::print_highlight("Downsampling...\n");
    pcl::PointCloud<pcl::PointNormal>::Ptr cloud_normals_down(new pcl::PointCloud<pcl::PointNormal>);
    pcl::PointCloud<pcl::PointNormal>::Ptr cloud_normalt_down(new pcl::PointCloud<pcl::PointNormal>);
    pcl::VoxelGrid<pcl::PointNormal> grid;
    const float leaf = 0.8f;//0.005f;
    grid.setLeafSize(leaf, leaf, leaf);
    grid.setInputCloud(cloud_normals);
    grid.filter(*cloud_normals_down);
    grid.setInputCloud(cloud_normalt);
    grid.filter(*cloud_normalt_down);
    //pcl::io::savePCDFile("a0.pcd", *cloud_normalt);
    //pcl::io::savePCDFile("a.pcd", *cloud_normalt_down);

    // Estimate features
    pcl::console::print_highlight("Estimating features...\n");
    pcl::FPFHEstimation<pcl::PointNormal, pcl::PointNormal, pcl::FPFHSignature33> fpfh;
    //pcl::PointCloud<pcl::PointXYZ>::Ptr  cloud_normals_xyz(new pcl::PointCloud<pcl::PointXYZ>);
    fpfh.setInputCloud(cloud_normals_down);
    fpfh.setInputNormals(cloud_normals_down);

    //fpfh.setSearchMethod(tree);
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs(new pcl::PointCloud<pcl::FPFHSignature33>());
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfht(new pcl::PointCloud<pcl::FPFHSignature33>());
    fpfh.setRadiusSearch(3.5);
    fpfh.compute(*fpfhs);




    //pfh.setKSearch(30);
// Compute the target features
    fpfh.setInputCloud(cloud_normalt_down);
    fpfh.setInputNormals(cloud_normalt_down);
    //fpfh.setSearchMethod(tree);
    fpfh.setRadiusSearch(3.5);
    fpfh.compute(*fpfht);
    

    // Perform alignment
    pcl::console::print_highlight("Starting alignment...\n");
    pcl::SampleConsensusPrerejective<PointNT, PointNT, FeatureT> align;
    align.setInputSource(cloud_normals_down);
    align.setSourceFeatures(fpfhs);
    align.setInputTarget(cloud_normalt_down);
    align.setTargetFeatures(fpfht);

    align.setMaximumIterations(5000); // Number of RANSAC iterations
    align.setNumberOfSamples(3);// Number of points to sample for generating/prerejecting a pose
    align.setCorrespondenceRandomness(5); // Number of nearest features to use
    align.setSimilarityThreshold(0.9f); // Polygonal edge length similarity threshold
    align.setMaxCorrespondenceDistance(1.5f); // Inlier threshold 2.5f* leaf
    align.setInlierFraction(0.5f); // Required inlier fraction for accepting a pose hypothesis
    {
        //pcl::ScopeTime t("Alignment");
        align.align(*object_aligned);

    }

    cout << "计算完毕" << endl;

    if (align.hasConverged())
    {
        // Print results
        printf("\n");
        Eigen::Matrix4f transformation = align.getFinalTransformation();
        pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(0, 0), transformation(0, 1), transformation(0, 2));
        pcl::console::print_info("R = | %6.3f %6.3f %6.3f | \n", transformation(1, 0), transformation(1, 1), transformation(1, 2));
        pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(2, 0), transformation(2, 1), transformation(2, 2));
        pcl::console::print_info("\n");
        pcl::console::print_info("t = < %0.3f, %0.3f, %0.3f >\n", transformation(0, 3), transformation(1, 3), transformation(2, 3));
        pcl::console::print_info("\n");
        pcl::console::print_info("Inliers: %i/%i\n", align.getInliers().size(), object->size());
        pcl::transformPointCloud(*object_aligned, *object_aligned, transformation);
        pcl::io::savePCDFile("11111111.pcd", *object_aligned);
        // Show alignment
        pcl::visualization::PCLVisualizer visu("Alignment");
        visu.addPointCloud(scene, ColorHandlerT(scene, 0.0, 255.0, 0.0), "scene");
        visu.addPointCloud(object_aligned, ColorHandlerT(object_aligned, 0.0, 0.0, 255.0), "object_aligned");
        visu.spin();
    }
    else
    {
        pcl::console::print_error("Alignment failed!\n");
        return (1);
    }

    return (0);
}

调试参数并不理想

随机函数pcl::SampleConsensusInitialAlignment<pcl::PointXYZ, pcl::PointXYZ, pcl::FPFHSignature33> scia;

#include <pcl/registration/ia_ransac.h>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/features/normal_3d.h>
#include <pcl/features/fpfh.h>
#include <pcl/search/kdtree.h>
#include <pcl/io/pcd_io.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/filter.h>
#include <pcl/registration/icp.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <time.h>

using pcl::NormalEstimation;
using pcl::search::KdTree;
typedef pcl::PointXYZ PointT;
typedef pcl::PointCloud<PointT> PointCloud;

//点云可视化
void visualize_pcd(PointCloud::Ptr pcd_src,
    PointCloud::Ptr pcd_tgt,
    PointCloud::Ptr pcd_final)
{
    //int vp_1, vp_2;
    // Create a PCLVisualizer object
    pcl::visualization::PCLVisualizer viewer("registration Viewer");
    viewer.initCameraParameters();
    //viewer.createViewPort (0.0, 0, 0.5, 1.0, vp_1);
   // viewer.createViewPort (0.5, 0, 1.0, 1.0, vp_2);
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> src_h(pcd_src, 0, 255, 0);
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> tgt_h(pcd_tgt, 255, 0, 0);
    pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> final_h(pcd_final, 0, 0, 255);
    viewer.addPointCloud(pcd_src, src_h, "source cloud");
    viewer.addPointCloud(pcd_tgt, tgt_h, "tgt cloud");
    viewer.addPointCloud(pcd_final, final_h, "final cloud");
    //viewer.addCoordinateSystem(1.0);
    while (!viewer.wasStopped())
    {
        viewer.spinOnce(100);

    }
}

//由旋转平移矩阵计算旋转角度
void matrix2angle(Eigen::Matrix4f& result_trans, Eigen::Vector3f& result_angle)
{
    double ax, ay, az;
    if (result_trans(2, 0) == 1 || result_trans(2, 0) == -1)
    {
        az = 0;
        double dlta;
        dlta = atan2(result_trans(0, 1), result_trans(0, 2));
        if (result_trans(2, 0) == -1)
        {
            ay = M_PI / 2;
            ax = az + dlta;
        }
        else
        {
            ay = -M_PI / 2;
            ax = -az + dlta;
        }
    }
    else
    {
        ay = -asin(result_trans(2, 0));
        ax = atan2(result_trans(2, 1) / cos(ay), result_trans(2, 2) / cos(ay));
        az = atan2(result_trans(1, 0) / cos(ay), result_trans(0, 0) / cos(ay));
    }
    result_angle << ax, ay, az;
}

int
main(int argc, char** argv)
{
    //加载点云文件
    PointCloud::Ptr cloud_src_o(new PointCloud);//原点云,待配准
    pcl::io::loadPCDFile("source.pcd", *cloud_src_o);
    PointCloud::Ptr cloud_tgt_o(new PointCloud);//目标点云
    pcl::io::loadPCDFile("target.pcd", *cloud_tgt_o);

    clock_t start = clock();
    //去除NAN点
    std::vector<int> indices_src; //保存去除的点的索引
    pcl::removeNaNFromPointCloud(*cloud_src_o, *cloud_src_o, indices_src);
    pcl::removeNaNFromPointCloud(*cloud_tgt_o, *cloud_tgt_o, indices_src);
    std::cout << "remove *cloud_src_o nan" << endl;
    //下采样滤波
    pcl::VoxelGrid<pcl::PointXYZ> voxel_grid;
    voxel_grid.setLeafSize(0.8, 0.8, 0.8);
    voxel_grid.setInputCloud(cloud_src_o);
    PointCloud::Ptr cloud_src(new PointCloud);
    voxel_grid.filter(*cloud_src);
    std::cout << "down size *cloud_src_o from " << cloud_src_o->size() << "to" << cloud_src->size() << endl;
    //pcl::io::savePCDFileASCII("bunny_src_down.pcd", *cloud_src);
    //计算表面法线
    pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne_src;
    ne_src.setInputCloud(cloud_src);
    pcl::search::KdTree< pcl::PointXYZ>::Ptr tree_src(new pcl::search::KdTree< pcl::PointXYZ>());
    ne_src.setSearchMethod(tree_src);
    pcl::PointCloud<pcl::Normal>::Ptr cloud_src_normals(new pcl::PointCloud< pcl::Normal>);
    ne_src.setRadiusSearch(1.25);
    ne_src.compute(*cloud_src_normals);

 

    pcl::VoxelGrid<pcl::PointXYZ> voxel_grid_2;
    voxel_grid_2.setLeafSize(0.8, 0.8, 0.8);
    voxel_grid_2.setInputCloud(cloud_tgt_o);
    PointCloud::Ptr cloud_tgt(new PointCloud);
    voxel_grid_2.filter(*cloud_tgt);
    std::cout << "down size *cloud_tgt_o.pcd from " << cloud_tgt_o->size() << "to" << cloud_tgt->size() << endl;
    //pcl::io::savePCDFileASCII("bunny_tgt_down.pcd", *cloud_tgt);

    pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal> ne_tgt;
    ne_tgt.setInputCloud(cloud_tgt);
    pcl::search::KdTree< pcl::PointXYZ>::Ptr tree_tgt(new pcl::search::KdTree< pcl::PointXYZ>());
    ne_tgt.setSearchMethod(tree_tgt);
    pcl::PointCloud<pcl::Normal>::Ptr cloud_tgt_normals(new pcl::PointCloud< pcl::Normal>);
    //ne_tgt.setKSearch(20);
    ne_tgt.setRadiusSearch(1.25);
    ne_tgt.compute(*cloud_tgt_normals);

    //计算FPFH
    pcl::FPFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::FPFHSignature33> fpfh_src;
    fpfh_src.setInputCloud(cloud_src);
    fpfh_src.setInputNormals(cloud_src_normals);
    pcl::search::KdTree<PointT>::Ptr tree_src_fpfh(new pcl::search::KdTree<PointT>);
    fpfh_src.setSearchMethod(tree_src_fpfh);
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs_src(new pcl::PointCloud<pcl::FPFHSignature33>());
    fpfh_src.setRadiusSearch(2.5);
    fpfh_src.compute(*fpfhs_src);
    std::cout << "compute *cloud_src fpfh" << endl;

    pcl::FPFHEstimation<pcl::PointXYZ, pcl::Normal, pcl::FPFHSignature33> fpfh_tgt;
    fpfh_tgt.setInputCloud(cloud_tgt);
    fpfh_tgt.setInputNormals(cloud_tgt_normals);
    pcl::search::KdTree<PointT>::Ptr tree_tgt_fpfh(new pcl::search::KdTree<PointT>);
    fpfh_tgt.setSearchMethod(tree_tgt_fpfh);
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr fpfhs_tgt(new pcl::PointCloud<pcl::FPFHSignature33>());
    fpfh_tgt.setRadiusSearch(2.5);
    fpfh_tgt.compute(*fpfhs_tgt);
    std::cout << "compute *cloud_tgt fpfh" << endl;

    //SAC配准

    pcl::SampleConsensusInitialAlignment<pcl::PointXYZ, pcl::PointXYZ, pcl::FPFHSignature33> scia;
    scia.setInputSource(cloud_src);
    scia.setInputTarget(cloud_tgt);
    scia.setSourceFeatures(fpfhs_src);
    scia.setTargetFeatures(fpfhs_tgt);
    //scia.setMinSampleDistance(0.85);
    scia.setRANSACIterations(5000);
    scia.setNumberOfSamples(3);
    scia.setMaximumIterations(5000);
    scia.setCorrespondenceRandomness(5);
    scia.setMaxCorrespondenceDistance(2.6);
    PointCloud::Ptr sac_result(new PointCloud);
    scia.align(*sac_result);
    pcl::io::savePCDFile("align123.pcd", *sac_result);
    std::cout << "sac has converged:" << scia.hasConverged() << "  score: " << scia.getFitnessScore() << endl;
    Eigen::Matrix4f sac_trans;
    sac_trans = scia.getFinalTransformation();
    std::cout << sac_trans << endl;
    //pcl::io::savePCDFileASCII("bunny_transformed_sac.pcd", *sac_result);
    clock_t sac_time = clock();

    //icp配准
    //PointCloud::Ptr icp_result(new PointCloud);
    //pcl::IterativeClosestPoint<pcl::PointXYZ, pcl::PointXYZ> icp;
    //icp.setInputSource(sac_result);
    //icp.setInputTarget(cloud_tgt_o);
    Set the max correspondence distance to 4cm (e.g., correspondences with higher distances will be ignored)
    //icp.setMaxCorrespondenceDistance(0.55);
     最大迭代次数
    //icp.setMaximumIterations(100);
     两次变化矩阵之间的差值
    //icp.setTransformationEpsilon(1e-10);
     均方误差
    //icp.setEuclideanFitnessEpsilon(0.2);
    //icp.align(*icp_result);
    //pcl::io::savePCDFile("icp.pcd", *icp_result);
    //clock_t end = clock();
    //cout << "total time: " << (double)(end - start) / (double)CLOCKS_PER_SEC << " s" << endl;
    把计算法线和点特征直方图的时间也算在SAC里面了
    //cout << "sac time: " << (double)(sac_time - start) / (double)CLOCKS_PER_SEC << " s" << endl;
    //cout << "icp time: " << (double)(end - sac_time) / (double)CLOCKS_PER_SEC << " s" << endl;

    //std::cout << "ICP has converged:" << icp.hasConverged()
    //    << " score: " << icp.getFitnessScore() << std::endl;
    //Eigen::Matrix4f icp_trans;
    //icp_trans = icp.getFinalTransformation();
    cout<<"ransformationProbability"<<icp.getTransformationProbability()<<endl;
    //std::cout << icp_trans << endl;
    使用创建的变换对未过滤的输入点云进行变换
    //PointCloud::Ptr icptrans_result(new PointCloud);
    //pcl::transformPointCloud(*cloud_src_o, *icptrans_result, icp_trans);
    保存转换的输入点云
    pcl::io::savePCDFileASCII("bunny_transformed_sac_ndt.pcd", *icp_result);

    计算误差
    //Eigen::Vector3f ANGLE_origin;
    //ANGLE_origin << 0, 0, M_PI / 5;
    //double error_x, error_y, error_z;
    //Eigen::Vector3f ANGLE_result;
    //matrix2angle(icp_trans, ANGLE_result);
    //error_x = fabs(ANGLE_result(0)) - fabs(ANGLE_origin(0));
    //error_y = fabs(ANGLE_result(1)) - fabs(ANGLE_origin(1));
    //error_z = fabs(ANGLE_result(2)) - fabs(ANGLE_origin(2));
    //cout << "original angle in x y z:\n" << ANGLE_origin << endl;
    //cout << "error in aixs_x: " << error_x << "  error in aixs_y: " << error_y << "  error in aixs_z: " << error_z << endl;

    可视化*/
    //visualize_pcd(cloud_src_o, cloud_tgt_o, icptrans_result);
    return (0);
}
举报

相关推荐

0 条评论