7

我正在做一个注册项目,我有一把椅子,一些物体在 kinect 前面旋转。

我可以成功进行成对注册,但正如预期的那样,存在一些偏差(图像中的结果)。

我想使用 LUM,以便全局最小化累积误差(然后将其“传播”到帧中),但我最终让帧漂浮在空中。(图片下方的代码)

这在LUM的使用上有什么明显的错误吗?---我使用关键点+特征,而不是盲目地用完整的点云喂 LUM

为什么所有示例都添加单向边缘而不是双向?

在此处输入图像描述

PARAM_LUM_centroidDistTHRESH = 0.30;
PARAM_LUM_MaxIterations = 100;
PARAM_LUM_ConvergenceThreshold = 0.0f;
int NeighborhoodNUMB = 2;
int FrameDistLOOPCLOSURE = 5;
PARAM_CORR_REJ_InlierThreshold = 0.020;

pcl::registration::LUM<pcl::PointXYZRGBNormal> lum;
lum.setMaxIterations(        PARAM_LUM_MaxIterations );
lum.setConvergenceThreshold( PARAM_LUM_ConvergenceThreshold );

QVector< pcl::PointCloud<pcl::PointXYZRGB>::Ptr >  cloudVector_ORGan_P_;



for (int iii=0; iii<totalClouds; iii++)
{                
        // read - iii_cloud_ORGan_P_
        // transform it with pairwise registration result
        cloudVector_ORGan_P_.append( iii_cloud_ORGan_P_ );
}

for (size_t iii=0; iii<totalClouds; iii++)
{        
    pcl::compute3DCentroid( *cloudVector_ORGan_P_[iii], centrVector[iii] );

    pcl::IntegralImageNormalEstimation<pcl::PointXYZRGB,pcl::Normal> ne;
    //blah blah parameters
    //compute normals with *ne*                            
    //pcl::removeNaNFromPointCloud
    //pcl::removeNaNNormalsFromPointCloud

    pcl::ISSKeypoint3D< pcl::PointXYZRGBNormal, pcl::PointXYZRGBNormal> keyPointDetector;
    //blah balh parameters;
    //keyPointDetector.compute
    //then remove  NAN keypoints

    pcl::SHOTColorEstimationOMP< pcl::PointXYZRGBNormal,pcl::PointXYZRGBNormal,pcl::SHOT1344 > featureDescriptor;
    //featureDescriptor.setSearchSurface( **ful_unorganized_cloud_in_here** );
    //featureDescriptor.setInputNormals(  **normals_from_above____in_here** );
    //featureDescriptor.setInputCloud(    **keypoints_from_above__in_here** );
    //blah blah parameters
    //featureDescriptor.compute
    //delete NAN *Feature* + corresp. *Keypoints* with *.erase*
}

for (size_t iii=0; iii<totalClouds; iii++)
{
    lum.addPointCloud( KEYptVector_UNorg_P_[iii] );
}

for (size_t iii=1; iii<totalClouds; iii++)
{
    for (size_t jjj=0; jjj<iii; jjj++)
    {
        double cloudCentrDISTANCE = ( centrVector[iii] - centrVector[jjj] ).norm();

            if (   (cloudCentrDISTANCE<PARAM_LUM_centroidDistTHRESH  &&  qAbs(iii-jjj)<=NeighborhoodNUMB)       ||
                   (cloudCentrDISTANCE<PARAM_LUM_centroidDistTHRESH  &&  qAbs(iii-jjj)> FrameDistLOOPCLOSURE)   )
            {

                int sourceID;
                int targetID;

                if (qAbs(iii-jjj)<=NeighborhoodNUMB) // so that connection are e.g. 0->1, 1->2, 2->3, 3->4, 4->5, 5->0
                {                                    // not sure if it helps
                    sourceID = jjj;
                    targetID = iii;
                }
                else
                {
                    sourceID = iii;
                    targetID = jjj;
                }


                *source_cloud_KEYpt_P_ = *lum.getPointCloud(sourceID);
                *target_cloud_KEYpt_P_ = *lum.getPointCloud(targetID);

                *source_cloud_FEATures = *FEATtVector_UNorg_P_[sourceID];
                *target_cloud_FEATures = *FEATtVector_UNorg_P_[targetID];

                // KeyPoint Estimation
                pcl::registration::CorrespondenceEstimation<keyPointTYPE,keyPointTYPE> corrEst;
                corrEst.setInputSource(            source_cloud_FEATures );
                corrEst.setInputTarget(            target_cloud_FEATures );
                corrEst.determineCorrespondences( *corrAll );

                // KeyPoint Rejection
                pcl::registration::CorrespondenceRejectorSampleConsensus<pcl::PointXYZRGBNormal> corrRej;
                corrRej.setInputSource(           source_cloud_KEYpt_P_ );
                corrRej.setInputTarget(           target_cloud_KEYpt_P_ );
                corrRej.setInlierThreshold(       PARAM_CORR_REJ_InlierThreshold     );
                corrRej.setMaximumIterations(     10000    );
                corrRej.setRefineModel(           true     );
                corrRej.setInputCorrespondences(  corrAll  );
                corrRej.getCorrespondences(      *corrFilt );

                lum.setCorrespondences( sourceID, targetID, corrFilt );

            } // if

    } // jjj

} // iii

lum.compute();

// PCLVisualizer - show this - lum.getConcatenatedCloud()
4

1 回答 1

6

在对 LUM 进行了多天的试验后,我决定转向另一个用于 Graph 优化的工具,即g2o。您可以在图像中看到结果,它并不完美(参见小的平移漂移@正面视图),但它是合理的,并且比简单的成对增量配准要好得多(没有非常明显的旋转漂移!),

如果你有兴趣,我建议下载github 版本!它是最新的,而其他版本 -像这样- 已经过时了,而且我个人在编译库本身我的源代码时都遇到了一些编译问题)

在此处输入图像描述

在此处输入图像描述

于 2013-10-10T11:45:59.017 回答