一种通过模仿SIFT特征实现基于二维地图的初定位方法

当年进入SLAM领域的时候最开始接触的是二维激光导航机器人,它在建立了二维栅格地图之后通过amcl进行定位,但是amcl是一个动态的过程,需要机器人不停地运动以求收敛,因此如何在机器人开机后进行直接定位成为了一个有趣的问题,最简单的方法是用rviz发一个initial pose,但这也不是我们想要的。另外,机器人运行途中,由于传感器等多方面原因定位漂移,自身是可以知晓这一点,但是如何恢复也是一个难点。原来面试的时候也遇到过类似的问题,看来是一个普适性的问题啊。

在之前的实践中,可以采取暴力遍历的匹配方法,查看匹配评分较低的位置,但是花费时间过长,如果能将位置与量化的特征相结合,将大大降低耗时与自动定位那段时间的CPU。

因此,在犯下无数日夜的拖延症之后,我们今天来通过非暴力的方法解决,主要的思路是模仿SIFT特征点与描述子的思想,针对点云轮廓的变化提取特征,同时计算主成分方向,以追求旋转不变性。通过这种方法,我们只需将地图遍历计算后,存入文件,等待自定位时直接读文件再计算。

一种通过模仿SIFT特征实现基于二维地图的初定位方法

通过测试我们可以发现,评分较高的自定位位置在真实位置富集在附近。

测试代码的头文件如下:

#ifndef AUTO_LOCATION_H
#define AUTO_LOCATION_H

#include "ros/ros.h"
#include "geometry_msgs/PoseWithCovarianceStamped.h"
#include "geometry_msgs/PoseArray.h"
#include "geometry_msgs/PoseStamped.h"
#include "sensor_msgs/PointCloud.h"

#include "opencv2/opencv.hpp"

#include <pcl/ModelCoefficients.h>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/sample_consensus/method_types.h>   
#include <pcl/sample_consensus/model_types.h>   
#include <pcl/segmentation/sac_segmentation.h>   
#include <pcl/filters/extract_indices.h>
#include <pcl/common/transforms.h>

#include <queue>

typedef pcl::PointXYZ PointType;
typedef pcl::PointCloud<PointType> PointCloud;
typedef pcl::PointCloud<PointType>::Ptr PointPtr;

struct LineInfo
{
    float A, B, C = 1.0;//直线的方程参数
    float vec_x, vec_y;//检测点到该直线的垂足的方向向量
    float distance;//检测点到该直线的距离
    float angle = 0.0;//本方向向量与主成分方向的角度(确保旋转不变性)

    //优先级队列求距离最远的直线作为主成分
    bool operator< (const LineInfo& other) const
    {
        return (distance<other.distance);
    }
};

struct Descriptor
{
    geometry_msgs::Pose pose;

    std::vector<LineInfo> line_info_vec_;

    //检查本描述子是否与另一个描述子相似
    //应该可用梯度下降法求解,这里先粗暴处理了
    bool similar(const Descriptor& other)
    {
        int err = 0;
        for(auto& info:line_info_vec_)
        {
            bool found = false;
            for(auto& other_info:other.line_info_vec_)
            {
                bool c1 = (fabs(info.angle-other_info.angle)<2.0);
                bool c2 = (fabs(info.distance-other_info.distance)<0.1);
                if(c1 && c2)
                {
                    found = true;
                }
            }

            if(!found)
            {
                err += 1;
            }
        }

        if(err<2)
            return true;
        else
            return false;
    }
};

namespace auto_location
{
class AutoLocation
{
public:
    AutoLocation(ros::NodeHandle&);
private:
    void getMap();
    void restoreDescriptor();

    std::vector<Descriptor> map_descriptor_vec_;

private:
    ros::Publisher potential_pos_pub_;
    geometry_msgs::PoseArray potential_pos_;

    ros::Publisher actual_pose_pub_;
    geometry_msgs::PoseStamped actual_pose_;

    ros::Publisher fake_laser_pub_;
    sensor_msgs::PointCloud fake_laser_cloud_;

    PointPtr laser_cloud_ptr_, line_cloud_ptr_;

    ros::Publisher map_pub_;
    sensor_msgs::PointCloud map_cloud_;

    ros::Subscriber initial_pose_sub_;
    void initialPoseCb(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr&);

private:
    void getFakeLaser(geometry_msgs::Pose);
    Descriptor parseLaserCloud();
    void computeDescriptor();
    LineInfo computeLine(pcl::PointIndices::Ptr&);
    float computeAngle(LineInfo&, LineInfo&);
    void getProperOrientation(Descriptor&, const Descriptor&, geometry_msgs::Pose&);
private:
    cv::Mat map_img_;
    std::string map_path_ = "/home/ruoyu/location_ws/src/auto_location/maps/map.png";

    float scale_ = 0.02;
};
}/* end of namespace */
#endif

 源文件如下:

#include "auto_location.h"

using namespace std;

namespace auto_location
{
AutoLocation::AutoLocation(ros::NodeHandle& nh)
{
    map_pub_ = nh.advertise<sensor_msgs::PointCloud>("/map_cloud", 1, true);
    fake_laser_pub_ = nh.advertise<sensor_msgs::PointCloud>("/fake_laser", 1, true);
    potential_pos_pub_ = nh.advertise<geometry_msgs::PoseArray>("/potential_pos", 1, true);
    actual_pose_pub_ = nh.advertise<geometry_msgs::PoseStamped>("/actual_pose", 1, true);

    initial_pose_sub_ = nh.subscribe<geometry_msgs::PoseWithCovarianceStamped>("/initialpose", 1, &AutoLocation::initialPoseCb, this);

    laser_cloud_ptr_.reset(new PointCloud);
    line_cloud_ptr_.reset(new PointCloud);

    getMap();
    restoreDescriptor();

    cout<<"auto location initialized"<<endl;
}

void AutoLocation::getMap()
{
    map_img_ = cv::imread(map_path_);

    cv::cvtColor(map_img_, map_img_, CV_BGR2GRAY);

    cout<<map_img_.size()<<endl;

    geometry_msgs::Point32 p;
    
    for(int u = 0; u < map_img_.cols; ++u)
    {
        for(int v = 0; v < map_img_.rows; ++v)
        {
            int val = map_img_.at<unsigned char>(v,u);

            if(val==0)
            {
                p.x = u*scale_;
                p.y = v*scale_;
                p.z = 0.0;
                map_cloud_.points.push_back(p);
            }
        }
    }

    map_cloud_.header.frame_id = "map";
    fake_laser_cloud_.header.frame_id = "map";
    actual_pose_.header.frame_id = "map";
    potential_pos_.header.frame_id = "map";

    map_pub_.publish(map_cloud_);
}

void AutoLocation::restoreDescriptor()
{
    //在整个地图上遍历采样,存储所有的描述子
    
    float sample_step = 0.1;

    for(float x = 0.6; x < 9.4; x+=sample_step)
    {
        for(float y = 0.6; y < 6.4; y+=sample_step)
        {
            geometry_msgs::Pose pose;
            pose.position.x = x;
            pose.position.y = y;
            pose.orientation.w = 1;//在地图上遍历时,代入计算的位姿,四元数为初始方向

            getFakeLaser(pose);

            Descriptor d = parseLaserCloud();
            d.pose = pose;

            map_descriptor_vec_.push_back(d);
        }
    }
}

void AutoLocation::initialPoseCb(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr& msg)
{
    actual_pose_.pose = msg->pose.pose;
    actual_pose_pub_.publish(actual_pose_);

    getFakeLaser(msg->pose.pose);

    Descriptor curr_descriptor = parseLaserCloud();

    potential_pos_.poses.clear();
    
    if(!curr_descriptor.line_info_vec_.empty())
    {
        for(const auto& d:map_descriptor_vec_)
        {
            if(curr_descriptor.similar(d))
            {
                geometry_msgs::Pose pose = d.pose;

                //根据主成分向量的方向角,
                getProperOrientation(curr_descriptor, d, pose);

                potential_pos_.poses.push_back(pose);
            }
        }
    }

    cout<<"potential_pos_.poses:"<<potential_pos_.poses.size()<<endl;

    potential_pos_pub_.publish(potential_pos_);
}

void AutoLocation::getProperOrientation(Descriptor& curr, const Descriptor& pot, geometry_msgs::Pose& pose)
{
    //通过潜在位置的主成分向量角度,与当前点云的主成分向量角度,求解当前位姿的四元数
    auto& curr_principal = curr.line_info_vec_[0];
    auto& pot_principal = pot.line_info_vec_[0];

    float angle;

    angle = computeAngle(curr_principal, const_cast<LineInfo&>(pot_principal))/180.0*M_PI;

    pose.orientation.z = sin(angle*0.5);
    pose.orientation.w = cos(angle*0.5);
}

Descriptor AutoLocation::parseLaserCloud()
{
    Descriptor descriptor;

    int line_thresh = 5;

    //依次提取点云中的直线特征,并求出直线点云的参数
    std::priority_queue<LineInfo> line_distance_queue;

    cout<<"linear points size:";
    while(laser_cloud_ptr_->size()>line_thresh)
    {
        pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
        pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
        pcl::SACSegmentation<pcl::PointXYZ> seg;
        seg.setOptimizeCoefficients(true);
        seg.setModelType(pcl::SACMODEL_LINE);
        seg.setMethodType(pcl::SAC_RANSAC);
        seg.setDistanceThreshold(0.1);
        seg.setInputCloud(laser_cloud_ptr_);
        seg.segment(*inliers, *coefficients);

        cout<<inliers->indices.size()<<" ";

        if(inliers->indices.size()<line_thresh)
            break;

        //计算本条直线的方程Ax+By+C=0
        LineInfo this_info = computeLine(inliers);
        line_distance_queue.push(this_info);

        pcl::ExtractIndices<PointType> extract;
        extract.setInputCloud(laser_cloud_ptr_);
        extract.setIndices(inliers);
        extract.setNegative(true);
        extract.filter(*laser_cloud_ptr_);
    }
    cout<<endl;

    //
    auto principal_component = line_distance_queue.top();
    line_distance_queue.pop();
    descriptor.line_info_vec_.push_back(principal_component);//以主成分方向为0角度

    while(!line_distance_queue.empty())
    {
        auto this_line = line_distance_queue.top();
        line_distance_queue.pop();
        
        computeAngle(principal_component, this_line);
        descriptor.line_info_vec_.push_back(this_line);
    }

    for(auto info:descriptor.line_info_vec_)
    {
        cout<<info.distance<<","<<info.angle<<endl;
    }

    return descriptor;
}

float AutoLocation::computeAngle(LineInfo& ori, LineInfo& curr)
{
    //以主成分方向的方向为参照,求其它向量与主成分向量的夹角
    float muliple = (ori.vec_x*curr.vec_x+ori.vec_y*curr.vec_y);
    float disMultiple = sqrt(ori.vec_x*ori.vec_x+ori.vec_y*ori.vec_y)*sqrt(curr.vec_y*curr.vec_y+curr.vec_x*curr.vec_x);
    curr.angle = 180.0/M_PI*acos(muliple/disMultiple);

    //通过叉乘计算顺逆时针旋转角度(正负值)
    float cross = ori.vec_x*curr.vec_y-ori.vec_y*curr.vec_x;
    if(cross < 0)
    {
        curr.angle = 360.0-curr.angle;
    }

    return curr.angle;
}

LineInfo AutoLocation::computeLine(pcl::PointIndices::Ptr& indice)
{
    LineInfo info;
    
    auto sqDis = [&](int i, int j) -> float
    {
        auto& p1 = laser_cloud_ptr_->points[indice->indices[i]];
        auto& p2 = laser_cloud_ptr_->points[indice->indices[j]];
    
        return ((p1.x-p2.x)*(p1.x-p2.x)+(p1.y-p2.y)*(p1.y-p2.y));
    };

    int ind1, ind2;

    if(indice->indices.size()==2)
    {
        ind1 = 0;
        ind2 = 1;
    }
    else
    {
        //找到一条拟合线段最远的两端点
        float max_dis; 
        ind1 = 0;
        ind2 = 1;
        max_dis = sqDis(ind1, ind2);
        for(size_t i = 2; i < indice->indices.size(); ++i)
        {
            float dis1 = sqDis(i, ind1);
            float dis2 = sqDis(i, ind2);

            if(dis1>dis2 && dis1>max_dis)
            {
                ind2 = i;
            }
            else if(dis1<dis2 && dis2>max_dis)
            {
                ind1 = i;
            }
        }
    }
    
    auto& p1 = laser_cloud_ptr_->points[indice->indices[ind1]];
    auto& p2 = laser_cloud_ptr_->points[indice->indices[ind2]];

    float x1 = p1.x;
    float x2 = p2.x;
    float y1 = p1.y;
    float y2 = p2.y;

    info.B = (1.0-x1/x2)/(x1*y2/x2-y1);
    info.A = -(1.0+info.B*y1)/x1;

    info.distance = 1.0/sqrt(info.A*info.A+info.B*info.B);

    info.vec_x = -info.A/(info.A*info.A+info.B*info.B);
    info.vec_y = -info.B/(info.A*info.A+info.B*info.B);

    return info;
}

void AutoLocation::getFakeLaser(geometry_msgs::Pose pose)
{
    float center_x = pose.position.x;
    float center_y = pose.position.y;

    int pixel_x = center_x/scale_;
    int pixel_y = center_y/scale_;

    float dx, dy;

    fake_laser_cloud_.points.clear();
    laser_cloud_ptr_->clear();

    PointType p;

    //回溯法求虚假点云
    for(int i = 0; i < 720; ++i)
    {
        float theta = i*0.5;

        if(theta == 90.0)
        {
            dx = 0.0;
            dy = 1.0;
        }
        else if(theta == 270.0)
        {
            dx = 0.0;
            dy = -1.0;
        }
        else
        {
            dx = cos(M_PI*theta/180.0);
            dy = dx*tan(M_PI*theta/180.0);
        }

        float x = pixel_x;
        float y = pixel_y;
        bool no_conflict = true;
        while(no_conflict)
        {
            x += dx;
            y += dy;

            if(x<0 || y<0 || x>=map_img_.cols || y>=map_img_.rows)
                no_conflict = false;

            if((int)map_img_.at<unsigned char>((int)y,(int)x)==0)
            {
                no_conflict = false;

                p.x = x*scale_;
                p.y = y*scale_;
                p.z = 0.0;
                laser_cloud_ptr_->points.push_back(p);
            }
        }
    }

    //将其转换至初始位姿(原点)
    auto r_q = pose.orientation;
    auto r_p = pose.position;
    Eigen::Quaternionf q(r_q.w, r_q.x, r_q.y, r_q.z);
    Eigen::Matrix4f pose_m = Eigen::Matrix4f::Identity();
    pose_m.block<3,3>(0,0) = q.matrix();
    pose_m(0,3) = r_p.x;
    pose_m(1,3) = r_p.y;
    pose_m(2,3) = r_p.z;

    Eigen::Matrix4f inv_m = pose_m.inverse();

    PointCloud cloud;
    pcl::transformPointCloud(*laser_cloud_ptr_, cloud, inv_m);
    
    *laser_cloud_ptr_ = cloud;

    geometry_msgs::Point32 g_p;
    for(size_t i = 0; i < cloud.points.size(); ++i)
    {
        g_p.x = cloud.points[i].x;
        g_p.y = cloud.points[i].y;
        g_p.z = cloud.points[i].z;

        fake_laser_cloud_.points.push_back(g_p);
    }
    
    fake_laser_pub_.publish(fake_laser_cloud_);
}

}/* end of namespace */

int main(int argc, char** argv)
{
    ros::init(argc, argv, "auto_location");
    ros::NodeHandle nh("~");

    auto_location::AutoLocation location(nh);

    ros::spin();
    return 0;
}

在这里我将每个位置采样的点云,分解成直线的组合,同时将最远的直线的法向量作为主成分方向。那么,原点到其它直线的法向量就根据主成分方向作相应转换。对主成分的提取规则可能有更好的选择,例如最长的直线,点最多的直线等。
另外,描述子的匹配规则仍然需要改善。我的测试一直是在完全结构化的环境下进行,get到的雷达点云也十分理想,在真实传感器数据下必然会打折扣,因此权且抛砖引玉,等待下一波再优化~

上一篇:多传感器融合定位 第六章 惯性导航结算及误差模型


下一篇:ROS2极简总结-MoveIt2