本文阅读的代码为2020年11月1日下载的github的最新master。
如果代码后续更新了请以github为准。

1 CMakelist.txt 与 package.xml

1.1 package.xml

package.xml 里的依赖:除了ros的常规依赖外,还有cv_bridge, pcl_conversions, GTSAM 三者。

1.2 CMakelist.txt

CMaKelist.txt里还指明了其他的依赖。

find_package(OpenMP REQUIRED)
find_package(PCL REQUIRED QUIET)
find_package(OpenCV REQUIRED QUIET)
find_package(GTSAM REQUIRED QUIET)

没有写install部分。

还生成了一个msg,其内容如下

# Cloud Info
Header header 

int32[] startRingIndex
int32[] endRingIndex

int32[]  pointColInd # point column index in range image
float32[] pointRange # point range 

int64 imuAvailable
int64 odomAvailable

# Attitude for LOAM initialization
float32 imuRollInit
float32 imuPitchInit
float32 imuYawInit

# Initial guess from imu pre-integration
float32 initialGuessX
float32 initialGuessY
float32 initialGuessZ
float32 initialGuessRoll
float32 initialGuessPitch
float32 initialGuessYaw

# Point cloud messages
sensor_msgs/PointCloud2 cloud_deskewed  # 去畸变后的雷达数据 original cloud deskewed
sensor_msgs/PointCloud2 cloud_corner    # 提取处的角点特征 extracted corner feature
sensor_msgs/PointCloud2 cloud_surface   # 提取处的面特征 extracted surface feature

2 include/utility.h

这个文件的内容很简单,可以分为3个部分。
第一部分: 各种 #include
第二部分: 定义了一个类 ParamServer。

  • 类的成员变量就是 config/param.yml 里所有的参数。
  • 只有2个成员函数
    – 构造函数: 从ros的参数服务器里读取所有参数,并赋值给成员变量
    – imuConverter() : 将ros下的imu数据 根据imu的外参 进行数值变换。

第三部分: 将ros的imu数据解析处理的函数,发布ros topic的函数,以及2个计算点距离的函数。

3 src/imageProjection.cpp

3.1 首先声明了 2 个数据类型

// Velodyne
struct PointXYZIRT
{
    PCL_ADD_POINT4D
    PCL_ADD_INTENSITY;
    uint16_t ring; // 使用了 ring 信息,雷达数据位于第几条扫描线上
    float time;
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW // Eigen的字段对齐
} EIGEN_ALIGN16;

// 将 PointXYZIRT 在pcl中注册,其包括的字段为 x,y,z,intensity,ring,time
POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRT,  
    (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
    (uint16_t, ring, ring) (float, time, time)
    
// Ouster 雷达的数据类型
struct PointXYZIRT {
    PCL_ADD_POINT4D;
    float intensity;
    uint32_t t;
    uint16_t reflectivity;
    uint8_t ring;
    uint16_t noise;
    uint32_t range;
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW
}EIGEN_ALIGN16;

POINT_CLOUD_REGISTER_POINT_STRUCT(PointXYZIRT,
    (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
    (uint32_t, t, t) (uint16_t, reflectivity, reflectivity)
    (uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range)
)

3.2 main()

可以看到,使用了3个线程,这3个线程是ros自己管理的,灵活调用,有可能一个函数同时占用2个线程,并不是为每个回调分配1个线程。

int main(int argc, char** argv)
{
    ros::init(argc, argv, "lio_sam");

    ImageProjection IP;
    
    ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m");

    ros::MultiThreadedSpinner spinner(3);
    spinner.spin();
    
    return 0;
}

3.3 ImageProjection类

类的成员变量为

// imu deque 的数据长度
const int queueLength = 2000;

class ImageProjection : public ParamServer
{
private:

    std::mutex imuLock;
    std::mutex odoLock;

    ros::Subscriber subLaserCloud;
    ros::Publisher  pubLaserCloud;
    
    ros::Publisher pubExtractedCloud;
    ros::Publisher pubLaserCloudInfo;

    // 保存imu数据
    ros::Subscriber subImu;
    std::deque<sensor_msgs::Imu> imuQueue;

    // 保存 odom 数据
    ros::Subscriber subOdom;
    std::deque<nav_msgs::Odometry> odomQueue;

    // 保存雷达数据
    std::deque<sensor_msgs::PointCloud2> cloudQueue;
    sensor_msgs::PointCloud2 currentCloudMsg;
    
    double *imuTime = new double[queueLength];
    double *imuRotX = new double[queueLength];
    double *imuRotY = new double[queueLength];
    double *imuRotZ = new double[queueLength];

    int imuPointerCur;
    bool firstPointFlag;
    Eigen::Affine3f transStartInverse;

    pcl::PointCloud<PointXYZIRT>::Ptr laserCloudIn;
    pcl::PointCloud<PointType>::Ptr   fullCloud;    // 一帧雷达所有的数据
    pcl::PointCloud<PointType>::Ptr   extractedCloud;

    int deskewFlag;
    cv::Mat rangeMat;

    bool odomDeskewFlag;
    float odomIncreX;
    float odomIncreY;
    float odomIncreZ;

    // 自己定义的消息类型
    lio_sam::cloud_info cloudInfo;

    double timeScanCur;
    double timeScanEnd;

    std_msgs::Header cloudHeader;
 };

4 ImageProjection类的成员函数

4.1 构造函数

    ImageProjection() : deskewFlag(0)
    {
        // 订阅topic, 指定hints到roscpp的传输层, 使用 使用没延迟的TCP通讯
        subImu        = nh.subscribe<sensor_msgs::Imu>(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay());
        subOdom       = nh.subscribe<nav_msgs::Odometry>(odomTopic+"_incremental", 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay());
        subLaserCloud = nh.subscribe<sensor_msgs::PointCloud2>(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay());

        pubExtractedCloud = nh.advertise<sensor_msgs::PointCloud2> ("lio_sam/deskew/cloud_deskewed", 1);
        pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> ("lio_sam/deskew/cloud_info", 1);

        allocateMemory();       // 为各个指针分配内存
        resetParameters();      // 为其他参数进行初始化与重置

        // pcl控制台输出error信息
        pcl::console::setVerbosityLevel(pcl::console::L_ERROR);
    }

    // 为3个指针分配内存
    void allocateMemory()
    {
        // 估计是考虑到这3个指针从头到尾都持续存在吧,所以并没有delete...
        laserCloudIn.reset(new pcl::PointCloud<PointXYZIRT>());
        fullCloud.reset(new pcl::PointCloud<PointType>());
        extractedCloud.reset(new pcl::PointCloud<PointType>());

        fullCloud->points.resize(N_SCAN*Horizon_SCAN);

        cloudInfo.startRingIndex.assign(N_SCAN, 0);
        cloudInfo.endRingIndex.assign(N_SCAN, 0);

        cloudInfo.pointColInd.assign(N_SCAN*Horizon_SCAN, 0);
        cloudInfo.pointRange.assign(N_SCAN*Horizon_SCAN, 0);

        // 怎么调用了2遍 resetParameters()
        resetParameters();
    }

    // 为其他参数进行初始化与重置
    void resetParameters()
    {
        laserCloudIn->clear();
        extractedCloud->clear();

        // 深度图像的初始化 reset range matrix for range image projection
        rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX));

        imuPointerCur = 0;
        firstPointFlag = true;
        odomDeskewFlag = false;

        for (int i = 0; i < queueLength; ++i)
        {
            imuTime[i] = 0;
            imuRotX[i] = 0;
            imuRotY[i] = 0;
            imuRotZ[i] = 0;
        }
    }

4.2 imu与odom的回调

    // imu的回调,先转换坐标系,再保存数据
    void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg)
    {
        // 将imu的数据 转换到 lidar 坐标系下 ???
        sensor_msgs::Imu thisImu = imuConverter(*imuMsg);

        std::lock_guard<std::mutex> lock1(imuLock);
        imuQueue.push_back(thisImu);    // 保存数据
    }

    // odom的回调,仅仅保存数据
    void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg)
    {
        std::lock_guard<std::mutex> lock2(odoLock);
        odomQueue.push_back(*odometryMsg);
    }

4.3 雷达点云的回调

    // 雷达点云的回调
    void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
    {
        // 首先检查点云的队列是否少于等于2个
        // 之后将ros格式的点云 转成 自定义的pcl格式的点云,并获取雷达数据的起始和终止时间,以及检查是否存在nan
        // 当第三个雷达数据到达的时候,会检查第一个雷达数据的field,是否存在ring与time字段,这个检查只会进行一次
        if (!cachePointCloud(laserCloudMsg))
            return;

        // 获取当前点云持续时间的旋转平移
        // 旋转的角速度由imu积分得到,平移量由里程计得到
        if (!deskewInfo())
            return;

        // 将校正畸变后的点云转换成range image,并存入fullCloud
        projectPointCloud();

        cloudExtraction();

        // 发布 cloud_deskewed 和 cloudInfo
        publishClouds();

        // 参数进行初始化与重置
        resetParameters();
    }

5

5.1 cachePointCloud()

    // 首先检查点云的队列是否少于等于2个
    // 之后将ros格式的点云 转成 自定义的pcl格式的点云,并获取雷达数据的起始和终止时间,以及检查是否存在nan
    // 当第三个雷达数据到达的时候,会检查第一个雷达数据的field,是否存在ring与time字段,这个检查只会进行一次
    bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
    {
        // cache point cloud
        cloudQueue.push_back(*laserCloudMsg);

        // 点云的个数少于等于2个,不做处理,没法匹配
        if (cloudQueue.size() <= 2)
            return false;

        // convert cloud
        currentCloudMsg = cloudQueue.front();
        cloudQueue.pop_front();

        // 将ros格式的点云 转成 自定义的pcl格式的点云
        pcl::fromROSMsg(currentCloudMsg, *laserCloudIn);

        // get timestamp
        cloudHeader = currentCloudMsg.header;
        timeScanCur = cloudHeader.stamp.toSec(); // 认为ros中header的时间为这一帧雷达数据的起始时间
        timeScanEnd = timeScanCur + laserCloudIn->points.back().time; // Velodyne
        // timeScanEnd = timeScanCur + (float)laserCloudIn->points.back().t / 1000000000.0; // Ouster数据的时间格式与vlp不一样

        // check dense flag 点云数据不能包含 nan
        if (laserCloudIn->is_dense == false)
        {
            ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!");
            ros::shutdown();
        }

        // check ring channel 检查是否存在 ring 字段
        static int ringFlag = 0;
        if (ringFlag == 0)
        {
            ringFlag = -1;
            for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)
            {
                if (currentCloudMsg.fields[i].name == "ring")
                {
                    ringFlag = 1;
                    break;
                }
            }
            if (ringFlag == -1)
            {
                ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!");
                ros::shutdown();
            }
        }   

        // check point time 检查是否存在 time 字段
        if (deskewFlag == 0)
        {
            deskewFlag = -1;
            for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)
            {
                if (currentCloudMsg.fields[i].name == timeField)
                {
                    deskewFlag = 1;
                    break;
                }
            }
            if (deskewFlag == -1)
                ROS_WARN("Point cloud timestamp not available, deskew function disabled, system will drift significantly!");
        }

        return true;
    }

5.2 deskewInfo()

    // 获取当前点云持续时间的旋转平移
    // 旋转的角速度由imu积分得到,平移量由里程计得到
    bool deskewInfo()
    {
        std::lock_guard<std::mutex> lock1(imuLock);
        std::lock_guard<std::mutex> lock2(odoLock);

        // make sure IMU data available for the scan
        // imu数据队列的头尾的时间戳要在雷达数据的时间段之外
        if (imuQueue.empty() || 
            imuQueue.front().header.stamp.toSec() > timeScanCur || 
            imuQueue.back().header.stamp.toSec() < timeScanEnd)
        {
            ROS_DEBUG("Waiting for IMU data ...");
            return false;
        }

        imuDeskewInfo();

        odomDeskewInfo();

        return true;
    }

    // 修剪imu的数据队列,直到imu的时间处于这帧点云的时间内
    // 之后对当前队列中的imu进行积分,角加速度乘时间,求得旋转的角速度
    void imuDeskewInfo()
    {
        // cloudInfo 中imu的数据队列正在处理,暂时不可用
        cloudInfo.imuAvailable = false;

        // 修剪imu的数据队列,直到imu的时间处于这帧点云的时间前0.01秒内
        while (!imuQueue.empty())
        {
            if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
                imuQueue.pop_front();
            else
                break;
        }

        if (imuQueue.empty())
            return;

        imuPointerCur = 0;

        for (int i = 0; i < (int)imuQueue.size(); ++i)
        {
            sensor_msgs::Imu thisImuMsg = imuQueue[i];
            double currentImuTime = thisImuMsg.header.stamp.toSec();

            // get roll, pitch, and yaw estimation for this scan
            // imu的时间合适,转变成 rpy 
            if (currentImuTime <= timeScanCur)
                imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit);

            if (currentImuTime > timeScanEnd + 0.01)
                break;

            if (imuPointerCur == 0){
                imuRotX[0] = 0;
                imuRotY[0] = 0;
                imuRotZ[0] = 0;
                imuTime[0] = currentImuTime;
                ++imuPointerCur;
                continue;
            }

            // get angular velocity
            double angular_x, angular_y, angular_z;
            imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z);

            // integrate rotation
            // 对imu的角加速度进行积分,角加速度 * ( 当前帧imu的时间 - 上一帧imu的时间 )
            double timeDiff = currentImuTime - imuTime[imuPointerCur-1];
            imuRotX[imuPointerCur] = imuRotX[imuPointerCur-1] + angular_x * timeDiff;
            imuRotY[imuPointerCur] = imuRotY[imuPointerCur-1] + angular_y * timeDiff;
            imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur-1] + angular_z * timeDiff;
            imuTime[imuPointerCur] = currentImuTime;
            ++imuPointerCur;
        }

        --imuPointerCur;

        if (imuPointerCur <= 0)
            return;

        cloudInfo.imuAvailable = true;
    }

    // 获取这帧点云时间内的 第一个和最后一个里程计信息,将第一个odom当成初始值放入cloudInfo
    // 求得第一个和最后一个里程计间的平移量
    void odomDeskewInfo()
    {
        cloudInfo.odomAvailable = false;

        // 修剪odom的数据队列,直到odom的时间处于这帧点云的时间前0.01秒内
        while (!odomQueue.empty())
        {
            if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
                odomQueue.pop_front();
            else
                break;
        }

        if (odomQueue.empty())
            return;

        if (odomQueue.front().header.stamp.toSec() > timeScanCur)
            return;

        // get start odometry at the beinning of the scan
        nav_msgs::Odometry startOdomMsg;

        // 获取odom的时间 大于等于 雷达当前时间的 odom
        for (int i = 0; i < (int)odomQueue.size(); ++i)
        {
            startOdomMsg = odomQueue[i];

            if (ROS_TIME(&startOdomMsg) < timeScanCur)
                continue;
            else
                break;
        }

        tf::Quaternion orientation;
        tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation);

        double roll, pitch, yaw;
        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);

        // Initial guess used in mapOptimization
        cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x;
        cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y;
        cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z;
        cloudInfo.initialGuessRoll  = roll;
        cloudInfo.initialGuessPitch = pitch;
        cloudInfo.initialGuessYaw   = yaw;

        cloudInfo.odomAvailable = true;

        // get end odometry at the end of the scan
        odomDeskewFlag = false;

        // odom最后一个数据的时间 比 雷达结束的时间早,不能覆盖点云的时间
        if (odomQueue.back().header.stamp.toSec() < timeScanEnd)
            return;

        // 获取第一个比 雷达结束的时间 晚的 odom
        nav_msgs::Odometry endOdomMsg;

        for (int i = 0; i < (int)odomQueue.size(); ++i)
        {
            endOdomMsg = odomQueue[i];

            if (ROS_TIME(&endOdomMsg) < timeScanEnd)
                continue;
            else
                break;
        }

        if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0])))
            return;

        Eigen::Affine3f transBegin = pcl::getTransformation(
            startOdomMsg.pose.pose.position.x, 
            startOdomMsg.pose.pose.position.y, 
            startOdomMsg.pose.pose.position.z, 
            roll, pitch, yaw);

        tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation);
        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
        Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x, endOdomMsg.pose.pose.position.y, endOdomMsg.pose.pose.position.z, roll, pitch, yaw);

        Eigen::Affine3f transBt = transBegin.inverse() * transEnd;

        // 通过 transBt 获取 odomIncreX等,每一帧点云数据的odom变化量
        float rollIncre, pitchIncre, yawIncre;
        pcl::getTranslationAndEulerAngles(transBt, 
            odomIncreX, odomIncreY, odomIncreZ, 
            rollIncre, pitchIncre, yawIncre);

        odomDeskewFlag = true;
    }

5.3 projectPointCloud()

    // 将校正畸变后的点云转换成range image,并存入fullCloud
    void projectPointCloud()
    {
        int cloudSize = laserCloudIn->points.size();
        // range image projection
        for (int i = 0; i < cloudSize; ++i)
        {
            PointType thisPoint;
            thisPoint.x = laserCloudIn->points[i].x;
            thisPoint.y = laserCloudIn->points[i].y;
            thisPoint.z = laserCloudIn->points[i].z;
            thisPoint.intensity = laserCloudIn->points[i].intensity;

            // 求得当前点对应 range image 的 range
            float range = pointDistance(thisPoint);
            if (range < lidarMinRange || range > lidarMaxRange)
                continue;

            // 求得当前点对应 range image 的 行索引
            int rowIdn = laserCloudIn->points[i].ring;
            if (rowIdn < 0 || rowIdn >= N_SCAN)
                continue;

            if (rowIdn % downsampleRate != 0)
                continue;

            // 求得点的水平角(deg)
            float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI;
            // 水平分辨率
            static float ang_res_x = 360.0/float(Horizon_SCAN);
            // 求得当前点对应 range image 的 列索引
            int columnIdn = -round((horizonAngle-90.0)/ang_res_x) + Horizon_SCAN/2;
            if (columnIdn >= Horizon_SCAN)
                columnIdn -= Horizon_SCAN;

            if (columnIdn < 0 || columnIdn >= Horizon_SCAN)
                continue;

            if (rangeMat.at<float>(rowIdn, columnIdn) != FLT_MAX)
                continue;

            // 对点云中的每个点进行 畸变校正
            thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time); // Velodyne
            // thisPoint = deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0); // Ouster

            rangeMat.at<float>(rowIdn, columnIdn) = range;

            // 并且 将这个点存到 fullCloud 中
            int index = columnIdn + rowIdn * Horizon_SCAN;
            fullCloud->points[index] = thisPoint;
        }
    }

    // 根据点云中某点的时间戳赋予其对应插值得到的旋转量
    void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur)
    {
        *rotXCur = 0; *rotYCur = 0; *rotZCur = 0;

        // 找到在 pointTime 之后的imu数据
        int imuPointerFront = 0;
        while (imuPointerFront < imuPointerCur)
        {
            if (pointTime < imuTime[imuPointerFront])
                break;
            ++imuPointerFront;
        }

        // 如果上边的循环没进去或者到了最大执行次数,则只能近似的将当前的旋转 赋值过去
        if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0)
        {
            
            *rotXCur = imuRotX[imuPointerFront];
            *rotYCur = imuRotY[imuPointerFront];
            *rotZCur = imuRotZ[imuPointerFront];
        } else {
            int imuPointerBack = imuPointerFront - 1;

            // 算一下该点时间戳在本组imu中的位置,线性插值
            double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
            double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
            
            // 按前后百分比赋予旋转量
            *rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack;
            *rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack;
            *rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack;
        }
    }

    // 如果运动慢,点云由于运动导致的矫正不如没有
    void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur)
    {
        *posXCur = 0; *posYCur = 0; *posZCur = 0;

        // If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented.

        // if (cloudInfo.odomAvailable == false || odomDeskewFlag == false)
        //     return;

        // float ratio = relTime / (timeScanEnd - timeScanCur);

        // *posXCur = ratio * odomIncreX;
        // *posYCur = ratio * odomIncreY;
        // *posZCur = ratio * odomIncreZ;
    }

    // 对点云的每个点进行畸变校正
    PointType deskewPoint(PointType *point, double relTime)
    {
        if (deskewFlag == -1 || cloudInfo.imuAvailable == false)
            return *point;

        double pointTime = timeScanCur + relTime;

        float rotXCur, rotYCur, rotZCur;
        findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur);

        float posXCur, posYCur, posZCur;
        findPosition(relTime, &posXCur, &posYCur, &posZCur);

        // 点云中的第一个点 求 transStartInverse,之后在这帧数据畸变过程中不再改变
        if (firstPointFlag == true)
        {
            transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, 
                rotXCur, rotYCur, rotZCur)).inverse();
            firstPointFlag = false;
        }

        // transform points to start
        Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, 
            rotXCur, rotYCur, rotZCur);
        
        // 该点相对第一个点的变换矩阵 = 第一个点在lidar世界坐标系下的变换矩阵的逆 × 当前点时lidar世界坐标系下变换矩阵
        Eigen::Affine3f transBt = transStartInverse * transFinal;

        // 根据lidar位姿变换,修正点云位置
        PointType newPoint;
        newPoint.x = transBt(0,0) * point->x + transBt(0,1) * point->y + transBt(0,2) * point->z + transBt(0,3);
        newPoint.y = transBt(1,0) * point->x + transBt(1,1) * point->y + transBt(1,2) * point->z + transBt(1,3);
        newPoint.z = transBt(2,0) * point->x + transBt(2,1) * point->y + transBt(2,2) * point->z + transBt(2,3);
        newPoint.intensity = point->intensity;

        return newPoint;
    }

5.4 publishClouds()

// 发布 cloud_deskewed 和 cloudInfo
void publishClouds()
{
    cloudInfo.header = cloudHeader;
    cloudInfo.cloud_deskewed  = publishCloud(&pubExtractedCloud, extractedCloud, cloudHeader.stamp, lidarFrame);
    pubLaserCloudInfo.publish(cloudInfo);
}
   
sensor_msgs::PointCloud2 publishCloud(
    ros::Publisher *thisPub, 
    pcl::PointCloud<PointType>::Ptr thisCloud, 
    ros::Time thisStamp, 
    std::string thisFrame)
{
    sensor_msgs::PointCloud2 tempCloud;
    pcl::toROSMsg(*thisCloud, tempCloud);
    tempCloud.header.stamp = thisStamp;
    tempCloud.header.frame_id = thisFrame;
    if (thisPub->getNumSubscribers() != 0)
        thisPub->publish(tempCloud);
    return tempCloud;
}

总结一下:

这个类做了点云的畸变纠正,在速度慢的情况下只用imu进行畸变校正。要是用odom的话要把代码的注释取消掉。

之后进行range image的制作,并提取了有效点的索引,暂时还没用到。

将处理好的2个点云发布出去。

REERENCES

https://blog.csdn.net/unlimitedai/article/details/107378759#t6