#include "utility.h"
#include "lio_sam/cloud_info.h"
struct VelodynePointXYZIRT
{
PCL_ADD_POINT4D
PCL_ADD_INTENSITY;
uint16_t ring;
float time;
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
} EIGEN_ALIGN16;
POINT_CLOUD_REGISTER_POINT_STRUCT (VelodynePointXYZIRT,
(float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
(uint16_t, ring, ring) (float, time, time)
)
struct OusterPointXYZIRT {
PCL_ADD_POINT4D;
float intensity;
uint32_t t;
uint16_t reflectivity;
uint8_t ring;
uint16_t noise;
uint32_t range;
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
} EIGEN_ALIGN16;
POINT_CLOUD_REGISTER_POINT_STRUCT(OusterPointXYZIRT,
(float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
(uint32_t, t, t) (uint16_t, reflectivity, reflectivity)
(uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range)
)
// Use the Velodyne point format as a common representation
using PointXYZIRT = VelodynePointXYZIRT;
const int queueLength = 2000;
class ImageProjection : public ParamServer
{
private:
// 互斥锁
std::mutex imuLock;
std::mutex odoLock;
// !@Subscriber
ros::Subscriber subLaserCloud;
ros::Subscriber subImu;
ros::Subscriber subOdom;
// !@ Publisher
ros::Publisher pubLaserCloud; // no use
ros::Publisher pubExtractedCloud;
ros::Publisher pubLaserCloudInfo;
// !@ queue 回调函数中用于保存 接收的信息
std::deque<sensor_msgs::Imu> imuQueue;
std::deque<nav_msgs::Odometry> odomQueue;
std::deque<sensor_msgs::PointCloud2> cloudQueue;
sensor_msgs::PointCloud2 currentCloudMsg; //保存 队列中取出的当前雷达点云信息
double *imuTime = new double[queueLength];
double *imuRotX = new double[queueLength];
double *imuRotY = new double[queueLength];
double *imuRotZ = new double[queueLength];
// imuRotX、Y、Z,imuTime的数组下标,最终数组长度
int imuPointerCur;
// 第一个点标记
bool firstPointFlag;
Eigen::Affine3f transStartInverse;
pcl::PointCloud<PointXYZIRT>::Ptr laserCloudIn; // 原始雷达点云信息
pcl::PointCloud<OusterPointXYZIRT>::Ptr tmpOusterCloudIn; // SensorType::OUSTER
pcl::PointCloud<PointType>::Ptr fullCloud; // 保存去畸变之后的点【将点投影到起始位置】
pcl::PointCloud<PointType>::Ptr extractedCloud;
int deskewFlag; // 等于1 表示有 “time” 或者 “t”
cv::Mat rangeMat; // 二维图像矩阵?
bool odomDeskewFlag;
float odomIncreX;
float odomIncreY;
float odomIncreZ;
lio_sam::cloud_info cloudInfo;
double timeScanCur; // 雷达一帧点云 第一个点时间
double timeScanEnd; // 雷达一帧点云 最后一个点时间 也就是下一帧开始时间
std_msgs::Header cloudHeader;
vector<int> columnIdnCountVec;
public:
ImageProjection():
deskewFlag(0)
{
subImu = nh.subscribe<sensor_msgs::Imu>(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay());
subOdom = nh.subscribe<nav_msgs::Odometry>(odomTopic+"_incremental", 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay());
subLaserCloud = nh.subscribe<sensor_msgs::PointCloud2>(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay());
// 发布去畸变的点云
pubExtractedCloud = nh.advertise<sensor_msgs::PointCloud2> ("lio_sam/deskew/cloud_deskewed", 1);
//发布激光点云信息 msg文件夹下面的cloud_info.msg 定义的信息
pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> ("lio_sam/deskew/cloud_info", 1);
allocateMemory(); // 分配内存
resetParameters(); // 重置参数
pcl::console::setVerbosityLevel(pcl::console::L_ERROR);
}
void allocateMemory()
{
laserCloudIn.reset(new pcl::PointCloud<PointXYZIRT>());
tmpOusterCloudIn.reset(new pcl::PointCloud<OusterPointXYZIRT>()); // SensorType::OUSTER
fullCloud.reset(new pcl::PointCloud<PointType>());
extractedCloud.reset(new pcl::PointCloud<PointType>());
fullCloud->points.resize(N_SCAN*Horizon_SCAN);
cloudInfo.startRingIndex.assign(N_SCAN, 0);
cloudInfo.endRingIndex.assign(N_SCAN, 0);
cloudInfo.pointColInd.assign(N_SCAN*Horizon_SCAN, 0);
cloudInfo.pointRange.assign(N_SCAN*Horizon_SCAN, 0);
resetParameters();
}
void resetParameters()
{
laserCloudIn->clear();
extractedCloud->clear();
// reset range matrix for range image projection
rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX));
imuPointerCur = 0;
firstPointFlag = true;
odomDeskewFlag = false;
for (int i = 0; i < queueLength; ++i)
{
imuTime[i] = 0;
imuRotX[i] = 0;
imuRotY[i] = 0;
imuRotZ[i] = 0;
}
columnIdnCountVec.assign(N_SCAN, 0);
}
~ImageProjection(){}
void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg)
{
// imuConverter() 将imu坐标系的数据转到雷达坐标系下
sensor_msgs::Imu thisImu = imuConverter(*imuMsg);
//锁存就是保证一次处理完
std::lock_guard<std::mutex> lock1(imuLock);
// 转换之后的IMU数据 放入队列中 这之后会有pop的操作 也均为基于imuQueue的
imuQueue.push_back(thisImu);
// debug IMU data
// cout << std::setprecision(6);
// cout << "IMU acc: " << endl;
// cout << "x: " << thisImu.linear_acceleration.x <<
// ", y: " << thisImu.linear_acceleration.y <<
// ", z: " << thisImu.linear_acceleration.z << endl;
// cout << "IMU gyro: " << endl;
// cout << "x: " << thisImu.angular_velocity.x <<
// ", y: " << thisImu.angular_velocity.y <<
// ", z: " << thisImu.angular_velocity.z << endl;
// double imuRoll, imuPitch, imuYaw;
// tf::Quaternion orientation;
// tf::quaternionMsgToTF(thisImu.orientation, orientation);
// tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
// cout << "IMU roll pitch yaw: " << endl;
// cout << "roll: " << imuRoll << ", pitch: " << imuPitch << ", yaw: " << imuYaw << endl << endl;
}
void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg)
{
//里程计回调更简单了 锁存后存入队列
//在前期版本中作者在地图优化程序中=只发布了一个里程计消息 现在增加了增量式里程计话题 更方便处理
std::lock_guard<std::mutex> lock2(odoLock);
odomQueue.push_back(*odometryMsg);
}
void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
{
// 1. 缓存点云信息
if (!cachePointCloud(laserCloudMsg))
return;
// 2. 计算去畸变所需参数
if (!deskewInfo())
return;
///
//上面完成了 去畸变的相关参数的获取,但是没有进行去畸变的工作,具体的工作在后面进行
//
// 3. 投影到距离图像中(二维矩阵)
projectPointCloud();
// 4. 点云提取
cloudExtraction();
// 5. 发布点云
publishClouds();
// 6. 重置参数
resetParameters();
}
/**
* 1. 点云缓存,放入队列
* 保存一帧的信息,头,时间等
* laserCloudIn 原始点云
* cloudHeader 点云头部信息
* timeScanCur 开始时间
* timeScanEnd 结束时间
*/
bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
{
// cache point cloud
// 接收的原始点云数据 laserCloudMsg 存入队列 cloudQueue
cloudQueue.push_back(*laserCloudMsg);
if (cloudQueue.size() <= 2)
return false;
// convert cloud
取出对头的元素,将点云转化为 ROS 点云信息,保存到 laserCloudIn
currentCloudMsg = std::move(cloudQueue.front());
cloudQueue.pop_front();
if (sensor == SensorType::VELODYNE || sensor == SensorType::LIVOX)
{
pcl::moveFromROSMsg(currentCloudMsg, *laserCloudIn);
}
else if (sensor == SensorType::OUSTER)
{
// Convert to Velodyne format
pcl::moveFromROSMsg(currentCloudMsg, *tmpOusterCloudIn);
laserCloudIn->points.resize(tmpOusterCloudIn->size());
laserCloudIn->is_dense = tmpOusterCloudIn->is_dense;
for (size_t i = 0; i < tmpOusterCloudIn->size(); i++)
{
auto &src = tmpOusterCloudIn->points[i];
auto &dst = laserCloudIn->points[i];
dst.x = src.x;
dst.y = src.y;
dst.z = src.z;
dst.intensity = src.intensity;
dst.ring = src.ring;
dst.time = src.t * 1e-9f;
}
}
else
{
ROS_ERROR_STREAM("Unknown sensor type: " << int(sensor));
ros::shutdown();
}
// get timestamp
cloudHeader = currentCloudMsg.header;
// 一帧点云开始时间,第一个点
timeScanCur = cloudHeader.stamp.toSec();
// 一帧点云结束时间 最后一个点 back(),也就是下一帧开始时间
timeScanEnd = timeScanCur + laserCloudIn->points.back().time;
// check dense flag 对点云的一个检查流程 有没有无效点
if (laserCloudIn->is_dense == false)
{
ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!"); //去除 无效点
ros::shutdown();
}
// check ring channel 检查ring这个field是否存在
// veloodyne和ouster都有 这个ring
static int ringFlag = 0;
if (ringFlag == 0)
{
ringFlag = -1;
for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)
{
// 只要有 fields[i].name == "ring" 就赋值为 1 然后退出循环
if (currentCloudMsg.fields[i].name == "ring")
{
ringFlag = 1;
break;
}
}
if (ringFlag == -1)
{
ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!");
ros::shutdown();
}
}
// check point time 类似上面的 ring 检查
if (deskewFlag == 0)
{
deskewFlag = -1;
for (auto &field : currentCloudMsg.fields)
{
if (field.name == "time" || field.name == "t")
{
deskewFlag = 1; // 表示有 “time”
break;
}
}
if (deskewFlag == -1)
ROS_WARN("Point cloud timestamp not available, deskew function disabled, system will drift significantly!");
}
return true;
}
/**
* 2. 去畸变处理
* 雷达时间集合[timeScanCur, timeScanEnd] ,确保IMU 头部 和 尾部 的时间包括前面的时间集合
*/
bool deskewInfo()
{
std::lock_guard<std::mutex> lock1(imuLock);
std::lock_guard<std::mutex> lock2(odoLock);
// make sure IMU data available for the scan
// IMU 队列空时退出
//imu队列顶部元素的时间【需要】小于当前激光扫描时间 imu队列底部时间又要大于下一帧扫描的时间戳
//其实就是保证imu有数据并且包含扫描帧间时间 用于后续操作
if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur || imuQueue.back().header.stamp.toSec() < timeScanEnd)
{
ROS_DEBUG("Waiting for IMU data ...");
return false;
}
// IMU 去畸变参数 计算
imuDeskewInfo();
// 里程计去畸变参数
odomDeskewInfo();
return true;
}
/**
* IMU 去畸变
*
* 遍历IMU队列数据,
* (1)用imu的欧拉角做扫描的位姿估计 赋值给 cloudInfo
* (2)更新 imuRotX,Y,Z 的值
* cloudInfo.imuAvailable 的 值只有在最后执行完变为 true
*/
void imuDeskewInfo()
{
//这个参数在地图优化mapOptmization.cpp程序中用到 函数开始 为false , 函数结束完成相关操作后置true
// 标记已经进行IMU去畸变处理过
cloudInfo.imuAvailable = false;
while (!imuQueue.empty()) // 队列不空
{
//本来是满足imuQueue.front().header.stamp.toSec() < timeScanCur 的
//继续以0.01为阈值 舍弃较旧的imu数据
if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
imuQueue.pop_front();
else
break;
}
if (imuQueue.empty())
return; // 在这里返回的话 cloudInfo.imuAvailable 还是 false;
imuPointerCur = 0;
// 遍历IMU队列数据
for (int i = 0; i < (int)imuQueue.size(); ++i)
{
// 临时变量 thisImuMsg 保存 当前 IMU
sensor_msgs::Imu thisImuMsg = imuQueue[i];
// 临时变量,IMU时间
double currentImuTime = thisImuMsg.header.stamp.toSec();
// get roll, pitch, and yaw estimation for this scan
// 关键的一点 用imu的欧拉角做扫描的位姿估计 这里直接把值给了cloudInfo, 在地图优化程序中使用
if (currentImuTime <= timeScanCur)
imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit);
// 如果 当前 IMU 时间 大于 下一帧开始时间 退出
if (currentImuTime > timeScanEnd + 0.01)
break;
// 初始值
if (imuPointerCur == 0){
imuRotX[0] = 0;
imuRotY[0] = 0;
imuRotZ[0] = 0;
imuTime[0] = currentImuTime;
++imuPointerCur;
continue;
}
// get angular velocity
double angular_x, angular_y, angular_z;
// IMU信息中获取 角速度保存到 angular_x、y、z
imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z);
// integrate rotation
//把角速度和时间间隔积分出转角 用于后续的去畸变
double timeDiff = currentImuTime - imuTime[imuPointerCur-1]; // 当前时间减去 前一个时间
imuRotX[imuPointerCur] = imuRotX[imuPointerCur-1] + angular_x * timeDiff; // 当前Rot = 前一个 + IMU角速度 * 时间差
imuRotY[imuPointerCur] = imuRotY[imuPointerCur-1] + angular_y * timeDiff;
imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur-1] + angular_z * timeDiff;
imuTime[imuPointerCur] = currentImuTime;
++imuPointerCur;
}
--imuPointerCur; // 最后总个数【因为每次在 for 循环之后++ ,所以最后一次 多加了一次,要减掉】
if (imuPointerCur <= 0)
return; // 如果从这里return , 那么 cloudInfo.imuAvailable 还是false
cloudInfo.imuAvailable = true;
}
/**
* 同理 上面的IMU部分
*
* 利用里程计消息生成初始位姿估计 保存到cloudInfo, 在地图优化中使用的
* cloudInfo.odomAvailable = true;
*
* 【寻找开始位姿startOdomMsg, 结束位姿endOdomMsg,计算之间的相对变换,然后得到里程计增量,用于后续的畸变处理】
* 通过一帧 起始和结束 之间的相对变换tranBt,获得增量值,保存到 odomIncreX, odomIncreY, odomIncreZ
* 最后标记 odomDeskewFlag = true;
*/
void odomDeskewInfo()
{
cloudInfo.odomAvailable = false;
while (!odomQueue.empty())
{
//本来是满足odomQueue.front().header.stamp.toSec() < timeScanCur 的
//继续以0.01为阈值 舍弃较旧的 odom 数据
if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
odomQueue.pop_front();
else
break;
}
if (odomQueue.empty())
return;
if (odomQueue.front().header.stamp.toSec() > timeScanCur) // 时间不能超过 雷达一帧开始的时间
return;
// get start odometry at the beinning of the scan
nav_msgs::Odometry startOdomMsg; // 临时变量保存当前 队列取出的里程计数据
// startOdomMsg【保存 大于等于timeScanCur的第一个里程计数据 】
// 遍历里程计队列
for (int i = 0; i < (int)odomQueue.size(); ++i)
{
startOdomMsg = odomQueue[i];
if (ROS_TIME(&startOdomMsg) < timeScanCur)
continue;
else
break;
}
tf::Quaternion orientation;
tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation);
double roll, pitch, yaw;
tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
//利用里程计消息生成初始位姿估计 在地图优化中使用的 具体需要看对应代码
// Initial guess used in mapOptimization
cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x;
cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y;
cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z;
cloudInfo.initialGuessRoll = roll;
cloudInfo.initialGuessPitch = pitch;
cloudInfo.initialGuessYaw = yaw;
cloudInfo.odomAvailable = true;
// get end odometry at the end of the scan
odomDeskewFlag = false;
// 同 IMU,里程计队列最后数据时间 要 大于 当前雷达帧最后时间
if (odomQueue.back().header.stamp.toSec() < timeScanEnd)
return;
// 保存大于等于 雷达帧最后时间的 里程计信息
nav_msgs::Odometry endOdomMsg;
for (int i = 0; i < (int)odomQueue.size(); ++i)
{
endOdomMsg = odomQueue[i];
if (ROS_TIME(&endOdomMsg) < timeScanEnd)
continue;
else
break;
}
//位姿协方差矩阵判断 开始位姿协方差矩阵 不等于 结束位姿协方差矩阵,则返回
if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0])))
return;
// 如果协方差矩阵相同
//获得起始 start 的变换 transBegin
Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x,
startOdomMsg.pose.pose.position.y,
startOdomMsg.pose.pose.position.z,
roll, pitch, yaw);
// 获得 结束 end 的变换 transEnd
tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation);
tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x,
endOdomMsg.pose.pose.position.y,
endOdomMsg.pose.pose.position.z,
roll, pitch, yaw);
// 获得一帧扫描 起始 和 结束的 相对变换 transBt
Eigen::Affine3f transBt = transBegin.inverse() * transEnd;
float rollIncre, pitchIncre, yawIncre;
// 通过tranBt 获得增量值 后续去畸变用到
pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre);
odomDeskewFlag = true;
}
/**
* 根据时间下标找到之前保存过的IMU 旋转,在imuRotX,Y, Z,time中存放
*/
void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur)
{
// 初始值
*rotXCur = 0; *rotYCur = 0; *rotZCur = 0;
int imuPointerFront = 0;
while (imuPointerFront < imuPointerCur)
{
if (pointTime < imuTime[imuPointerFront])
break;
++imuPointerFront;
}
if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0)
{
*rotXCur = imuRotX[imuPointerFront];
*rotYCur = imuRotY[imuPointerFront];
*rotZCur = imuRotZ[imuPointerFront];
} else {
int imuPointerBack = imuPointerFront - 1;
double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
// 乘以一个比例值
*rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack;
*rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack;
*rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack;
}
}
void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur)
{
//作者这里注释掉了 如果高速移动可能有用 低速车辆提升不大 用到了里程计增量值
*posXCur = 0; *posYCur = 0; *posZCur = 0;
// If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented.
// if (cloudInfo.odomAvailable == false || odomDeskewFlag == false)
// return;
// float ratio = relTime / (timeScanEnd - timeScanCur);
// *posXCur = ratio * odomIncreX;
// *posYCur = ratio * odomIncreY;
// *posZCur = ratio * odomIncreZ;
}
/**
* 校正点云畸变
*
* 通过两个点之间的相对变换 得到transBt 来将点投影到起始位置
* 【这里的两个点 是当前点transFinal 和 起始点transStartInverse,每次都是算的与起始点之间的位姿变换,这样才会将点投影到起始位置】
*/
PointType deskewPoint(PointType *point, double relTime)
{
// cloudInfo.imuAvailable == false 表示前面 IMU畸变参数计算没有执行完
// deskewFlag == -1 表示没有“time” 或者 “t”
if (deskewFlag == -1 || cloudInfo.imuAvailable == false)
return *point; // 返回当前点,不能执行去畸变处理,因为没有数据
// 点在一帧中的具体时间 这个time就有用了,【 当前帧开始时间timeScanCur + 第 i 个点时间】
double pointTime = timeScanCur + relTime;
// 定义相关变量用于补偿 旋转 平移
float rotXCur, rotYCur, rotZCur;
// 获得相关旋转量 【利用 imuRotX、Y、Z,Time获得】
findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur);
float posXCur, posYCur, posZCur;
findPosition(relTime, &posXCur, &posYCur, &posZCur);
// 两个点位姿变换,每次都是与起始点(第一个)之间的位姿变换,方便将点投影到起始位置
if (firstPointFlag == true)
{
// 起始变换矩阵赋初值再取逆
transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur)).inverse();
firstPointFlag = false;
}
// transform points to start
// 把点投影到每一帧扫描的起始时刻 参考Loam里的方案
Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur);
// 相对变换 【上一点transStartInverse 和 当前点 transFinal 计算之间的相对变换】
Eigen::Affine3f transBt = transStartInverse * transFinal;
// 去畸变之后的点
PointType newPoint;
newPoint.x = transBt(0,0) * point->x + transBt(0,1) * point->y + transBt(0,2) * point->z + transBt(0,3);
newPoint.y = transBt(1,0) * point->x + transBt(1,1) * point->y + transBt(1,2) * point->z + transBt(1,3);
newPoint.z = transBt(2,0) * point->x + transBt(2,1) * point->y + transBt(2,2) * point->z + transBt(2,3);
newPoint.intensity = point->intensity;
return newPoint;
}
/**
* 将点云投影得到类似于一个二维距离图像的过程
* 计算行rowIdn 计算列columnIdn ,点云投影到对应行列 rangeMat 保存
*
* thisPoint 利用 deskewPoint() 进行去畸变处理【计算与第一个之间的变换,投影到起始位置,达到去畸变的效果】
*/
void projectPointCloud()
{
// laserCloudIn就是原始的点云话题中的数据(在回调函数中 区分设备的类型,不同的设备,转换的方式不同)
int cloudSize = laserCloudIn->points.size();
// range image projection
// 遍历每一个点
for (int i = 0; i < cloudSize; ++i)
{
PointType thisPoint; //临时变量
// 读取当前点的xyz信息
thisPoint.x = laserCloudIn->points[i].x;
thisPoint.y = laserCloudIn->points[i].y;
thisPoint.z = laserCloudIn->points[i].z;
thisPoint.intensity = laserCloudIn->points[i].intensity;
// 计算距离 x*x + y*y + z*z
float range = pointDistance(thisPoint); //
if (range < lidarMinRange || range > lidarMaxRange)
continue;
// 点所在的雷达线束,有些雷达自带 ring信息,表明了线束,没有带的要自己根据角度计算,在LEGO-LOAM中有
int rowIdn = laserCloudIn->points[i].ring;
if (rowIdn < 0 || rowIdn >= N_SCAN)
continue;
if (rowIdn % downsampleRate != 0)
continue;
// 点在当前线束 上的那个位置【一根线上的点也有相应位置】
int columnIdn = -1;
// 不同雷达设备,计算方式不同
if (sensor == SensorType::VELODYNE || sensor == SensorType::OUSTER)
{
float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI;
static float ang_res_x = 360.0/float(Horizon_SCAN);
columnIdn = -round((horizonAngle-90.0)/ang_res_x) + Horizon_SCAN/2;
if (columnIdn >= Horizon_SCAN)
columnIdn -= Horizon_SCAN;
}
else if (sensor == SensorType::LIVOX)
{
columnIdn = columnIdnCountVec[rowIdn];
columnIdnCountVec[rowIdn] += 1;
}
if (columnIdn < 0 || columnIdn >= Horizon_SCAN)
continue;
if (rangeMat.at<float>(rowIdn, columnIdn) != FLT_MAX)
continue;
// 调用deskewPoint来矫正点云去畸变
thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time);
// cv::Mat类型的rangMat生成 距离图像 16*1800
rangeMat.at<float>(rowIdn, columnIdn) = range;
//索引值 类似于图像中像素索引的概念应该比较好理解
int index = columnIdn + rowIdn * Horizon_SCAN;
// //fullCloud填充内容 这里是运动补偿去畸变后的点云
fullCloud->points[index] = thisPoint;
}
}
/**
* 点云提取
*/
void cloudExtraction()
{
// 有效激光点数量
int count = 0;
// extract segmented cloud for lidar odometry
for (int i = 0; i < N_SCAN; ++i)
{
// 第i条线束上 开始点的下标
//记录每根扫描线起始第五个激光点在startRingIndex中的索引
cloudInfo.startRingIndex[i] = count - 1 + 5;
for (int j = 0; j < Horizon_SCAN; ++j)
{
if (rangeMat.at<float>(i,j) != FLT_MAX)
{
// mark the points' column index for marking occlusion later
//记录每个点在Horizon_SCAN方向上的索引
cloudInfo.pointColInd[count] = j;
// save range info
//记录激光点距离
cloudInfo.pointRange[count] = rangeMat.at<float>(i,j);
// save extracted cloud
//保存有效激光点至extractedCloud中
extractedCloud->push_back(fullCloud->points[j + i*Horizon_SCAN]);
// size of extracted cloud
++count;
}
}
// 第 i 条线束结束点 的 位置
//记录每根扫描线倒数第五个激光点在endRingIndex中的索引
cloudInfo.endRingIndex[i] = count -1 - 5;
}
}
void publishClouds()
{
// 发布 cloud_info 和 extractedCloud提取的有效点
cloudInfo.header = cloudHeader;
cloudInfo.cloud_deskewed = publishCloud(pubExtractedCloud, extractedCloud, cloudHeader.stamp, lidarFrame);
pubLaserCloudInfo.publish(cloudInfo);
}
};
int main(int argc, char** argv)
{
ros::init(argc, argv, "lio_sam");
// 创建类对象,并执行类的构造函数 ImageProjection()
ImageProjection IP;
ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m");
ros::MultiThreadedSpinner spinner(3);
spinner.spin();
return 0;
}
LIO-SAM 源码阅读--imageProjection.cpp(1)
于 2022-12-06 15:31:41 首次发布