摘要:在ROS kinetic下,利用realsense D435深度相机采集校准的RGBD图片,合成点云,在rviz中查看点云,最后保存成pcd文件。
一、 各种bug
代码编译成功后,打开rviz添加pointcloud2选项卡,当我订阅合成点云时,可视化失败,选项卡报错:
1)Data size (9394656 bytes) does not match width (640) times height (480) times point_step (32). Dropping message.
解读:通过 rostopic echo /pointcloud_topic 读取相机节点发布的原始点云的相关数据,可以发现每一帧原始点云的点数量为307200。合成点云的点数量为 9394656 / 32 ,约26万。可以猜测由于某种原因,系统把每一帧合成点云的数据都丢弃了。
原因:我事先给定合成点云的大小为,height = 480,width = 640. 然而在合成点云的过程中,剔除了部分违法值(d=0),由此导致合成点云的点数量与指定的点数量不匹配,合成点云的数据因此被丢弃。
解决方法:采用如下方法给定点云大小, cloud->height = 1; cloud->width = cloud->points.size();
2)transform xxxxx;
解读:通过 rostopic echo /pointcloud_topic ,发现原始点云数据具有如下信息,
header:
seq: 50114
stamp:
secs: 1528439158
nsecs: 557543003
frame_id: "camera_color_optical_frame"
由此推断,合成点云缺失参考坐标系header.frame_id。点云中点的XYZ属性是相对于某个坐标系来描述的,因此,需要指定点云的参考坐标系。
解决方法:添加点云的header信息,
pub_pointcloud.header.frame_id = "camera_color_optical_frame";
pub_pointcloud.header.stamp = ros::Time::now();
3)将合成的点云存储成pcd文件时遇到如下报错:
[ INFO] [1528442016.931874649]: point cloud size = 0
terminate called after throwing an instance of 'pcl::IOException'
what(): : [pcl::PCDWriter::writeASCII] Input point cloud has no data!
Aborted (core dumped)
经过多方查找,摸索了一步trick,分享给大家。真实报错原因仍未查明,期待前辈的指点。
高博的源代码如下所示,里面的cloud是pcl的数据类型,
pcl::io::savePCDFile( "./pointcloud.pcd", *cloud ); 。
我的源代码如下面所示,先通过 pcl::toROSMsg() 将pcl的数据类型转换成ros的数据类型,再写入pcd中,即可跳过报错。
4)相机内参
由于在彩色图和深度图配准的过程中,选用的是彩色图的坐标系,因此在合成点云(像素坐标在变换到相机坐标)时应该选用彩色图的相机内参。
realsense官方提供了一个应用程序可以查看所有视频流的内参。
gordon@gordon-5577:/usr/local/bin$ ./rs-sensor-control
如下所示
84 : Color #0 (Video Stream: Y16 640x480@ 60Hz)
85 : Color #0 (Video Stream: BGRA8 640x480@ 60Hz)
86 : Color #0 (Video Stream: RGBA8 640x480@ 60Hz)
87 : Color #0 (Video Stream: BGR8 640x480@ 60Hz)
88 : Color #0 (Video Stream: RGB8 640x480@ 60Hz)
89 : Color #0 (Video Stream: YUYV 640x480@ 60Hz)
90 : Color #0 (Video Stream: Y16 640x480@ 30Hz)
91 : Color #0 (Video Stream: BGRA8 640x480@ 30Hz)
92 : Color #0 (Video Stream: RGBA8 640x480@ 30Hz)
93 : Color #0 (Video Stream: BGR8 640x480@ 30Hz)
94 : Color #0 (Video Stream: RGB8 640x480@ 30Hz)
95 : Color #0 (Video Stream: YUYV 640x480@ 30Hz)
96 : Color #0 (Video Stream: Y16 640x480@ 15Hz)
97 : Color #0 (Video Stream: BGRA8 640x480@ 15Hz)
98 : Color #0 (Video Stream: RGBA8 640x480@ 15Hz)
99 : Color #0 (Video Stream: BGR8 640x480@ 15Hz)
100: Color #0 (Video Stream: RGB8 640x480@ 15Hz)
101: Color #0 (Video Stream: YUYV 640x480@ 15Hz)
102: Color #0 (Video Stream: Y16 640x480@ 6Hz)
103: Color #0 (Video Stream: BGRA8 640x480@ 6Hz)
104: Color #0 (Video Stream: RGBA8 640x480@ 6Hz)
105: Color #0 (Video Stream: BGR8 640x480@ 6Hz)
106: Color #0 (Video Stream: RGB8 640x480@ 6Hz)
107: Color #0 (Video Stream: YUYV 640x480@ 6Hz)
5)深度图从ROS的数据类型转换为CV的数据类型
参看链接
二、程序代码
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <sensor_msgs/PointCloud2.h>
// PCL 库
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl_conversions/pcl_conversions.h>
// 定义点云类型
typedef pcl::PointCloud<pcl::PointXYZRGB> PointCloud;
using namespace std;
//namespace enc = sensor_msgs::image_encodings;
// 相机内参
const double camera_factor = 1000;
const double camera_cx = 321.798;
const double camera_cy = 239.607;
const double camera_fx = 615.899;
const double camera_fy = 616.468;
// 全局变量:图像矩阵和点云
cv_bridge::CvImagePtr color_ptr, depth_ptr;
cv::Mat color_pic, depth_pic;
void color_Callback(const sensor_msgs::ImageConstPtr& color_msg)
{
//cv_bridge::CvImagePtr color_ptr;
try
{
cv::imshow("color_view", cv_bridge::toCvShare(color_msg, sensor_msgs::image_encodings::BGR8)->image);
color_ptr = cv_bridge::toCvCopy(color_msg, sensor_msgs::image_encodings::BGR8);
cv::waitKey(1050); // 不断刷新图像,频率时间为int delay,单位为ms
}
catch (cv_bridge::Exception& e )
{
ROS_ERROR("Could not convert from '%s' to 'bgr8'.", color_msg->encoding.c_str());
}
color_pic = color_ptr->image;
// output some info about the rgb image in cv format
cout<<"output some info about the rgb image in cv format"<<endl;
cout<<"rows of the rgb image = "<<color_pic.rows<<endl;
cout<<"cols of the rgb image = "<<color_pic.cols<<endl;
cout<<"type of rgb_pic's element = "<<color_pic.type()<<endl;
}
void depth_Callback(const sensor_msgs::ImageConstPtr& depth_msg)
{
//cv_bridge::CvImagePtr depth_ptr;
try
{
//cv::imshow("depth_view", cv_bridge::toCvShare(depth_msg, sensor_msgs::image_encodings::TYPE_16UC1)->image);
//depth_ptr = cv_bridge::toCvCopy(depth_msg, sensor_msgs::image_encodings::TYPE_16UC1);
cv::imshow("depth_view", cv_bridge::toCvShare(depth_msg, sensor_msgs::image_encodings::TYPE_32FC1)->image);
depth_ptr = cv_bridge::toCvCopy(depth_msg, sensor_msgs::image_encodings::TYPE_32FC1);
cv::waitKey(1050);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("Could not convert from '%s' to 'mono16'.", depth_msg->encoding.c_str());
}
depth_pic = depth_ptr->image;
// output some info about the depth image in cv format
cout<<"output some info about the depth image in cv format"<<endl;
cout<<"rows of the depth image = "<<depth_pic.rows<<endl;
cout<<"cols of the depth image = "<<depth_pic.cols<<endl;
cout<<"type of depth_pic's element = "<<depth_pic.type()<<endl;
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "image_listener");
ros::NodeHandle nh;
cv::namedWindow("color_view");
cv::namedWindow("depth_view");
cv::startWindowThread();
image_transport::ImageTransport it(nh);
image_transport::Subscriber sub = it.subscribe("/camera/color/image_raw", 1, color_Callback);
image_transport::Subscriber sub1 = it.subscribe("/camera/aligned_depth_to_color/image_raw", 1, depth_Callback);
ros::Publisher pointcloud_publisher = nh.advertise<sensor_msgs::PointCloud2>("generated_pc", 1);
// 点云变量
// 使用智能指针,创建一个空点云。这种指针用完会自动释放。
PointCloud::Ptr cloud ( new PointCloud );
sensor_msgs::PointCloud2 pub_pointcloud;
double sample_rate = 1.0; // 1HZ,1秒发1次
ros::Rate naptime(sample_rate); // use to regulate loop rate
cout<<"depth value of depth map : "<<endl;
while (ros::ok()) {
// 遍历深度图
for (int m = 0; m < depth_pic.rows; m++){
for (int n = 0; n < depth_pic.cols; n++){
// 获取深度图中(m,n)处的值
float d = depth_pic.ptr<float>(m)[n];//ushort d = depth_pic.ptr<ushort>(m)[n];
// d 可能没有值,若如此,跳过此点
if (d == 0)
continue;
// d 存在值,则向点云增加一个点
pcl::PointXYZRGB p;
// 计算这个点的空间坐标
p.z = double(d) / camera_factor;
p.x = (n - camera_cx) * p.z / camera_fx;
p.y = (m - camera_cy) * p.z / camera_fy;
// 从rgb图像中获取它的颜色
// rgb是三通道的BGR格式图,所以按下面的顺序获取颜色
p.b = color_pic.ptr<uchar>(m)[n*3];
p.g = color_pic.ptr<uchar>(m)[n*3+1];
p.r = color_pic.ptr<uchar>(m)[n*3+2];
// 把p加入到点云中
cloud->points.push_back( p );
}
}
// 设置并保存点云
cloud->height = 1;
cloud->width = cloud->points.size();
ROS_INFO("point cloud size = %d",cloud->width);
cloud->is_dense = false;// 转换点云的数据类型并存储成pcd文件
pcl::toROSMsg(*cloud,pub_pointcloud);
pub_pointcloud.header.frame_id = "camera_color_optical_frame";
pub_pointcloud.header.stamp = ros::Time::now();
pcl::io::savePCDFile("./pointcloud.pcd", pub_pointcloud);
cout<<"publish point_cloud height = "<<pub_pointcloud.height<<endl;
cout<<"publish point_cloud width = "<<pub_pointcloud.width<<endl;
// 发布合成点云和原始点云
pointcloud_publisher.publish(pub_pointcloud);
ori_pointcloud_publisher.publish(cloud_msg);
// 清除数据并退出
cloud->points.clear();
ros::spinOnce(); //allow data update from callback;
naptime.sleep(); // wait for remainder of specified period;
}
cv::destroyWindow("color_view");
cv::destroyWindow("depth_view");
}