目录

  • 思路
  • 鼠标位置处的雷达坐标
  • 鼠标位置处的GPS
  • 效果

思路

通过鼠标回调函数获取鼠标所在位置处的像素坐标,从而得出鼠标所在像素坐标处的点云坐标,由于点云稀疏,如果该像素没有对应雷达点,则遍历,寻找出附近最近的雷达点。

为了得到像素对应位置的gps,在点云密集化的程序中将所需的位姿信息发布出来,然后在该程序中订阅。

鼠标位置处的雷达坐标

//*************************************************************************************************
//    显示图像 获取鼠标位置处的实际空间位置
//    source ~/catkin_ws/devel/setup.bash &&  rosrun my_lidar_cam_fusion fire_img_show
//
//****************************************************************************************************#include <ros/ros.h>
#include <boost/thread.hpp>
#include<iostream>
#include<sstream>#include <cv_bridge/cv_bridge.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/time_synchronizer.h>#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud2.h>using namespace std;
using namespace cv;
static const std::string FIRE = "fire_img_show";
static int mouse_x;
static int mouse_y;class fire_img_show
{public:fire_img_show(){cout<< "show:image"<<endl;row = 480; //行col = 640;sub1.subscribe(n,"cloud_img",100);sub2.subscribe(n,"image_fire",300);sync.connectInput( sub1,  sub2);sync.registerCallback(  boost::bind(&fire_img_show::chatterCallback, this,_1, _2)  );cv::namedWindow(FIRE);}~fire_img_show(){cv::destroyWindow(FIRE);}void chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire );static void on_Mouse(int event, int x, int y, int flags, void* param);bool cloud_position(int r,int c);private:ros::NodeHandle n;message_filters::Subscriber<sensor_msgs::Image> sub1;message_filters::Subscriber<sensor_msgs::Image> sub2;typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> MySyncPolicy;message_filters::Synchronizer<MySyncPolicy> sync{MySyncPolicy(2000)};ros::Time time;int row; //行int col;int mouse_r;int mouse_c;cv::Mat cloud_img;cv::Mat image_fire;string str;};
void fire_img_show::on_Mouse(int event, int x, int y, int flags, void* param)
{switch(event){case CV_EVENT_MOUSEMOVE://鼠标滑动{mouse_x=x;mouse_y=y;//cout << x<< y<<endl;}break;}}void fire_img_show::chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img_msg,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire_msg )
{time=cloud_img_msg->header.stamp;//从ros消息中提取opencv图像cv_bridge::CvImagePtr cv_ptr1 = cv_bridge::toCvCopy(cloud_img_msg, sensor_msgs::image_encodings::TYPE_64FC4 );cloud_img  = cv_ptr1 -> image;cv_bridge::CvImagePtr cv_ptr2 = cv_bridge::toCvCopy(image_fire_msg, sensor_msgs::image_encodings::BGR8);image_fire  = cv_ptr2 -> image;setMouseCallback(FIRE, on_Mouse,0);//鼠标回调if(20<mouse_x && mouse_x<620 && 20<mouse_y && mouse_y<460){if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]>0){ostringstream inStr;inStr<<setiosflags(ios::fixed)<<setprecision(9);//inStr<<"("<<cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]<<","<<cloud_img.at<Vec4d>(mouse_y, mouse_x)[1]<<","<<cloud_img.at<Vec4d>(mouse_y, mouse_x)[2]<<")";str=inStr.str();}if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]==0){if(cloud_position( mouse_y, mouse_x)){ostringstream inStr;inStr<<setiosflags(ios::fixed)<<setprecision(9);//inStr<<"("<<cloud_img.at<Vec4d>(mouse_r, mouse_c)[0]<<","<<cloud_img.at<Vec4d>(mouse_r, mouse_c)[1]<<","<<cloud_img.at<Vec4d>(mouse_r, mouse_c)[2]<<")";str=inStr.str();}}IplImage image=IplImage(image_fire);CvArr* s=(CvArr*) &image ;CvFont font;cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX,0.4,0.4,0,1,CV_AA);if(mouse_x>300)cvPutText(s,str.c_str(),cvPoint(mouse_x-300-15,mouse_y-5),&font,CV_RGB(255,0,0));elsecvPutText(s,str.c_str(),cvPoint(mouse_x+5,mouse_y-5),&font,CV_RGB(255,0,0));}Point point;point.x = mouse_x;point.y = mouse_y;circle(image_fire,point, 2,Scalar(0,0,255),-1);imshow(FIRE, image_fire);waitKey(1);}bool fire_img_show::cloud_position(int r,int c)
{cv::Mat point(4,3,CV_64F,Scalar());for(int j=c;j>(c-5);j--){if(cloud_img.at<Vec4d>(r, j)[0]>0){point.at<double>(0,0)=r;point.at<double>(0,1)=j;point.at<double>(0,2)=c-j;break;}}for(int j=c;j<(c+5);j++){if(cloud_img.at<Vec4d>(r, j)[0]>0){point.at<double>(1,0)=r;point.at<double>(1,1)=j;point.at<double>(1,2)=j-c;break;}}if(point.at<double>(0,2)>0 || point.at<double>(1,2)>0){if(point.at<double>(0,2)>0 && point.at<double>(1,2)>0){if(point.at<double>(0,2)<point.at<double>(1,2)){mouse_r=point.at<double>(0,0);mouse_c=point.at<double>(0,1);}else{mouse_r=point.at<double>(1,0);mouse_c=point.at<double>(1,1);}}else{if(point.at<double>(0,2)>0){mouse_r=point.at<double>(0,0);mouse_c=point.at<double>(0,1);}else{mouse_r=point.at<double>(1,0);mouse_c=point.at<double>(1,1);}}return true;}//--------------------------------------------------for(int i=r;i>(r-10);i--){if(cloud_img.at<Vec4d>(i, c)[0]>0){point.at<double>(2,0)=i;point.at<double>(2,1)=c;point.at<double>(2,2)=r-i;break;}}for(int i=r;i<(r+10);i++){if(cloud_img.at<Vec4d>(i, c)[0]>0){point.at<double>(3,0)=i;point.at<double>(3,1)=c;point.at<double>(3,2)=i-r;break;}}if(point.at<double>(2,2)>0 || point.at<double>(3,2)>0){if(point.at<double>(2,2)>0 && point.at<double>(3,2)>0){if(point.at<double>(2,2)<point.at<double>(3,2)){mouse_r=point.at<double>(2,0);mouse_c=point.at<double>(2,1);}else{mouse_r=point.at<double>(3,0);mouse_c=point.at<double>(3,1);}}else{if(point.at<double>(2,2)>0){mouse_r=point.at<double>(2,0);mouse_c=point.at<double>(2,1);}else{mouse_r=point.at<double>(3,0);mouse_c=point.at<double>(3,1);}}return true;}return false;
}int main(int argc,char** argv)
{ros::init (argc, argv, "fire_img_show");fire_img_show fis;//ros::MultiThreadedSpinner spinner(2);//spinner.spin();ros::spin();return 0;
}

鼠标位置处的GPS

对上述程序做简单修改:

//*************************************************************************************************
//    显示图像 获取鼠标位置处的实际空间位置
//    source ~/catkin_ws/devel/setup.bash &&  rosrun my_lidar_cam_fusion fire_img_show
//
//****************************************************************************************************#include <ros/ros.h>
#include <boost/thread.hpp>
#include<iostream>
#include<sstream>#include <cv_bridge/cv_bridge.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/time_synchronizer.h>#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud2.h>#define PI 3.1415926
using namespace std;
using namespace cv;
static const std::string FIRE = "fire_img_show";
static int mouse_x;
static int mouse_y;class fire_img_show
{public:fire_img_show(){cout<< "show:image"<<endl;row = 480; //行col = 640;sub1.subscribe(n,"cloud_img",100);sub2.subscribe(n,"image_fire",300);sub3.subscribe(n,"CV_GPS_R_IL",100);sync.connectInput( sub1,  sub2 ,sub3);sync.registerCallback(  boost::bind(&fire_img_show::chatterCallback, this,_1, _2 ,_3)  );cv::namedWindow(FIRE);}~fire_img_show(){cv::destroyWindow(FIRE);}void chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img_msg,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire_msg,const  boost::shared_ptr<const sensor_msgs::Image>& CV_G_R_ni_msg );static void on_Mouse(int event, int x, int y, int flags, void* param);bool cloud_position(int r,int c);void mi_to_du(cv::Mat I_point);void get_CV_G_R_IL();private:ros::NodeHandle n;message_filters::Subscriber<sensor_msgs::Image> sub1;message_filters::Subscriber<sensor_msgs::Image> sub2;message_filters::Subscriber<sensor_msgs::Image> sub3;typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image ,sensor_msgs::Image> MySyncPolicy;message_filters::Synchronizer<MySyncPolicy> sync{MySyncPolicy(2000)};ros::Time time;int row; //行int col;int mouse_r;int mouse_c;cv::Mat cloud_img;cv::Mat image_fire;cv::Mat CV_G_R_IL;cv::Mat GPS;//3x1cv::Mat R; //4X4 旋转cv::Mat I2L;//4X4 旋转平移string str;};
void fire_img_show::on_Mouse(int event, int x, int y, int flags, void* param)
{switch(event){case CV_EVENT_MOUSEMOVE://鼠标滑动{mouse_x=x;mouse_y=y;//cout << x<< y<<endl;}break;}}void fire_img_show::chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img_msg,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire_msg,const  boost::shared_ptr<const sensor_msgs::Image>& CV_G_R_ni_msg )
{time=cloud_img_msg->header.stamp;//从ros消息中提取opencv图像cv_bridge::CvImagePtr cv_ptr1 = cv_bridge::toCvCopy(cloud_img_msg, sensor_msgs::image_encodings::TYPE_64FC4 );cloud_img  = cv_ptr1 -> image;cv_bridge::CvImagePtr cv_ptr2 = cv_bridge::toCvCopy(image_fire_msg, sensor_msgs::image_encodings::BGR8);image_fire  = cv_ptr2 -> image;cv_bridge::CvImagePtr cv_ptr3 = cv_bridge::toCvCopy(CV_G_R_ni_msg, sensor_msgs::image_encodings::TYPE_64FC3 );CV_G_R_IL  = cv_ptr3 -> image;get_CV_G_R_IL();setMouseCallback(FIRE, on_Mouse,0);//鼠标回调if(20<mouse_x && mouse_x<620 && 20<mouse_y && mouse_y<460){if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]>0){Mat point = (Mat_<double>(4, 1) << cloud_img.at<Vec4d>(mouse_y, mouse_x)[0],cloud_img.at<Vec4d>(mouse_y, mouse_x)[1], cloud_img.at<Vec4d>(mouse_y, mouse_x)[2], 1);Mat I_point(4, 1, CV_64F);I_point= R * I2L*point;mi_to_du(I_point);ostringstream inStr;inStr<<setiosflags(ios::fixed)<<setprecision(9);//inStr<<"("<<I_point.at<double>(0)<<","<<I_point.at<double>(1)<<","<<I_point.at<double>(2)<<")";str=inStr.str();}if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]==0){if(cloud_position( mouse_y, mouse_x)){Mat point = (Mat_<double>(4, 1) << cloud_img.at<Vec4d>(mouse_r, mouse_c)[0],cloud_img.at<Vec4d>(mouse_r, mouse_c)[1], cloud_img.at<Vec4d>(mouse_r, mouse_c)[2], 1);Mat I_point(4, 1, CV_64F);I_point= R * I2L*point;mi_to_du(I_point);ostringstream inStr;inStr<<setiosflags(ios::fixed)<<setprecision(9);//inStr<<"("<<I_point.at<double>(0)<<","<<I_point.at<double>(1)<<","<<I_point.at<double>(2)<<")";str=inStr.str();}}IplImage image=IplImage(image_fire);CvArr* s=(CvArr*) &image ;CvFont font;cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX,0.4,0.4,0,1,CV_AA);if(mouse_x>300)cvPutText(s,str.c_str(),cvPoint(mouse_x-300-15,mouse_y-5),&font,CV_RGB(255,0,0));elsecvPutText(s,str.c_str(),cvPoint(mouse_x+5,mouse_y-5),&font,CV_RGB(255,0,0));}Point point;point.x = mouse_x;point.y = mouse_y;circle(image_fire,point, 2,Scalar(0,0,255),-1);imshow(FIRE, image_fire);waitKey(1);}
void fire_img_show::mi_to_du(cv::Mat I_point)
{I_point.at<double>(0)=I_point.at<double>(0)/111000+GPS.at<double>(1);//weidouble lon=111000*cos(GPS.at<double>(1)/180*PI);I_point.at<double>(1)=I_point.at<double>(1)/lon+GPS.at<double>(0);//jinI_point.at<double>(2)=-I_point.at<double>(2)+GPS.at<double>(2);//gao}
void fire_img_show::get_CV_G_R_IL()
{//get gpsGPS= (Mat_<double>(3, 1) << CV_G_R_IL.at<Vec3d>(0,0)[0],CV_G_R_IL.at<Vec3d>(0,1)[0],CV_G_R_IL.at<Vec3d>(0,2)[0]);//cout << "gps"<<GPS<<setprecision(9)<<endl;//与初始坐标系的平移  CV_G_R_IL.at<Vec3d>(1,0)[0]   (1,1)[0]  (1,2)[0]//四元数  CV_G_R_IL.at<Vec3d>(2,0)[0]  (2,1)[0]   (2,2)[0]  (2,3)[0]//第二层: 4x4 旋转  初始坐标系toIMU系R= (Mat_<double>(4, 4)<< CV_G_R_IL.at<Vec3d>(0,0)[1] ,CV_G_R_IL.at<Vec3d>(0,1)[1],CV_G_R_IL.at<Vec3d>(0,2)[1],0,CV_G_R_IL.at<Vec3d>(1,0)[1] ,CV_G_R_IL.at<Vec3d>(1,1)[1],CV_G_R_IL.at<Vec3d>(1,2)[1],0,CV_G_R_IL.at<Vec3d>(2,0)[1] ,CV_G_R_IL.at<Vec3d>(2,1)[1],CV_G_R_IL.at<Vec3d>(2,2)[1],0,0,0,0,1);I2L= (Mat_<double>(4, 4)<< CV_G_R_IL.at<Vec3d>(0,0)[2] ,CV_G_R_IL.at<Vec3d>(0,1)[2],CV_G_R_IL.at<Vec3d>(0,2)[2],CV_G_R_IL.at<Vec3d>(0,3)[2],CV_G_R_IL.at<Vec3d>(1,0)[2] ,CV_G_R_IL.at<Vec3d>(1,1)[2],CV_G_R_IL.at<Vec3d>(1,2)[2],CV_G_R_IL.at<Vec3d>(1,3)[2],CV_G_R_IL.at<Vec3d>(2,0)[2] ,CV_G_R_IL.at<Vec3d>(2,1)[2],CV_G_R_IL.at<Vec3d>(2,2)[2],CV_G_R_IL.at<Vec3d>(2,3)[2],0,0,0,1);
}bool fire_img_show::cloud_position(int r,int c)
{cv::Mat point(4,3,CV_64F,Scalar());for(int j=c;j>(c-5);j--){if(cloud_img.at<Vec4d>(r, j)[0]>0){point.at<double>(0,0)=r;point.at<double>(0,1)=j;point.at<double>(0,2)=c-j;break;}}for(int j=c;j<(c+5);j++){if(cloud_img.at<Vec4d>(r, j)[0]>0){point.at<double>(1,0)=r;point.at<double>(1,1)=j;point.at<double>(1,2)=j-c;break;}}if(point.at<double>(0,2)>0 || point.at<double>(1,2)>0){if(point.at<double>(0,2)>0 && point.at<double>(1,2)>0){if(point.at<double>(0,2)<point.at<double>(1,2)){mouse_r=point.at<double>(0,0);mouse_c=point.at<double>(0,1);}else{mouse_r=point.at<double>(1,0);mouse_c=point.at<double>(1,1);}}else{if(point.at<double>(0,2)>0){mouse_r=point.at<double>(0,0);mouse_c=point.at<double>(0,1);}else{mouse_r=point.at<double>(1,0);mouse_c=point.at<double>(1,1);}}return true;}//--------------------------------------------------for(int i=r;i>(r-10);i--){if(cloud_img.at<Vec4d>(i, c)[0]>0){point.at<double>(2,0)=i;point.at<double>(2,1)=c;point.at<double>(2,2)=r-i;break;}}for(int i=r;i<(r+10);i++){if(cloud_img.at<Vec4d>(i, c)[0]>0){point.at<double>(3,0)=i;point.at<double>(3,1)=c;point.at<double>(3,2)=i-r;break;}}if(point.at<double>(2,2)>0 || point.at<double>(3,2)>0){if(point.at<double>(2,2)>0 && point.at<double>(3,2)>0){if(point.at<double>(2,2)<point.at<double>(3,2)){mouse_r=point.at<double>(2,0);mouse_c=point.at<double>(2,1);}else{mouse_r=point.at<double>(3,0);mouse_c=point.at<double>(3,1);}}else{if(point.at<double>(2,2)>0){mouse_r=point.at<double>(2,0);mouse_c=point.at<double>(2,1);}else{mouse_r=point.at<double>(3,0);mouse_c=point.at<double>(3,1);}}return true;}return false;
}int main(int argc,char** argv)
{ros::init (argc, argv, "fire_img_show");fire_img_show fis;//ros::MultiThreadedSpinner spinner(2);//spinner.spin();ros::spin();return 0;
}

修改后的点云密集化程序如下:

//*************************************************************************************************
//    获取密集点云,迭代发布:cloud_cut
//    source ~/catkin_ws/devel/setup.bash &&  rosrun my_lidar_cam_fusion my_cloud_dense
//
//    http://docs.ros.org/en/melodic/api/sensor_msgs/html/image__encodings_8h.html
//
//****************************************************************************************************#include "ros/ros.h"
#include <math.h>
#include <iostream>
#include <cmath>
#include "std_msgs/String.h"
#include <boost/thread.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h> //ROS图象类型的编码函数
#include <image_transport/image_transport.h> //在ROS系统中的话题上发布和订阅图象消息
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>#include <sbg_driver/SbgImuData.h>
#include <sbg_driver/SbgEkfQuat.h>
#include <sbg_driver/SbgGpsPos.h>
#include <sbg_driver/SbgEkfNav.h>#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/sync_policies/exact_time.h>
#include <message_filters/time_synchronizer.h>#include <geometry_msgs/TransformStamped.h>
#include "tf/transform_datatypes.h"#include <pcl/io/pcd_io.h>
#include <pcl/point_cloud.h>
#include <pcl/common/common.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl_ros/point_cloud.h>
#include <pcl_ros/transforms.h>
#include <pcl/point_types.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/sample_consensus/ransac.h>
#include <pcl/sample_consensus/sac_model_plane.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/filters/passthrough.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/statistical_outlier_removal.h>#include <sensor_msgs/Imu.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/PointCloud.h>#include <Eigen/Eigen>
#include <Eigen/Dense>
#include <Eigen/Geometry>
#include <Eigen/Eigenvalues>#define PI 3.1415926#define rQ 40 //雷达旋转角度
#define TX 0.18//0.14 //平移0.18
#define TY 0
#define TZ 0.24//0.30  //0.24using namespace sensor_msgs;
using namespace std;
using namespace cv;class cloud_dense
{public:cloud_dense(){cout<< "获取密集点云,迭代发布:cloud_CUT   cloud_img  "<<endl;k=true;M= (Mat_<double>(3, 3) << 1120.793247366556, 0, 404.6240125739656, 0, 1126.958397947255, 248.9162767468943, 0, 0, 1);cout << "M=" << M << endl;RT= (Mat_<double>(3, 4) << -0.0209971, - 0.999327, 0.0300679 , 0.01784, - 0.0238557 ,- 0.0295651, - 0.999278, - 0.0963614, 0.999495 ,- 0.0216992, - 0.0232189, 0.00614244 );cout << "RT=" << RT << endl;row = 480;col = 640;cloud_pub = n.advertise<sensor_msgs::PointCloud2>("cloud_cut", 1);//拼接后的点云发布imu_pub = n.advertise<sensor_msgs::Imu>("imu_RT", 1);//发布RT//image_pub = n.advertise<sensor_msgs::Image>("image_raw", 1);cloud_img_pub =  n.advertise<sensor_msgs::Image>("cloud_img", 1);//发布CV_GPS_R_IL =  n.advertise<sensor_msgs::Image>("CV_GPS_R_IL", 1);//发布//sub = n.subscribe<sensor_msgs::Image>("/image_raw", 30, &ThreadListener::myCallback5,this,ros::TransportHints().tcpNoDelay());sub1.subscribe(n,"points_raw",10);//订阅激光雷达sub2.subscribe(n,"/gps_raw",10);sub3.subscribe(n,"/quat_raw",100);sync.connectInput( sub1,  sub2, sub3 );sync.registerCallback(  boost::bind(&cloud_dense::chatterCallback, this,_1, _2, _3 )  );}void chatterCallback(const  boost::shared_ptr<const sensor_msgs::PointCloud2>& pc2,const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg);void get_Quat_gps(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg);void Down_sampling(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_tmp,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud);void GetDirectDistance(double srcLon, double srcLat,double gz, double destLon, double destLat,double tz,double *x,double *y,double *z);void lidar_to_imu(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg);void cloud_to_Initial_coord(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud );void Initial_to_lidar_coord( pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2 ,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar );void Cloud_Cut(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut);void Effective_point(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut);void Publishe_msg(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut );void Publishe_RT();void Publishe_cloudImg_msg(cv::Mat cloudImg);private:ros::NodeHandle n;message_filters::Subscriber<sensor_msgs::PointCloud2> sub1;message_filters::Subscriber<sbg_driver::SbgEkfNav> sub2;message_filters::Subscriber<sbg_driver::SbgEkfQuat> sub3;typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::PointCloud2, sbg_driver::SbgEkfNav,sbg_driver::SbgEkfQuat> MySyncPolicy;message_filters::Synchronizer<MySyncPolicy> sync{MySyncPolicy(2000)};ros::Publisher cloud_pub;ros::Publisher imu_pub;ros::Publisher cloud_img_pub;ros::Publisher CV_GPS_R_IL;//ros::Subscriber sub;bool k;//调试double gx,gy,gz ;//转到初始坐标系的平移基准double x,y,z,w,tx,ty,tz;//  平移计算double nx,ny,nz;Eigen::Matrix4d lidartoimu;Eigen::Matrix4d l2i_ni;Eigen::Matrix4d T_matrix;//4x4旋转矩阵赋值Eigen::Matrix4d T_ni;pcl::PointCloud<pcl::PointXYZ> cloud_new1;ros::Time time;cv::Mat M;cv::Mat RT;cv::Mat cv_g_r;int row; //行int col;};void cloud_dense::Down_sampling(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_tmp,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud)
{pcl::VoxelGrid<pcl::PointXYZ> sor;sor.setInputCloud(cloud_tmp);sor.setLeafSize(0.08f, 0.08f, 0.08f);  //体素滤波 20cmsor.filter(*cloud);
}// 把经纬坐标转米void cloud_dense::GetDirectDistance(double srcLon, double srcLat,double gz, double destLon, double destLat,double tz,double *x,double *y,double *z)
{*x=(destLon-srcLon)*111000*cos(srcLat/180*PI);*y=(destLat-srcLat)*111000;*z=tz-gz;
}
void cloud_dense::lidar_to_imu(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg)
{//cout<<" 将发布密集点云:cloud_lidar  "<<endl;double  RQ=rQ*PI/180;gx = gpsmsg->position.y;//把gps值放到这里gy = gpsmsg->position.x;gz = gpsmsg->position.z;Eigen::Matrix3d r_y;//4x4矩阵绕Y轴r_y(0,0)=cos(RQ);r_y(0,1)=0;r_y(0,2)=sin(RQ);r_y(1,0)=0;r_y(1,1)=1;r_y(1,2)=0;r_y(2,0)=-sin(RQ);r_y(2,1)=0;r_y(2,2)=cos(RQ);Eigen::Matrix3d r_l2I;//4x4矩阵雷达到贯导r_l2I(0,0)=0;r_l2I(0,1)=0;r_l2I(0,2)=1;r_l2I(1,0)=0;r_l2I(1,1)=-1;r_l2I(1,2)=0;r_l2I(2,0)=1;r_l2I(2,1)=0;r_l2I(2,2)=0;Eigen::Matrix3d r_l_i=r_y*r_l2I;//4x4矩阵雷达到贯导lidartoimu(0,0)=r_l_i(0,0);lidartoimu(0,1)=r_l_i(0,1);lidartoimu(0,2)=r_l_i(0,2);lidartoimu(0,3)=TX;lidartoimu(1,0)=r_l_i(1,0);lidartoimu(1,1)=r_l_i(1,1);lidartoimu(1,2)=r_l_i(1,2);lidartoimu(1,3)=TY;lidartoimu(2,0)=r_l_i(2,0);lidartoimu(2,1)=r_l_i(2,1);lidartoimu(2,2)=r_l_i(2,2);lidartoimu(2,3)=TZ;lidartoimu(3,0)=0;lidartoimu(3,1)=0;lidartoimu(3,2)=0;lidartoimu(3,3)=1;l2i_ni=lidartoimu.inverse();k=false;}void cloud_dense::cloud_to_Initial_coord( pcl::PointCloud<pcl::PointXYZ>::Ptr cloud )
{for (std::size_t i = 0; i <cloud->size(); i++){if(3<cloud->points[i].x || cloud->points[i].x<-3){if(-20<cloud->points[i].y && cloud->points[i].y<20){Eigen::MatrixXd pointxyz(4,1);//点云坐标pointxyz(0,0)=cloud->points[i].x;pointxyz(1,0)=cloud->points[i].y;pointxyz(2,0)=cloud->points[i].z;pointxyz(3,0)=1;Eigen::MatrixXd n=T_matrix*lidartoimu*pointxyz; //4X4pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );cloud_new1.push_back(point_1);/* //===================================================================================================================Mat uv(3, 1, CV_64F);Mat point = (Mat_<double>(4, 1) << cloud->points[i].x, cloud->points[i].y, cloud->points[i].z, 1);uv = M * RT*point;if (uv.at<double>(2) == 0){cout << "uv.at<double>(2)=0" << endl;break;}double u = uv.at<double>(0) / uv.at<double>(2);double v = uv.at<double>(1) / uv.at<double>(2);int px = int(u + 0.5);int py = int(v + 0.5);//cout << "(u,v)=" << px << "  "<< py << endl;//注意:image.at<Vec3b>(row,rol)  但是像素坐标为(x,y),即(u,v),即(rol,row)if (0 <= px && px < col && 0 <= py && py < row){Eigen::MatrixXd n=T_matrix*lidartoimu*pointxyz; //4X4pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );cloud_new1.push_back(point_1);}*/}//if}//if}//for//降采样pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2(new pcl::PointCloud<pcl::PointXYZ>);Down_sampling(cloud_new1.makeShared(), cloud2);pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar(new pcl::PointCloud<pcl::PointXYZ>);Initial_to_lidar_coord( cloud2, cloud_lidar );
}void cloud_dense::Initial_to_lidar_coord( pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2 ,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar )
{for (std::size_t i = 0; i <cloud2->size(); i++){Eigen::MatrixXd pointxyz(4,1);//点云坐标pointxyz(0,0)=cloud2->points[i].x;pointxyz(1,0)=cloud2->points[i].y;pointxyz(2,0)=cloud2->points[i].z;pointxyz(3,0)=1;Eigen::MatrixXd n=l2i_ni*T_ni*pointxyz;pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );cloud_lidar->push_back(point_1);}//裁剪pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut(new pcl::PointCloud<pcl::PointXYZ>);Cloud_Cut(cloud_lidar,cloud_cut);//保存有效点用于迭代Effective_point(cloud_cut);Publishe_msg( cloud_cut );}void cloud_dense::Cloud_Cut(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut)
{/*//x轴直通滤波处理pcl::PassThrough<pcl::PointXYZ> pass2;pass2.setInputCloud(cloud_lidar);pass2.setFilterFieldName("x");if(forget_x<3){ forget_x=3;}pass2.setFilterLimits(forget_x-1,forget_x+30);pass2.setFilterLimitsNegative(false);pass2.filter(*cloud_cut);*///==========================================================================================for (std::size_t i = 0; i <cloud_lidar->size(); i++){Mat uv(3, 1, CV_64F);Mat point = (Mat_<double>(4, 1) << cloud_lidar->points[i].x, cloud_lidar->points[i].y, cloud_lidar->points[i].z, 1);uv = M * RT*point;if (uv.at<double>(2) == 0){cout << "uv.at<double>(2)=0" << endl;break;}double u = uv.at<double>(0) / uv.at<double>(2);double v = uv.at<double>(1) / uv.at<double>(2);int px = int(u + 0.5);int py = int(v + 0.5);//cout << "(u,v)=" << px << "  "<< py << endl;//注意:image.at<Vec3b>(row,rol)  但是像素坐标为(x,y),即(u,v),即(rol,row)if (0 <= px && px < col && 0 <= py && py < (row+100)){cloud_cut->push_back(cloud_lidar->points[i]);}}/*cv::Mat cloudImg(row,col,CV_64FC3,Scalar());for (std::size_t i = 0; i <cloud_lidar->size(); i++){Mat uv(3, 1, CV_64F);Mat point = (Mat_<double>(4, 1) << cloud_lidar->points[i].x, cloud_lidar->points[i].y, cloud_lidar->points[i].z, 1);uv = M * RT*point;if (uv.at<double>(2) == 0){cout << "uv.at<double>(2)=0" << endl;break;}double u = uv.at<double>(0) / uv.at<double>(2);double v = uv.at<double>(1) / uv.at<double>(2);int px = int(u + 0.5);int py = int(v + 0.5);//cout << "(u,v)=" << px << "  "<< py << endl;//注意:image.at<Vec3b>(row,rol)  但是像素坐标为(x,y),即(u,v),即(rol,row)if (0 <= px && px < col && 0 <= py && py < row){cloudImg.at<Vec3d>(py,px)[0]= point.at<double>(0);cloudImg.at<Vec3d>(py,px)[1]= point.at<double>(1);cloudImg.at<Vec3d>(py,px)[2]= point.at<double>(2);cloud_cut->push_back(cloud_lidar->points[i]);}}Publishe_cloudImg_msg( cloudImg);*/}void cloud_dense::Publishe_cloudImg_msg(cv::Mat cloudImg)
{cv_bridge::CvImage cvi;cvi.header.stamp = time;cvi.header.frame_id = "CV_GPS_R_IL";cvi.encoding = "64FC3";cvi.image = cloudImg;sensor_msgs::Image msg;cvi.toImageMsg(msg);cloud_img_pub.publish(msg);}void cloud_dense::Effective_point(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut)
{pcl::PointCloud<pcl::PointXYZ> cloud_new2;for (std::size_t i = 0; i <cloud_cut->size(); i++){Eigen::MatrixXd pointxyz(4,1);//点云坐标pointxyz(0,0)=cloud_cut->points[i].x;pointxyz(1,0)=cloud_cut->points[i].y;pointxyz(2,0)=cloud_cut->points[i].z;pointxyz(3,0)=1;Eigen::MatrixXd n=T_matrix*lidartoimu*pointxyz;pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );cloud_new2.push_back(point_1);}cloud_new1=cloud_new2;
}
void cloud_dense::Publishe_msg(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut )
{//发布点云sensor_msgs::PointCloud2 output;pcl::toROSMsg(*cloud_cut, output);output.header.stamp = time;output.header.frame_id = "map";cloud_pub.publish(output);//发布位姿sensor_msgs::Imu outimu;outimu.header.stamp = time;outimu.header.frame_id = "lidar_RT";outimu.orientation.x=x;outimu.orientation.y=y;outimu.orientation.y=y;outimu.orientation.w=w;outimu.angular_velocity.x=ny;outimu.angular_velocity.y=nx;outimu.angular_velocity.z=-nz;imu_pub.publish(outimu);//发布CV--GPS--T--T_nicv_bridge::CvImage cvi;cvi.header.stamp = time;cvi.header.frame_id = "cloud_image";cvi.encoding = "64FC3";cvi.image = cv_g_r;sensor_msgs::Image msg;cvi.toImageMsg(msg);CV_GPS_R_IL.publish(msg);}
void cloud_dense::get_Quat_gps(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg)
{x = quatmsg->quaternion.x;//四元数y = quatmsg->quaternion.y;z = quatmsg->quaternion.z;w = quatmsg->quaternion.w;tx = gpsmsg->position.y;//把gps值放到这里ty = gpsmsg->position.x;tz = gpsmsg->position.z;GetDirectDistance(gx,gy,gz,tx,ty,tz,&nx,&ny,&nz);//计算GPS变化量//  四元数转换为旋转矩阵Eigen::Quaterniond quaternion(w,x,y,z);Eigen::Matrix3d rotation_matrix;rotation_matrix=quaternion.toRotationMatrix();T_matrix(0,0)=rotation_matrix(0,0);T_matrix(1,0)=rotation_matrix(1,0);T_matrix(2,0)=rotation_matrix(2,0);T_matrix(3,0)=0;T_matrix(0,1)=rotation_matrix(0,1);T_matrix(1,1)=rotation_matrix(1,1);T_matrix(2,1)=rotation_matrix(2,1);T_matrix(3,1)=0;T_matrix(0,2)=rotation_matrix(0,2);T_matrix(1,2)=rotation_matrix(1,2);T_matrix(2,2)=rotation_matrix(2,2);T_matrix(3,2)=0;T_matrix(0,3)=ny;//x->jin   y->wei   z-> -hT_matrix(1,3)=nx;T_matrix(2,3)=-nz;T_matrix(3,3)=1;//cout<< T_matrix << endl;T_ni=T_matrix.inverse();Publishe_RT();}void cloud_dense::Publishe_RT()
{cv_g_r=Mat(4,4,CV_64FC3,Scalar());//第一层--GPS--nx-ny-nz--x-y-z-wcv_g_r.at<Vec3d>(0,0)[0]=tx;//ji  gpscv_g_r.at<Vec3d>(0,1)[0]=ty;cv_g_r.at<Vec3d>(0,2)[0]=tz;cv_g_r.at<Vec3d>(1,0)[0]=nx;//jicv_g_r.at<Vec3d>(1,1)[0]=ny;cv_g_r.at<Vec3d>(1,2)[0]=nz;cv_g_r.at<Vec3d>(2,0)[0]=x;//quatcv_g_r.at<Vec3d>(2,1)[0]=y;cv_g_r.at<Vec3d>(2,2)[0]=z;cv_g_r.at<Vec3d>(2,3)[0]=w;//第二层--4x4旋转 初始坐标系toIMU系cv_g_r.at<Vec3d>(0,0)[1]=T_matrix(0,0);cv_g_r.at<Vec3d>(0,1)[1]=T_matrix(0,1);cv_g_r.at<Vec3d>(0,2)[1]=T_matrix(0,2);cv_g_r.at<Vec3d>(0,3)[1]=T_matrix(0,3);cv_g_r.at<Vec3d>(1,0)[1]=T_matrix(1,0);cv_g_r.at<Vec3d>(1,1)[1]=T_matrix(1,1);cv_g_r.at<Vec3d>(1,2)[1]=T_matrix(1,2);cv_g_r.at<Vec3d>(1,3)[1]=T_matrix(1,3);cv_g_r.at<Vec3d>(2,0)[1]=T_matrix(2,0);cv_g_r.at<Vec3d>(2,1)[1]=T_matrix(2,1);cv_g_r.at<Vec3d>(2,2)[1]=T_matrix(2,2);cv_g_r.at<Vec3d>(2,3)[1]=T_matrix(2,3);cv_g_r.at<Vec3d>(3,0)[1]=T_matrix(3,0);cv_g_r.at<Vec3d>(3,1)[1]=T_matrix(3,1);cv_g_r.at<Vec3d>(3,2)[1]=T_matrix(3,2);cv_g_r.at<Vec3d>(3,3)[1]=T_matrix(3,3);//第三层--4x4旋转 imu系to雷达cv_g_r.at<Vec3d>(0,0)[2]=lidartoimu(0,0);cv_g_r.at<Vec3d>(0,1)[2]=lidartoimu(0,1);cv_g_r.at<Vec3d>(0,2)[2]=lidartoimu(0,2);cv_g_r.at<Vec3d>(0,3)[2]=lidartoimu(0,3);cv_g_r.at<Vec3d>(1,0)[2]=lidartoimu(1,0);cv_g_r.at<Vec3d>(1,1)[2]=lidartoimu(1,1);cv_g_r.at<Vec3d>(1,2)[2]=lidartoimu(1,2);cv_g_r.at<Vec3d>(1,3)[2]=lidartoimu(1,3);cv_g_r.at<Vec3d>(2,0)[2]=lidartoimu(2,0);cv_g_r.at<Vec3d>(2,1)[2]=lidartoimu(2,1);cv_g_r.at<Vec3d>(2,2)[2]=lidartoimu(2,2);cv_g_r.at<Vec3d>(2,3)[2]=lidartoimu(2,3);cv_g_r.at<Vec3d>(3,0)[2]=lidartoimu(3,0);cv_g_r.at<Vec3d>(3,1)[2]=lidartoimu(3,1);cv_g_r.at<Vec3d>(3,2)[2]=lidartoimu(3,2);cv_g_r.at<Vec3d>(3,3)[2]=lidartoimu(3,3);}//  回调函数
void cloud_dense::chatterCallback(const  boost::shared_ptr<const sensor_msgs::PointCloud2>& pc2,const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg)
{time = pc2->header.stamp;pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_tmp(new pcl::PointCloud<pcl::PointXYZ>);//点云对象pcl::fromROSMsg (*pc2, *cloud_tmp);// 下采样pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);Down_sampling(cloud_tmp, cloud);// 雷达与惯导间的旋转平移if(k){lidar_to_imu( gpsmsg );}else{// 提取出四元数与gpsget_Quat_gps(gpsmsg,quatmsg);//点云处理:转到初始坐标系下cloud_to_Initial_coord(cloud);} //else
}int main(int argc, char **argv)
{ros::init(argc, argv, "my_cloud_dense");cloud_dense cf;ros::MultiThreadedSpinner spinner(3);spinner.spin();//ros::spin();return 0;
}

效果

1.显示鼠标处的雷达坐标

2.显示鼠标处的GPS

雷达相机融合(七)--显示鼠标位置处的实际空间位置相关推荐

  1. Airsim雷达相机融合生成彩色点云

    代码,cmake,launch等都在网盘链接中 链接: https://pan.baidu.com/s/14hWBDnRZC41xkqAk6FirtA 提取码: 1een --来自百度网盘超级会员v4 ...

  2. unity 如何获取到屏幕中间_【Unity】屏幕空间位置变换到世界空间位置的方法

    屏幕空间像素的位置,是一个二维的浮点数,而世界空间的位置,则是三维的浮点数.实现的基本思路很简单,是世界空间位置变换到屏幕空间位置的逆过程,只是稍微有些区别.如果对图形渲染管线中的坐标变换没有弄清楚, ...

  3. 雷达相机融合(四)--点云着色

    目录 源码一 源码二 效果 源码一 //******************************************************************************** ...

  4. 【Python_PyQtGraph 学习笔记(三)】基于GraphicsLayoutWidget实现显示曲线对象 鼠标位置处坐标的功能

    基于GraphicsLayoutWidget实现显示曲线对象 鼠标位置处坐标的功能 前言 在PyQtGraph的官方例程中有此功能的实现,可参考Crosshair / Mouse interactio ...

  5. 利用鼠标在图像上画框并实时显示鼠标所点击处坐标

    我们在做小型项目的时候,有时为了实现良好的交互,或者更方便实时观察输入数据,通常需要使用到下列几项功能: 1.利用鼠标在所显示的图像/视频中选取ROI区域 2.实时显示鼠标所点击位置处的坐标信息 本文 ...

  6. 【radar】毫米波雷达-相机-激光雷达融合相关论文汇总(特征融合、RPN融合、弱监督融合、决策融合、深度估计、跟踪)(5)

    [radar]毫米波雷达-相机-激光雷达融合相关论文汇总(特征融合.RPN融合.弱监督融合.决策融合.深度估计.跟踪)(5) Radar Camera Fusion Feature-level Fus ...

  7. 自动驾驶多相机与多雷达数据融合方法汇总

    点击上方"3D视觉工坊",选择"星标" 干货第一时间送达 作者丨eyesighting@知乎 来源丨https://zhuanlan.zhihu.com/p/4 ...

  8. Latex幻灯片ppt的beamer中,如何在每章节前自动显示当前所处目录位置

    Latex幻灯片ppt的beamer中,如何在每章节前自动显示当前所处目录位置 看了其他文章,是手动在每章前设置帧.本文提供一段代码,使得自动在每章节前显示当前所处目录位置. 如果只需要在新的一节(S ...

  9. QT编写实现图片的幻灯片播放、自适应显示、缩放(以鼠标位置为中心进行缩放)、拖动、重置、显示鼠标位置像素坐标及RGB值、播放GIF动画、截图保存、批量保存、拖入文件夹遍历所有文件

    这个图片查看器功能很多,是我花了不少心思,不断优化,不断添加功能的成果: 1.能打开并显示所有常用图片格式文件,显示鼠标位置像素坐标及RGB值 2.能缩放,拖动图片,可以以鼠标为中心滚动滚轮进行缩放 ...

最新文章

  1. Go进阶(8): map嵌套的两轮初始化
  2. 开发自定义JSF组件(4) 保存状态与恢复状态
  3. “双十一”Guitar Pro 带你高姿态过光棍节
  4. Docker学习总结(55)——Google和Facebook为什么不用Docker?
  5. PHP-redis中文文档 1
  6. Linux系统瓶颈排查
  7. Springboot 使用wangEditor3.0上传图片
  8. MyBatis-SELECT基本查询
  9. NOIP2013提高组华容道题解
  10. 记Windows下二进制文件查看器
  11. fatal error: opencv2/opencv.hpp: 没有那个文件或目录
  12. linux上多个CUDA切换使用(小白教程)
  13. html重复渐变包括,CSS3怎么实现重复线性渐变效果
  14. 洛谷P1067多项式输出
  15. Ezchip Tilera Tile-Mx100: Der 100-ARM-Netzwerkprozessor
  16. 微信小程序:修改单选radio大小样式
  17. FS平台数据库设计规范说明书v1.00
  18. POJ 3055 Digital Friends 笔记
  19. linux可以怎么玩(以阿里云Ubuntu服务器为例)(三)——Python零代码搞定个人云盘
  20. Uva 1626(区间dp)

热门文章

  1. mpu6500-gnss组合导航代码分析
  2. TCP/IP四层模型及功能
  3. 使用EasyExcel实现Excel的导入导出
  4. TDH 集群的许可证管理机制及TDH集群的卸载与安装
  5. 高级程序员最爱用的8款代码编辑器,你用哪几个?
  6. 希尔伯特空间、拓扑空间概念理解
  7. 有赞Vant组件库的引入及轮播图片预览的实现
  8. 小轮子 ios 获取语言
  9. TP-Link WR340G+ 路由器桥接实践(2016年10月更新tplink新产品wifi中继器设备)
  10. html连接accdb文件,连接Access2007数所库(accdb格式数据库)需要另安驱动