ros重置后地址_从零开始丨INDEMIND双目惯性模组ROS平台下实时ORB-SLAM记录教程
本文涉及很多代碼及文字,排版、文字錯誤請見諒。
閱讀時間預計30分鐘
本文涉及圖像、數據均由INDEMIND雙目視覺慣性模組采集
為了防止各位同學修改出錯,我們把修改好的代碼及文件上傳至GitHub,各位同學按教程修改后,可根據我們提供的代碼進行對比,確保萬無一失,GitHub連接:
https://github.com/INDEMINDtech/ORB-SLAM2-?github.com一:環境介紹
系統:Ubuntu 16.04 ROS
ORB依賴庫:Pangolin、OpenCV、Eigen3、DBoW2、g2o,ros
二:下載SDK及ORB-SLAM源碼
下載地址:
- SDK:
- ORB-SLAM:
三:修改源碼
1、下載好SDK之后,進入SDK-Linux/demo_ros/src目錄。將下載好的放在該目錄下,并對其改名,改為 ORB_SLAM
2. 進入ORB_SLAM/Examples/Stereo目錄下,修改http://stereo_euroc.cc文件。
1)將其橙色區域改成
代碼如下:
#include<iostream> #include<algorithm> #include<fstream> #include<iomanip> #include<chrono> #include<opencv2/core/core.hpp> #include<System.h> #include "ros/ros.h" #include <cv_bridge/cv_bridge.h> #include <sensor_msgs/Image.h> #include "std_msgs/String.h" #include "FileYaml.h" #include <queue> #include <mutex> #include <thread> #include <condition_variable> using namespace std; ros::Subscriber image_l_sub; ros::Subscriber image_r_sub;int image_l_count = 0; queue<cv::Mat> que_image_l; queue<cv::Mat> que_image_r; queue<long double> que_stamp;std::mutex limage_mutex; std::mutex rimage_mutex; std::condition_variable left_data_cond; std::condition_variable right_data_cond; // std::lock_guard<std::mutex>lock(rimage_mutex); // std::thread show_thread{show_image}; //visualization thread // show_thread.detach();cv::Mat imLeft; cv::Mat imRight; ros::Time ros_stamp; long double stamp;修改后如下:
2)在代碼,long double stamp;后添加時間轉換函數
long double time_tranform(int64_t time) {//取后13位 int b = time/1e13; int64_t temp = b * 1e13; int64_t c = time - temp;//小數點后9位 long double d = c / 1e9; return d; }修改后如下:
3)刪除LoadImages函數
4)在time_tranform和main函數之間添加左圖回調函數imagelCallback,添加右圖回調函數imagerCallback,以及一些變量的定義
代碼如下:
//image_l回調函數 void imagelCallback(const sensor_msgs::ImageConstPtr&msg) {cv_bridge::CvImagePtr cv_ptr;cv_ptr = cv_bridge::toCvCopy(msg, "mono8");cv_ptr->image.copyTo(imLeft);image_l_count = cv_ptr->header.seq;ros_stamp = cv_ptr->header.stamp; std::lock_guard<std::mutex> lock_l(limage_mutex);stamp = time_tranform(ros_stamp.toNSec()); //cout<<"ros_stamp: "<<ros_stamp<<endl;que_image_l.push(imLeft);que_stamp.push(stamp); } //image_r回調函數 void imagerCallback(const sensor_msgs::ImageConstPtr&msg) {cv_bridge::CvImagePtr cv_ptr;cv_ptr = cv_bridge::toCvCopy(msg, "mono8");cv_ptr->image.copyTo(imRight); std::lock_guard<std::mutex> lock_r(rimage_mutex);que_image_r.push(imRight); } Camera_Other_Parameter vecCamerasParam; cv::Mat M1l,M2l,M1r,M2r; cv::Mat data_left; cv::Mat data_right; long double frame_time; cv::Mat imLeftRect, imRightRect;5)在cv::Mat imLeftRect, imRightRect;之后,添加show_ORB函數,用于獲取圖像數據和時間,以及啟動ORB的功能
代碼如下:
void show_ORB() { //-----------------ORB_SLAM2 Init--------------------------------------------- const string param1_ORBvoc = "Vocabulary/ORBvoc.txt";const string param3_ORByaml = "Examples/Stereo/EuRoC.yaml";ORB_SLAM2::System SLAM(param1_ORBvoc,param3_ORByaml,ORB_SLAM2::System::STEREO,true); std::this_thread::sleep_for(std::chrono::milliseconds(500)); while(true){ std::this_thread::sleep_for(std::chrono::milliseconds(30)); std::unique_lock<std::mutex> lock_l(limage_mutex);data_left = que_image_l.front();frame_time = que_stamp.front();que_image_l.pop();que_stamp.pop();lock_l.unlock(); std::unique_lock<std::mutex> lock_r(rimage_mutex);data_right = que_image_r.front();que_image_r.pop();lock_r.unlock(); // cout.precision(13); // cout<<"frame: "<<frame_time<<endl;cv::resize(data_left,data_left,cv::Size(vecCamerasParam.cols,vecCamerasParam.rows));cv::resize(data_right,data_right,cv::Size(vecCamerasParam.cols,vecCamerasParam.rows));cv::remap(data_left,imLeftRect,M1l,M2l,cv::INTER_LINEAR);cv::remap(data_right,imRightRect,M1r,M2r,cv::INTER_LINEAR); #ifdef COMPILEDWITHC11 std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now(); #else std::chrono::monotonic_clock::time_point t1 = std::chrono::monotonic_clock::now(); #endifSLAM.TrackStereo(imLeftRect,imRightRect,frame_time); #ifdef COMPILEDWITHC11 std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now(); #else std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now(); #endif double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();// Wait to load the next frame double T = 0;T = que_stamp.front() - frame_time;if(ttrack < T)usleep((T-ttrack)*1e6);/* cv::imshow("left",data_left);cv::imshow("right",data_right);cv::waitKey(1);*/} // Stop all threadsSLAM.Shutdown(); // Save camera trajectorySLAM.SaveTrajectoryTUM("CameraTrajectory.txt"); }修改后如下:
6)將main函數內的內容全部刪除,在main函數內添加ros初始化,讀取配置文件,去畸變,獲取參數,校正,ORB_SLAM的啟動
代碼如下:
ros::init(argc,argv,"ORB_SLAM2");ros::NodeHandle n;image_l_sub = n.subscribe("/module/image_left",100,imagelCallback);image_r_sub = n.subscribe("/module/image_right", 100,imagerCallback);const char *param2_SDKyaml = "/home/indemind/u/SDK-Linux-ros/lib/1604/headset.yaml";readConfig(param2_SDKyaml,vecCamerasParam); //-----------------fisheye rectify---------------------------------------------cv::Mat Q;if(vecCamerasParam.K_l.empty() || vecCamerasParam.K_r.empty() || vecCamerasParam.P_l.empty() || vecCamerasParam.P_r.empty() || vecCamerasParam.R_l.empty() ||vecCamerasParam.R_r.empty() || vecCamerasParam.D_l.empty() || vecCamerasParam.D_r.empty() || vecCamerasParam.rows==0 || vecCamerasParam.cols==0){ cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl; return -1;}cv::fisheye::initUndistortRectifyMap(vecCamerasParam.K_l,vecCamerasParam.D_l,vecCamerasParam.R_l,vecCamerasParam.P_l.rowRange(0,3).colRange(0,3),cv::Size(vecCamerasParam.cols,vecCamerasParam.rows),CV_32FC1,M1l,M2l);cv::fisheye::initUndistortRectifyMap(vecCamerasParam.K_r,vecCamerasParam.D_r,vecCamerasParam.R_r,vecCamerasParam.P_r.rowRange(0,3).colRange(0,3),cv::Size(vecCamerasParam.cols,vecCamerasParam.rows),CV_32FC1,M1r,M2r); cout << "the P_l of initUndistortRectifyMap after" << endl; for(int i = 0;i < 3;++i) for(int j = 0;j < 3;++j){ double *ptr = vecCamerasParam.P_l.ptr<double>(i,j); cout << *ptr<<endl;}cout << "the P_r of initUndistortRectifyMap after" << endl; for(int i = 0;i < 3;++i) for(int j = 0;j < 3;++j){ double *ptr = vecCamerasParam.P_r.ptr<double>(i,j); cout << *ptr<<endl;}cv::stereoRectify(vecCamerasParam.K_l,vecCamerasParam.D_l,vecCamerasParam.K_r,vecCamerasParam.D_r,cv::Size(vecCamerasParam.cols,vecCamerasParam.rows),vecCamerasParam.R,vecCamerasParam.t,vecCamerasParam.R_l,vecCamerasParam.R_r,vecCamerasParam.P_l,vecCamerasParam.P_r,Q,cv::CALIB_ZERO_DISPARITY,0);//-----------------fisheye end--------------------------------------------- std::thread show_thread{show_ORB}; //visualization threadshow_thread.detach();ros::spin();return 0;修改后如下:
7)將LoadImages函數刪除掉
文件修改完成
3. 進入SDK-Linux/demo_ros/src/ORB_SLAM2/include目錄下,新建FileYaml.h文件,將下列代碼填入文件
#ifndef _FILEYAML_H #define _FILEYAML_H#include <iostream> #include <fstream> using namespace std;#include <opencv/cv.h> #include <opencv2/calib3d.hpp> #include <opencv2/features2d/features2d.hpp> #include<opencv2/opencv.hpp>#include <Eigen/Core> #include <opencv2/core/eigen.hpp>#include <sophus/so3.h> #include <sophus/se3.h> #include <chrono> using namespace cv; using namespace Eigen; using namespace Sophus;using cv::FileStorage; using cv::FileNode; struct Camera {Eigen::Matrix4d T_SC;Eigen::Vector2d imageDimension;Eigen::VectorXd distortionCoefficients;Eigen::Vector2d focalLength;Eigen::Vector2d principalPoint; std::string distortionType;Camera() :T_SC(Eigen::Matrix4d::Identity()),imageDimension(Eigen::Vector2d(1280, 800)),distortionCoefficients(Eigen::Vector2d(0, 0)),focalLength(Eigen::Vector2d(700, 700)),principalPoint(Eigen::Vector2d(640, 400)),distortionType("equidistant"){}void write(FileStorage& fs) const //Write serialization for this class {fs << "{:";fs << "T_SC";fs << "[:"; for (int i = 0; i < 4; i++){ for (int j = 0; j < 4; j++){fs << T_SC(i, j);}}fs << "]";fs << "image_dimension";fs << "[:";fs << imageDimension(0) << imageDimension(1);fs << "]";fs << "distortion_coefficients";fs << "[:"; for (int i = 0; i < distortionCoefficients.rows(); i++){fs << distortionCoefficients(i);}fs << "]";fs << "distortion_type" << distortionType;fs << "focal_length";fs << "[:";fs << focalLength(0) << focalLength(1);fs << "]";fs << "principal_point";fs << "[:";fs << principalPoint(0) << principalPoint(1);fs << "]";fs << "}";}void read(const FileNode& node) //Read serialization for this class {cv::FileNode T_SC_node = node["T_SC"];cv::FileNode imageDimensionNode = node["image_dimension"];cv::FileNode distortionCoefficientNode = node["distortion_coefficients"];cv::FileNode focalLengthNode = node["focal_length"];cv::FileNode principalPointNode = node["principal_point"];// extrinsicsT_SC << T_SC_node[0], T_SC_node[1], T_SC_node[2], T_SC_node[3], T_SC_node[4], T_SC_node[5], T_SC_node[6], T_SC_node[7], T_SC_node[8], T_SC_node[9], T_SC_node[10], T_SC_node[11], T_SC_node[12], T_SC_node[13], T_SC_node[14], T_SC_node[15];imageDimension << imageDimensionNode[0], imageDimensionNode[1];distortionCoefficients.resize(distortionCoefficientNode.size()); for (size_t i = 0; i<distortionCoefficientNode.size(); ++i) {distortionCoefficients[i] = distortionCoefficientNode[i];}focalLength << focalLengthNode[0], focalLengthNode[1];principalPoint << principalPointNode[0], principalPointNode[1];distortionType = (std::string)(node["distortion_type"]);} };struct Camera_Other_Parameter {Mat K_l;Mat K_r;Mat D_l;Mat D_r;Mat R_l;Mat R_r;Mat P_l;Mat P_r;Mat R;Mat t; int cols; //圖像寬 int rows; //圖像高 };void readConfig(const char *yamlPath,Camera_Other_Parameter &vecCamerasOtherParam);#endif注:該文件為讀取文件的頭文件
4. 進入SDK-Linux/demo_ros/src/ORB_SLAM2/src目錄下,新建http://FileYaml.cc文件,將下列代碼填入文件:
#include "FileYaml.h"static void write(FileStorage& fs, const std::string&, const Camera& x) {x.write(fs); }static void read(const FileNode& node, Camera& x, const Camera& default_value = Camera()) { if (node.empty())x = default_value; elsex.read(node); }void readConfig(const char *yamlPath,Camera_Other_Parameter &vecCamerasOtherParam) { std::vector<Camera> vecCameras;cv::FileStorage fin(yamlPath, cv::FileStorage::READ); if(!fin.isOpened()){ cerr << endl << "Failed to load readConfig yamlPath " << endl; return ;}cv::FileNode cameras_node = fin["cameras"]; /* cv::FileNode Rl_node = fin["Rl"];cv::FileNode Rr_node = fin["Rr"];cv::FileNode Pl_node = fin["Pl"];cv::FileNode Pr_node = fin["Pr"];cv::FileNode Kl_node = fin["Kl"];cv::FileNode Kr_node = fin["Kr"];cv::FileNode Dl_node = fin["Dl"];cv::FileNode Dr_node = fin["Dr"];*/fin["Rl"] >> vecCamerasOtherParam.R_l;fin["Rr"] >> vecCamerasOtherParam.R_r;fin["Pl"] >> vecCamerasOtherParam.P_l;fin["Pr"] >> vecCamerasOtherParam.P_r;fin["Kl"] >> vecCamerasOtherParam.K_l;fin["Kr"] >> vecCamerasOtherParam.K_r;fin["Dl"] >> vecCamerasOtherParam.D_l;fin["Dr"] >> vecCamerasOtherParam.D_r;/*vecCamerasOtherParam.R_l = Rl_node.mat();vecCamerasOtherParam.R_r = Rr_node.mat();vecCamerasOtherParam.P_l = Pl_node.mat();vecCamerasOtherParam.P_r = Pr_node.mat();vecCamerasOtherParam.K_l = Kl_node.mat();vecCamerasOtherParam.K_r = Kr_node.mat();vecCamerasOtherParam.D_l = Dl_node.mat();vecCamerasOtherParam.D_r = Dr_node.mat();*/for (cv::FileNodeIterator it = cameras_node.begin(); it != cameras_node.end(); it++){Camera camera;(*it) >> camera;vecCameras.push_back(camera);}//obtain col & rowvecCamerasOtherParam.cols = vecCameras[0].imageDimension(0);vecCamerasOtherParam.rows = vecCameras[0].imageDimension(1); //obtain R & tEigen::Matrix4d T_SC_left;Eigen::Matrix4d T_SC_right;T_SC_left = vecCameras[0].T_SC;T_SC_right = vecCameras[1].T_SC; SE3 T_SC_l(T_SC_left.topLeftCorner(3,3),T_SC_left.topRightCorner(3,1)); SE3 T_SC_r(T_SC_right.topLeftCorner(3,3),T_SC_right.topRightCorner(3,1));SE3 Tcl_cr = T_SC_l.inverse()*T_SC_r;SE3 Tcr_cl = T_SC_r.inverse()*T_SC_l;Matrix3d R = Tcr_cl.rotation_matrix();Vector3d t = Tcr_cl.translation();//Eigen tranfer to array double * R_ptr= new double[R.size()]; double *t_ptr = new double[t.size()];Map<MatrixXd>(R_ptr, R.rows(), R.cols()) = R;Map<MatrixXd>(t_ptr, t.rows(), t.cols()) = t; cout<<"R_matrix"<<endl; double R_matrix[3][3]; for(int i = 0;i < 3;i++) for(int j = 0;j<3;j++){ //transposeR_matrix[j][i] = R_ptr[i+j*3]; cout<<R_matrix[j][i]<<endl;}cout<<"t_matrix"<<endl; double t_matrix[3]; for(int i = 0;i < 3;i++){t_matrix[i] = t_ptr[i]; cout<<t_matrix[i]<<endl;}vecCamerasOtherParam.R = cv::Mat(3,3,CV_64FC1,R_matrix);vecCamerasOtherParam.t = cv::Mat(3,1,CV_64FC1,t_matrix); }注:該文件為讀取文件的源文件
5. 修改CMakeLists.txt
進入SDK-Linux/demo_ros/src目錄下,修改CMakeLists.txt文件
將ORB中的CMakeLists.txt添加到SDK的CMakeLists.txt下,修改如下
1)
在add_compile_options(-std=c++11)后面添加
IF(NOT CMAKE_BUILD_TYPE) SET(CMAKE_BUILD_TYPE Release) ENDIF()MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3 -march=native ") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3 -march=native")# Check C++11 or C++0x support include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) if(COMPILER_SUPPORTS_CXX11) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")add_definitions(-DCOMPILEDWITHC11)message(STATUS "Using flag -std=c++11.") elseif(COMPILER_SUPPORTS_CXX0X) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")add_definitions(-DCOMPILEDWITHCd0X)message(STATUS "Using flag -std=c++0x.") else()message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") endif()LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)find_package(OpenCV 3.0 QUIET) if(NOT OpenCV_FOUND)find_package(OpenCV 2.4.3 QUIET) if(NOT OpenCV_FOUND)message(FATAL_ERROR "OpenCV > 2.4.3 not found.")endif() endif()#find_package(Eigen3 3.1.0 REQUIRED) find_package(Pangolin REQUIRED) include_directories( ${Pangolin_INCLUDE_DIRS})修改后如下:
2)
在橙色區域后面添加:
#Eigen include_directories("/usr/include/eigen3") #Sophus find_package(Sophus REQUIRED) include_directories( ${Sophus_INCLUDE_DIRS}) include_directories( ${PROJECT_SOURCE_DIR} ${PROJECT_SOURCE_DIR}/ORB_SLAM/include ${PROJECT_SOURCE_DIR}/ORB_SLAM #${EIGEN3_INCLUDE_DIR} ${Pangolin_INCLUDE_DIRS} ) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/lib)3)
對橙色區域后修改:
add_library(${PROJECT_NAME} SHARED ORB_SLAM/src/FileYaml.cc ORB_SLAM/src/System.cc ORB_SLAM/src/Tracking.cc ORB_SLAM/src/LocalMapping.cc ORB_SLAM/src/LoopClosing.cc ORB_SLAM/src/ORBextractor.cc ORB_SLAM/src/ORBmatcher.cc ORB_SLAM/src/FrameDrawer.cc ORB_SLAM/src/Converter.cc ORB_SLAM/src/MapPoint.cc ORB_SLAM/src/KeyFrame.cc ORB_SLAM/src/Map.cc ORB_SLAM/src/MapDrawer.cc ORB_SLAM/src/Optimizer.cc ORB_SLAM/src/PnPsolver.cc ORB_SLAM/src/Frame.cc ORB_SLAM/src/KeyFrameDatabase.cc ORB_SLAM/src/Sim3Solver.cc ORB_SLAM/src/Initializer.cc ORB_SLAM/src/Viewer.cc)4)
將橙色區域修改為:
target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} #${EIGEN3_LIBS} ${Pangolin_LIBRARIES} ${Sophus_LIBRARIES} ${PROJECT_SOURCE_DIR}/ORB_SLAM/Thirdparty/DBoW2/lib/libDBoW2.so ${PROJECT_SOURCE_DIR}/ORB_SLAM/Thirdparty/g2o/lib/libg2o.so )5)
完成步驟4)后,繼續添加如下代碼:
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/ORB_SLAM/Examples/RGB-D)add_executable(rgbd_tum ORB_SLAM/Examples/RGB-D/rgbd_tum.cc) target_link_libraries(rgbd_tum ${PROJECT_NAME} ${Sophus_LIBRARIES}) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/ORB_SLAM/Examples/Stereo)add_executable(stereo_kitti ORB_SLAM/Examples/Stereo/stereo_kitti.cc) target_link_libraries(stereo_kitti ${PROJECT_NAME} ${Sophus_LIBRARIES})add_executable(stereo_euroc ORB_SLAM/Examples/Stereo/stereo_euroc.cc) add_dependencies(stereo_euroc ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) target_link_libraries(stereo_euroc ${PROJECT_NAME} ${Sophus_LIBRARIES} ${catkin_LIBRARIES})add_executable(module_driver src/camera_driver.cpp) add_dependencies(module_driver ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) target_link_libraries(module_driver ${PROJECT_SOURCE_DIR}/../../lib/1604/libindem.so ${catkin_LIBRARIES} ${PROJECT_SOURCE_DIR}/../../lib/1604/libboost_filesystem.so.1.58.0 ${PROJECT_SOURCE_DIR}/../../lib/1604/libboost_system.so.1.58.0 ${PROJECT_SOURCE_DIR}/../../lib/1604/libg3logger.so.1.3.0-0 ${PROJECT_SOURCE_DIR}/../../lib/1604/libnanomsg.so.5 ${PROJECT_SOURCE_DIR}/../../lib/1604/libopencv_core.so.3.4 ${PROJECT_SOURCE_DIR}/../../lib/1604/libopencv_imgproc.so.3.4 ${PROJECT_SOURCE_DIR}/../../lib/1604/libopencv_videoio.so.3.4pthreadstdc++fsdl )set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/ORB_SLAM/Examples/Monocular)add_executable(mono_tum ORB_SLAM/Examples/Monocular/mono_tum.cc) target_link_libraries(mono_tum ${Sophus_LIBRARIES} ${PROJECT_NAME})add_executable(mono_kitti ORB_SLAM/Examples/Monocular/mono_kitti.cc) target_link_libraries(mono_kitti ${Sophus_LIBRARIES} ${PROJECT_NAME})add_executable(mono_euroc ORB_SLAM/Examples/Monocular/mono_euroc.cc) target_link_libraries(mono_euroc ${Sophus_LIBRARIES} ${PROJECT_NAME})CMakeLists.txt修改完畢
6.編譯依賴庫
進入……ORB_SLAMThirdparty/DBoW2目錄,打開終端,輸入如下命令:
mkdir build cd build cmake .. make進入……ORB_SLAMThirdparty/g2o目錄,打開終端,輸入如下命令:
mkdir build cd build cmake .. make在……ORB_SLAMThirdparty/g2o目錄下,新建config.h文件,將下列代碼填入文件:
#ifndef G2O_CONFIG_H #define G2O_CONFIG_H/* #undef G2O_OPENMP */ /* #undef G2O_SHARED_LIBS */// give a warning if Eigen defaults to row-major matrices. // We internally assume column-major matrices throughout the code. #ifdef EIGEN_DEFAULT_TO_ROW_MAJOR # error "g2o requires column major Eigen matrices (see http://eigen.tuxfamily.org/bz/show_bug.cgi?id=422)" #endif#endif7. 編譯
1)安裝SDK,教程請查看:
開講啦丨雙目慣性模組運行ORB-SLAM算法示例?mp.weixin.qq.com注意:以下SDK依賴環境必須安裝!!!!
安裝 cmake
sudo apt-get install cmake安裝 google-glog + gflags
sudo apt-get install libgoogle-glog-dev安裝 BLAS & LAPACK
sudo apt-get install libatlas-base-dev安裝 SuiteSparse and CXSparse
sudo apt-get install libsuitesparse-dev進入SDK-Linux/demo_ros目錄下,執行catkin_make命令
7. 執行
1)新打開一個shell,執行roscore命令,如下圖
2- 進入SDK-Linux/lib/1604目錄下,執行
sudo -s sudo chmod 777 ./run.sh之后執行
./run.sh如下圖
3)進入SDK-Linux/demo_ros/src/ORB_SLAM目錄下,執行./Examples/Stereo/stereo_euroc命令
注意:此時,將獲得去畸變之后的參數,把下列參數寫入到SDK-Linux/demo_ros/src/ORB_SLAM/src/http://Tracking.cc文件中,如下圖
float fx = 597.935; float fy = 597.935; float cx = 476.987; float cy = 442.158;最后,再次編譯,并執行,即可得到實時ORB
教程至此結束,Demo如下:
https://www.zhihu.com/video/1090996629682991104高校大使推薦同學購買可獲得獎勵金,購買同學福利多多,點擊了解詳情
提交相關信息,即可獲得100元院校優惠券,點擊了解詳情
提交相關信息,即可享受15天免費試用,試用期購買享大額優惠,點擊了解詳情
總結
以上是生活随笔為你收集整理的ros重置后地址_从零开始丨INDEMIND双目惯性模组ROS平台下实时ORB-SLAM记录教程的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: Centos7下怎么安装Zimbra
- 下一篇: 计算机中arrows指的是什么键