基本工具(1) openvslam 讀取gnss.txt,然後匹配讀取影像路徑,儲存軌跡到txt

MKT-porter發表於2024-12-07

注意跟丟的資料

1 新增編譯節點

CMakeLists.txt

# 自己改的照片模式
add_executable(run_image_slam_mydata run_image_slam_mydata.cc util/image_util.cc)
list(APPEND EXECUTABLE_TARGETS run_image_slam_mydata)

  

2新增依賴檔案

API_File_IO.h

#ifndef MYHEADER_H // 如果 MYHEADER_H 沒有被定義
#define MYHEADER_H // 定義 MYHEADER_H

#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include <vector>
#include <opencv2/opencv.hpp>

//#include <yaml-cpp/yaml.h>

#include <iostream>
#include <string>
#include <popl.hpp>// 假設這是popl庫的標頭檔案
 

#include <Eigen/Dense>
#include <Eigen/Geometry>


using namespace std;
using namespace cv;
using namespace YAML;


// 讀取GNSS資料檔案
vector<tuple<string, double, double, double>> readGNSSData(const string& filePath) {
    vector<tuple<string, double, double, double>> data;
    ifstream file(filePath);
    if (!file.is_open()) {
        cerr << "Failed to open file: " << filePath << endl;
        return data;
    }
    string line;
    while (getline(file, line)) {
        stringstream ss(line);
        string imgName;
        double lat, lon, alt;
        if (ss >> imgName >> lat >> lon >> alt) {
            data.emplace_back(imgName, lat, lon, alt);
        }
    }
    file.close();
    return data;
}
 
// 根據影像檔名讀取影像
Mat readImage(const string& imageDir, const string& imageName) {
    string filePath = imageDir + "/" + imageName;
    Mat img = imread(filePath);
    if (img.empty()) {
        cerr << "Failed to read image: " << filePath << endl;
    }
    return img;
}



// // 讀取YAML配置檔案
string readConfig(const string filePath, string DataName) {
    ifstream file(filePath);
    if (!file.is_open()) {
        cerr << "Failed to open config file: " << filePath << endl;
        return "-1";
    }
 
    YAML::Node config_yaml = YAML::Load(file);
    try {
        std::string Data_context = config_yaml[DataName].as<string>();
        return Data_context;
       
    } catch (const BadConversion& e) {
        cerr << "Error parsing config file: " << e.what() << endl;
        return "-1";
    }
 
    
}



std::string Get_ConfigPath(int argc, char* argv[]) {

    popl::OptionParser op( "允許引數");

    // 定義-g選項,它需要一個引數(路徑)
    auto configPath = op.add<popl::Value<std::string>>("c", "config", "Path to thedata file");
   
    // 解析選項
    op.parse(argc, argv);

    if(configPath->is_set()){

        std::string configPath_ = configPath->value();
        std::cout << " 配置檔案路徑  " << configPath_ << std::endl;
        return configPath_;

    }
    else{
        
        std::cout << "讀取路徑失敗" << std::endl;
        return "fail";   
    }
}


/*

./run_image_slam \
-c  data/config.yaml  \

*/




void main_test(int argc, char* argv[]) {

    // 全域性變數宣告
    std::string Config_PATH;
    std::string GNSS_PSTH;
    std::string img_dir_path;
    std::string vocab_file_path;
    std::string map_db_path;

    vector<tuple<string, double, double, double>> gnssData;

    std::string config_file_path = Get_ConfigPath(argc, argv);

    // 初始化全域性變數
    Config_PATH = readConfig(config_file_path, "Config_PATH");
    GNSS_PSTH = Config_PATH + "/" + readConfig(config_file_path, "GNSS_PSTH");
    img_dir_path = Config_PATH + "/" + readConfig(config_file_path, "image_dir_path");
    vocab_file_path = Config_PATH + "/" + readConfig(config_file_path, "vocab_file_path");
    map_db_path = Config_PATH + "/" + readConfig(config_file_path, "map_db_path");

    
    std::cout<<"===============配置檔案資訊=============== " <<std::endl;
    std::cout<<"config_file_path : " << config_file_path <<std::endl;

    std::cout<<"資料根目錄 : " << Config_PATH <<std::endl;
    std::cout<<"GNSS_PSTH : " << GNSS_PSTH <<std::endl;
    std::cout<<"image_dir_path : " << img_dir_path <<std::endl;
    std::cout<<"vocab_file_path : " << vocab_file_path <<std::endl;
    std::cout<<"map_db_path : " << map_db_path <<std::endl;


    // 讀取GNSS資料
    gnssData = readGNSSData(GNSS_PSTH);

    //遍歷GNSS資料並讀取對應的影像
    for (const auto& [imgName, lat, lon, alt] : gnssData) {
        Mat img = readImage(img_dir_path, imgName);
        if (!img.empty()) {
            // 在這裡可以處理影像,例如顯示、儲存等
            imshow("Image", img);
            waitKey(0);  // 按任意鍵繼續顯示下一張影像
        }
    }
     
    // 遍歷模式2
    for (unsigned int i = 0; i < gnssData.size(); ++i) {
        std::string imgName = std::get<0>(gnssData[i]);
        double lat = std::get<1>(gnssData[i]);
        double lon = std::get<2>(gnssData[i]);
        double alt = std::get<3>(gnssData[i]);
    }

}


/*
camera_rtk.yaml

#==============#
# 相機引數 #
#==============#
  
Camera.name: "RTK"
Camera.setup: "monocular" # 單目"Monocular", 雙目"Stereo", 深度相機"RGBD"
Camera.model: "perspective" #perspective 正常相機  fisheye 魚眼相機

Config_PATH: "/home/dongdong/2project/0data/RTK/data_1_nwpuUp/data3_1130_13pm/300_map_12pm/"
image_dir_path: "images"
GNSS_PSTH: slam_config/gnss.txt
vocab_file_path: slam_config/orb_vocab.dbow2
map_db_path: slam_config/Map_GNSS.msg


#相機引數
Camera.fps: 30
Camera.cols: 1805
Camera.rows: 1203
  
#內參
Camera.fx: 1193.7076128098686
Camera.fy: 1193.1735265967602
Camera.cx: 910.8785687265895
Camera.cy: 602.174293145834
  
#畸變係數
Camera.k1: 0.0
Camera.k2: 0.0
Camera.p1: 0.0
Camera.p2: 0.0
Camera.k3: 0.0
Camera.k4: 0.0
  
  
Camera.color_order: "RGB" #"Gray", "RGB", "BGR"

Initializer.num_min_triangulated_pts: 20
Initializer.parallax_deg_threshold: 1
Feature.max_num_keypoints: 3000
#===========================#
# 顯示引數 #
#===========================#

PangolinViewer.keyframe_size: 0.07
PangolinViewer.keyframe_line_width: 2
PangolinViewer.graph_line_width: 1
PangolinViewer.point_size: 2
PangolinViewer.camera_size: 0.08
PangolinViewer.camera_line_width: 3
PangolinViewer.viewpoint_x: 0
PangolinViewer.viewpoint_y: -0.65
PangolinViewer.viewpoint_z: -1.9
PangolinViewer.viewpoint_f: 400

 */




void Pose_T_to_t_and_q(Eigen::Matrix4d transform , Eigen::Vector3d &t,Eigen::Quaterniond &q){

  // 從變換矩陣中提取旋轉矩陣
    Eigen::Matrix3d rotation = transform.block<3, 3>(0, 0);
 
    // 從旋轉矩陣計算四元數
    Eigen::Quaterniond quaternion(rotation);
 
    // 從變換矩陣中提取平移向量
    Eigen::Vector3d translation = transform.block<3, 1>(0, 3);

    t = translation;
    q = quaternion;


}


#include <filesystem>
#include <string>

void WritePosetoTxtFile(string save_txtname,std::vector<std::tuple<Eigen::Matrix4d, std::string>> poses_names){
    


    // 開啟檔案以寫入 覆蓋
    std::ofstream file(save_txtname);
    if (!file.is_open()) {
        std::cerr << "無法開啟檔案以寫入" << std::endl;
       
    }

    for (const auto& pose_name : poses_names) {

        std::string name = std::get<1>(pose_name);
        Eigen::Matrix4d transform = std::get<0>(pose_name);

        //poses_names.emplace_back(matrix, name);

        // 從旋轉矩陣計算四元數
        Eigen::Quaterniond q;
        // 從變換矩陣中提取平移向量
        Eigen::Vector3d t;
        Pose_T_to_t_and_q(transform,t,q);

        std::cout << "Name: " << std::get<1>(pose_name) << std::endl;
       
    // 寫入名字、平移向量和四元數到檔案
        file << name << " ";
        file << t[0] << " "; // 轉置是為了以行向量的形式寫入
        file << t[1] << " ";
        file << t[2] << " ";
        file << q.w() << " " << q.x() << " " << q.y() << " " << q.z() << std::endl;
    }
    // 關閉檔案
    file.close();
}



#endif // 結束包含保護

  

  3主檔案新增增加的檔案讀寫函式

關鍵修改

1新增標頭檔案和全域性變數

// 包含的檔案
#include "API_File_IO.h"

// 全域性變數宣告
std::string Config_PATH;
std::string GNSS_PSTH;
std::string img_dir_path;
std::string vocab_file_path;
std::string map_db_path;

// 讀取GNSS資料
vector<tuple<string, double, double, double>> gnssData ;
// 儲存預測位姿
vector<tuple<Eigen::Matrix4d,std::string>> poses_names;

  

2 輸入路徑讀取引數

開啟命令參考

./run_image_slam \
-c  /home/dongdong/2project/1salm/1openvsalm/openvslam/data/camera_rtk.yaml  \

  程式碼獲取路徑

    //main_test(argc, argv);
    std::string config_file_path = Get_ConfigPath(argc, argv);

    // 初始化全域性變數
    Config_PATH = readConfig(config_file_path, "Config_PATH");
    GNSS_PSTH = Config_PATH + "/" + readConfig(config_file_path, "GNSS_PSTH");
    img_dir_path = Config_PATH + "/" + readConfig(config_file_path, "image_dir_path");
    vocab_file_path = Config_PATH + "/" + readConfig(config_file_path, "vocab_file_path");
    map_db_path = Config_PATH + "/" + readConfig(config_file_path, "map_db_path");

    std::cout<<"===============配置檔案資訊=============== " <<std::endl;

    std::cout<<"config_file_path : " << config_file_path <<std::endl;

    std::cout<<"資料根目錄 : " << Config_PATH <<std::endl;
    std::cout<<"GNSS_PSTH : " << GNSS_PSTH <<std::endl;
    std::cout<<"Image_PATH : " << img_dir_path <<std::endl;
    std::cout<<"vocab_file_path : " << vocab_file_path <<std::endl;
    std::cout<<"map_db_path : " << map_db_path <<std::endl;

  

3 迴圈讀取影像送入跟蹤

時間戳

  for (unsigned int i = 0; i < gnssData.size(); ++i) {


            std::string imgName = std::get<0>(gnssData[i]);
            double lat = std::get<1>(gnssData[i]);
            double lon = std::get<2>(gnssData[i]);
            double alt = std::get<3>(gnssData[i]);


            const auto  img = readImage(img_dir_path, imgName);

            double timestamp_ =  i*2.0;

  

4 每次跟蹤結果儲存佇列

            if (!img.empty() && (i % frame_skip == 0)) {
                // input the current frame and estimate the camera pose
                Eigen::Matrix4d curent_pose = SLAM.feed_monocular_frame(img, timestamp_, mask);
                poses_names.emplace_back(curent_pose, imgName);
   
            }

  

5 儲存結果軌跡

        WritePosetoTxtFile("1_1_frame_slam_enu.txt",poses_names);

  

  

完整程式碼

#include "util/image_util.h"

#ifdef USE_PANGOLIN_VIEWER
#include "pangolin_viewer/viewer.h"
#elif USE_SOCKET_PUBLISHER
#include "socket_publisher/publisher.h"
#endif

#include "openvslam/system.h"
#include "openvslam/config.h"

#include <iostream>
#include <chrono>
#include <numeric>

#include <opencv2/core/core.hpp>
#include <opencv2/imgcodecs.hpp>
#include <spdlog/spdlog.h>
#include <popl.hpp>

#ifdef USE_STACK_TRACE_LOGGER
#include <glog/logging.h>
#endif

#ifdef USE_GOOGLE_PERFTOOLS
#include <gperftools/profiler.h>
#endif


// 包含的檔案
#include "API_File_IO.h"

// 全域性變數宣告
std::string Config_PATH;
std::string GNSS_PSTH;
std::string img_dir_path;
std::string vocab_file_path;
std::string map_db_path;

// 讀取GNSS資料
vector<tuple<string, double, double, double>> gnssData ;
// 儲存預測位姿
vector<tuple<Eigen::Matrix4d,std::string>> poses_names;




void mono_tracking(const std::shared_ptr<openvslam::config>& cfg,
                   const std::string& vocab_file_path, const std::string& image_dir_path, const std::string& mask_img_path,
                   const unsigned int frame_skip, const bool no_sleep, const bool auto_term,
                   const bool eval_log, const std::string& map_db_path) {
    // load the mask image
    const cv::Mat mask = mask_img_path.empty() ? cv::Mat{} : cv::imread(mask_img_path, cv::IMREAD_GRAYSCALE);

    const image_sequence sequence(image_dir_path, cfg->camera_->fps_);
    const auto frames = sequence.get_frames();

    // build a SLAM system
    openvslam::system SLAM(cfg, vocab_file_path);
    // startup the SLAM process
    SLAM.startup();

#ifdef USE_PANGOLIN_VIEWER
    pangolin_viewer::viewer viewer(cfg, &SLAM, SLAM.get_frame_publisher(), SLAM.get_map_publisher());
#elif USE_SOCKET_PUBLISHER
    socket_publisher::publisher publisher(cfg, &SLAM, SLAM.get_frame_publisher(), SLAM.get_map_publisher());
#endif

    std::vector<double> track_times;
    track_times.reserve(frames.size());

    // run the SLAM in another thread
    std::thread thread([&]() {




       


        //for (const auto& [imgName, lat, lon, alt] : gnssData) {
            //     Mat img = readImage(Image_PATH, imgName);

        for (unsigned int i = 0; i < gnssData.size(); ++i) {


            std::string imgName = std::get<0>(gnssData[i]);
            double lat = std::get<1>(gnssData[i]);
            double lon = std::get<2>(gnssData[i]);
            double alt = std::get<3>(gnssData[i]);


            const auto  img = readImage(img_dir_path, imgName);

            double timestamp_ =  i*2.0;

            //const auto img = cv::imread(frame.img_path_, cv::IMREAD_UNCHANGED);

            const auto tp_1 = std::chrono::steady_clock::now();

            if (!img.empty() && (i % frame_skip == 0)) {
                // input the current frame and estimate the camera pose
                Eigen::Matrix4d curent_pose = SLAM.feed_monocular_frame(img, timestamp_, mask);
                poses_names.emplace_back(curent_pose, imgName);
   
            }

            const auto tp_2 = std::chrono::steady_clock::now();

            const auto track_time = std::chrono::duration_cast<std::chrono::duration<double>>(tp_2 - tp_1).count();
            if (i % frame_skip == 0) {
                track_times.push_back(track_time);
            }

            // wait until the timestamp of the next frame
            if (!no_sleep && i < frames.size() - 1) {
                const auto wait_time = frames.at(i + 1).timestamp_ - (timestamp_ + track_time);
                if (0.0 < wait_time) {
                    std::this_thread::sleep_for(std::chrono::microseconds(static_cast<unsigned int>(wait_time * 1e6)));
                }
            }

            // check if the termination of SLAM system is requested or not
            if (SLAM.terminate_is_requested()) {
                break;
            }
        }


        WritePosetoTxtFile("1_1_frame_slam_enu.txt",poses_names);

        // wait until the loop BA is finished
        while (SLAM.loop_BA_is_running()) {
            std::this_thread::sleep_for(std::chrono::microseconds(5000));
        }

        // automatically close the viewer
#ifdef USE_PANGOLIN_VIEWER
        if (auto_term) {
            viewer.request_terminate();
        }
#elif USE_SOCKET_PUBLISHER
        if (auto_term) {
            publisher.request_terminate();
        }
#endif
    });

    // run the viewer in the current thread
#ifdef USE_PANGOLIN_VIEWER
    viewer.run();
#elif USE_SOCKET_PUBLISHER
    publisher.run();
#endif

    thread.join();

    // shutdown the SLAM process
    SLAM.shutdown();

    if (eval_log) {
        // output the trajectories for evaluation
        SLAM.save_frame_trajectory("frame_trajectory.txt", "TUM");
        SLAM.save_keyframe_trajectory("keyframe_trajectory.txt", "TUM");
        // output the tracking times for evaluation
        std::ofstream ofs("track_times.txt", std::ios::out);
        if (ofs.is_open()) {
            for (const auto track_time : track_times) {
                ofs << track_time << std::endl;
            }
            ofs.close();
        }
    }

    if (!map_db_path.empty()) {
        // output the map database
        SLAM.save_map_database(map_db_path);
    }

    std::sort(track_times.begin(), track_times.end());
    const auto total_track_time = std::accumulate(track_times.begin(), track_times.end(), 0.0);
    std::cout << "median tracking time: " << track_times.at(track_times.size() / 2) << "[s]" << std::endl;
    std::cout << "mean tracking time: " << total_track_time / track_times.size() << "[s]" << std::endl;
}


/*

./run_image_slam \
-c  /home/dongdong/2project/1salm/1openvsalm/openvslam/data/camera_rtk.yaml  \


*/



int main(int argc, char* argv[]) {
#ifdef USE_STACK_TRACE_LOGGER
    google::InitGoogleLogging(argv[0]);
    google::InstallFailureSignalHandler();
#endif

    //main_test(argc, argv);
    std::string config_file_path = Get_ConfigPath(argc, argv);

    // 初始化全域性變數
    Config_PATH = readConfig(config_file_path, "Config_PATH");
    GNSS_PSTH = Config_PATH + "/" + readConfig(config_file_path, "GNSS_PSTH");
    img_dir_path = Config_PATH + "/" + readConfig(config_file_path, "image_dir_path");
    vocab_file_path = Config_PATH + "/" + readConfig(config_file_path, "vocab_file_path");
    map_db_path = Config_PATH + "/" + readConfig(config_file_path, "map_db_path");

    std::cout<<"===============配置檔案資訊=============== " <<std::endl;

    std::cout<<"config_file_path : " << config_file_path <<std::endl;

    std::cout<<"資料根目錄 : " << Config_PATH <<std::endl;
    std::cout<<"GNSS_PSTH : " << GNSS_PSTH <<std::endl;
    std::cout<<"Image_PATH : " << img_dir_path <<std::endl;
    std::cout<<"vocab_file_path : " << vocab_file_path <<std::endl;
    std::cout<<"map_db_path : " << map_db_path <<std::endl;


    // 讀取GNSS資料
    gnssData = readGNSSData(GNSS_PSTH);

    // 遍歷GNSS資料並讀取對應的影像
    // for (const auto& [imgName, lat, lon, alt] : gnssData) {
    //     Mat img = readImage(Image_PATH, imgName);
    //     if (!img.empty()) {
    //         // 在這裡可以處理影像,例如顯示、儲存等
    //         imshow("Image", img);
    //         waitKey(0);  // 按任意鍵繼續顯示下一張影像
    //     }
    // }



    // create options
    popl::OptionParser op("Allowed options");
    auto help = op.add<popl::Switch>("h", "help", "produce help message");
    //auto vocab_file_path = op.add<popl::Value<std::string>>("v", "vocab", "vocabulary file path");
    //auto img_dir_path = op.add<popl::Value<std::string>>("i", "img_dir", "directory path which contains images");
    //auto config_file_path = op.add<popl::Value<std::string>>("c", "config", "config file path");
    auto mask_img_path = op.add<popl::Value<std::string>>("", "mask", "mask image path", "");
    auto frame_skip = op.add<popl::Value<unsigned int>>("", "frame-skip", "interval of frame skip", 1);
    auto no_sleep = op.add<popl::Switch>("", "no-sleep", "not wait for next frame in real time");
    auto auto_term = op.add<popl::Switch>("", "auto-term", "automatically terminate the viewer");
    auto debug_mode = op.add<popl::Switch>("", "debug", "debug mode");
    auto eval_log = op.add<popl::Switch>("", "eval-log", "store trajectory and tracking times for evaluation");
    //auto map_db_path = op.add<popl::Value<std::string>>("p", "map-db", "store a map database at this path after SLAM", "");
    try {
        op.parse(argc, argv);
    }
    catch (const std::exception& e) {
        std::cerr << e.what() << std::endl;
        std::cerr << std::endl;
        std::cerr << op << std::endl;
        return EXIT_FAILURE;
    }

    // check validness of options
    if (help->is_set()) {
        std::cerr << op << std::endl;
        return EXIT_FAILURE;
    }
    // if (!vocab_file_path->is_set() || !img_dir_path->is_set() || !config_file_path->is_set()) {
    //     std::cerr << "invalid arguments" << std::endl;
    //     std::cerr << std::endl;
    //     std::cerr << op << std::endl;
    //     return EXIT_FAILURE;
    // }
    if (vocab_file_path=="-1" || img_dir_path=="-1" || config_file_path=="-1") {
        std::cerr << "invalid arguments" << std::endl;
        std::cerr << std::endl;
        std::cerr << op << std::endl;
        return EXIT_FAILURE;
    }
    // setup logger
    spdlog::set_pattern("[%Y-%m-%d %H:%M:%S.%e] %^[%L] %v%$");
    if (debug_mode->is_set()) {
        spdlog::set_level(spdlog::level::debug);
    }
    else {
        spdlog::set_level(spdlog::level::info);
    }

    // load configuration
    std::shared_ptr<openvslam::config> cfg;
    try {
        cfg = std::make_shared<openvslam::config>(config_file_path);
    }
    catch (const std::exception& e) {
        std::cerr << e.what() << std::endl;
        return EXIT_FAILURE;
    }

#ifdef USE_GOOGLE_PERFTOOLS
    ProfilerStart("slam.prof");
#endif

    // run tracking
    // if (cfg->camera_->setup_type_ == openvslam::camera::setup_type_t::Monocular) {
    //     mono_tracking(cfg, vocab_file_path->value(), img_dir_path->value(), mask_img_path->value(),
    //                   frame_skip->value(), no_sleep->is_set(), auto_term->is_set(),
    //                   eval_log->is_set(), map_db_path->value());

    if (cfg->camera_->setup_type_ == openvslam::camera::setup_type_t::Monocular) {
    mono_tracking(cfg, vocab_file_path, img_dir_path, mask_img_path->value(),
                    frame_skip->value(), no_sleep->is_set(), auto_term->is_set(),
                    eval_log->is_set(), map_db_path);
    }
    else {
        throw std::runtime_error("Invalid setup type: " + cfg->camera_->get_setup_type_string());
    }

#ifdef USE_GOOGLE_PERFTOOLS
    ProfilerStop();
#endif

    return EXIT_SUCCESS;
}

  

配置檔案

Camera.color_order: RGB
Camera.cols: 1805
Camera.cx: 910.8785687265895
Camera.cy: 602.174293145834
Camera.fps: 10
Camera.fx: 1193.7076128098686
Camera.fy: 1193.1735265967602
Camera.k1: 0
Camera.k2: 0
Camera.k3: 0
Camera.k4: 0
Camera.model: perspective
Camera.name: NWPU monocular
Camera.p1: 0
Camera.p2: 0
Camera.rows: 1203
Camera.setup: monocular
Config_PATH: /home/dongdong/2project/0data/RTK/data_1_nwpuUp/data3_1130_13pm//280_260/
Feature.max_num_keypoints: 3000
Feature.num_levels: 8
Feature.scale_factor: 1.2
Fixed.altitude: 400.0
Fixed.altitude_flag: 0
GNSS_Have: 1
GNSS_PSTH: slam_config/gnss.txt
GNSS_USE: 1
Initial.alt: 415
Initial.lat: 34.033727638888884
Initial.lon: 108.76406197222222
Loop_Th: 80.0
Map.Ininterval: 10
Map.InsertFps: 20
Map.alt: 715
Marker.Num: 0
PangolinViewer.camera_line_width: 3
PangolinViewer.camera_size: 0.08
PangolinViewer.graph_line_width: 1
PangolinViewer.keyframe_line_width: 2
PangolinViewer.keyframe_size: 0.07
PangolinViewer.point_size: 2
PangolinViewer.viewpoint_f: 400
PangolinViewer.viewpoint_x: 0
PangolinViewer.viewpoint_y: -0.65
PangolinViewer.viewpoint_z: -1.9
Relocalize_KF_Th: 3.0
Relocalize_Th: 80.0
Save.data: 1
Save.newmap: 1
Tracking_CF_Th: 10.0
V_Instant_Th: 200.0
WorkMode: map_onlynewData
WorkMode_1: map_all_keep_allKfms
WorkMode_2: map_all
WorkMode_3: location_only
WorkMode_4: map_onlynewData
hAcc: 1.0
image_dir_path: images
image_dir_type: JPG
map_db_path: slam_out/Map_GNSS.msg
mask_img_path: none.jpg
op.Global_Optimize_Th: 1.0
op.Remove_Kayframe_Th: 6.0
op.Second_Optimize_Th: 0
op.is_Second_Optimize: 0
sRt_colmap2gnss: sparse/srt_colmap2gnss.yaml
sRt_slam2gnss: slam_out/srt_slam2gnss.yaml
vAcc: 1.0
vocab_file_path: slam_config/orb_vocab.dbow2

  

相關文章