/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see .
*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "librealsense2/rsutil.h"
#include
using namespace std;
bool b_continue_session;
void exit_loop_handler(int s){
cout << "Finishing session" << endl;
b_continue_session = false;
}
void interpolateData(const std::vector &vBase_times,
std::vector &vInterp_data, std::vector &vInterp_times,
const rs2_vector &prev_data, const double &prev_time);
rs2_vector interpolateMeasure(const double target_time,
const rs2_vector current_data, const double current_time,
const rs2_vector prev_data, const double prev_time);
static rs2_option get_sensor_option(const rs2::sensor& sensor)
{
// Sensors usually have several options to control their properties
// such as Exposure, Brightness etc.
std::cout << "Sensor supports the following options:\n" << std::endl;
// The following loop shows how to iterate over all available options
// Starting from 0 until RS2_OPTION_COUNT (exclusive)
for (int i = 0; i < static_cast(RS2_OPTION_COUNT); i++)
{
rs2_option option_type = static_cast(i);
//SDK enum types can be streamed to get a string that represents them
std::cout << " " << i << ": " << option_type;
// To control an option, use the following api:
// First, verify that the sensor actually supports this option
if (sensor.supports(option_type))
{
std::cout << std::endl;
// Get a human readable description of the option
const char* description = sensor.get_option_description(option_type);
std::cout << " Description : " << description << std::endl;
// Get the current value of the option
float current_value = sensor.get_option(option_type);
std::cout << " Current Value : " << current_value << std::endl;
//To change the value of an option, please follow the change_sensor_option() function
}
else
{
std::cout << " is not supported" << std::endl;
}
}
uint32_t selected_sensor_option = 0;
return static_cast(selected_sensor_option);
}
int main(int argc, char **argv) {
if (argc < 3 || argc > 4) {
cerr << endl
<< "Usage: ./stereo_inertial_realsense_D435i path_to_vocabulary path_to_settings (trajectory_file_name)"
<< endl;
return 1;
}
string file_name;
if (argc == 4) {
file_name = string(argv[argc - 1]);
}
struct sigaction sigIntHandler;
sigIntHandler.sa_handler = exit_loop_handler;
sigemptyset(&sigIntHandler.sa_mask);
sigIntHandler.sa_flags = 0;
sigaction(SIGINT, &sigIntHandler, NULL);
b_continue_session = true;
double offset = 0; // ms
rs2::context ctx;
rs2::device_list devices = ctx.query_devices();
rs2::device selected_device;
if (devices.size() == 0)
{
std::cerr << "No device connected, please connect a RealSense device" << std::endl;
return 0;
}
else
selected_device = devices[0];
std::vector sensors = selected_device.query_sensors();
int index = 0;
// We can now iterate the sensors and print their names
for (rs2::sensor sensor : sensors)
if (sensor.supports(RS2_CAMERA_INFO_NAME)) {
++index;
if (index == 1) {
sensor.set_option(RS2_OPTION_ENABLE_AUTO_EXPOSURE, 1);
sensor.set_option(RS2_OPTION_AUTO_EXPOSURE_LIMIT,5000);
sensor.set_option(RS2_OPTION_EMITTER_ENABLED, 0); // switch off emitter
}
// std::cout << " " << index << " : " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
get_sensor_option(sensor);
if (index == 2){
// RGB camera (not used here...)
sensor.set_option(RS2_OPTION_EXPOSURE,100.f);
}
if (index == 3){
sensor.set_option(RS2_OPTION_ENABLE_MOTION_CORRECTION,0);
}
}
// Declare RealSense pipeline, encapsulating the actual device and sensors
rs2::pipeline pipe;
// Create a configuration for configuring the pipeline with a non default profile
rs2::config cfg;
cfg.enable_stream(RS2_STREAM_INFRARED, 1, 640, 480, RS2_FORMAT_Y8, 30);
cfg.enable_stream(RS2_STREAM_INFRARED, 2, 640, 480, RS2_FORMAT_Y8, 30);
cfg.enable_stream(RS2_STREAM_ACCEL, RS2_FORMAT_MOTION_XYZ32F);
cfg.enable_stream(RS2_STREAM_GYRO, RS2_FORMAT_MOTION_XYZ32F);
// IMU callback
std::mutex imu_mutex;
std::condition_variable cond_image_rec;
vector v_accel_timestamp;
vector v_accel_data;
vector v_gyro_timestamp;
vector v_gyro_data;
double prev_accel_timestamp = 0;
rs2_vector prev_accel_data;
double current_accel_timestamp = 0;
rs2_vector current_accel_data;
vector v_accel_timestamp_sync;
vector v_accel_data_sync;
cv::Mat imCV, imRightCV;
int width_img, height_img;
double timestamp_image = -1.0;
bool image_ready = false;
int count_im_buffer = 0; // count dropped frames
auto imu_callback = [&](const rs2::frame& frame)
{
std::unique_lock lock(imu_mutex);
if(rs2::frameset fs = frame.as())
{
count_im_buffer++;
double new_timestamp_image = fs.get_timestamp()*1e-3;
if(abs(timestamp_image-new_timestamp_image)<0.001){
// cout << "Two frames with the same timeStamp!!!\n";
count_im_buffer--;
return;
}
rs2::video_frame ir_frameL = fs.get_infrared_frame(1);
rs2::video_frame ir_frameR = fs.get_infrared_frame(2);
imCV = cv::Mat(cv::Size(width_img, height_img), CV_8U, (void*)(ir_frameL.get_data()), cv::Mat::AUTO_STEP);
imRightCV = cv::Mat(cv::Size(width_img, height_img), CV_8U, (void*)(ir_frameR.get_data()), cv::Mat::AUTO_STEP);
timestamp_image = fs.get_timestamp()*1e-3;
image_ready = true;
while(v_gyro_timestamp.size() > v_accel_timestamp_sync.size())
{
int index = v_accel_timestamp_sync.size();
double target_time = v_gyro_timestamp[index];
v_accel_data_sync.push_back(current_accel_data);
v_accel_timestamp_sync.push_back(target_time);
}
lock.unlock();
cond_image_rec.notify_all();
}
else if (rs2::motion_frame m_frame = frame.as())
{
if (m_frame.get_profile().stream_name() == "Gyro")
{
// It runs at 200Hz
v_gyro_data.push_back(m_frame.get_motion_data());
v_gyro_timestamp.push_back((m_frame.get_timestamp()+offset)*1e-3);
//rs2_vector gyro_sample = m_frame.get_motion_data();
//std::cout << "Gyro:" << gyro_sample.x << ", " << gyro_sample.y << ", " << gyro_sample.z << std::endl;
}
else if (m_frame.get_profile().stream_name() == "Accel")
{
// It runs at 60Hz
prev_accel_timestamp = current_accel_timestamp;
prev_accel_data = current_accel_data;
current_accel_data = m_frame.get_motion_data();
current_accel_timestamp = (m_frame.get_timestamp()+offset)*1e-3;
while(v_gyro_timestamp.size() > v_accel_timestamp_sync.size())
{
int index = v_accel_timestamp_sync.size();
double target_time = v_gyro_timestamp[index];
rs2_vector interp_data = interpolateMeasure(target_time, current_accel_data, current_accel_timestamp,
prev_accel_data, prev_accel_timestamp);
v_accel_data_sync.push_back(interp_data);
v_accel_timestamp_sync.push_back(target_time);
}
// std::cout << "Accel:" << current_accel_data.x << ", " << current_accel_data.y << ", " << current_accel_data.z << std::endl;
}
}
};
rs2::pipeline_profile pipe_profile = pipe.start(cfg, imu_callback);
vector vImuMeas;
rs2::stream_profile cam_left = pipe_profile.get_stream(RS2_STREAM_INFRARED, 1);
rs2::stream_profile cam_right = pipe_profile.get_stream(RS2_STREAM_INFRARED, 2);
rs2::stream_profile imu_stream = pipe_profile.get_stream(RS2_STREAM_GYRO);
float* Rbc = cam_left.get_extrinsics_to(imu_stream).rotation;
float* tbc = cam_left.get_extrinsics_to(imu_stream).translation;
std::cout << "Tbc (left) = " << std::endl;
for(int i = 0; i<3; i++){
for(int j = 0; j<3; j++)
std::cout << Rbc[i*3 + j] << ", ";
std::cout << tbc[i] << "\n";
}
float* Rlr = cam_right.get_extrinsics_to(cam_left).rotation;
float* tlr = cam_right.get_extrinsics_to(cam_left).translation;
std::cout << "Tlr = " << std::endl;
for(int i = 0; i<3; i++){
for(int j = 0; j<3; j++)
std::cout << Rlr[i*3 + j] << ", ";
std::cout << tlr[i] << "\n";
}
rs2_intrinsics intrinsics_left = cam_left.as().get_intrinsics();
width_img = intrinsics_left.width;
height_img = intrinsics_left.height;
cout << "Left camera: \n";
std::cout << " fx = " << intrinsics_left.fx << std::endl;
std::cout << " fy = " << intrinsics_left.fy << std::endl;
std::cout << " cx = " << intrinsics_left.ppx << std::endl;
std::cout << " cy = " << intrinsics_left.ppy << std::endl;
std::cout << " height = " << intrinsics_left.height << std::endl;
std::cout << " width = " << intrinsics_left.width << std::endl;
std::cout << " Coeff = " << intrinsics_left.coeffs[0] << ", " << intrinsics_left.coeffs[1] << ", " <<
intrinsics_left.coeffs[2] << ", " << intrinsics_left.coeffs[3] << ", " << intrinsics_left.coeffs[4] << ", " << std::endl;
std::cout << " Model = " << intrinsics_left.model << std::endl;
rs2_intrinsics intrinsics_right = cam_right.as().get_intrinsics();
width_img = intrinsics_right.width;
height_img = intrinsics_right.height;
cout << "Right camera: \n";
std::cout << " fx = " << intrinsics_right.fx << std::endl;
std::cout << " fy = " << intrinsics_right.fy << std::endl;
std::cout << " cx = " << intrinsics_right.ppx << std::endl;
std::cout << " cy = " << intrinsics_right.ppy << std::endl;
std::cout << " height = " << intrinsics_right.height << std::endl;
std::cout << " width = " << intrinsics_right.width << std::endl;
std::cout << " Coeff = " << intrinsics_right.coeffs[0] << ", " << intrinsics_right.coeffs[1] << ", " <<
intrinsics_right.coeffs[2] << ", " << intrinsics_right.coeffs[3] << ", " << intrinsics_right.coeffs[4] << ", " << std::endl;
std::cout << " Model = " << intrinsics_right.model << std::endl;
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO, true, 0, file_name);
float imageScale = SLAM.GetImageScale();
double timestamp;
cv::Mat im, imRight;
// Clear IMU vectors
v_gyro_data.clear();
v_gyro_timestamp.clear();
v_accel_data_sync.clear();
v_accel_timestamp_sync.clear();
double t_resize = 0.f;
double t_track = 0.f;
while (!SLAM.isShutDown())
{
std::vector vGyro;
std::vector vGyro_times;
std::vector vAccel;
std::vector vAccel_times;
{
std::unique_lock lk(imu_mutex);
if(!image_ready)
cond_image_rec.wait(lk);
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point time_Start_Process = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point time_Start_Process = std::chrono::monotonic_clock::now();
#endif
if(count_im_buffer>1)
cout << count_im_buffer -1 << " dropped frs\n";
count_im_buffer = 0;
while(v_gyro_timestamp.size() > v_accel_timestamp_sync.size())
{
int index = v_accel_timestamp_sync.size();
double target_time = v_gyro_timestamp[index];
rs2_vector interp_data = interpolateMeasure(target_time, current_accel_data, current_accel_timestamp, prev_accel_data, prev_accel_timestamp);
v_accel_data_sync.push_back(interp_data);
// v_accel_data_sync.push_back(current_accel_data); // 0 interpolation
v_accel_timestamp_sync.push_back(target_time);
}
// Copy the IMU data
vGyro = v_gyro_data;
vGyro_times = v_gyro_timestamp;
vAccel = v_accel_data_sync;
vAccel_times = v_accel_timestamp_sync;
timestamp = timestamp_image;
im = imCV.clone();
imRight = imRightCV.clone();
// Clear IMU vectors
v_gyro_data.clear();
v_gyro_timestamp.clear();
v_accel_data_sync.clear();
v_accel_timestamp_sync.clear();
image_ready = false;
}
for(int i=0; i >(t_End_Resize - t_Start_Resize).count();
SLAM.InsertResizeTime(t_resize);
#endif
}
#ifdef REGISTER_TIMES
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t_Start_Track = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t_Start_Track = std::chrono::monotonic_clock::now();
#endif
#endif
// Stereo images are already rectified.
SLAM.TrackStereo(im, imRight, timestamp, vImuMeas);
#ifdef REGISTER_TIMES
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t_End_Track = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t_End_Track = std::chrono::monotonic_clock::now();
#endif
t_track = t_resize + std::chrono::duration_cast >(t_End_Track - t_Start_Track).count();
SLAM.InsertTrackTime(t_track);
#endif
// Clear the previous IMU measurements to load the new ones
vImuMeas.clear();
}
cout << "System shutdown!\n";
}
rs2_vector interpolateMeasure(const double target_time,
const rs2_vector current_data, const double current_time,
const rs2_vector prev_data, const double prev_time)
{
// If there are not previous information, the current data is propagated
if(prev_time == 0)
{
return current_data;
}
rs2_vector increment;
rs2_vector value_interp;
if(target_time > current_time) {
value_interp = current_data;
}
else if(target_time > prev_time)
{
increment.x = current_data.x - prev_data.x;
increment.y = current_data.y - prev_data.y;
increment.z = current_data.z - prev_data.z;
double factor = (target_time - prev_time) / (current_time - prev_time);
value_interp.x = prev_data.x + increment.x * factor;
value_interp.y = prev_data.y + increment.y * factor;
value_interp.z = prev_data.z + increment.z * factor;
// zero interpolation
value_interp = current_data;
}
else {
value_interp = prev_data;
}
return value_interp;
}