!! Draft version !!
Some conventions
- physical units are all in SI (International System of Units) by default (second, meter, radian ..), otherwise the name is suffixed with unit (example:
interEyesDistanceMm
if is in mm or edgeTimestampUs
if timestamp in microseconds).
- 3D rotation: we use right handed convention, recommand to use rotation matrix as many as possible or quaternions if need less memory usage. SDK provides tools for conversions between 3D rotation formats (see functions to convert between 3D rotations representations) and other conventions (left handed).
- device frame coordinates is based on the IMU frame coordinates with X to right, Y to down and Z to forward (main direction of the cameras).
- world frame coordinates is usually aligned with gravity (using the IMU) and X to right, Y to down and Z to forward of the initial pose.
- components frame coordinates (stereo cameras, ToF, color camera, AR/VR displays ...) are usually defined by calibration and are defined according to the device frame coordianates (IMU)
- timestamps: the API usually use the two following timestamps fields:
double hostTimestamp
: host timestamp in seconds of the physical event of the data based on std::chrono::steady_clock
that is guaranteed to be monotonic by the C++ STL
std::int64_t edgeTimestampUs
: timestamp of the physical event of the data based on edge clock
Examples
The main entry point to use a XVisio is xv::Device. This class is the main entry point of the API, it gives access to the device and algorithms. See xv::getDevices() to have an instance corresponding to a device.
A device can have multiple components, accessible with member functions :
- xv::Device::slam() : 6dof tracker doing the SLAM algorithm on host based on informations coming from device (stereo camera, IMU sensor, ..)
- xv::Device::imuSensor() : sensor with at least 3-axis accelerometer and 3-axis gyrometer
- xv::Device::fisheyeCameras(): typically 2 fisheye cameras used for Visual SLAM
- xv::Device::tofCamera(): a depth camera sensor
- #xv::Device::edge(): used to run some algorithm directly on embedded device (typically Visual SLAM) when it is possible to choose between host and edge processing
- xv::Device::display(): used to handle informations about the display (if device can display like a HMD)
- xv::Device::objectDetector(): used to run and get results of the CNN object detector
If a device does not support a component or doesn't have the component (for example ToF), the accessor function will return null_ptr
. The data streams and processings under a component are activated only if at least one callback is registerd to the component. If all the callbacks are unregistered then steams can be deactivated.
Here are some example how to use the differents components.
6dof tracking with SLAM on host and get fisheye images
#include <xv-sdk.h>
#include <iostream>
#include <thread>
#include <atomic>
#include <cmath>
#include "frequency_counter.hpp"
static FrequencyCounter fps;
fps.tic();
if (fps.count() % 500 == 1) {
std::cout <<
"SLAM pose callback : " << fps.fps() <<
" Hz [timestamp=" << pose.
hostTimestamp() <<
" x=" << pose.
x() <<
" y=" << pose.
y() <<
" z=" << pose.
z()
<< " pitch=" << pitchYawRoll[0]*180./M_PI << "°" << " yaw=" << pitchYawRoll[1]*180./M_PI << "°" << " roll=" << pitchYawRoll[2]*180./M_PI << "°"
<< std::endl;
}
}
std::shared_ptr<const xv::FisheyeImages> lastStereoImage;
int main( int , char* [] )
{
if (devices.empty()) {
std::cerr << "Timeout for device detection." << std::endl;
return EXIT_FAILURE;
}
auto device = devices.begin()->second;
if (!device->slam()) {
std::cerr << "Host SLAM algorithm not supported." << std::endl;
return EXIT_FAILURE;
}
device->slam()->registerPoseCallback(onPose);
if (device->fisheyeCameras()) {
device->fisheyeCameras()->registerCallback([](std::shared_ptr<const xv::FisheyeImages> images) {
lastStereoImage = images;
static FrequencyCounter fps;
fps.tic();
if (fps.count() % 30 == 1) {
std::cout << "Fisheyes : " << fps.fps() << " Hz" << std::endl;
}
});
}
std::atomic<bool> stop(false);
std::thread threadLoop60Hz([&stop, &device]{
while (!stop) {
auto now = std::chrono::steady_clock::now();
if (device->slam()->getPose(pose)) {
static FrequencyCounter fps;
fps.tic();
if (fps.count() % 120 == 1) {
std::cout <<
"Current SLAM : " << fps.fps() <<
" Hz [timestamp=" << pose.
hostTimestamp() <<
" x=" << pose.
x() <<
" y=" << pose.
y() <<
" z=" << pose.
z()
<< " pitch=" << pitchYawRoll[0]*180./M_PI << "°" << " yaw=" << pitchYawRoll[1]*180./M_PI << "°" << " roll=" << pitchYawRoll[2]*180./M_PI << "°"
<< std::endl;
}
}
device->slam()->getPose(pose30ms, 0.030);
if (lastStereoImage) {
device->slam()->getPoseAt(poseAtStereoImage, lastStereoImage->
hostTimestamp);
}
double t = std::chrono::duration_cast<std::chrono::seconds>(std::chrono::steady_clock::now().time_since_epoch()).count();
t += 0.0123;
device->slam()->getPoseAt(poseAt, t);
std::this_thread::sleep_until(now+std::chrono::microseconds(long(1./60.*1e6)));
}
});
std::cout << "Press enter to start SLAM ..." << std::endl;
std::cin.get();
device->slam()->start();
std::cout << "Press enter to stop SLAM ..." << std::endl;
std::cin.get();
device->slam()->stop();
stop = true;
if (threadLoop60Hz.joinable()) {
threadLoop60Hz.join();
}
return EXIT_SUCCESS;
}
Edge SLAM 6dof pose tracking and RGB camera output
#include <xv-sdk.h>
#include <iostream>
#include <thread>
#include <atomic>
#include <cmath>
#include "frequency_counter.hpp"
static FrequencyCounter fps;
fps.tic();
if (fps.count() % 120 == 1) {
std::cout <<
"Current SLAM : " << fps.fps() <<
" Hz [timestamp=" << pose.
hostTimestamp() <<
" x=" << pose.
x() <<
" y=" << pose.
y() <<
" z=" << pose.
z()
<< " pitch=" << pitchYawRoll[0]*180./M_PI << "°" << " yaw=" << pitchYawRoll[1]*180./M_PI << "°" << " roll=" << pitchYawRoll[2]*180./M_PI << "°"
<< std::endl;
}
}
std::shared_ptr<const xv::ColorImage> lastColorImage;
int main( int , char* [] )
{
if (devices.empty()) {
std::cerr << "Timeout for device detection." << std::endl;
return EXIT_FAILURE;
}
auto device = devices.begin()->second;
if (!device->edge()) {
std::cerr << "Edge SLAM algorithm not supported." << std::endl;
return EXIT_FAILURE;
}
device->edge()->registerPoseCallback(onPose);
if (device->colorCamera()) {
device->colorCamera()->registerCallback([](std::shared_ptr<const xv::ColorImage> image) {
lastColorImage = image;
static FrequencyCounter fps;
fps.tic();
if (fps.count() % 120 == 1) {
std::cout << "Current color image : " << fps.fps() << " Hz [timestamp=" << image->hostTimestamp << " width=" << image->width << " height=" << image->height
<< "]" << std::endl;
}
});
} else {
std::cerr << "Access to color camera not supported." << std::endl;
return EXIT_FAILURE;
}
std::cout << "Press enter to start SLAM ..." << std::endl;
std::cin.get();
device->edge()->start();
std::cout << "Press enter to stop SLAM ..." << std::endl;
std::cin.get();
device->edge()->stop();
return EXIT_SUCCESS;
}