#include <fstream>
#include <ios>
#include <iostream>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/core/vpXmlParserCamera.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/detection/vpDetectorAprilTag.h>
#include <visp3/mbt/vpMbGenericTracker.h>
typedef enum {
state_detection,
state_tracking,
state_quit
} state_t;
void createCaoFile(double cubeEdgeSize)
{
std::ofstream fileStream;
fileStream.open("cube.cao", std::ofstream::out | std::ofstream::trunc);
fileStream << "V1\n";
fileStream << "# 3D Points\n";
fileStream << "8 # Number of points\n";
fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 0: (X, Y, Z)\n";
fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 1\n";
fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 2\n";
fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 3\n";
fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 4\n";
fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 5\n";
fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 6\n";
fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 7\n";
fileStream << "# 3D Lines\n";
fileStream << "0 # Number of lines\n";
fileStream << "# Faces from 3D lines\n";
fileStream << "0 # Number of faces\n";
fileStream << "# Faces from 3D points\n";
fileStream << "6 # Number of faces\n";
fileStream << "4 0 3 2 1 # Face 0: [number of points] [index of the 3D points]...\n";
fileStream << "4 1 2 5 6\n";
fileStream << "4 4 7 6 5\n";
fileStream << "4 0 7 4 3\n";
fileStream << "4 5 2 3 4\n";
fileStream << "4 0 1 6 7 # Face 5\n";
fileStream << "# 3D cylinders\n";
fileStream << "0 # Number of cylinders\n";
fileStream << "# 3D circles\n";
fileStream << "0 # Number of circles\n";
fileStream.close();
}
#if defined(VISP_HAVE_APRILTAG)
{
std::vector<vpHomogeneousMatrix> cMo_vec;
bool ret = detector.
detect(I, tagSize, cam, cMo_vec);
for (size_t i = 0; i < cMo_vec.size(); i++) {
}
cMo = cMo_vec[0];
return state_tracking;
}
return state_detection;
}
#endif // #if defined(VISP_HAVE_APRILTAG)
{
try {
}
catch (...) {
return state_detection;
}
if (projection_error > projection_error_threshold) {
return state_detection;
}
return state_tracking;
}
#ifdef VISP_HAVE_PCL
std::map<std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr>mapOfPointclouds,
#else
std::map<std::string, const std::vector<vpColVector> *>mapOfPointclouds,
std::map<std::string, unsigned int> mapOfWidths,
std::map<std::string, unsigned int> mapOfHeights,
#endif
{
try {
#ifdef VISP_HAVE_PCL
tracker.
track(mapOfImages, mapOfPointclouds);
#else
tracker.
track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
#endif
}
catch (...) {
return state_detection;
}
if (projection_error > projection_error_threshold) {
return state_detection;
}
tracker.
display(I_gray, I_depth, cMo, depth_M_color*cMo, cam_color, cam_depth,
vpColor::red, 3);
return state_tracking;
}
int main(int argc, const char **argv)
{
#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_MODULE_MBT)
double opt_tag_size = 0.08;
float opt_quad_decimate = 1.0;
int opt_nthreads = 1;
double opt_cube_size = 0.125;
#ifdef VISP_HAVE_OPENCV
bool opt_use_texture = false;
#endif
bool opt_use_depth = false;
double opt_projection_error_threshold = 40.;
#if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
bool display_off = true;
#else
bool display_off = false;
#endif
for (int i = 1; i < argc; i++) {
if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
opt_tag_size = atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
opt_quad_decimate = (float)atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
opt_nthreads = atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--display_off") {
display_off = true;
} else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
} else if (std::string(argv[i]) == "--cube_size" && i + 1 < argc) {
opt_cube_size = atof(argv[i + 1]);
#ifdef VISP_HAVE_OPENCV
} else if (std::string(argv[i]) == "--texture") {
opt_use_texture = true;
#endif
} else if (std::string(argv[i]) == "--depth") {
opt_use_depth = true;
} else if (std::string(argv[i]) == "--projection_error" && i + 1 < argc) {
opt_projection_error_threshold = atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "Usage: " << argv[0] << " [--cube_size <size in m>] [--tag_size <size in m>]"
" [--quad_decimate <decimation>] [--nthreads <nb>]"
" [--tag_family <0: TAG_36h11, 1: TAG_36h10, 2: TAG_36ARTOOLKIT, "
" 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5>]";
#if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
std::cout << " [--display_off]";
#endif
std::cout << " [--texture] [--depth] [--projection_error <30 - 100>] [--help]" << std::endl;
return EXIT_SUCCESS;
}
}
createCaoFile(opt_cube_size);
try {
rs2::config config;
int width = 640, height = 480, stream_fps = 30;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, stream_fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, stream_fps);
config.disable_stream(RS2_STREAM_INFRARED);
if (opt_use_depth) {
}
std::map<std::string, vpHomogeneousMatrix> mapOfCameraTransformations;
std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
#ifdef VISP_HAVE_PCL
std::map<std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr> mapOfPointclouds;
pcl::PointCloud<pcl::PointXYZ>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZ>());
#else
std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
std::vector<vpColVector> pointcloud;
#endif
std::map<std::string, vpHomogeneousMatrix> mapOfCameraPoses;
std::cout << "Cube size: " << opt_cube_size << std::endl;
std::cout << "AprilTag size: " << opt_tag_size << std::endl;
std::cout << "AprilTag family: " << opt_tag_family << std::endl;
std::cout << "Camera parameters:" << std::endl;
std::cout << " Color:\n" << cam_color << std::endl;
if (opt_use_depth)
std::cout << " Depth:\n" << cam_depth << std::endl;
std::cout << "Detection: " << std::endl;
std::cout << " Quad decimate: " << opt_quad_decimate << std::endl;
std::cout << " Threads number: " << opt_nthreads << std::endl;
std::cout << "Tracker: " << std::endl;
std::cout << " Use edges : 1" << std::endl;
std::cout << " Use texture: "
#ifdef VISP_HAVE_OPENCV
<< opt_use_texture << std::endl;
#else
<< " na" << std::endl;
#endif
std::cout << " Use depth : " << opt_use_depth << std::endl;
std::cout << " Projection error: " << opt_projection_error_threshold << std::endl;
if (!display_off) {
#ifdef VISP_HAVE_X11
d_gray =
new vpDisplayX(I_gray, 50, 50,
"Color stream");
if (opt_use_depth)
#elif defined(VISP_HAVE_GDI)
if (opt_use_depth)
#elif defined(VISP_HAVE_OPENCV)
if (opt_use_depth)
#endif
}
std::vector<int> trackerTypes;
#ifdef VISP_HAVE_OPENCV
if (opt_use_texture)
else
#endif
if (opt_use_depth)
#ifdef VISP_HAVE_OPENCV
if (opt_use_texture) {
}
#endif
if (opt_use_depth) {
mapOfCameraTransformations["Camera2"] = depth_M_color;
}
else {
}
state_t state = state_detection;
while (state != state_quit) {
if (opt_use_depth) {
#ifdef VISP_HAVE_PCL
realsense.
acquire((
unsigned char *) I_color.
bitmap, (
unsigned char *) I_depth_raw.
bitmap, NULL, pointcloud, NULL);
#else
realsense.
acquire((
unsigned char *) I_color.
bitmap, (
unsigned char *) I_depth_raw.
bitmap, &pointcloud, NULL, NULL);
#endif
mapOfImages["Camera1"] = &I_gray;
mapOfImages["Camera2"] = &I_depth;
#ifdef VISP_HAVE_PCL
mapOfPointclouds["Camera2"] = pointcloud;
#else
mapOfPointclouds["Camera2"] = &pointcloud;
mapOfWidths["Camera2"] = width;
mapOfHeights["Camera2"] = height;
#endif
}
else {
}
if (state == state_detection) {
state = detectAprilTag(I_gray, detector, opt_tag_size, cam_color, cMo);
if (state == state_tracking) {
if (opt_use_depth) {
mapOfCameraPoses["Camera1"] = cMo;
mapOfCameraPoses["Camera2"] = depth_M_color * cMo;
}
else {
}
}
}
if (state == state_tracking) {
if (opt_use_depth) {
#ifdef VISP_HAVE_PCL
state = track(mapOfImages, mapOfPointclouds,
I_gray, I_depth, depth_M_color, tracker, opt_projection_error_threshold, cMo);
#else
state = track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights,
I_gray, I_depth, depth_M_color, tracker, opt_projection_error_threshold, cMo);
#endif
}
else {
state = track(I_gray, tracker, opt_projection_error_threshold, cMo);
}
}
state = state_quit;
}
if (opt_use_depth) {
state = state_quit;
}
}
}
if (!display_off) {
delete d_gray;
if (opt_use_depth)
delete d_depth;
}
std::cerr <<
"Catch an exception: " << e.
getMessage() << std::endl;
}
return EXIT_SUCCESS;
#else
(void)argc;
(void)argv;
#ifndef VISP_HAVE_APRILTAG
std::cout << "ViSP is not build with Apriltag support" << std::endl;
#endif
#ifndef VISP_HAVE_REALSENSE2
std::cout << "ViSP is not build with librealsense2 support" << std::endl;
#endif
std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
#endif
return EXIT_SUCCESS;
}