#include <visp3/core/vpConfig.h>
#ifdef VISP_HAVE_MODULE_SENSOR
#include <visp3/sensor/vpV4l2Grabber.h>
#include <visp3/sensor/vp1394CMUGrabber.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/sensor/vpFlyCaptureGrabber.h>
#include <visp3/sensor/vpRealSense2.h>
#endif
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpXmlParserCamera.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/vision/vpKeyPoint.h>
#include <visp3/mbt/vpMbGenericTracker.h>
int main(int argc, char **argv)
{
#if defined(VISP_HAVE_OPENCV) && \
(defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100) || \
defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) )
try {
std::string opt_modelname = "model/teabox/teabox.cao";
int opt_tracker = 2;
int opt_device = 0;
double opt_proj_error_threshold = 30.;
bool opt_use_ogre = false;
bool opt_use_scanline = false;
bool opt_display_projection_error = false;
bool opt_learn = false;
bool opt_auto_init = false;
std::string opt_learning_data = "learning/data-learned.bin";
std::string opt_intrinsic_file = "";
std::string opt_camera_name = "";
for (int i = 0; i < argc; i++) {
if (std::string(argv[i]) == "--model") {
opt_modelname = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--tracker") {
opt_tracker = atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
opt_device = atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--max_proj_error") {
opt_proj_error_threshold = atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--use_ogre") {
opt_use_ogre = true;
} else if (std::string(argv[i]) == "--use_scanline") {
opt_use_scanline = true;
} else if (std::string(argv[i]) == "--learn") {
opt_learn = true;
} else if (std::string(argv[i]) == "--learning_data" && i+1 < argc) {
opt_learning_data = argv[i+1];
} else if (std::string(argv[i]) == "--auto_init") {
opt_auto_init = true;
} else if (std::string(argv[i]) == "--display_proj_error") {
opt_display_projection_error = true;
} else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
opt_intrinsic_file = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
opt_camera_name = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "\nUsage: " << argv[0]
<< " [--camera_device <camera device> (default: 0)]"
<< " [--intrinsic <intrinsic file> (default: empty)]"
<< " [--camera_name <camera name> (default: empty)]"
<< " [--model <model name> (default: teabox)]"
<< " [--tracker <0=egde|1=keypoint|2=hybrid> (default: 2)]"
<< " [--use_ogre] [--use_scanline]"
<< " [--max_proj_error <allowed projection error> (default: 30)]"
<< " [--learn] [--auto_init] [--learning_data <data-learned.bin> (default: learning/data-learned.bin)]"
<< " [--display_proj_error]"
<< " [--help] [-h]\n"
<< std::endl;
return 0;
}
}
if (!parentname.empty())
objectname = parentname + "/" + objectname;
std::cout << "Tracker requested config files: " << objectname << ".[init, cao]" << std::endl;
std::cout << "Tracker optional config files: " << objectname << ".[ppm]" << std::endl;
std::cout << "Tracked features: " << std::endl;
std::cout << " Use edges : " << (opt_tracker == 0 || opt_tracker == 2) << std::endl;
std::cout << " Use klt : " << (opt_tracker == 1 || opt_tracker == 2) << std::endl;
std::cout << "Tracker options: " << std::endl;
std::cout << " Use ogre : " << opt_use_ogre << std::endl;
std::cout << " Use scanline: " << opt_use_scanline << std::endl;
std::cout << " Proj. error : " << opt_proj_error_threshold << std::endl;
std::cout << " Display proj. error: " << opt_display_projection_error << std::endl;
std::cout << "Config files: " << std::endl;
std::cout << " Config file : " << "\"" << objectname + ".xml" << "\"" << std::endl;
std::cout << " Model file : " << "\"" << objectname + ".cao" << "\"" << std::endl;
std::cout << " Init file : " << "\"" << objectname + ".init" << "\"" << std::endl;
std::cout << "Learning options : " << std::endl;
std::cout << " Learn : " << opt_learn << std::endl;
std::cout << " Auto init : " << opt_auto_init << std::endl;
std::cout << " Learning data: " << opt_learning_data << std::endl;
#if VISP_VERSION_INT > VP_VERSION_INT(3, 2, 0)
#else
#endif
#ifdef VISP_HAVE_PUGIXML
if (!opt_intrinsic_file.empty() && !opt_camera_name.empty())
#endif
#if defined(VISP_HAVE_V4L2)
std::ostringstream device;
device << "/dev/video" << opt_device;
std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
#elif defined(VISP_HAVE_DC1394)
(void)opt_device;
std::cout << "Use DC1394 grabber" << std::endl;
#elif defined(VISP_HAVE_CMU1394)
(void)opt_device;
std::cout << "Use CMU1394 grabber" << std::endl;
#elif defined(VISP_HAVE_FLYCAPTURE)
(void)opt_device;
std::cout << "Use FlyCapture grabber" << std::endl;
#elif defined(VISP_HAVE_REALSENSE2)
(void)opt_device;
std::cout << "Use Realsense 2 grabber" << std::endl;
rs2::config config;
config.disable_stream(RS2_STREAM_DEPTH);
config.disable_stream(RS2_STREAM_INFRARED);
config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
std::cout << "Read camera parameters from Realsense device" << std::endl;
#elif defined(VISP_HAVE_OPENCV)
std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
cv::VideoCapture g(opt_device);
if (!g.isOpened()) {
std::cout << "Failed to open the camera" << std::endl;
return -1;
}
cv::Mat frame;
g >> frame;
#endif
#if defined(VISP_HAVE_X11)
#elif defined(VISP_HAVE_GDI)
#else
#endif
display->init(I, 100, 100, "Model-based tracker");
while (true) {
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
g.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
g >> frame;
#endif
break;
}
}
if (opt_tracker == 0)
#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV)
else if (opt_tracker == 1)
else
#else
else {
# if !defined(VISP_HAVE_MODULE_KLT)
std::cout << "klt and hybrid model-based tracker are not available since visp_klt module is not available. "
"In CMakeGUI turn visp_klt module ON, configure and build ViSP again."
<< std::endl;
# else
std::cout << "Hybrid tracking is impossible since OpenCV is not enabled. "
<< "Install OpenCV, configure and build ViSP again to run this tutorial."
<< std::endl;
# endif
return EXIT_SUCCESS;
}
#endif
bool usexml = false;
#ifdef VISP_HAVE_PUGIXML
usexml = true;
}
#endif
if (!usexml) {
if (opt_tracker == 0 || opt_tracker == 2) {
}
#ifdef VISP_HAVE_MODULE_KLT
if (opt_tracker == 1 || opt_tracker == 2) {
}
#endif
}
#if (defined(VISP_HAVE_OPENCV_NONFREE) || defined(VISP_HAVE_OPENCV_XFEATURES2D))
std::string detectorName = "SIFT";
std::string extractorName = "SIFT";
std::string matcherName = "BruteForce";
#else
std::string detectorName = "FAST";
std::string extractorName = "ORB";
std::string matcherName = "BruteForce-Hamming";
#endif
if (opt_learn || opt_auto_init) {
#if !(defined(VISP_HAVE_OPENCV_NONFREE) || defined(VISP_HAVE_OPENCV_XFEATURES2D))
# if (VISP_HAVE_OPENCV_VERSION < 0x030000)
keypoint.setDetectorParameter("ORB", "nLevels", 1);
# else
cv::Ptr<cv::ORB> orb_detector = keypoint.
getDetector(
"ORB").dynamicCast<cv::ORB>();
if (orb_detector) {
orb_detector->setNLevels(1);
}
# endif
#endif
}
if (opt_auto_init) {
std::cout << "Cannot enable auto detection. Learning file \"" << opt_learning_data << "\" doesn't exist" << std::endl;
return EXIT_FAILURE;
}
}
else {
tracker.
initClick(I, objectname +
".init",
true);
}
bool learn_position = false;
bool run_auto_init = false;
if (opt_auto_init) {
run_auto_init = true;
}
int learn_id = 1;
unsigned int learn_cpt = 0;
bool quit = false;
bool tracking_failed = false;
while (!quit) {
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
g.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
g >> frame;
#endif
if (run_auto_init) {
tracking_failed = false;
std::cout << "Auto init succeed" << std::endl;
} else {
continue;
}
}
else if (tracking_failed) {
tracking_failed = false;
tracker.
initClick(I, objectname +
".init",
true);
}
try {
if (run_auto_init) {
run_auto_init = false;
}
tracking_failed = true;
if (opt_auto_init) {
std::cout << "Tracker needs to restart (tracking exception)" << std::endl;
run_auto_init = true;
}
}
if (! tracking_failed) {
double proj_error = 0;
}
else {
}
if (proj_error > opt_proj_error_threshold) {
std::cout << "Tracker needs to restart (projection error detected: " << proj_error << ")" << std::endl;
if (opt_auto_init) {
run_auto_init = true;
}
tracking_failed = true;
}
}
if (! tracking_failed) {
{
std::stringstream ss;
ss << "Translation: " << std::setprecision(5) << pose[0] << " " << pose[1] << " " << pose[2] << " [m]";
ss.str("");
}
}
if (learn_position) {
learn_cpt ++;
std::vector<cv::KeyPoint> trainKeyPoints;
keypoint.
detect(I, trainKeyPoints);
std::vector<vpPolygon> polygons;
std::vector<std::vector<vpPoint> > roisPt;
std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > pair = tracker.
getPolygonFaces();
polygons = pair.first;
roisPt = pair.second;
std::vector<cv::Point3f> points3f;
keypoint.
buildReference(I, trainKeyPoints, points3f,
true, learn_id++);
for (std::vector<cv::KeyPoint>::const_iterator it = trainKeyPoints.begin(); it != trainKeyPoints.end(); ++it) {
}
learn_position = false;
std::cout << "Data learned" << std::endl;
}
std::stringstream ss;
if (opt_learn)
else if (opt_auto_init)
else
quit = true;
learn_position = true;
run_auto_init = true;
}
}
}
if (opt_learn && learn_cpt) {
std::cout << "Save learning from " << learn_cpt << " images in file: " << opt_learning_data << std::endl;
}
delete display;
std::cout << "Catch a ViSP exception: " << e << std::endl;
}
#elif defined(VISP_HAVE_OPENCV)
(void) argc;
(void) argv;
std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, Realsense2), configure and build ViSP again to use this example" << std::endl;
#else
(void) argc;
(void) argv;
std::cout << "Install OpenCV 3rd party, configure and build ViSP again to use this example" << std::endl;
#endif
}