Visual Servoing Platform  version 3.2.0 under development (2019-01-22)
tutorial-mb-generic-tracker-apriltag-live-webcam.cpp
1 #include <fstream>
3 #include <ios>
4 #include <iostream>
5 
6 #include <visp3/gui/vpDisplayGDI.h>
7 #include <visp3/gui/vpDisplayOpenCV.h>
8 #include <visp3/gui/vpDisplayX.h>
9 #include <visp3/core/vpXmlParserCamera.h>
10 #include <visp3/sensor/vpV4l2Grabber.h>
11 #include <visp3/detection/vpDetectorAprilTag.h>
12 #include <visp3/mbt/vpMbGenericTracker.h>
13 
14 typedef enum {
15  state_detection,
16  state_tracking,
17  state_quit
18 } state_t;
19 
20 // Creates a cube.cao file in your current directory
21 // cubeEdgeSize : size of cube edges in meters
22 void createCaoFile(double cubeEdgeSize)
23 {
24  std::ofstream fileStream;
25  fileStream.open("cube.cao", std::ofstream::out | std::ofstream::trunc);
26  fileStream << "V1\n";
27  fileStream << "# 3D Points\n";
28  fileStream << "8 # Number of points\n";
29  fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 0: (X, Y, Z)\n";
30  fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 1\n";
31  fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 2\n";
32  fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 3\n";
33  fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 4\n";
34  fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 5\n";
35  fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 6\n";
36  fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 7\n";
37  fileStream << "# 3D Lines\n";
38  fileStream << "0 # Number of lines\n";
39  fileStream << "# Faces from 3D lines\n";
40  fileStream << "0 # Number of faces\n";
41  fileStream << "# Faces from 3D points\n";
42  fileStream << "6 # Number of faces\n";
43  fileStream << "4 0 3 2 1 # Face 0: [number of points] [index of the 3D points]...\n";
44  fileStream << "4 1 2 5 6\n";
45  fileStream << "4 4 7 6 5\n";
46  fileStream << "4 0 7 4 3\n";
47  fileStream << "4 5 2 3 4\n";
48  fileStream << "4 0 1 6 7 # Face 5\n";
49  fileStream << "# 3D cylinders\n";
50  fileStream << "0 # Number of cylinders\n";
51  fileStream << "# 3D circles\n";
52  fileStream << "0 # Number of circles\n";
53  fileStream.close();
54 }
55 
56 #if defined(VISP_HAVE_APRILTAG)
57 state_t detectAprilTag(const vpImage<unsigned char> &I, vpDetectorAprilTag &detector,
58  double tagSize, const vpCameraParameters &cam, vpHomogeneousMatrix &cMo)
59 {
60  std::vector<vpHomogeneousMatrix> cMo_vec;
61 
62  // Detection
63  bool ret = detector.detect(I, tagSize, cam, cMo_vec);
64 
65  // Display camera pose
66  for (size_t i = 0; i < cMo_vec.size(); i++) {
67  vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
68  }
69 
70  vpDisplay::displayText(I, 40, 20, "State: waiting tag detection", vpColor::red);
71 
72  if (ret && detector.getNbObjects() > 0) { // if tag detected, we pick the first one
73  cMo = cMo_vec[0];
74  return state_tracking;
75  }
76 
77  return state_detection;
78 }
79 #endif // #if defined(VISP_HAVE_APRILTAG)
80 
81 state_t track(const vpImage<unsigned char> &I, vpMbGenericTracker &tracker,
82  double projection_error_threshold, vpHomogeneousMatrix &cMo)
83 {
85  tracker.getCameraParameters(cam);
86 
87  // Track the object
88  try {
89  tracker.track(I);
90  }
91  catch (...) {
92  return state_detection;
93  }
94 
95  tracker.getPose(cMo);
96 
97  // Detect tracking error
98  double projection_error = tracker.computeCurrentProjectionError(I, cMo, cam);
99  if (projection_error > projection_error_threshold) {
100  return state_detection;
101  }
102 
103  // Display
104  tracker.display(I, cMo, cam, vpColor::red, 2);
105  vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, 3);
106  vpDisplay::displayText(I, 40, 20, "State: tracking in progress", vpColor::red);
107 
108  return state_tracking;
109 }
110 
111 int main(int argc, const char **argv)
112 {
114 #if defined(VISP_HAVE_APRILTAG) && (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_OPENCV)) && \
115  defined(VISP_HAVE_MODULE_MBT)
116 
118  int opt_device = 0;
120  double opt_tag_size = 0.08;
121  float opt_quad_decimate = 1.0;
122  int opt_nthreads = 1;
123  std::string opt_intrinsic_file = "";
124  std::string opt_camera_name = "";
125  double opt_cube_size = 0.125; // 12.5cm by default
126 #ifdef VISP_HAVE_OPENCV
127  bool opt_use_texture = false;
128 #endif
129  double opt_projection_error_threshold = 40.;
130 
131 #if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
132  bool display_off = true;
133 #else
134  bool display_off = false;
135 #endif
136 
137  for (int i = 1; i < argc; i++) {
138  if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
139  opt_tag_size = atof(argv[i + 1]);
140  } else if (std::string(argv[i]) == "--input" && i + 1 < argc) {
141  opt_device = atoi(argv[i + 1]);
142  } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
143  opt_quad_decimate = (float)atof(argv[i + 1]);
144  } else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
145  opt_nthreads = atoi(argv[i + 1]);
146  } else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
147  opt_intrinsic_file = std::string(argv[i + 1]);
148  } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
149  opt_camera_name = std::string(argv[i + 1]);
150  } else if (std::string(argv[i]) == "--display_off") {
151  display_off = true;
152  } else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
153  opt_tag_family = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
154  } else if (std::string(argv[i]) == "--cube_size" && i + 1 < argc) {
155  opt_cube_size = atof(argv[i + 1]);
156 #ifdef VISP_HAVE_OPENCV
157  } else if (std::string(argv[i]) == "--texture") {
158  opt_use_texture = true;
159 #endif
160  } else if (std::string(argv[i]) == "--projection_error" && i + 1 < argc) {
161  opt_projection_error_threshold = atof(argv[i + 1]);
162  } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
163  std::cout << "Usage: " << argv[0] << " [--input <camera id>] [--cube_size <size in m>] [--tag_size <size in m>]"
164  " [--quad_decimate <decimation>] [--nthreads <nb>]"
165  " [--intrinsic <xml intrinsic file>] [--camera_name <camera name in xml file>]"
166  " [--tag_family <0: TAG_36h11, 1: TAG_36h10, 2: TAG_36ARTOOLKIT, "
167  " 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5>]";
168 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
169  std::cout << " [--display_off]";
170 #endif
171  std::cout << " [--texture] [--projection_error <30 - 100>] [--help]" << std::endl;
172  return EXIT_SUCCESS;
173  }
174  }
175 
176  createCaoFile(opt_cube_size);
177 
178  vpCameraParameters cam;
179  bool camIsInit = false;
180 #ifdef VISP_HAVE_XML2
181  vpXmlParserCamera parser;
182  if (!opt_intrinsic_file.empty() && !opt_camera_name.empty()) {
183  parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
184  camIsInit = true;
185  }
186 #endif
187 
188  try {
190 
192 #if defined(VISP_HAVE_V4L2)
193  vpV4l2Grabber g;
194  std::ostringstream device;
195  device << "/dev/video" << opt_device;
196  std::cout << "Use device " << device.str() << " (v4l2 grabber)" << std::endl;
197  g.setDevice(device.str());
198  g.setScale(1);
199  g.acquire(I);
200 #elif defined(VISP_HAVE_OPENCV)
201  std::cout << "Use device " << opt_device << " (OpenCV grabber)" << std::endl;
202  cv::VideoCapture cap(opt_device); // open the default camera
203  if (!cap.isOpened()) { // check if we succeeded
204  std::cout << "Failed to open the camera" << std::endl;
205  return EXIT_FAILURE;
206  }
207  cv::Mat frame;
208  cap >> frame; // get a new frame from camera
209  vpImageConvert::convert(frame, I);
210 #endif
211  if (!camIsInit) {
212  cam.initPersProjWithoutDistortion(600, 600, I.getWidth() / 2., I.getHeight() / 2.);
213  }
214 
215  std::cout << "Cube size: " << opt_cube_size << std::endl;
216  std::cout << "AprilTag size: " << opt_tag_size << std::endl;
217  std::cout << "AprilTag family: " << opt_tag_family << std::endl;
218  std::cout << "Camera parameters:\n" << cam << std::endl;
219  std::cout << "Detection: " << std::endl;
220  std::cout << " Quad decimate: " << opt_quad_decimate << std::endl;
221  std::cout << " Threads number: " << opt_nthreads << std::endl;
222  std::cout << "Tracker: " << std::endl;
223  std::cout << " Use edges : 1"<< std::endl;
224  std::cout << " Use texture: "
225 #ifdef VISP_HAVE_OPENCV
226  << opt_use_texture << std::endl;
227 #else
228  << " na" << std::endl;
229 #endif
230  std::cout << " Projection error: " << opt_projection_error_threshold << std::endl;
231 
232  // Construct display
233  vpDisplay *d = NULL;
234  if (!display_off) {
235 #ifdef VISP_HAVE_X11
236  d = new vpDisplayX(I);
237 #elif defined(VISP_HAVE_GDI)
238  d = new vpDisplayGDI(I);
239 #elif defined(VISP_HAVE_OPENCV)
240  d = new vpDisplayOpenCV(I);
241 #endif
242  }
243 
244  // Initialize AprilTag detector
245  vpDetectorAprilTag detector(opt_tag_family);
246  detector.setAprilTagQuadDecimate(opt_quad_decimate);
247  detector.setAprilTagNbThreads(opt_nthreads);
248 
249  // Prepare MBT
250  vpMbGenericTracker tracker;
251 #ifdef VISP_HAVE_OPENCV
252  if (opt_use_texture)
254  else
255 #endif
257  // edges
258  vpMe me;
259  me.setMaskSize(5);
260  me.setMaskNumber(180);
261  me.setRange(12);
262  me.setThreshold(10000);
263  me.setMu1(0.5);
264  me.setMu2(0.5);
265  me.setSampleStep(4);
266  tracker.setMovingEdge(me);
267 
268 #ifdef VISP_HAVE_OPENCV
269  if (opt_use_texture) {
270  vpKltOpencv klt_settings;
271  klt_settings.setMaxFeatures(300);
272  klt_settings.setWindowSize(5);
273  klt_settings.setQuality(0.015);
274  klt_settings.setMinDistance(8);
275  klt_settings.setHarrisFreeParameter(0.01);
276  klt_settings.setBlockSize(3);
277  klt_settings.setPyramidLevels(3);
278  tracker.setKltOpencv(klt_settings);
279  tracker.setKltMaskBorder(5);
280  }
281 #endif
282 
283  // camera calibration params
284  tracker.setCameraParameters(cam);
285  // model definition
286  tracker.loadModel("cube.cao");
287  tracker.setDisplayFeatures(true);
288  tracker.setAngleAppear(vpMath::rad(70));
289  tracker.setAngleDisappear(vpMath::rad(80));
290 
292  state_t state = state_detection;
293 
294  // wait for a tag detection
295  while (state != state_quit) {
296 
297 #if defined(VISP_HAVE_V4L2)
298  g.acquire(I);
299 #elif defined(VISP_HAVE_OPENCV)
300  cap >> frame;
301  vpImageConvert::convert(frame, I);
302 #endif
303 
305 
306  if (state == state_detection) {
307  state = detectAprilTag(I, detector, opt_tag_size, cam, cMo);
308 
309  // Initialize the tracker with the result of the detection
310  if (state == state_tracking) {
311  tracker.initFromPose(I, cMo);
312  }
313  }
314 
315  if (state == state_tracking) {
316  state = track(I, tracker, opt_projection_error_threshold, cMo);
317  }
318 
319  vpDisplay::displayText(I, 20, 20, "Click to quit...", vpColor::red);
320  if (vpDisplay::getClick(I, false)) { // exit
321  state = state_quit;
322  }
323 
324  vpDisplay::flush(I);
325  }
326 
327  if (!display_off)
328  delete d;
329  } catch (const vpException &e) {
330  std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
331  }
332 
333  return EXIT_SUCCESS;
334 #else
335  (void)argc;
336  (void)argv;
337 #ifndef VISP_HAVE_APRILTAG
338  std::cout << "ViSP is not build with Apriltag support" << std::endl;
339 #endif
340 #if !(defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_OPENCV))
341  std::cout << "ViSP is not build with v4l2 or OpenCV support" << std::endl;
342 #endif
343  std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
344 #endif
345  return EXIT_SUCCESS;
346 }
virtual void setDisplayFeatures(const bool displayF)
void setAprilTagQuadDecimate(const float quadDecimate)
void acquire(vpImage< unsigned char > &I)
Class that defines generic functionnalities for display.
Definition: vpDisplay.h:171
virtual void track(const vpImage< unsigned char > &I)
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
void setHarrisFreeParameter(double harris_k)
unsigned int getWidth() const
Definition: vpImage.h:239
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void setMaskNumber(const unsigned int &a)
Definition: vpMe.cpp:454
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
void setMaxFeatures(const int maxCount)
void setSampleStep(const double &s)
Definition: vpMe.h:278
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:151
void setDevice(const std::string &devname)
static const vpColor none
Definition: vpColor.h:192
void setMinDistance(double minDistance)
virtual void setAngleDisappear(const double &a)
error that can be emited by ViSP classes.
Definition: vpException.h:71
Definition: vpMe.h:60
virtual void getCameraParameters(vpCameraParameters &cam1, vpCameraParameters &cam2) const
virtual void setMovingEdge(const vpMe &me)
size_t getNbObjects() const
XML parser to load and save intrinsic camera parameters.
Real-time 6D object pose tracking using its CAD model.
static void flush(const vpImage< unsigned char > &I)
void setMu1(const double &mu_1)
Definition: vpMe.h:241
static const vpColor red
Definition: vpColor.h:180
void setQuality(double qualityLevel)
void initPersProjWithoutDistortion(const double px, const double py, const double u0, const double v0)
virtual void setKltMaskBorder(const unsigned int &e)
virtual void initFromPose(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
void setMaskSize(const unsigned int &a)
Definition: vpMe.cpp:461
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Generic class defining intrinsic camera parameters.
virtual void setAngleAppear(const double &a)
const char * getMessage(void) const
Definition: vpException.cpp:90
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setPyramidLevels(const int pyrMaxLevel)
static double rad(double deg)
Definition: vpMath.h:102
virtual void loadModel(const std::string &modelFile, const bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setCameraParameters(const vpCameraParameters &camera)
void setMu2(const double &mu_2)
Definition: vpMe.h:248
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setWindowSize(const int winSize)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
void setBlockSize(const int blockSize)
virtual void getPose(vpHomogeneousMatrix &c1Mo, vpHomogeneousMatrix &c2Mo) const
void setThreshold(const double &t)
Definition: vpMe.h:300
unsigned int getHeight() const
Definition: vpImage.h:178
virtual void setTrackerType(const int type)
void setRange(const unsigned int &r)
Definition: vpMe.h:271
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, const unsigned int image_width=0, const unsigned int image_height=0)
virtual void setKltOpencv(const vpKltOpencv &t)
void setAprilTagNbThreads(const int nThreads)
bool detect(const vpImage< unsigned char > &I)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)