Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
mbtGenericTracking2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Example of Hybrid Tracking of MBT and MBT KTL.
33  *
34  * Authors:
35  * Aurelien Yol
36  * Souriya Trinh
37  *
38  *****************************************************************************/
39 
47 #include <iostream>
48 #include <visp3/core/vpConfig.h>
49 
50 #if defined(VISP_HAVE_MODULE_MBT) && defined(VISP_HAVE_DISPLAY)
51 
52 #include <visp3/core/vpDebug.h>
53 #include <visp3/core/vpHomogeneousMatrix.h>
54 #include <visp3/core/vpIoTools.h>
55 #include <visp3/core/vpMath.h>
56 #include <visp3/gui/vpDisplayD3D.h>
57 #include <visp3/gui/vpDisplayGDI.h>
58 #include <visp3/gui/vpDisplayGTK.h>
59 #include <visp3/gui/vpDisplayOpenCV.h>
60 #include <visp3/gui/vpDisplayX.h>
61 #include <visp3/io/vpImageIo.h>
62 #include <visp3/io/vpParseArgv.h>
63 #include <visp3/io/vpVideoReader.h>
64 #include <visp3/mbt/vpMbGenericTracker.h>
65 
66 #define GETOPTARGS "x:m:i:n:de:chtfColwvpT:"
67 
68 #define USE_XML 0
69 
70 void usage(const char *name, const char *badparam)
71 {
72  fprintf(stdout, "\n\
73 Example of tracking based on the 3D model.\n\
74 \n\
75 SYNOPSIS\n\
76  %s [-i <test image path>] [-x <config file>]\n\
77  [-m <model name>] [-n <initialisation file base name>] [-e <last frame index>]\n\
78  [-t] [-c] [-d] [-h] [-f] [-C] [-o] [-w] [-l] [-v] [-p]\n\
79  [-T <tracker type>]\n", name);
80 
81  fprintf(stdout, "\n\
82 OPTIONS: \n\
83  -i <input image path> \n\
84  Set image input path.\n\
85  From this path read images \n\
86  \"mbt/cube/image%%04d.ppm\". These \n\
87  images come from ViSP-images-x.y.z.tar.gz available \n\
88  on the ViSP website.\n\
89  Setting the VISP_INPUT_IMAGE_PATH environment\n\
90  variable produces the same behavior than using\n\
91  this option.\n\
92 \n\
93  -x <config file> \n\
94  Set the config file (the xml file) to use.\n\
95  The config file is used to specify the parameters of the tracker.\n\
96 \n\
97  -m <model name> \n\
98  Specify the name of the file of the model\n\
99  The model can either be a vrml model (.wrl) or a .cao file.\n\
100 \n\
101  -e <last frame index> \n\
102  Specify the index of the last frame. Once reached, the tracking is stopped\n\
103 \n\
104  -f \n\
105  Do not use the vrml model, use the .cao one. These two models are \n\
106  equivalent and comes from ViSP-images-x.y.z.tar.gz available on the ViSP\n\
107  website. However, the .cao model allows to use the 3d model based tracker \n\
108  without Coin.\n\
109 \n\
110  -C \n\
111  Track only the cube (not the cylinder). In this case the models files are\n\
112  cube.cao or cube.wrl instead of cube_and_cylinder.cao and \n\
113  cube_and_cylinder.wrl.\n\
114 \n\
115  -n <initialisation file base name> \n\
116  Base name of the initialisation file. The file will be 'base_name'.init .\n\
117  This base name is also used for the optional picture specifying where to \n\
118  click (a .ppm picture).\n\
119 \n\
120  -t \n\
121  Turn off the display of the the moving edges and Klt points. \n\
122 \n\
123  -d \n\
124  Turn off the display.\n\
125 \n\
126  -c\n\
127  Disable the mouse click. Useful to automate the \n\
128  execution of this program without human intervention.\n\
129 \n\
130  -o\n\
131  Use Ogre3D for visibility tests\n\
132 \n\
133  -w\n\
134  When Ogre3D is enable [-o] show Ogre3D configuration dialog that allows to set the renderer.\n\
135 \n\
136  -l\n\
137  Use the scanline for visibility tests.\n\
138 \n\
139  -v\n\
140  Compute covariance matrix.\n\
141 \n\
142  -p\n\
143  Compute gradient projection error.\n\
144 \n\
145  -T <tracker type>\n\
146  Set tracker type (<1 (Edge)>, <2 (KLT)>, <3 (EdgeKlt)>).\n\
147 \n\
148  -h \n\
149  Print the help.\n\n");
150 
151  if (badparam)
152  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
153 }
154 
155 bool getOptions(int argc, const char **argv, std::string &ipath, std::string &configFile, std::string &modelFile,
156  std::string &initFile, long &lastFrame, bool &displayFeatures, bool &click_allowed, bool &display,
157  bool &cao3DModel, bool &trackCylinder, bool &useOgre, bool &showOgreConfigDialog, bool &useScanline,
158  bool &computeCovariance, bool &projectionError, int &trackerType)
159 {
160  const char *optarg_;
161  int c;
162  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
163 
164  switch (c) {
165  case 'e':
166  lastFrame = atol(optarg_);
167  break;
168  case 'i':
169  ipath = optarg_;
170  break;
171  case 'x':
172  configFile = optarg_;
173  break;
174  case 'm':
175  modelFile = optarg_;
176  break;
177  case 'n':
178  initFile = optarg_;
179  break;
180  case 't':
181  displayFeatures = false;
182  break;
183  case 'f':
184  cao3DModel = true;
185  break;
186  case 'c':
187  click_allowed = false;
188  break;
189  case 'd':
190  display = false;
191  break;
192  case 'C':
193  trackCylinder = false;
194  break;
195  case 'o':
196  useOgre = true;
197  break;
198  case 'l':
199  useScanline = true;
200  break;
201  case 'w':
202  showOgreConfigDialog = true;
203  break;
204  case 'v':
205  computeCovariance = true;
206  break;
207  case 'p':
208  projectionError = true;
209  break;
210  case 'T':
211  trackerType = atoi(optarg_);
212  break;
213  case 'h':
214  usage(argv[0], NULL);
215  return false;
216  break;
217 
218  default:
219  usage(argv[0], optarg_);
220  return false;
221  break;
222  }
223  }
224 
225  if ((c == 1) || (c == -1)) {
226  // standalone param or error
227  usage(argv[0], NULL);
228  std::cerr << "ERROR: " << std::endl;
229  std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
230  return false;
231  }
232 
233  return true;
234 }
235 
236 int main(int argc, const char **argv)
237 {
238  try {
239  std::string env_ipath;
240  std::string opt_ipath;
241  std::string ipath;
242  std::string opt_configFile;
243  std::string opt_modelFile;
244  std::string modelFile;
245  std::string opt_initFile;
246  std::string initFile;
247  long opt_lastFrame = -1;
248  bool displayFeatures = true;
249  bool opt_click_allowed = true;
250  bool opt_display = true;
251  bool cao3DModel = false;
252  bool trackCylinder = true;
253  bool useOgre = false;
254  bool showOgreConfigDialog = false;
255  bool useScanline = false;
256  bool computeCovariance = false;
257  bool projectionError = false;
258  int trackerType = vpMbGenericTracker::EDGE_TRACKER;
259 
260  // Get the visp-images-data package path or VISP_INPUT_IMAGE_PATH
261  // environment variable value
262  env_ipath = vpIoTools::getViSPImagesDataPath();
263 
264  // Set the default input path
265  if (!env_ipath.empty())
266  ipath = env_ipath;
267 
268  // Read the command line options
269  if (!getOptions(argc, argv, opt_ipath, opt_configFile, opt_modelFile, opt_initFile, opt_lastFrame, displayFeatures,
270  opt_click_allowed, opt_display, cao3DModel, trackCylinder, useOgre, showOgreConfigDialog,
271  useScanline, computeCovariance, projectionError, trackerType)) {
272  return EXIT_FAILURE;
273  }
274 
275  // Test if an input path is set
276  if (opt_ipath.empty() && env_ipath.empty()) {
277  usage(argv[0], NULL);
278  std::cerr << std::endl << "ERROR:" << std::endl;
279  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
280  << " environment variable to specify the location of the " << std::endl
281  << " image path where test images are located." << std::endl
282  << std::endl;
283 
284  return EXIT_FAILURE;
285  }
286 
287  // Get the option values
288  if (!opt_ipath.empty())
289  ipath = vpIoTools::createFilePath(opt_ipath, "mbt/cube/image%04d.pgm");
290  else
291  ipath = vpIoTools::createFilePath(env_ipath, "mbt/cube/image%04d.pgm");
292 
293 #if defined(VISP_HAVE_PUGIXML) && USE_XML
294  std::string configFile;
295  if (!opt_configFile.empty())
296  configFile = opt_configFile;
297  else if (!opt_ipath.empty())
298  configFile = vpIoTools::createFilePath(opt_ipath, "mbt/cube.xml");
299  else
300  configFile = vpIoTools::createFilePath(env_ipath, "mbt/cube.xml");
301 #endif
302 
303  if (!opt_modelFile.empty()) {
304  modelFile = opt_modelFile;
305  } else {
306  std::string modelFileCao;
307  std::string modelFileWrl;
308  if (trackCylinder) {
309  modelFileCao = "mbt/cube_and_cylinder.cao";
310  modelFileWrl = "mbt/cube_and_cylinder.wrl";
311  } else {
312  modelFileCao = "mbt/cube.cao";
313  modelFileWrl = "mbt/cube.wrl";
314  }
315 
316  if (!opt_ipath.empty()) {
317  if (cao3DModel) {
318  modelFile = vpIoTools::createFilePath(opt_ipath, modelFileCao);
319  } else {
320 #ifdef VISP_HAVE_COIN3D
321  modelFile = vpIoTools::createFilePath(opt_ipath, modelFileWrl);
322 #else
323  std::cerr << "Coin is not detected in ViSP. Use the .cao model instead." << std::endl;
324  modelFile = vpIoTools::createFilePath(opt_ipath, modelFileCao);
325 #endif
326  }
327  } else {
328  if (cao3DModel) {
329  modelFile = vpIoTools::createFilePath(env_ipath, modelFileCao);
330  } else {
331 #ifdef VISP_HAVE_COIN3D
332  modelFile = vpIoTools::createFilePath(env_ipath, modelFileWrl);
333 #else
334  std::cerr << "Coin is not detected in ViSP. Use the .cao model instead." << std::endl;
335  modelFile = vpIoTools::createFilePath(env_ipath, modelFileCao);
336 #endif
337  }
338  }
339  }
340 
341  if (!opt_initFile.empty())
342  initFile = opt_initFile;
343  else if (!opt_ipath.empty())
344  initFile = vpIoTools::createFilePath(opt_ipath, "mbt/cube");
345  else
346  initFile = vpIoTools::createFilePath(env_ipath, "mbt/cube");
347 
348  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
349  vpImage<unsigned char> I1, I2, I3;
350  vpVideoReader reader;
351 
352  reader.setFileName(ipath);
353  try {
354  reader.open(I1);
355  I2 = I1;
356  I3 = I1;
357  } catch (...) {
358  std::cerr << "Cannot open sequence: " << ipath << std::endl;
359  return EXIT_FAILURE;
360  }
361 
362  if (opt_lastFrame > 1 && opt_lastFrame < reader.getLastFrameIndex())
363  reader.setLastFrameIndex(opt_lastFrame);
364 
365  reader.acquire(I1);
366  I2 = I1;
367  I3 = I1;
368 
369  mapOfImages["Camera1"] = &I1;
370  mapOfImages["Camera2"] = &I2;
371  mapOfImages["Camera3"] = &I3;
372 
373 // initialise a display
374 #if defined VISP_HAVE_X11
375  vpDisplayX display1, display2, display3;
376 #elif defined VISP_HAVE_GDI
377  vpDisplayGDI display1, display2, display3;
378 #elif defined VISP_HAVE_OPENCV
379  vpDisplayOpenCV display1, display2, display3;
380 #elif defined VISP_HAVE_D3D9
381  vpDisplayD3D display1, display2, display3;
382 #elif defined VISP_HAVE_GTK
383  vpDisplayGTK display1, display2, display3;
384 #else
385  opt_display = false;
386 #endif
387 
388  if (opt_display) {
389 #if defined(VISP_HAVE_DISPLAY)
393 
394  display1.init(I1, 100, 100, "Test tracking (Cam1)");
395  display2.init(I2, (int)(I1.getWidth() / vpDisplay::getDownScalingFactor(I1)) + 110, 100, "Test tracking (Cam2)");
396  display3.init(I3, 100, (int)(I1.getHeight() / vpDisplay::getDownScalingFactor(I1)) + 110, "Test tracking (Cam3)");
397 #endif
398  vpDisplay::display(I1);
399  vpDisplay::display(I2);
400  vpDisplay::display(I3);
401 
402  vpDisplay::flush(I1);
403  vpDisplay::flush(I2);
404  vpDisplay::flush(I3);
405  }
406 
407  // Object pointer to check that inheritance is ok
408  vpMbTracker *tracker = new vpMbGenericTracker(3, trackerType);
409  std::map<std::string, vpHomogeneousMatrix> mapOfCameraPoses;
410  std::map<std::string, vpCameraParameters> mapOfCameraParams;
411 
412 // Initialise the tracker: camera parameters, moving edge and KLT settings
413 #if defined(VISP_HAVE_PUGIXML) && USE_XML
414  // From the xml file
415  std::map<std::string, std::string> mapOfConfigFiles;
416  mapOfConfigFiles["Camera1"] = configFile;
417  mapOfConfigFiles["Camera2"] = configFile;
418  mapOfConfigFiles["Camera3"] = configFile;
419  dynamic_cast<vpMbGenericTracker *>(tracker)->loadConfigFile(mapOfConfigFiles);
420 #else
421  // By setting the parameters:
422  vpCameraParameters cam;
423  cam.initPersProjWithoutDistortion(547, 542, 338, 234);
424  mapOfCameraParams["Camera1"] = cam;
425  mapOfCameraParams["Camera2"] = cam;
426  mapOfCameraParams["Camera3"] = cam;
427 
428  vpMe me;
429  me.setMaskSize(5);
430  me.setMaskNumber(180);
431  me.setRange(7);
432  me.setThreshold(5000);
433  me.setMu1(0.5);
434  me.setMu2(0.5);
435  me.setSampleStep(4);
436  std::map<std::string, vpMe> mapOfMe;
437  mapOfMe["Camera1"] = me;
438  mapOfMe["Camera2"] = me;
439  mapOfMe["Camera3"] = me;
440 
441 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
442  vpKltOpencv klt;
443  klt.setMaxFeatures(10000);
444  klt.setWindowSize(5);
445  klt.setQuality(0.01);
446  klt.setMinDistance(5);
447  klt.setHarrisFreeParameter(0.01);
448  klt.setBlockSize(3);
449  klt.setPyramidLevels(3);
450  std::map<std::string, vpKltOpencv> mapOfKlt;
451  mapOfKlt["Camera1"] = klt;
452  mapOfKlt["Camera2"] = klt;
453  mapOfKlt["Camera3"] = klt;
454 
455  dynamic_cast<vpMbGenericTracker *>(tracker)->setKltOpencv(mapOfKlt);
456  dynamic_cast<vpMbGenericTracker *>(tracker)->setKltMaskBorder(5);
457 #endif
458 
459  dynamic_cast<vpMbGenericTracker *>(tracker)->setCameraParameters(mapOfCameraParams);
460  dynamic_cast<vpMbGenericTracker *>(tracker)->setMovingEdge(mapOfMe);
461  tracker->setAngleAppear(vpMath::rad(65));
462  tracker->setAngleDisappear(vpMath::rad(75));
463 
464  // Specify the clipping to
465  tracker->setNearClippingDistance(0.01);
466  tracker->setFarClippingDistance(0.90);
467 
468  std::map<std::string, unsigned int> mapOfClippingFlags;
469  dynamic_cast<vpMbGenericTracker *>(tracker)->getClipping(mapOfClippingFlags);
470  for (std::map<std::string, unsigned int>::iterator it = mapOfClippingFlags.begin(); it != mapOfClippingFlags.end();
471  ++it) {
472  it->second = (it->second | vpMbtPolygon::FOV_CLIPPING);
473  }
474 
475  dynamic_cast<vpMbGenericTracker *>(tracker)->setClipping(mapOfClippingFlags);
476 // tracker->setClipping(tracker->getClipping() | vpMbtPolygon::LEFT_CLIPPING
477 // | vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING |
478 // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING
479 #endif
480 
481  // Display the moving edges, and the Klt points
482  tracker->setDisplayFeatures(displayFeatures);
483 
484  // Tells if the tracker has to use Ogre3D for visibility tests
485  tracker->setOgreVisibilityTest(useOgre);
486  if (useOgre)
487  tracker->setOgreShowConfigDialog(showOgreConfigDialog);
488 
489  // Tells if the tracker has to use the scanline visibility tests
490  tracker->setScanLineVisibilityTest(useScanline);
491 
492  // Tells if the tracker has to compute the covariance matrix
493  tracker->setCovarianceComputation(computeCovariance);
494 
495  // Tells if the tracker has to compute the projection error
496  tracker->setProjectionErrorComputation(projectionError);
497 
498  // Retrieve the camera parameters from the tracker
499  dynamic_cast<vpMbGenericTracker *>(tracker)->getCameraParameters(mapOfCameraParams);
500 
501  // Loop to position the cube
502  if (opt_display && opt_click_allowed) {
503  while (!vpDisplay::getClick(I1, false)) {
504  vpDisplay::display(I1);
505  vpDisplay::displayText(I1, 15, 10, "click after positioning the object", vpColor::red);
506  vpDisplay::flush(I1);
507  }
508  }
509 
510  // Load the 3D model (either a vrml file or a .cao file)
511  tracker->loadModel(modelFile);
512 
513  // Initialise the tracker by clicking on the image
514  // This function looks for
515  // - a ./cube/cube.init file that defines the 3d coordinates (in meter,
516  // in the object basis) of the points used for the initialisation
517  // - a ./cube/cube.ppm file to display where the user have to click
518  // (optionnal, set by the third parameter)
519  if (opt_display && opt_click_allowed) {
520  std::map<std::string, std::string> mapOfInitFiles;
521  mapOfInitFiles["Camera1"] = initFile;
522 
523  dynamic_cast<vpMbGenericTracker *>(tracker)->initClick(mapOfImages, mapOfInitFiles, true);
524  dynamic_cast<vpMbGenericTracker *>(tracker)->getPose(mapOfCameraPoses);
525 
526  // display the 3D model at the given pose
527  dynamic_cast<vpMbGenericTracker *>(tracker)->display(mapOfImages, mapOfCameraPoses, mapOfCameraParams,
528  vpColor::red);
529  } else {
530  vpHomogeneousMatrix c1Moi(0.02044769891, 0.1101505452, 0.5078963719, 2.063603907, 1.110231561, -0.4392789872);
531  std::map<std::string, vpHomogeneousMatrix> mapOfInitPoses;
532  mapOfInitPoses["Camera1"] = c1Moi;
533 
534  dynamic_cast<vpMbGenericTracker *>(tracker)->initFromPose(mapOfImages, mapOfInitPoses);
535  }
536 
537  // track the model
538  dynamic_cast<vpMbGenericTracker *>(tracker)->track(mapOfImages);
539  dynamic_cast<vpMbGenericTracker *>(tracker)->getPose(mapOfCameraPoses);
540 
541  if (opt_display) {
542  vpDisplay::flush(I1);
543  vpDisplay::flush(I2);
544  vpDisplay::flush(I3);
545  }
546 
547  bool quit = false, click = false;
548  while (!reader.end() && !quit) {
549  // acquire a new image
550  reader.acquire(I1);
551  I2 = I1;
552  I3 = I1;
553  mapOfImages["Camera1"] = &I1;
554  mapOfImages["Camera2"] = &I2;
555  mapOfImages["Camera3"] = &I3;
556 
557  // display the image
558  if (opt_display) {
559  vpDisplay::display(I1);
560  vpDisplay::display(I2);
561  vpDisplay::display(I3);
562 
563  std::stringstream ss;
564  ss << "Num frame: " << reader.getFrameIndex() << "/" << reader.getLastFrameIndex();
565  vpDisplay::displayText(I1, 40, 20, ss.str(), vpColor::red);
566  }
567 
568  // Test to reset the tracker
569  if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 10) {
570  std::cout << "----------Test reset tracker----------" << std::endl;
571  if (opt_display) {
572  vpDisplay::display(I1);
573  vpDisplay::display(I2);
574  vpDisplay::display(I3);
575  }
576 
577  tracker->resetTracker();
578 #if defined(VISP_HAVE_PUGIXML) && USE_XML
579  dynamic_cast<vpMbGenericTracker *>(tracker)->loadConfigFile(mapOfConfigFiles);
580 #else
581  // By setting the parameters:
582  cam.initPersProjWithoutDistortion(547, 542, 338, 234);
583  mapOfCameraParams["Camera1"] = cam;
584  mapOfCameraParams["Camera2"] = cam;
585  mapOfCameraParams["Camera3"] = cam;
586 
587  me.setMaskSize(5);
588  me.setMaskNumber(180);
589  me.setRange(7);
590  me.setThreshold(5000);
591  me.setMu1(0.5);
592  me.setMu2(0.5);
593  me.setSampleStep(4);
594 
595  mapOfMe["Camera1"] = me;
596  mapOfMe["Camera2"] = me;
597  mapOfMe["Camera3"] = me;
598 
599 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
600  klt.setMaxFeatures(10000);
601  klt.setWindowSize(5);
602  klt.setQuality(0.01);
603  klt.setMinDistance(5);
604  klt.setHarrisFreeParameter(0.01);
605  klt.setBlockSize(3);
606  klt.setPyramidLevels(3);
607 
608  mapOfKlt["Camera1"] = klt;
609  mapOfKlt["Camera2"] = klt;
610  mapOfKlt["Camera3"] = klt;
611 
612  dynamic_cast<vpMbGenericTracker *>(tracker)->setKltOpencv(mapOfKlt);
613  dynamic_cast<vpMbGenericTracker *>(tracker)->setKltMaskBorder(5);
614 #endif
615 
616  dynamic_cast<vpMbGenericTracker *>(tracker)->setCameraParameters(mapOfCameraParams);
617  dynamic_cast<vpMbGenericTracker *>(tracker)->setMovingEdge(mapOfMe);
618  tracker->setAngleAppear(vpMath::rad(65));
619  tracker->setAngleDisappear(vpMath::rad(75));
620 
621  // Specify the clipping to
622  tracker->setNearClippingDistance(0.01);
623  tracker->setFarClippingDistance(0.90);
624 
625  dynamic_cast<vpMbGenericTracker *>(tracker)->getClipping(mapOfClippingFlags);
626  for (std::map<std::string, unsigned int>::iterator it = mapOfClippingFlags.begin();
627  it != mapOfClippingFlags.end(); ++it) {
628  it->second = (it->second | vpMbtPolygon::FOV_CLIPPING);
629  }
630 
631  dynamic_cast<vpMbGenericTracker *>(tracker)->setClipping(mapOfClippingFlags);
632 // tracker->setClipping(tracker->getClipping() | vpMbtPolygon::LEFT_CLIPPING
633 // | vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING |
634 // vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING
635 #endif
636  tracker->loadModel(modelFile);
637  dynamic_cast<vpMbGenericTracker *>(tracker)->setCameraParameters(mapOfCameraParams);
638  tracker->setOgreVisibilityTest(useOgre);
639  tracker->setScanLineVisibilityTest(useScanline);
640  tracker->setCovarianceComputation(computeCovariance);
641  tracker->setProjectionErrorComputation(projectionError);
642  dynamic_cast<vpMbGenericTracker *>(tracker)->initFromPose(mapOfImages, mapOfCameraPoses);
643  }
644 
645  // Test to set an initial pose
646  if (reader.getFrameIndex() == reader.getFirstFrameIndex() + 50) {
647  vpHomogeneousMatrix c1Moi;
648  c1Moi.buildFrom(0.0439540832, 0.0845870108, 0.5477322481, 2.179498458, 0.8611798108, -0.3491961946);
649  std::map<std::string, vpHomogeneousMatrix> mapOfSetPoses;
650  mapOfSetPoses["Camera1"] = c1Moi;
651 
652  std::cout << "Test set pose" << std::endl;
653  dynamic_cast<vpMbGenericTracker *>(tracker)->setPose(mapOfImages, mapOfSetPoses);
654  }
655 
656  // track the object: stop tracking from frame 40 to 50
657  if (reader.getFrameIndex() - reader.getFirstFrameIndex() < 40 ||
658  reader.getFrameIndex() - reader.getFirstFrameIndex() >= 50) {
659  dynamic_cast<vpMbGenericTracker *>(tracker)->track(mapOfImages);
660  dynamic_cast<vpMbGenericTracker *>(tracker)->getPose(mapOfCameraPoses);
661  if (opt_display) {
662  // display the 3D model
663  if (reader.getFrameIndex() - reader.getFirstFrameIndex() >= 50) {
664  std::map<std::string, const vpImage<unsigned char> *> mapOfSubImages;
665  mapOfSubImages["Camera1"] = &I1;
666  mapOfSubImages["Camera2"] = &I2;
667 
668  dynamic_cast<vpMbGenericTracker *>(tracker)->display(mapOfSubImages, mapOfCameraPoses, mapOfCameraParams,
669  vpColor::red, 3);
670  } else {
671  dynamic_cast<vpMbGenericTracker *>(tracker)->display(mapOfImages, mapOfCameraPoses, mapOfCameraParams,
672  vpColor::red, 3);
673  }
674  // display the frame
675  vpDisplay::displayFrame(I1, mapOfCameraPoses["Camera1"], mapOfCameraParams["Camera1"], 0.05);
676  vpDisplay::displayFrame(I2, mapOfCameraPoses["Camera2"], mapOfCameraParams["Camera2"], 0.05);
677  vpDisplay::displayFrame(I3, mapOfCameraPoses["Camera3"], mapOfCameraParams["Camera3"], 0.05);
678  }
679  }
680 
681  if (opt_click_allowed && opt_display) {
682  vpDisplay::displayText(I1, 10, 10, "Click to quit", vpColor::red);
684  if (vpDisplay::getClick(I1, button, click)) {
685  switch (button) {
687  quit = !click;
688  break;
689 
691  click = !click;
692  break;
693 
694  default:
695  break;
696  }
697  }
698  }
699 
700  if (computeCovariance) {
701  std::cout << "Covariance matrix: \n" << tracker->getCovarianceMatrix() << std::endl << std::endl;
702  }
703 
704  if (projectionError) {
705  std::cout << "Projection error: " << tracker->getProjectionError() << std::endl << std::endl;
706  }
707 
708  if (opt_display) {
709  vpDisplay::flush(I1);
710  vpDisplay::flush(I2);
711  vpDisplay::flush(I3);
712  }
713  }
714 
715  std::cout << "Reached last frame: " << reader.getFrameIndex() << std::endl;
716  std::cout << "\nFinal poses, c1Mo:\n"
717  << mapOfCameraPoses["Camera1"] << "\nc2Mo:\n"
718  << mapOfCameraPoses["Camera2"] << "\nc3Mo:\n"
719  << mapOfCameraPoses["Camera3"] << std::endl;
720 
721  if (opt_click_allowed && !quit) {
723  }
724  reader.close();
725 
726  delete tracker;
727  tracker = NULL;
728 
729 #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION >= 2)
730  // Cleanup memory allocated by Coin library used to load a vrml model in
731  // vpMbGenericTracker::loadModel() We clean only if Coin was used.
732  if (!cao3DModel)
733  SoDB::finish();
734 #endif
735 
736  return EXIT_SUCCESS;
737  } catch (const vpException &e) {
738  std::cout << "Catch an exception: " << e << std::endl;
739  return EXIT_FAILURE;
740  }
741 }
742 
743 #else
744 
745 int main()
746 {
747  std::cout << "visp_mbt, visp_gui modules and OpenCV are required to run "
748  "this example."
749  << std::endl;
750  return EXIT_SUCCESS;
751 }
752 
753 #endif
virtual void setCovarianceComputation(const bool &flag)
Definition: vpMbTracker.h:495
void setMaxFeatures(int maxCount)
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static std::string getViSPImagesDataPath()
Definition: vpIoTools.cpp:1292
void setHarrisFreeParameter(double harris_k)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:476
Implementation of an homogeneous matrix and operations on such kind of matrices.
void setMaskNumber(const unsigned int &a)
Definition: vpMe.cpp:454
virtual void setDownScalingFactor(unsigned int scale)
Definition: vpDisplay.cpp:231
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
void setSampleStep(const double &s)
Definition: vpMe.h:278
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:150
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:639
virtual double getProjectionError() const
Definition: vpMbTracker.h:306
void setMinDistance(double minDistance)
error that can be emited by ViSP classes.
Definition: vpException.h:71
Definition: vpMe.h:60
Real-time 6D object pose tracking using its CAD model.
virtual void resetTracker()=0
static void flush(const vpImage< unsigned char > &I)
long getFirstFrameIndex()
void setMu1(const double &mu_1)
Definition: vpMe.h:241
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:69
static const vpColor red
Definition: vpColor.h:179
void setQuality(double qualityLevel)
void open(vpImage< vpRGBa > &I)
long getLastFrameIndex()
Display for windows using Direct3D 3rd party. Thus to enable this class Direct3D should be installed...
Definition: vpDisplayD3D.h:106
static std::string createFilePath(const std::string &parent, const std::string &child)
Definition: vpIoTools.cpp:1537
void setMaskSize(const unsigned int &a)
Definition: vpMe.cpp:461
void setPyramidLevels(int pyrMaxLevel)
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Generic class defining intrinsic camera parameters.
Main methods for a model-based tracker.
Definition: vpMbTracker.h:104
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:137
void acquire(vpImage< vpRGBa > &I)
void setWindowSize(int winSize)
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:465
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
static double rad(double deg)
Definition: vpMath.h:108
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const std::string &title="")
void setMu2(const double &mu_2)
Definition: vpMe.h:248
long getFrameIndex() const
virtual void setOgreVisibilityTest(const bool &v)
void setLastFrameIndex(const long last_frame)
unsigned int getHeight() const
Definition: vpImage.h:186
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
unsigned int getDownScalingFactor()
Definition: vpDisplay.h:229
virtual vpMatrix getCovarianceMatrix() const
Definition: vpMbTracker.h:261
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
void setThreshold(const double &t)
Definition: vpMe.h:300
void setFileName(const std::string &filename)
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:597
void setRange(const unsigned int &r)
Definition: vpMe.h:271
virtual void setFarClippingDistance(const double &dist)
unsigned int getWidth() const
Definition: vpImage.h:244
void setBlockSize(int blockSize)
virtual void setDisplayFeatures(bool displayF)
Definition: vpMbTracker.h:513
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:580
virtual void setNearClippingDistance(const double &dist)