Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
testGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Regression test for MBT.
33  *
34  *****************************************************************************/
35 
42 #include <cstdlib>
43 #include <iostream>
44 #include <visp3/core/vpConfig.h>
45 
46 #if defined(VISP_HAVE_MODULE_MBT)
47 
48 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
49 #include <type_traits>
50 #endif
51 
52 #include <visp3/core/vpIoTools.h>
53 #include <visp3/core/vpImageDraw.h>
54 #include <visp3/core/vpFont.h>
55 #include <visp3/io/vpParseArgv.h>
56 #include <visp3/io/vpImageIo.h>
57 #include <visp3/gui/vpDisplayX.h>
58 #include <visp3/gui/vpDisplayGDI.h>
59 #include <visp3/gui/vpDisplayOpenCV.h>
60 #include <visp3/gui/vpDisplayD3D.h>
61 #include <visp3/gui/vpDisplayGTK.h>
62 #include <visp3/mbt/vpMbGenericTracker.h>
63 
64 #define GETOPTARGS "i:dsclt:e:DmCh"
65 
66 namespace
67 {
68  void usage(const char *name, const char *badparam)
69  {
70  fprintf(stdout, "\n\
71  Regression test for vpGenericTracker.\n\
72  \n\
73  SYNOPSIS\n\
74  %s [-i <test image path>] [-c] [-d] [-s] [-h] [-l] \n\
75  [-t <tracker type>] [-e <last frame index>] [-D] [-m] [-C]\n", name);
76 
77  fprintf(stdout, "\n\
78  OPTIONS: \n\
79  -i <input image path> \n\
80  Set image input path.\n\
81  These images come from ViSP-images-x.y.z.tar.gz available \n\
82  on the ViSP website.\n\
83  Setting the VISP_INPUT_IMAGE_PATH environment\n\
84  variable produces the same behavior than using\n\
85  this option.\n\
86  \n\
87  -d \n\
88  Turn off the display.\n\
89  \n\
90  -s \n\
91  If display is turn off, tracking results are saved in a video folder.\n\
92  \n\
93  -c\n\
94  Disable the mouse click. Useful to automate the \n\
95  execution of this program without human intervention.\n\
96  \n\
97  -t <tracker type>\n\
98  Set tracker type (<1 (Edge)>, <2 (KLT)>, <3 (both)>) for color sensor.\n\
99  \n\
100  -l\n\
101  Use the scanline for visibility tests.\n\
102  \n\
103  -e <last frame index>\n\
104  Specify the index of the last frame. Once reached, the tracking is stopped.\n\
105  \n\
106  -D \n\
107  Use depth.\n\
108  \n\
109  -m \n\
110  Set a tracking mask.\n\
111  \n\
112  -C \n\
113  Use color images.\n\
114  \n\
115  -h \n\
116  Print the help.\n\n");
117 
118  if (badparam)
119  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
120  }
121 
122  bool getOptions(int argc, const char **argv, std::string &ipath, bool &click_allowed, bool &display, bool &save,
123  bool &useScanline, int &trackerType, int &lastFrame, bool &use_depth, bool &use_mask,
124  bool &use_color_image)
125  {
126  const char *optarg_;
127  int c;
128  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
129 
130  switch (c) {
131  case 'i':
132  ipath = optarg_;
133  break;
134  case 'c':
135  click_allowed = false;
136  break;
137  case 'd':
138  display = false;
139  break;
140  case 's':
141  save = true;
142  break;
143  case 'l':
144  useScanline = true;
145  break;
146  case 't':
147  trackerType = atoi(optarg_);
148  break;
149  case 'e':
150  lastFrame = atoi(optarg_);
151  break;
152  case 'D':
153  use_depth = true;
154  break;
155  case 'm':
156  use_mask = true;
157  break;
158  case 'C':
159  use_color_image = true;
160  break;
161  case 'h':
162  usage(argv[0], NULL);
163  return false;
164  break;
165 
166  default:
167  usage(argv[0], optarg_);
168  return false;
169  break;
170  }
171  }
172 
173  if ((c == 1) || (c == -1)) {
174  // standalone param or error
175  usage(argv[0], NULL);
176  std::cerr << "ERROR: " << std::endl;
177  std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
178  return false;
179  }
180 
181  return true;
182  }
183 
184  template <typename Type>
185  bool read_data(const std::string &input_directory, int cpt, const vpCameraParameters &cam_depth,
186  vpImage<Type> &I, vpImage<uint16_t> &I_depth,
187  std::vector<vpColVector> &pointcloud, vpHomogeneousMatrix &cMo)
188  {
189 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
190  static_assert(std::is_same<Type, unsigned char>::value || std::is_same<Type, vpRGBa>::value,
191  "Template function supports only unsigned char and vpRGBa images!");
192 #endif
193  char buffer[256];
194  sprintf(buffer, std::string(input_directory + "/Images/Image_%04d.pgm").c_str(), cpt);
195  std::string image_filename = buffer;
196 
197  sprintf(buffer, std::string(input_directory + "/Depth/Depth_%04d.bin").c_str(), cpt);
198  std::string depth_filename = buffer;
199 
200  sprintf(buffer, std::string(input_directory + "/CameraPose/Camera_%03d.txt").c_str(), cpt);
201  std::string pose_filename = buffer;
202 
203  if (!vpIoTools::checkFilename(image_filename) || !vpIoTools::checkFilename(depth_filename)
204  || !vpIoTools::checkFilename(pose_filename))
205  return false;
206 
207  vpImageIo::read(I, image_filename);
208 
209  unsigned int depth_width = 0, depth_height = 0;
210  std::ifstream file_depth(depth_filename.c_str(), std::ios::in | std::ios::binary);
211  if (!file_depth.is_open())
212  return false;
213 
214  vpIoTools::readBinaryValueLE(file_depth, depth_height);
215  vpIoTools::readBinaryValueLE(file_depth, depth_width);
216  I_depth.resize(depth_height, depth_width);
217  pointcloud.resize(depth_height*depth_width);
218 
219  const float depth_scale = 0.000030518f;
220  for (unsigned int i = 0; i < I_depth.getHeight(); i++) {
221  for (unsigned int j = 0; j < I_depth.getWidth(); j++) {
222  vpIoTools::readBinaryValueLE(file_depth, I_depth[i][j]);
223  double x = 0.0, y = 0.0, Z = I_depth[i][j] * depth_scale;
224  vpPixelMeterConversion::convertPoint(cam_depth, j, i, x, y);
225  vpColVector pt3d(4, 1.0);
226  pt3d[0] = x*Z;
227  pt3d[1] = y*Z;
228  pt3d[2] = Z;
229  pointcloud[i*I_depth.getWidth()+j] = pt3d;
230  }
231  }
232 
233  std::ifstream file_pose(pose_filename.c_str());
234  if (!file_pose.is_open()) {
235  return false;
236  }
237 
238  for (unsigned int i = 0; i < 4; i++) {
239  for (unsigned int j = 0; j < 4; j++) {
240  file_pose >> cMo[i][j];
241  }
242  }
243 
244  return true;
245  }
246 
247  void convert(const vpImage<vpRGBa> &src, vpImage<vpRGBa> &dst)
248  {
249  dst = src;
250  }
251 
252  void convert(const vpImage<unsigned char> &src, vpImage<vpRGBa> &dst)
253  {
254  vpImageConvert::convert(src, dst);
255  }
256 
257  template <typename Type>
258  bool run(const std::string &input_directory,
259  bool opt_click_allowed, bool opt_display, bool useScanline, int trackerType_image,
260  int opt_lastFrame, bool use_depth, bool use_mask, bool save) {
261 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
262  static_assert(std::is_same<Type, unsigned char>::value || std::is_same<Type, vpRGBa>::value,
263  "Template function supports only unsigned char and vpRGBa images!");
264 #endif
265  // Initialise a display
266 #if defined VISP_HAVE_X11
267  vpDisplayX display1, display2;
268 #elif defined VISP_HAVE_GDI
269  vpDisplayGDI display1, display2;
270 #elif defined VISP_HAVE_OPENCV
271  vpDisplayOpenCV display1, display2;
272 #elif defined VISP_HAVE_D3D9
273  vpDisplayD3D display1, display2;
274 #elif defined VISP_HAVE_GTK
275  vpDisplayGTK display1, display2;
276 #else
277  opt_display = false;
278 #endif
279 
280  std::vector<int> tracker_type(2);
281  tracker_type[0] = trackerType_image;
282  tracker_type[1] = vpMbGenericTracker::DEPTH_DENSE_TRACKER;
283  vpMbGenericTracker tracker(tracker_type);
284 #if defined(VISP_HAVE_PUGIXML)
285  tracker.loadConfigFile(input_directory + "/Config/chateau.xml", input_directory + "/Config/chateau_depth.xml");
286 #else
287  {
288  vpCameraParameters cam_color, cam_depth;
289  cam_color.initPersProjWithoutDistortion(700.0, 700.0, 320.0, 240.0);
290  cam_depth.initPersProjWithoutDistortion(700.0, 700.0, 320.0, 240.0);
291  tracker.setCameraParameters(cam_color, cam_depth);
292  }
293 
294  // Edge
295  vpMe me;
296  me.setMaskSize(5);
297  me.setMaskNumber(180);
298  me.setRange(8);
299  me.setThreshold(10000);
300  me.setMu1(0.5);
301  me.setMu2(0.5);
302  me.setSampleStep(5);
303  tracker.setMovingEdge(me);
304 
305  // Klt
306 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
307  vpKltOpencv klt;
308  tracker.setKltMaskBorder(5);
309  klt.setMaxFeatures(10000);
310  klt.setWindowSize(5);
311  klt.setQuality(0.01);
312  klt.setMinDistance(5);
313  klt.setHarrisFreeParameter(0.02);
314  klt.setBlockSize(3);
315  klt.setPyramidLevels(3);
316 
317  tracker.setKltOpencv(klt);
318 #endif
319 
320  // Depth
321  tracker.setDepthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION);
322  tracker.setDepthNormalPclPlaneEstimationMethod(2);
323  tracker.setDepthNormalPclPlaneEstimationRansacMaxIter(200);
324  tracker.setDepthNormalPclPlaneEstimationRansacThreshold(0.001);
325  tracker.setDepthNormalSamplingStep(2, 2);
326 
327  tracker.setDepthDenseSamplingStep(4, 4);
328 
329  tracker.setAngleAppear(vpMath::rad(85.0));
330  tracker.setAngleDisappear(vpMath::rad(89.0));
331  tracker.setNearClippingDistance(0.01);
332  tracker.setFarClippingDistance(2.0);
333  tracker.setClipping(tracker.getClipping() | vpMbtPolygon::FOV_CLIPPING);
334 #endif
335 
336 #ifdef VISP_HAVE_COIN3D
337  tracker.loadModel(input_directory + "/Models/chateau.wrl", input_directory + "/Models/chateau.cao");
338 #else
339  tracker.loadModel(input_directory + "/Models/chateau.cao", input_directory + "/Models/chateau.cao");
340 #endif
342  T[0][0] = -1;
343  T[0][3] = -0.2;
344  T[1][1] = 0;
345  T[1][2] = 1;
346  T[1][3] = 0.12;
347  T[2][1] = 1;
348  T[2][2] = 0;
349  T[2][3] = -0.15;
350  tracker.loadModel(input_directory + "/Models/cube.cao", false, T);
351  vpCameraParameters cam_color, cam_depth;
352  tracker.getCameraParameters(cam_color, cam_depth);
353  tracker.setDisplayFeatures(true);
354  tracker.setScanLineVisibilityTest(useScanline);
355 
356  std::map<int, std::pair<double, double> > map_thresh;
357  //Take the highest thresholds between all CI machines
358 #ifdef VISP_HAVE_COIN3D
360  = useScanline ? std::pair<double, double>(0.005, 3.9) : std::pair<double, double>(0.007, 2.9);
361 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
363  = useScanline ? std::pair<double, double>(0.006, 1.9) : std::pair<double, double>(0.005, 1.3);
365  = useScanline ? std::pair<double, double>(0.005, 3.2) : std::pair<double, double>(0.006, 2.8);
366 #endif
368  = useScanline ? std::pair<double, double>(0.003, 1.7) : std::pair<double, double>(0.002, 0.8);
369 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
371  = std::pair<double, double>(0.002, 0.3);
373  = useScanline ? std::pair<double, double>(0.002, 1.8) : std::pair<double, double>(0.002, 0.7);
374 #endif
375 #else
377  = useScanline ? std::pair<double, double>(0.007, 2.3) : std::pair<double, double>(0.007, 2.1);
378 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
380  = useScanline ? std::pair<double, double>(0.006, 1.7) : std::pair<double, double>(0.005, 1.4);
382  = useScanline ? std::pair<double, double>(0.004, 1.2) : std::pair<double, double>(0.004, 1.0);
383 #endif
385  = useScanline ? std::pair<double, double>(0.002, 0.7) : std::pair<double, double>(0.001, 0.4);
386 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
388  = std::pair<double, double>(0.002, 0.3);
390  = useScanline ? std::pair<double, double>(0.001, 0.5) : std::pair<double, double>(0.001, 0.4);
391 #endif
392 #endif
393 
394  vpImage<Type> I, I_depth;
395  vpImage<uint16_t> I_depth_raw;
396  vpHomogeneousMatrix cMo_truth;
397  std::vector<vpColVector> pointcloud;
398  int cpt_frame = 1;
399  if (!read_data(input_directory, cpt_frame, cam_depth, I, I_depth_raw, pointcloud, cMo_truth)) {
400  std::cerr << "Cannot read first frame!" << std::endl;
401  return EXIT_FAILURE;
402  }
403 
404  vpImage<bool> mask(I.getHeight(), I.getWidth());
405  const double roi_step = 7.0;
406  const double roi_step2 = 6.0;
407  if (use_mask) {
408  mask = false;
409  for (unsigned int i = (unsigned int) (I.getRows()/roi_step); i < (unsigned int) (I.getRows()*roi_step2/roi_step); i++) {
410  for (unsigned int j = (unsigned int) (I.getCols()/roi_step); j < (unsigned int) (I.getCols()*roi_step2/roi_step); j++) {
411  mask[i][j] = true;
412  }
413  }
414  tracker.setMask(mask);
415  }
416 
417  vpImageConvert::createDepthHistogram(I_depth_raw, I_depth);
418 
419  vpImage<vpRGBa> results(I.getHeight(), I.getWidth() + I_depth.getWidth());
420  vpImage<vpRGBa> resultsColor(I.getHeight(), I.getWidth());
421  vpImage<vpRGBa> resultsDepth(I_depth.getHeight(), I_depth.getWidth());
422  if (save) {
423  vpIoTools::makeDirectory("results");
424  }
425  if (opt_display) {
426 #ifdef VISP_HAVE_DISPLAY
427  display1.init(I, 0, 0, "Image");
428  display2.init(I_depth, (int) I.getWidth(), 0, "Depth");
429 #endif
430  }
431 
432  vpHomogeneousMatrix depth_M_color;
433  depth_M_color[0][3] = -0.05;
434  tracker.setCameraTransformationMatrix("Camera2", depth_M_color);
435  tracker.initFromPose(I, cMo_truth);
436 
437  vpFont font(24);
438  bool click = false, quit = false;
439  std::vector<double> vec_err_t, vec_err_tu;
440  std::vector<double> time_vec;
441  while (read_data(input_directory, cpt_frame, cam_depth, I, I_depth_raw, pointcloud, cMo_truth) && !quit
442  && (opt_lastFrame > 0 ? (int)cpt_frame <= opt_lastFrame : true)) {
443  vpImageConvert::createDepthHistogram(I_depth_raw, I_depth);
444 
445  if (opt_display) {
447  vpDisplay::display(I_depth);
448  } else if (save) {
449  convert(I, resultsColor);
450  convert(I_depth, resultsDepth);
451  }
452 
453  double t = vpTime::measureTimeMs();
454  std::map<std::string, const vpImage<Type> *> mapOfImages;
455  mapOfImages["Camera1"] = &I;
456  std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
457  mapOfPointclouds["Camera2"] = &pointcloud;
458  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
459  if (!use_depth) {
460  mapOfWidths["Camera2"] = 0;
461  mapOfHeights["Camera2"] = 0;
462  } else {
463  mapOfWidths["Camera2"] = I_depth.getWidth();
464  mapOfHeights["Camera2"] = I_depth.getHeight();
465  }
466 
467  tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
468  vpHomogeneousMatrix cMo = tracker.getPose();
469  t = vpTime::measureTimeMs() - t;
470  time_vec.push_back(t);
471 
472  if (opt_display) {
473  tracker.display(I, I_depth, cMo, depth_M_color*cMo, cam_color, cam_depth, vpColor::red, 3);
474  vpDisplay::displayFrame(I, cMo, cam_depth, 0.05, vpColor::none, 3);
475  vpDisplay::displayFrame(I_depth, depth_M_color*cMo, cam_depth, 0.05, vpColor::none, 3);
476 
477  std::stringstream ss;
478  ss << "Frame: " << cpt_frame;
479  vpDisplay::displayText(I_depth, 20, 20, ss.str(), vpColor::red);
480  ss.str("");
481  ss << "Nb features: " << tracker.getError().getRows();
482  vpDisplay::displayText(I_depth, 40, 20, ss.str(), vpColor::red);
483  } else if (save) {
484  //Models
485  std::map<std::string, std::vector<std::vector<double> > > mapOfModels;
486  std::map<std::string, unsigned int> mapOfW;
487  mapOfW["Camera1"] = I.getWidth();
488  mapOfW["Camera2"] = I.getHeight();
489  std::map<std::string, unsigned int> mapOfH;
490  mapOfH["Camera1"] = I_depth.getWidth();
491  mapOfH["Camera2"] = I_depth.getHeight();
492  std::map<std::string, vpHomogeneousMatrix> mapOfcMos;
493  mapOfcMos["Camera1"] = cMo;
494  mapOfcMos["Camera2"] = depth_M_color*cMo;
495  std::map<std::string, vpCameraParameters> mapOfCams;
496  mapOfCams["Camera1"] = cam_color;
497  mapOfCams["Camera2"] = cam_depth;
498  tracker.getModelForDisplay(mapOfModels, mapOfW, mapOfH, mapOfcMos, mapOfCams);
499  for (std::map<std::string, std::vector<std::vector<double> > >::const_iterator it = mapOfModels.begin();
500  it != mapOfModels.end(); ++it) {
501  for (size_t i = 0; i < it->second.size(); i++) {
502  // test if it->second[i][0] = 0
503  if (std::fabs(it->second[i][0]) <= std::numeric_limits<double>::epsilon()) {
504  vpImageDraw::drawLine(it->first == "Camera1" ? resultsColor : resultsDepth, vpImagePoint(it->second[i][1], it->second[i][2]),
505  vpImagePoint(it->second[i][3], it->second[i][4]), vpColor::red, 3);
506  }
507  }
508  }
509 
510  //Features
511  std::map<std::string, std::vector<std::vector<double> > > mapOfFeatures;
512  tracker.getFeaturesForDisplay(mapOfFeatures);
513  for (std::map<std::string, std::vector<std::vector<double> > >::const_iterator it = mapOfFeatures.begin();
514  it != mapOfFeatures.end(); ++it) {
515  for (size_t i = 0; i < it->second.size(); i++) {
516  if (std::fabs(it->second[i][0]) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][0] = 0 for ME
517  vpColor color = vpColor::yellow;
518  if (std::fabs(it->second[i][3]) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][3] = 0
519  color = vpColor::green;
520  } else if (std::fabs(it->second[i][3] - 1) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][3] = 1
521  color = vpColor::blue;
522  } else if (std::fabs(it->second[i][3] - 2) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][3] = 2
523  color = vpColor::purple;
524  } else if (std::fabs(it->second[i][3] - 3) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][3] = 3
525  color = vpColor::red;
526  } else if (std::fabs(it->second[i][3] - 4) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][3] = 4
527  color = vpColor::cyan;
528  }
529  vpImageDraw::drawCross(it->first == "Camera1" ? resultsColor : resultsDepth, vpImagePoint(it->second[i][1], it->second[i][2]),
530  3, color, 1);
531  } else if (std::fabs(it->second[i][0] - 1) <= std::numeric_limits<double>::epsilon()) { // test it->second[i][0] = 1 for KLT
532  vpImageDraw::drawCross(it->first == "Camera1" ? resultsColor : resultsDepth, vpImagePoint(it->second[i][1], it->second[i][2]),
533  10, vpColor::red, 1);
534  }
535  }
536  }
537 
538  //Computation time
539  std::ostringstream oss;
540  oss << "Tracking time: " << t << " ms";
541  font.drawText(resultsColor, oss.str(), vpImagePoint(20,20), vpColor::red);
542  }
543 
544  vpPoseVector pose_est(cMo);
545  vpPoseVector pose_truth(cMo_truth);
546  vpColVector t_est(3), t_truth(3);
547  vpColVector tu_est(3), tu_truth(3);
548  for (unsigned int i = 0; i < 3; i++) {
549  t_est[i] = pose_est[i];
550  t_truth[i] = pose_truth[i];
551  tu_est[i] = pose_est[i+3];
552  tu_truth[i] = pose_truth[i+3];
553  }
554 
555  vpColVector t_err = t_truth-t_est, tu_err = tu_truth-tu_est;
556  const double t_thresh = map_thresh[!use_depth ? trackerType_image : trackerType_image | vpMbGenericTracker::DEPTH_DENSE_TRACKER].first;
557  const double tu_thresh = map_thresh[!use_depth ? trackerType_image : trackerType_image | vpMbGenericTracker::DEPTH_DENSE_TRACKER].second;
558  double t_err2 = sqrt(t_err.sumSquare()), tu_err2 = vpMath::deg(sqrt(tu_err.sumSquare()));
559  vec_err_t.push_back( t_err2 );
560  vec_err_tu.push_back( tu_err2 );
561  if ( !use_mask && (t_err2 > t_thresh || tu_err2 > tu_thresh) ) { //no accuracy test with mask
562  std::cerr << "Pose estimated exceeds the threshold (t_thresh = " << t_thresh << " ; tu_thresh = " << tu_thresh << ")!" << std::endl;
563  std::cout << "t_err: " << t_err2 << " ; tu_err: " << tu_err2 << std::endl;
564  //TODO: fix MBT to make tests deterministic
565 // return EXIT_FAILURE;
566  }
567 
568  if (opt_display) {
569  if (use_mask) {
570  vpRect roi(vpImagePoint(I.getRows()/roi_step, I.getCols()/roi_step),
571  vpImagePoint(I.getRows()*roi_step2/roi_step, I.getCols()*roi_step2/roi_step));
574  }
575 
576  vpDisplay::flush(I);
577  vpDisplay::flush(I_depth);
578  } else if (save) {
579  char buffer[256];
580  std::ostringstream oss;
581  oss << "results/image_%04d.png";
582  sprintf(buffer, oss.str().c_str(), cpt_frame);
583 
584  results.insert(resultsColor, vpImagePoint());
585  results.insert(resultsDepth, vpImagePoint(0, resultsColor.getWidth()));
586 
587  vpImageIo::write(results, buffer);
588  }
589 
590  if (opt_display && opt_click_allowed) {
592  if (vpDisplay::getClick(I, button, click)) {
593  switch (button) {
595  quit = !click;
596  break;
597 
599  click = !click;
600  break;
601 
602  default:
603  break;
604  }
605  }
606  }
607 
608  cpt_frame++;
609  }
610 
611  if (!time_vec.empty())
612  std::cout << "Computation time, Mean: " << vpMath::getMean(time_vec) << " ms ; Median: " << vpMath::getMedian(time_vec)
613  << " ms ; Std: " << vpMath::getStdev(time_vec) << " ms" << std::endl;
614 
615  if (!vec_err_t.empty())
616  std::cout << "Max translation error: " << *std::max_element(vec_err_t.begin(), vec_err_t.end()) << std::endl;
617 
618  if (!vec_err_tu.empty())
619  std::cout << "Max thetau error: " << *std::max_element(vec_err_tu.begin(), vec_err_tu.end()) << std::endl;
620 
621 #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION >= 2)
622  // Cleanup memory allocated by Coin library used to load a vrml model. We clean only if Coin was used.
623  SoDB::finish();
624 #endif
625 
626  return EXIT_SUCCESS;
627  }
628 }
629 
630 int main(int argc, const char *argv[])
631 {
632  try {
633  std::string env_ipath;
634  std::string opt_ipath = "";
635  bool opt_click_allowed = true;
636  bool opt_display = true;
637  bool opt_save = false;
638  bool useScanline = false;
639  int trackerType_image = vpMbGenericTracker::EDGE_TRACKER;
640 #if defined(__mips__) || defined(__mips) || defined(mips) || defined(__MIPS__)
641  // To avoid Debian test timeout
642  int opt_lastFrame = 5;
643 #else
644  int opt_lastFrame = -1;
645 #endif
646  bool use_depth = false;
647  bool use_mask = false;
648  bool use_color_image = false;
649 
650  // Get the visp-images-data package path or VISP_INPUT_IMAGE_PATH
651  // environment variable value
652  env_ipath = vpIoTools::getViSPImagesDataPath();
653 
654  // Read the command line options
655  if (!getOptions(argc, argv, opt_ipath, opt_click_allowed, opt_display, opt_save,
656  useScanline, trackerType_image, opt_lastFrame, use_depth,
657  use_mask, use_color_image)) {
658  return EXIT_FAILURE;
659  }
660 
661  std::cout << "trackerType_image: " << trackerType_image << std::endl;
662  std::cout << "useScanline: " << useScanline << std::endl;
663  std::cout << "use_depth: " << use_depth << std::endl;
664  std::cout << "use_mask: " << use_mask << std::endl;
665  std::cout << "use_color_image: " << use_color_image << std::endl;
666 #ifdef VISP_HAVE_COIN3D
667  std::cout << "COIN3D available." << std::endl;
668 #endif
669 
670 #if !defined(VISP_HAVE_MODULE_KLT) || (!defined(VISP_HAVE_OPENCV) || (VISP_HAVE_OPENCV_VERSION < 0x020100))
671  if (trackerType_image & 2) {
672  std::cout << "KLT features cannot be used: ViSP is not built with "
673  "KLT module or OpenCV is not available.\nTest is not run."
674  << std::endl;
675  return EXIT_SUCCESS;
676  }
677 #endif
678 
679  // Test if an input path is set
680  if (opt_ipath.empty() && env_ipath.empty()) {
681  usage(argv[0], NULL);
682  std::cerr << std::endl << "ERROR:" << std::endl;
683  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
684  << " environment variable to specify the location of the " << std::endl
685  << " image path where test images are located." << std::endl
686  << std::endl;
687 
688  return EXIT_FAILURE;
689  }
690 
691  std::string input_directory = vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath, "mbt-depth/Castle-simu");
692  if (!vpIoTools::checkDirectory(input_directory)) {
693  std::cerr << "ViSP-images does not contain the folder: " << input_directory << "!" << std::endl;
694  return EXIT_SUCCESS;
695  }
696 
697  if (use_color_image) {
698  return run<vpRGBa>(input_directory, opt_click_allowed, opt_display, useScanline,
699  trackerType_image, opt_lastFrame, use_depth, use_mask, opt_save);
700  } else {
701  return run<unsigned char>(input_directory, opt_click_allowed, opt_display, useScanline,
702  trackerType_image, opt_lastFrame, use_depth, use_mask, opt_save);
703  }
704  } catch (const vpException &e) {
705  std::cout << "Catch an exception: " << e << std::endl;
706  return EXIT_FAILURE;
707  }
708 }
709 #else
710 int main() {
711  std::cout << "Enable MBT module (VISP_HAVE_MODULE_MBT) to launch this test." << std::endl;
712  return 0;
713 }
714 #endif
static void makeDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:572
void setMaxFeatures(int maxCount)
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static std::string getViSPImagesDataPath()
Definition: vpIoTools.cpp:1292
void setHarrisFreeParameter(double harris_k)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:879
Implementation of an homogeneous matrix and operations on such kind of matrices.
unsigned int getRows() const
Definition: vpImage.h:216
void setMaskNumber(const unsigned int &a)
Definition: vpMe.cpp:454
static void readBinaryValueLE(std::ifstream &file, int16_t &short_value)
Definition: vpIoTools.cpp:1860
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:222
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
Class to define colors available for display functionnalities.
Definition: vpColor.h:119
void setSampleStep(const double &s)
Definition: vpMe.h:278
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:252
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:150
static const vpColor none
Definition: vpColor.h:191
void setMinDistance(double minDistance)
error that can be emited by ViSP classes.
Definition: vpException.h:71
Definition: vpMe.h:60
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
static const vpColor green
Definition: vpColor.h:182
Real-time 6D object pose tracking using its CAD model.
static void flush(const vpImage< unsigned char > &I)
VISP_EXPORT double measureTimeMs()
Definition: vpTime.cpp:126
void setMu1(const double &mu_1)
Definition: vpMe.h:241
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:69
static const vpColor red
Definition: vpColor.h:179
void setQuality(double qualityLevel)
static bool checkDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:422
static void write(const vpImage< unsigned char > &I, const std::string &filename)
Definition: vpImageIo.cpp:445
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:202
Display for windows using Direct3D 3rd party. Thus to enable this class Direct3D should be installed...
Definition: vpDisplayD3D.h:106
static std::string createFilePath(const std::string &parent, const std::string &child)
Definition: vpIoTools.cpp:1537
static const vpColor cyan
Definition: vpColor.h:188
void setMaskSize(const unsigned int &a)
Definition: vpMe.cpp:461
void setPyramidLevels(int pyrMaxLevel)
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
unsigned int getCols() const
Definition: vpImage.h:177
Generic class defining intrinsic camera parameters.
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:137
void setWindowSize(int winSize)
static void displayRectangle(const vpImage< unsigned char > &I, const vpImagePoint &topLeft, unsigned int width, unsigned int height, const vpColor &color, bool fill=false, unsigned int thickness=1)
Font drawing functions for image.
Definition: vpFont.h:55
static double rad(double deg)
Definition: vpMath.h:108
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const std::string &title="")
void setMu2(const double &mu_2)
Definition: vpMe.h:248
static double deg(double rad)
Definition: vpMath.h:101
unsigned int getHeight() const
Definition: vpImage.h:186
static void read(vpImage< unsigned char > &I, const std::string &filename)
Definition: vpImageIo.cpp:243
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
static void drawCross(vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, unsigned char color, unsigned int thickness=1)
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:151
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
double sumSquare() const
void setThreshold(const double &t)
Definition: vpMe.h:300
static bool checkFilename(const std::string &filename)
Definition: vpIoTools.cpp:730
Defines a rectangle in the plane.
Definition: vpRect.h:78
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
void setRange(const unsigned int &r)
Definition: vpMe.h:271
static const vpColor yellow
Definition: vpColor.h:187
static const vpColor purple
Definition: vpColor.h:190
unsigned int getWidth() const
Definition: vpImage.h:244
void setBlockSize(int blockSize)
static void drawLine(vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, unsigned char color, unsigned int thickness=1)
Definition of the vpImage class member functions.
Definition: vpImage.h:124
static void createDepthHistogram(const vpImage< uint16_t > &src_depth, vpImage< vpRGBa > &dest_rgba)
static const vpColor blue
Definition: vpColor.h:185