Visual Servoing Platform  version 3.6.1 under development (2024-07-27)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
43 
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include <nlohmann/json.hpp>
46 using json = nlohmann::json;
47 #endif
48 
51  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
52  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
53  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
54 {
55  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
56 
57  // Add default camera transformation matrix
59 
60  // Add default ponderation between each feature type
62 
63 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
65 #endif
66 
69 }
70 
71 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
72  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
73  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
74  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
75 {
76  if (nbCameras == 0) {
77  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
78  }
79  else if (nbCameras == 1) {
80  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
81 
82  // Add default camera transformation matrix
84  }
85  else {
86  for (unsigned int i = 1; i <= nbCameras; i++) {
87  std::stringstream ss;
88  ss << "Camera" << i;
89  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
90 
91  // Add default camera transformation matrix
93  }
94 
95  // Set by default the reference camera to the first one
96  m_referenceCameraName = m_mapOfTrackers.begin()->first;
97  }
98 
99  // Add default ponderation between each feature type
101 
102 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
104 #endif
105 
108 }
109 
110 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
111  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
112  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
113  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
114 {
115  if (trackerTypes.empty()) {
116  throw vpException(vpException::badValue, "There is no camera!");
117  }
118 
119  if (trackerTypes.size() == 1) {
120  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
121 
122  // Add default camera transformation matrix
124  }
125  else {
126  for (size_t i = 1; i <= trackerTypes.size(); i++) {
127  std::stringstream ss;
128  ss << "Camera" << i;
129  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
130 
131  // Add default camera transformation matrix
133  }
134 
135  // Set by default the reference camera to the first one
136  m_referenceCameraName = m_mapOfTrackers.begin()->first;
137  }
138 
139  // Add default ponderation between each feature type
141 
142 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
144 #endif
145 
148 }
149 
150 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
151  const std::vector<int> &trackerTypes)
152  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
153  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
154  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
155 {
156  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
158  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
159  }
160 
161  for (size_t i = 0; i < cameraNames.size(); i++) {
162  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
163 
164  // Add default camera transformation matrix
166  }
167 
168  // Set by default the reference camera to the first one
169  m_referenceCameraName = m_mapOfTrackers.begin()->first;
170 
171  // Add default ponderation between each feature type
173 
174 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
176 #endif
177 
180 }
181 
183 {
184  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
185  ++it) {
186  delete it->second;
187  it->second = nullptr;
188  }
189 }
190 
209  const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
210 {
211  double rawTotalProjectionError = 0.0;
212  unsigned int nbTotalFeaturesUsed = 0;
213 
214  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
215  it != m_mapOfTrackers.end(); ++it) {
216  TrackerWrapper *tracker = it->second;
217 
218  unsigned int nbFeaturesUsed = 0;
219  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
220 
221  if (nbFeaturesUsed > 0) {
222  nbTotalFeaturesUsed += nbFeaturesUsed;
223  rawTotalProjectionError += curProjError;
224  }
225  }
226 
227  if (nbTotalFeaturesUsed > 0) {
228  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
229  }
230 
231  return 90.0;
232 }
233 
252  const vpHomogeneousMatrix &_cMo,
253  const vpCameraParameters &_cam)
254 {
256  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
257 
258  return computeCurrentProjectionError(I, _cMo, _cam);
259 }
260 
262 {
263  if (computeProjError) {
264  double rawTotalProjectionError = 0.0;
265  unsigned int nbTotalFeaturesUsed = 0;
266 
267  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
268  it != m_mapOfTrackers.end(); ++it) {
269  TrackerWrapper *tracker = it->second;
270 
271  double curProjError = tracker->getProjectionError();
272  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
273 
274  if (nbFeaturesUsed > 0) {
275  nbTotalFeaturesUsed += nbFeaturesUsed;
276  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
277  }
278  }
279 
280  if (nbTotalFeaturesUsed > 0) {
281  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
282  }
283  else {
284  projectionError = 90.0;
285  }
286  }
287  else {
288  projectionError = 90.0;
289  }
290 }
291 
292 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
293 {
294  computeVVSInit(mapOfImages);
295 
296  if (m_error.getRows() < 4) {
297  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
298  }
299 
300  double normRes = 0;
301  double normRes_1 = -1;
302  unsigned int iter = 0;
303 
304  vpMatrix LTL;
305  vpColVector LTR, v;
306  vpColVector error_prev;
307 
308  double mu = m_initialMu;
309  vpHomogeneousMatrix cMo_prev;
310 
311  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
312  if (isoJoIdentity)
313  oJo.eye();
314 
315  // Covariance
316  vpColVector W_true(m_error.getRows());
317  vpMatrix L_true, LVJ_true;
318 
319  // Create the map of VelocityTwistMatrices
320  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
321  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
322  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
324  cVo.build(it->second);
325  mapOfVelocityTwist[it->first] = cVo;
326  }
327 
328  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
329 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
330  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
331 #endif
332  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
333  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
334 
335  m_nb_feat_edge = 0;
336  m_nb_feat_klt = 0;
339 
340  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
341  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
342 
343  bool reStartFromLastIncrement = false;
344  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
345  if (reStartFromLastIncrement) {
346  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
347  it != m_mapOfTrackers.end(); ++it) {
348  TrackerWrapper *tracker = it->second;
349 
350  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
351 
352 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
353  vpHomogeneousMatrix c_curr_tTc_curr0 =
354  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
355  tracker->ctTc0 = c_curr_tTc_curr0;
356 #endif
357  }
358  }
359 
360  if (!reStartFromLastIncrement) {
362 
363  if (computeCovariance) {
364  L_true = m_L;
365  if (!isoJoIdentity) {
367  cVo.build(m_cMo);
368  LVJ_true = (m_L * (cVo * oJo));
369  }
370  }
371 
373  if (iter == 0) {
374  // If all the 6 dof should be estimated, we check if the interaction
375  // matrix is full rank. If not we remove automatically the dof that
376  // cannot be estimated. This is particularly useful when considering
377  // circles (rank 5) and cylinders (rank 4)
378  if (isoJoIdentity) {
379  cVo.build(m_cMo);
380 
381  vpMatrix K; // kernel
382  unsigned int rank = (m_L * cVo).kernel(K);
383  if (rank == 0) {
384  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
385  }
386 
387  if (rank != 6) {
388  vpMatrix I; // Identity
389  I.eye(6);
390  oJo = I - K.AtA();
391  isoJoIdentity = false;
392  }
393  }
394  }
395 
396  // Weighting
397  double num = 0;
398  double den = 0;
399 
400  unsigned int start_index = 0;
401  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
402  it != m_mapOfTrackers.end(); ++it) {
403  TrackerWrapper *tracker = it->second;
404 
405  if (tracker->m_trackerType & EDGE_TRACKER) {
406  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
407  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
408  W_true[start_index + i] = wi;
409  m_weightedError[start_index + i] = wi * m_error[start_index + i];
410 
411  num += wi * vpMath::sqr(m_error[start_index + i]);
412  den += wi;
413 
414  for (unsigned int j = 0; j < m_L.getCols(); j++) {
415  m_L[start_index + i][j] *= wi;
416  }
417  }
418 
419  start_index += tracker->m_error_edge.getRows();
420  }
421 
422 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
423  if (tracker->m_trackerType & KLT_TRACKER) {
424  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
425  double wi = tracker->m_w_klt[i] * factorKlt;
426  W_true[start_index + i] = wi;
427  m_weightedError[start_index + i] = wi * m_error[start_index + i];
428 
429  num += wi * vpMath::sqr(m_error[start_index + i]);
430  den += wi;
431 
432  for (unsigned int j = 0; j < m_L.getCols(); j++) {
433  m_L[start_index + i][j] *= wi;
434  }
435  }
436 
437  start_index += tracker->m_error_klt.getRows();
438  }
439 #endif
440 
441  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
442  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
443  double wi = tracker->m_w_depthNormal[i] * factorDepth;
444  W_true[start_index + i] = wi;
445  m_weightedError[start_index + i] = wi * m_error[start_index + i];
446 
447  num += wi * vpMath::sqr(m_error[start_index + i]);
448  den += wi;
449 
450  for (unsigned int j = 0; j < m_L.getCols(); j++) {
451  m_L[start_index + i][j] *= wi;
452  }
453  }
454 
455  start_index += tracker->m_error_depthNormal.getRows();
456  }
457 
458  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
459  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
460  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
461  W_true[start_index + i] = wi;
462  m_weightedError[start_index + i] = wi * m_error[start_index + i];
463 
464  num += wi * vpMath::sqr(m_error[start_index + i]);
465  den += wi;
466 
467  for (unsigned int j = 0; j < m_L.getCols(); j++) {
468  m_L[start_index + i][j] *= wi;
469  }
470  }
471 
472  start_index += tracker->m_error_depthDense.getRows();
473  }
474  }
475 
476  normRes_1 = normRes;
477  normRes = sqrt(num / den);
478 
479  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
480 
481  cMo_prev = m_cMo;
482 
484 
485 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
486  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
487  it != m_mapOfTrackers.end(); ++it) {
488  TrackerWrapper *tracker = it->second;
489 
490  vpHomogeneousMatrix c_curr_tTc_curr0 =
491  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
492  tracker->ctTc0 = c_curr_tTc_curr0;
493  }
494 #endif
495 
496  // Update cMo
497  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
498  it != m_mapOfTrackers.end(); ++it) {
499  TrackerWrapper *tracker = it->second;
500  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
501  }
502  }
503 
504  iter++;
505  }
506 
507  // Update features number
508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
509  it != m_mapOfTrackers.end(); ++it) {
510  TrackerWrapper *tracker = it->second;
511  if (tracker->m_trackerType & EDGE_TRACKER) {
512  m_nb_feat_edge += tracker->m_error_edge.size();
513  }
514 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
515  if (tracker->m_trackerType & KLT_TRACKER) {
516  m_nb_feat_klt += tracker->m_error_klt.size();
517  }
518 #endif
519  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
520  m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
521  }
522  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
523  m_nb_feat_depthDense += tracker->m_error_depthDense.size();
524  }
525  }
526 
527  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
528 
529  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
530  it != m_mapOfTrackers.end(); ++it) {
531  TrackerWrapper *tracker = it->second;
532 
533  if (tracker->m_trackerType & EDGE_TRACKER) {
534  tracker->updateMovingEdgeWeights();
535  }
536  }
537 }
538 
540 {
541  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
542 }
543 
544 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
545 {
546  unsigned int nbFeatures = 0;
547 
548  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
549  it != m_mapOfTrackers.end(); ++it) {
550  TrackerWrapper *tracker = it->second;
551  tracker->computeVVSInit(mapOfImages[it->first]);
552 
553  nbFeatures += tracker->m_error.getRows();
554  }
555 
556  m_L.resize(nbFeatures, 6, false, false);
557  m_error.resize(nbFeatures, false);
558 
559  m_weightedError.resize(nbFeatures, false);
560  m_w.resize(nbFeatures, false);
561  m_w = 1;
562 }
563 
565 {
566  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
567  "computeVVSInteractionMatrixAndR"
568  "esidu() should not be called");
569 }
570 
572  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
573  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
574 {
575  unsigned int start_index = 0;
576 
577  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
578  it != m_mapOfTrackers.end(); ++it) {
579  TrackerWrapper *tracker = it->second;
580 
581  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
582 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
583  vpHomogeneousMatrix c_curr_tTc_curr0 =
584  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
585  tracker->ctTc0 = c_curr_tTc_curr0;
586 #endif
587 
588  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
589 
590  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
591  m_error.insert(start_index, tracker->m_error);
592 
593  start_index += tracker->m_error.getRows();
594  }
595 }
596 
598 {
599  unsigned int start_index = 0;
600 
601  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
602  it != m_mapOfTrackers.end(); ++it) {
603  TrackerWrapper *tracker = it->second;
604  tracker->computeVVSWeights();
605 
606  m_w.insert(start_index, tracker->m_w);
607  start_index += tracker->m_w.getRows();
608  }
609 }
610 
625  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
626  bool displayFullModel)
627 {
628  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
629  if (it != m_mapOfTrackers.end()) {
630  TrackerWrapper *tracker = it->second;
631  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
632  }
633  else {
634  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
635  }
636 }
637 
652  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
653  bool displayFullModel)
654 {
655  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
656  if (it != m_mapOfTrackers.end()) {
657  TrackerWrapper *tracker = it->second;
658  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
659  }
660  else {
661  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
662  }
663 }
664 
682  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
683  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
684  unsigned int thickness, bool displayFullModel)
685 {
686  if (m_mapOfTrackers.size() == 2) {
687  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
688  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
689  ++it;
690 
691  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
692  }
693  else {
694  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
695  << std::endl;
696  }
697 }
698 
716  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
717  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
718  bool displayFullModel)
719 {
720  if (m_mapOfTrackers.size() == 2) {
721  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
722  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
723  ++it;
724 
725  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
726  }
727  else {
728  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
729  << std::endl;
730  }
731 }
732 
744 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
745  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
746  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
747  const vpColor &col, unsigned int thickness, bool displayFullModel)
748 {
749  // Display only for the given images
750  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
751  it_img != mapOfImages.end(); ++it_img) {
752  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
753  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
754  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
755 
756  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
757  it_cam != mapOfCameraParameters.end()) {
758  TrackerWrapper *tracker = it_tracker->second;
759  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
760  }
761  else {
762  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
763  }
764  }
765 }
766 
778 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
779  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
780  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
781  const vpColor &col, unsigned int thickness, bool displayFullModel)
782 {
783  // Display only for the given images
784  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
785  it_img != mapOfImages.end(); ++it_img) {
786  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
787  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
788  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
789 
790  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
791  it_cam != mapOfCameraParameters.end()) {
792  TrackerWrapper *tracker = it_tracker->second;
793  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
794  }
795  else {
796  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
797  }
798  }
799 }
800 
806 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
807 {
808  std::vector<std::string> cameraNames;
809 
810  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
811  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
812  cameraNames.push_back(it_tracker->first);
813  }
814 
815  return cameraNames;
816 }
817 
819 {
821 }
822 
832 {
833  if (m_mapOfTrackers.size() == 2) {
834  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
835  it->second->getCameraParameters(cam1);
836  ++it;
837 
838  it->second->getCameraParameters(cam2);
839  }
840  else {
841  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
842  << std::endl;
843  }
844 }
845 
851 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
852 {
853  // Clear the input map
854  mapOfCameraParameters.clear();
855 
856  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
857  it != m_mapOfTrackers.end(); ++it) {
858  vpCameraParameters cam_;
859  it->second->getCameraParameters(cam_);
860  mapOfCameraParameters[it->first] = cam_;
861  }
862 }
863 
870 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
871 {
872  std::map<std::string, int> trackingTypes;
873 
874  TrackerWrapper *traker;
875  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
876  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
877  traker = it_tracker->second;
878  trackingTypes[it_tracker->first] = traker->getTrackerType();
879  }
880 
881  return trackingTypes;
882 }
883 
892 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
893 {
894  if (m_mapOfTrackers.size() == 2) {
895  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
896  clippingFlag1 = it->second->getClipping();
897  ++it;
898 
899  clippingFlag2 = it->second->getClipping();
900  }
901  else {
902  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
903  << std::endl;
904  }
905 }
906 
912 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
913 {
914  mapOfClippingFlags.clear();
915 
916  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
917  it != m_mapOfTrackers.end(); ++it) {
918  TrackerWrapper *tracker = it->second;
919  mapOfClippingFlags[it->first] = tracker->getClipping();
920  }
921 }
922 
929 {
930  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
931  if (it != m_mapOfTrackers.end()) {
932  return it->second->getFaces();
933  }
934 
935  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
936  return faces;
937 }
938 
945 {
946  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
947  if (it != m_mapOfTrackers.end()) {
948  return it->second->getFaces();
949  }
950 
951  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
952  return faces;
953 }
954 
955 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
959 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
960 {
961  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
962  if (it != m_mapOfTrackers.end()) {
963  TrackerWrapper *tracker = it->second;
964  return tracker->getFeaturesCircle();
965  }
966  else {
967  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
968  m_referenceCameraName.c_str());
969  }
970 }
971 
975 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
976 {
977  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
978  if (it != m_mapOfTrackers.end()) {
979  TrackerWrapper *tracker = it->second;
980  return tracker->getFeaturesKltCylinder();
981  }
982  else {
983  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
984  m_referenceCameraName.c_str());
985  }
986 }
987 
991 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
992 {
993  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
994  if (it != m_mapOfTrackers.end()) {
995  TrackerWrapper *tracker = it->second;
996  return tracker->getFeaturesKlt();
997  }
998  else {
999  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1000  m_referenceCameraName.c_str());
1001  }
1002 }
1003 #endif
1004 
1031 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1032 {
1033  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1034 
1035  if (it != m_mapOfTrackers.end()) {
1036  return it->second->getFeaturesForDisplay();
1037  }
1038  else {
1039  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1040  }
1041 
1042  return std::vector<std::vector<double> >();
1043 }
1044 
1070 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1071 {
1072  // Clear the input map
1073  mapOfFeatures.clear();
1074 
1075  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1076  it != m_mapOfTrackers.end(); ++it) {
1077  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1078  }
1079 }
1080 
1091 
1092 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1101 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1102 {
1103  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1104  if (it != m_mapOfTrackers.end()) {
1105  TrackerWrapper *tracker = it->second;
1106  return tracker->getKltImagePoints();
1107  }
1108  else {
1109  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1110  }
1111 
1112  return std::vector<vpImagePoint>();
1113 }
1114 
1123 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1124 {
1125  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1126  if (it != m_mapOfTrackers.end()) {
1127  TrackerWrapper *tracker = it->second;
1128  return tracker->getKltImagePointsWithId();
1129  }
1130  else {
1131  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1132  }
1133 
1134  return std::map<int, vpImagePoint>();
1135 }
1136 
1143 {
1144  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1145  if (it != m_mapOfTrackers.end()) {
1146  TrackerWrapper *tracker = it->second;
1147  return tracker->getKltMaskBorder();
1148  }
1149  else {
1150  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1151  }
1152 
1153  return 0;
1154 }
1155 
1162 {
1163  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1164  if (it != m_mapOfTrackers.end()) {
1165  TrackerWrapper *tracker = it->second;
1166  return tracker->getKltNbPoints();
1167  }
1168  else {
1169  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1170  }
1171 
1172  return 0;
1173 }
1174 
1181 {
1182  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1183 
1184  if (it_tracker != m_mapOfTrackers.end()) {
1185  TrackerWrapper *tracker;
1186  tracker = it_tracker->second;
1187  return tracker->getKltOpencv();
1188  }
1189  else {
1190  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1191  }
1192 
1193  return vpKltOpencv();
1194 }
1195 
1205 {
1206  if (m_mapOfTrackers.size() == 2) {
1207  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1208  klt1 = it->second->getKltOpencv();
1209  ++it;
1210 
1211  klt2 = it->second->getKltOpencv();
1212  }
1213  else {
1214  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1215  << std::endl;
1216  }
1217 }
1218 
1224 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1225 {
1226  mapOfKlts.clear();
1227 
1228  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1229  it != m_mapOfTrackers.end(); ++it) {
1230  TrackerWrapper *tracker = it->second;
1231  mapOfKlts[it->first] = tracker->getKltOpencv();
1232  }
1233 }
1234 
1240 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1241 {
1242  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1243  if (it != m_mapOfTrackers.end()) {
1244  TrackerWrapper *tracker = it->second;
1245  return tracker->getKltPoints();
1246  }
1247  else {
1248  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1249  }
1250 
1251  return std::vector<cv::Point2f>();
1252 }
1253 
1261 #endif
1262 
1275 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1276 {
1277  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1278  if (it != m_mapOfTrackers.end()) {
1279  it->second->getLcircle(circlesList, level);
1280  }
1281  else {
1282  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1283  }
1284 }
1285 
1299 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1300  unsigned int level) const
1301 {
1302  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1303  if (it != m_mapOfTrackers.end()) {
1304  it->second->getLcircle(circlesList, level);
1305  }
1306  else {
1307  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1308  }
1309 }
1310 
1323 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1324 {
1325  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1326  if (it != m_mapOfTrackers.end()) {
1327  it->second->getLcylinder(cylindersList, level);
1328  }
1329  else {
1330  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1331  }
1332 }
1333 
1347 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1348  unsigned int level) const
1349 {
1350  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1351  if (it != m_mapOfTrackers.end()) {
1352  it->second->getLcylinder(cylindersList, level);
1353  }
1354  else {
1355  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1356  }
1357 }
1358 
1371 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1372 {
1373  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1374 
1375  if (it != m_mapOfTrackers.end()) {
1376  it->second->getLline(linesList, level);
1377  }
1378  else {
1379  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1380  }
1381 }
1382 
1396 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1397  unsigned int level) const
1398 {
1399  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1400  if (it != m_mapOfTrackers.end()) {
1401  it->second->getLline(linesList, level);
1402  }
1403  else {
1404  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1405  }
1406 }
1407 
1437 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1438  const vpHomogeneousMatrix &cMo,
1439  const vpCameraParameters &cam,
1440  bool displayFullModel)
1441 {
1442  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1443 
1444  if (it != m_mapOfTrackers.end()) {
1445  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1446  }
1447  else {
1448  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1449  }
1450 
1451  return std::vector<std::vector<double> >();
1452 }
1453 
1479 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1480  const std::map<std::string, unsigned int> &mapOfwidths,
1481  const std::map<std::string, unsigned int> &mapOfheights,
1482  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1483  const std::map<std::string, vpCameraParameters> &mapOfCams,
1484  bool displayFullModel)
1485 {
1486  // Clear the input map
1487  mapOfModels.clear();
1488 
1489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1490  it != m_mapOfTrackers.end(); ++it) {
1491  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1492  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1493  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1494  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1495 
1496  if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1497  findCam != mapOfCams.end()) {
1498  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1499  findCam->second, displayFullModel);
1500  }
1501  }
1502 }
1503 
1510 {
1511  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1512 
1513  if (it != m_mapOfTrackers.end()) {
1514  return it->second->getMovingEdge();
1515  }
1516  else {
1517  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1518  }
1519 
1520  return vpMe();
1521 }
1522 
1532 {
1533  if (m_mapOfTrackers.size() == 2) {
1534  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1535  it->second->getMovingEdge(me1);
1536  ++it;
1537 
1538  it->second->getMovingEdge(me2);
1539  }
1540  else {
1541  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1542  << std::endl;
1543  }
1544 }
1545 
1551 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1552 {
1553  mapOfMovingEdges.clear();
1554 
1555  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1556  it != m_mapOfTrackers.end(); ++it) {
1557  TrackerWrapper *tracker = it->second;
1558  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1559  }
1560 }
1561 
1579 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1580 {
1581  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1582 
1583  unsigned int nbGoodPoints = 0;
1584  if (it != m_mapOfTrackers.end()) {
1585 
1586  nbGoodPoints = it->second->getNbPoints(level);
1587  }
1588  else {
1589  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1590  }
1591 
1592  return nbGoodPoints;
1593 }
1594 
1609 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1610 {
1611  mapOfNbPoints.clear();
1612 
1613  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1614  it != m_mapOfTrackers.end(); ++it) {
1615  TrackerWrapper *tracker = it->second;
1616  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1617  }
1618 }
1619 
1626 {
1627  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1628  if (it != m_mapOfTrackers.end()) {
1629  return it->second->getNbPolygon();
1630  }
1631 
1632  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1633  return 0;
1634 }
1635 
1641 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1642 {
1643  mapOfNbPolygons.clear();
1644 
1645  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1646  it != m_mapOfTrackers.end(); ++it) {
1647  TrackerWrapper *tracker = it->second;
1648  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1649  }
1650 }
1651 
1663 {
1664  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1665  if (it != m_mapOfTrackers.end()) {
1666  return it->second->getPolygon(index);
1667  }
1668 
1669  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1670  return nullptr;
1671 }
1672 
1684 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1685 {
1686  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1687  if (it != m_mapOfTrackers.end()) {
1688  return it->second->getPolygon(index);
1689  }
1690 
1691  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1692  return nullptr;
1693 }
1694 
1710 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1711 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1712 {
1713  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1714 
1715  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1716  if (it != m_mapOfTrackers.end()) {
1717  TrackerWrapper *tracker = it->second;
1718  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1719  }
1720  else {
1721  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1722  }
1723 
1724  return polygonFaces;
1725 }
1726 
1744 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1745  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1746  bool orderPolygons, bool useVisibility, bool clipPolygon)
1747 {
1748  mapOfPolygons.clear();
1749  mapOfPoints.clear();
1750 
1751  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1752  it != m_mapOfTrackers.end(); ++it) {
1753  TrackerWrapper *tracker = it->second;
1754  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1755  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1756 
1757  mapOfPolygons[it->first] = polygonFaces.first;
1758  mapOfPoints[it->first] = polygonFaces.second;
1759  }
1760 }
1761 
1763 
1773 {
1774  if (m_mapOfTrackers.size() == 2) {
1775  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1776  it->second->getPose(c1Mo);
1777  ++it;
1778 
1779  it->second->getPose(c2Mo);
1780  }
1781  else {
1782  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1783  << std::endl;
1784  }
1785 }
1786 
1792 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1793 {
1794  // Clear the map
1795  mapOfCameraPoses.clear();
1796 
1797  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1798  it != m_mapOfTrackers.end(); ++it) {
1799  TrackerWrapper *tracker = it->second;
1800  tracker->getPose(mapOfCameraPoses[it->first]);
1801  }
1802 }
1803 
1808 
1813 {
1814  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1815  if (it != m_mapOfTrackers.end()) {
1816  TrackerWrapper *tracker = it->second;
1817  return tracker->getTrackerType();
1818  }
1819  else {
1820  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1821  m_referenceCameraName.c_str());
1822  }
1823 }
1824 
1826 {
1827  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1828  it != m_mapOfTrackers.end(); ++it) {
1829  TrackerWrapper *tracker = it->second;
1830  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1831  tracker->init(I);
1832  }
1833 }
1834 
1835 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1836  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1837 {
1838  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1839 }
1840 
1841 #ifdef VISP_HAVE_MODULE_GUI
1842 
1886  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1887  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1888 {
1889  if (m_mapOfTrackers.size() == 2) {
1890  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1891  TrackerWrapper *tracker = it->second;
1892  tracker->initClick(I1, initFile1, displayHelp, T1);
1893 
1894  ++it;
1895 
1896  tracker = it->second;
1897  tracker->initClick(I2, initFile2, displayHelp, T2);
1898 
1900  if (it != m_mapOfTrackers.end()) {
1901  tracker = it->second;
1902 
1903  // Set the reference cMo
1904  tracker->getPose(m_cMo);
1905  }
1906  }
1907  else {
1909  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1910  }
1911 }
1912 
1955 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1956  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1957  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1958 {
1959  if (m_mapOfTrackers.size() == 2) {
1960  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1961  TrackerWrapper *tracker = it->second;
1962  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1963 
1964  ++it;
1965 
1966  tracker = it->second;
1967  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1968 
1970  if (it != m_mapOfTrackers.end()) {
1971  tracker = it->second;
1972 
1973  // Set the reference cMo
1974  tracker->getPose(m_cMo);
1975  }
1976  }
1977  else {
1979  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1980  }
1981 }
1982 
2025 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2026  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2027  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2028 {
2029  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2030  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2031  mapOfImages.find(m_referenceCameraName);
2032  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2033 
2034  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2035  TrackerWrapper *tracker = it_tracker->second;
2036  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2037  if (it_T != mapOfT.end())
2038  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2039  else
2040  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2041  tracker->getPose(m_cMo);
2042  }
2043  else {
2044  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2045  }
2046 
2047  // Vector of missing initFile for cameras
2048  std::vector<std::string> vectorOfMissingCameraPoses;
2049 
2050  // Set pose for the specified cameras
2051  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2052  if (it_tracker->first != m_referenceCameraName) {
2053  it_img = mapOfImages.find(it_tracker->first);
2054  it_initFile = mapOfInitFiles.find(it_tracker->first);
2055 
2056  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2057  // InitClick for the current camera
2058  TrackerWrapper *tracker = it_tracker->second;
2059  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2060  }
2061  else {
2062  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2063  }
2064  }
2065  }
2066 
2067  // Init for cameras that do not have an initFile
2068  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2069  it != vectorOfMissingCameraPoses.end(); ++it) {
2070  it_img = mapOfImages.find(*it);
2071  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2073 
2074  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2075  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2076  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2077  m_mapOfTrackers[*it]->init(*it_img->second);
2078  }
2079  else {
2081  "Missing image or missing camera transformation "
2082  "matrix! Cannot set the pose for camera: %s!",
2083  it->c_str());
2084  }
2085  }
2086 }
2087 
2130 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2131  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2132  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2133 {
2134  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2135  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2136  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2137 
2138  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2139  TrackerWrapper *tracker = it_tracker->second;
2140  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2141  if (it_T != mapOfT.end())
2142  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2143  else
2144  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2145  tracker->getPose(m_cMo);
2146  }
2147  else {
2148  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2149  }
2150 
2151  // Vector of missing initFile for cameras
2152  std::vector<std::string> vectorOfMissingCameraPoses;
2153 
2154  // Set pose for the specified cameras
2155  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2156  if (it_tracker->first != m_referenceCameraName) {
2157  it_img = mapOfColorImages.find(it_tracker->first);
2158  it_initFile = mapOfInitFiles.find(it_tracker->first);
2159 
2160  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2161  // InitClick for the current camera
2162  TrackerWrapper *tracker = it_tracker->second;
2163  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2164  }
2165  else {
2166  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2167  }
2168  }
2169  }
2170 
2171  // Init for cameras that do not have an initFile
2172  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2173  it != vectorOfMissingCameraPoses.end(); ++it) {
2174  it_img = mapOfColorImages.find(*it);
2175  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2177 
2178  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2179  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2180  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2181  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2182  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2183  }
2184  else {
2186  "Missing image or missing camera transformation "
2187  "matrix! Cannot set the pose for camera: %s!",
2188  it->c_str());
2189  }
2190  }
2191 }
2192 #endif
2193 
2194 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2195  const int /*idFace*/, const std::string & /*name*/)
2196 {
2197  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2198 }
2199 
2201 {
2202  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2203 }
2204 
2206 {
2207  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2208 }
2209 
2240  const std::string &initFile1, const std::string &initFile2)
2241 {
2242  if (m_mapOfTrackers.size() == 2) {
2243  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2244  TrackerWrapper *tracker = it->second;
2245  tracker->initFromPoints(I1, initFile1);
2246 
2247  ++it;
2248 
2249  tracker = it->second;
2250  tracker->initFromPoints(I2, initFile2);
2251 
2253  if (it != m_mapOfTrackers.end()) {
2254  tracker = it->second;
2255 
2256  // Set the reference cMo
2257  tracker->getPose(m_cMo);
2258 
2259  // Set the reference camera parameters
2260  tracker->getCameraParameters(m_cam);
2261  }
2262  }
2263  else {
2265  "Cannot initFromPoints()! Require two cameras but "
2266  "there are %d cameras!",
2267  m_mapOfTrackers.size());
2268  }
2269 }
2270 
2301  const std::string &initFile1, const std::string &initFile2)
2302 {
2303  if (m_mapOfTrackers.size() == 2) {
2304  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2305  TrackerWrapper *tracker = it->second;
2306  tracker->initFromPoints(I_color1, initFile1);
2307 
2308  ++it;
2309 
2310  tracker = it->second;
2311  tracker->initFromPoints(I_color2, initFile2);
2312 
2314  if (it != m_mapOfTrackers.end()) {
2315  tracker = it->second;
2316 
2317  // Set the reference cMo
2318  tracker->getPose(m_cMo);
2319 
2320  // Set the reference camera parameters
2321  tracker->getCameraParameters(m_cam);
2322  }
2323  }
2324  else {
2326  "Cannot initFromPoints()! Require two cameras but "
2327  "there are %d cameras!",
2328  m_mapOfTrackers.size());
2329  }
2330 }
2331 
2332 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2333  const std::map<std::string, std::string> &mapOfInitPoints)
2334 {
2335  // Set the reference cMo
2336  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2337  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2338  mapOfImages.find(m_referenceCameraName);
2339  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2340 
2341  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2342  TrackerWrapper *tracker = it_tracker->second;
2343  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2344  tracker->getPose(m_cMo);
2345  }
2346  else {
2347  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2348  }
2349 
2350  // Vector of missing initPoints for cameras
2351  std::vector<std::string> vectorOfMissingCameraPoints;
2352 
2353  // Set pose for the specified cameras
2354  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2355  it_img = mapOfImages.find(it_tracker->first);
2356  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2357 
2358  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2359  // Set pose
2360  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2361  }
2362  else {
2363  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2364  }
2365  }
2366 
2367  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2368  it != vectorOfMissingCameraPoints.end(); ++it) {
2369  it_img = mapOfImages.find(*it);
2370  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2372 
2373  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2374  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2375  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2376  }
2377  else {
2379  "Missing image or missing camera transformation "
2380  "matrix! Cannot init the pose for camera: %s!",
2381  it->c_str());
2382  }
2383  }
2384 }
2385 
2386 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2387  const std::map<std::string, std::string> &mapOfInitPoints)
2388 {
2389  // Set the reference cMo
2390  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2391  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2392  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2393 
2394  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2395  it_initPoints != mapOfInitPoints.end()) {
2396  TrackerWrapper *tracker = it_tracker->second;
2397  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2398  tracker->getPose(m_cMo);
2399  }
2400  else {
2401  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2402  }
2403 
2404  // Vector of missing initPoints for cameras
2405  std::vector<std::string> vectorOfMissingCameraPoints;
2406 
2407  // Set pose for the specified cameras
2408  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2409  it_img = mapOfColorImages.find(it_tracker->first);
2410  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2411 
2412  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2413  // Set pose
2414  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2415  }
2416  else {
2417  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2418  }
2419  }
2420 
2421  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2422  it != vectorOfMissingCameraPoints.end(); ++it) {
2423  it_img = mapOfColorImages.find(*it);
2424  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2426 
2427  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2428  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2429  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2430  }
2431  else {
2433  "Missing image or missing camera transformation "
2434  "matrix! Cannot init the pose for camera: %s!",
2435  it->c_str());
2436  }
2437  }
2438 }
2439 
2441 {
2442  vpMbTracker::initFromPose(I, cMo);
2443 }
2444 
2457  const std::string &initFile1, const std::string &initFile2)
2458 {
2459  if (m_mapOfTrackers.size() == 2) {
2460  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2461  TrackerWrapper *tracker = it->second;
2462  tracker->initFromPose(I1, initFile1);
2463 
2464  ++it;
2465 
2466  tracker = it->second;
2467  tracker->initFromPose(I2, initFile2);
2468 
2470  if (it != m_mapOfTrackers.end()) {
2471  tracker = it->second;
2472 
2473  // Set the reference cMo
2474  tracker->getPose(m_cMo);
2475 
2476  // Set the reference camera parameters
2477  tracker->getCameraParameters(m_cam);
2478  }
2479  }
2480  else {
2482  "Cannot initFromPose()! Require two cameras but there "
2483  "are %d cameras!",
2484  m_mapOfTrackers.size());
2485  }
2486 }
2487 
2500  const std::string &initFile1, const std::string &initFile2)
2501 {
2502  if (m_mapOfTrackers.size() == 2) {
2503  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2504  TrackerWrapper *tracker = it->second;
2505  tracker->initFromPose(I_color1, initFile1);
2506 
2507  ++it;
2508 
2509  tracker = it->second;
2510  tracker->initFromPose(I_color2, initFile2);
2511 
2513  if (it != m_mapOfTrackers.end()) {
2514  tracker = it->second;
2515 
2516  // Set the reference cMo
2517  tracker->getPose(m_cMo);
2518 
2519  // Set the reference camera parameters
2520  tracker->getCameraParameters(m_cam);
2521  }
2522  }
2523  else {
2525  "Cannot initFromPose()! Require two cameras but there "
2526  "are %d cameras!",
2527  m_mapOfTrackers.size());
2528  }
2529 }
2530 
2545 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2546  const std::map<std::string, std::string> &mapOfInitPoses)
2547 {
2548  // Set the reference cMo
2549  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2550  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2551  mapOfImages.find(m_referenceCameraName);
2552  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2553 
2554  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2555  TrackerWrapper *tracker = it_tracker->second;
2556  tracker->initFromPose(*it_img->second, it_initPose->second);
2557  tracker->getPose(m_cMo);
2558  }
2559  else {
2560  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2561  }
2562 
2563  // Vector of missing pose matrices for cameras
2564  std::vector<std::string> vectorOfMissingCameraPoses;
2565 
2566  // Set pose for the specified cameras
2567  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2568  it_img = mapOfImages.find(it_tracker->first);
2569  it_initPose = mapOfInitPoses.find(it_tracker->first);
2570 
2571  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2572  // Set pose
2573  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2574  }
2575  else {
2576  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2577  }
2578  }
2579 
2580  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2581  it != vectorOfMissingCameraPoses.end(); ++it) {
2582  it_img = mapOfImages.find(*it);
2583  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2585 
2586  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2587  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2588  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2589  }
2590  else {
2592  "Missing image or missing camera transformation "
2593  "matrix! Cannot init the pose for camera: %s!",
2594  it->c_str());
2595  }
2596  }
2597 }
2598 
2613 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2614  const std::map<std::string, std::string> &mapOfInitPoses)
2615 {
2616  // Set the reference cMo
2617  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2618  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2619  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2620 
2621  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2622  TrackerWrapper *tracker = it_tracker->second;
2623  tracker->initFromPose(*it_img->second, it_initPose->second);
2624  tracker->getPose(m_cMo);
2625  }
2626  else {
2627  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2628  }
2629 
2630  // Vector of missing pose matrices for cameras
2631  std::vector<std::string> vectorOfMissingCameraPoses;
2632 
2633  // Set pose for the specified cameras
2634  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2635  it_img = mapOfColorImages.find(it_tracker->first);
2636  it_initPose = mapOfInitPoses.find(it_tracker->first);
2637 
2638  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2639  // Set pose
2640  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2641  }
2642  else {
2643  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2644  }
2645  }
2646 
2647  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2648  it != vectorOfMissingCameraPoses.end(); ++it) {
2649  it_img = mapOfColorImages.find(*it);
2650  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2652 
2653  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2654  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2655  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2656  }
2657  else {
2659  "Missing image or missing camera transformation "
2660  "matrix! Cannot init the pose for camera: %s!",
2661  it->c_str());
2662  }
2663  }
2664 }
2665 
2677  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2678 {
2679  if (m_mapOfTrackers.size() == 2) {
2680  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2681  it->second->initFromPose(I1, c1Mo);
2682 
2683  ++it;
2684 
2685  it->second->initFromPose(I2, c2Mo);
2686 
2687  m_cMo = c1Mo;
2688  }
2689  else {
2691  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2692  }
2693 }
2694 
2706  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2707 {
2708  if (m_mapOfTrackers.size() == 2) {
2709  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2710  it->second->initFromPose(I_color1, c1Mo);
2711 
2712  ++it;
2713 
2714  it->second->initFromPose(I_color2, c2Mo);
2715 
2716  m_cMo = c1Mo;
2717  }
2718  else {
2720  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2721  }
2722 }
2723 
2737 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2738  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2739 {
2740  // Set the reference cMo
2741  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2742  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2743  mapOfImages.find(m_referenceCameraName);
2744  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2745 
2746  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2747  TrackerWrapper *tracker = it_tracker->second;
2748  tracker->initFromPose(*it_img->second, it_camPose->second);
2749  tracker->getPose(m_cMo);
2750  }
2751  else {
2752  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2753  }
2754 
2755  // Vector of missing pose matrices for cameras
2756  std::vector<std::string> vectorOfMissingCameraPoses;
2757 
2758  // Set pose for the specified cameras
2759  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2760  it_img = mapOfImages.find(it_tracker->first);
2761  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2762 
2763  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2764  // Set pose
2765  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2766  }
2767  else {
2768  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2769  }
2770  }
2771 
2772  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2773  it != vectorOfMissingCameraPoses.end(); ++it) {
2774  it_img = mapOfImages.find(*it);
2775  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2777 
2778  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2779  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2780  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2781  }
2782  else {
2784  "Missing image or missing camera transformation "
2785  "matrix! Cannot set the pose for camera: %s!",
2786  it->c_str());
2787  }
2788  }
2789 }
2790 
2804 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2805  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2806 {
2807  // Set the reference cMo
2808  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2809  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2810  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2811 
2812  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2813  TrackerWrapper *tracker = it_tracker->second;
2814  tracker->initFromPose(*it_img->second, it_camPose->second);
2815  tracker->getPose(m_cMo);
2816  }
2817  else {
2818  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2819  }
2820 
2821  // Vector of missing pose matrices for cameras
2822  std::vector<std::string> vectorOfMissingCameraPoses;
2823 
2824  // Set pose for the specified cameras
2825  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2826  it_img = mapOfColorImages.find(it_tracker->first);
2827  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2828 
2829  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2830  // Set pose
2831  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2832  }
2833  else {
2834  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2835  }
2836  }
2837 
2838  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2839  it != vectorOfMissingCameraPoses.end(); ++it) {
2840  it_img = mapOfColorImages.find(*it);
2841  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2843 
2844  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2845  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2846  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2847  }
2848  else {
2850  "Missing image or missing camera transformation "
2851  "matrix! Cannot set the pose for camera: %s!",
2852  it->c_str());
2853  }
2854  }
2855 }
2856 
2868 void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2869 {
2870  const std::string extension = vpIoTools::getFileExtension(configFile);
2871  if (extension == ".xml") {
2872  loadConfigFileXML(configFile, verbose);
2873  }
2874 #ifdef VISP_HAVE_NLOHMANN_JSON
2875  else if (extension == ".json") {
2876  loadConfigFileJSON(configFile, verbose);
2877  }
2878 #endif
2879  else {
2880  throw vpException(vpException::ioError, "MBT config parsing: File format " + extension + "for file " + configFile + " is not supported.");
2881  }
2882 }
2883 
2895 void vpMbGenericTracker::loadConfigFileXML(const std::string &configFile, bool verbose)
2896 {
2897  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2898  it != m_mapOfTrackers.end(); ++it) {
2899  TrackerWrapper *tracker = it->second;
2900  tracker->loadConfigFile(configFile, verbose);
2901  }
2902 
2904  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2905  }
2906 
2907  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2908  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2909  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2910  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2911 }
2912 
2913 #ifdef VISP_HAVE_NLOHMANN_JSON
2922 void vpMbGenericTracker::loadConfigFileJSON(const std::string &settingsFile, bool verbose)
2923 {
2924  //Read file
2925  std::ifstream jsonFile(settingsFile);
2926  if (!jsonFile.good()) {
2927  throw vpException(vpException::ioError, "Could not read from settings file " + settingsFile + " to initialise the vpMbGenericTracker");
2928  }
2929  json settings;
2930  try {
2931  settings = json::parse(jsonFile);
2932  }
2933  catch (json::parse_error &e) {
2934  std::stringstream msg;
2935  msg << "Could not parse JSON file : \n";
2936 
2937  msg << e.what() << std::endl;
2938  msg << "Byte position of error: " << e.byte;
2939  throw vpException(vpException::ioError, msg.str());
2940  }
2941  jsonFile.close();
2942 
2943  if (!settings.contains("version")) {
2944  throw vpException(vpException::notInitialized, "JSON configuration does not contain versioning information");
2945  }
2946  else if (settings["version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2947  throw vpException(vpException::badValue, "Trying to load an old configuration file");
2948  }
2949 
2950  //Load Basic settings
2951  settings.at("referenceCameraName").get_to(m_referenceCameraName);
2952  json trackersJson;
2953  trackersJson = settings.at("trackers");
2954 
2955  //Find camera that are already present in the tracker but not in the config file: they will be removed
2956  std::vector<std::string> unusedCameraNames = getCameraNames();
2957 
2958  bool refCameraFound = false;
2959  //Foreach camera
2960  for (const auto &it : trackersJson.items()) {
2961  const std::string cameraName = it.key();
2962  const json trackerJson = it.value();
2963  refCameraFound = refCameraFound || cameraName == m_referenceCameraName;
2964 
2965  //Load transformation between current camera and reference camera, if it exists
2966  if (trackerJson.contains("camTref")) {
2967  m_mapOfCameraTransformationMatrix[cameraName] = trackerJson["camTref"].get<vpHomogeneousMatrix>();
2968  }
2969  else if (cameraName != m_referenceCameraName) { // No transformation to reference and its not the reference itself
2970  throw vpException(vpException::notInitialized, "Camera " + cameraName + " has no transformation to the reference camera");
2971  }
2972  if (verbose) {
2973  std::cout << "Loading tracker " << cameraName << std::endl << " with settings: " << std::endl << trackerJson.dump(2);
2974  }
2975  if (m_mapOfTrackers.count(cameraName)) {
2976  if (verbose) {
2977  std::cout << "Updating an already existing tracker with JSON configuration." << std::endl;
2978  }
2979  from_json(trackerJson, *(m_mapOfTrackers[cameraName]));
2980  }
2981  else {
2982  if (verbose) {
2983  std::cout << "Creating a new tracker from JSON configuration." << std::endl;
2984  }
2985  TrackerWrapper *tw = new TrackerWrapper(); //vpMBTracker is responsible for deleting trackers
2986  *tw = trackerJson;
2987  m_mapOfTrackers[cameraName] = tw;
2988  }
2989  const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName); // Mark this camera name as used
2990  unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2991  }
2992  if (!refCameraFound) {
2993  throw vpException(vpException::badValue, "Reference camera not found in trackers");
2994  }
2995 
2996  // All camerasthat were defined in the tracker but not in the config file are removed
2997  for (const std::string &oldCameraName : unusedCameraNames) {
2998  m_mapOfCameraTransformationMatrix.erase(oldCameraName);
2999  TrackerWrapper *tw = m_mapOfTrackers[oldCameraName];
3000  m_mapOfTrackers.erase(oldCameraName);
3001  delete tw;
3002  }
3003 
3004  const TrackerWrapper *refTracker = m_mapOfTrackers[m_referenceCameraName];
3005  refTracker->getCameraParameters(m_cam);
3006  this->angleAppears = refTracker->getAngleAppear();
3007  this->angleDisappears = refTracker->getAngleDisappear();
3008  this->clippingFlag = refTracker->getClipping();
3009  this->distNearClip = refTracker->getNearClippingDistance();
3010  this->distFarClip = refTracker->getFarClippingDistance();
3011 
3012  // These settings can be set in each tracker or globally. Global value overrides local ones.
3013  if (settings.contains("display")) {
3014  const json displayJson = settings["display"];
3015  setDisplayFeatures(displayJson.value("features", displayFeatures));
3016  setProjectionErrorDisplay(displayJson.value("projectionError", m_projectionErrorDisplay));
3017  }
3018  if (settings.contains("visibilityTest")) {
3019  const json visJson = settings["visibilityTest"];
3020  setOgreVisibilityTest(visJson.value("ogre", useOgre));
3021  setScanLineVisibilityTest(visJson.value("scanline", useScanLine));
3022  }
3023 
3024  // VVS global settings
3025  if (settings.contains("vvs")) {
3026  const json vvsJson = settings["vvs"];
3027  setLambda(vvsJson.value("lambda", this->m_lambda));
3028  setMaxIter(vvsJson.value("maxIter", this->m_maxIter));
3029  setInitialMu(vvsJson.value("initialMu", this->m_initialMu));
3030  }
3031 
3032  //If a 3D model is defined, load it
3033  if (settings.contains("model")) {
3034  loadModel(settings.at("model").get<std::string>(), verbose);
3035  }
3036 }
3037 
3045 void vpMbGenericTracker::saveConfigFile(const std::string &settingsFile) const
3046 {
3047  json j;
3048  j["referenceCameraName"] = m_referenceCameraName;
3049  j["version"] = MBT_JSON_SETTINGS_VERSION;
3050  // j["thresholdOutlier"] = m_thresholdOutlier;
3051  json trackers;
3052  for (const auto &kv : m_mapOfTrackers) {
3053  trackers[kv.first] = *(kv.second);
3054  const auto itTransformation = m_mapOfCameraTransformationMatrix.find(kv.first);
3055  if (itTransformation != m_mapOfCameraTransformationMatrix.end()) {
3056  trackers[kv.first]["camTref"] = itTransformation->second;
3057  }
3058  }
3059  j["trackers"] = trackers;
3060  j["vvs"] = json {
3061  {"lambda", m_lambda},
3062  {"maxIter", m_maxIter},
3063  {"initialMu", m_initialMu}
3064  };
3065 
3066  std::ofstream f(settingsFile);
3067  if (f.good()) {
3068  const unsigned indentLevel = 4;
3069  f << j.dump(indentLevel);
3070  f.close();
3071  }
3072  else {
3073  throw vpException(vpException::ioError, "Could not save tracker configuration to JSON file: " + settingsFile);
3074  }
3075 }
3076 
3077 #endif
3078 
3093 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
3094 {
3095  if (m_mapOfTrackers.size() != 2) {
3096  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3097  }
3098 
3099  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3100  TrackerWrapper *tracker = it_tracker->second;
3101  tracker->loadConfigFile(configFile1, verbose);
3102 
3103  ++it_tracker;
3104  tracker = it_tracker->second;
3105  tracker->loadConfigFile(configFile2, verbose);
3106 
3108  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
3109  }
3110 
3111  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
3112  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
3113  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
3114  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
3115 }
3116 
3130 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
3131 {
3132  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3133  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3134  TrackerWrapper *tracker = it_tracker->second;
3135 
3136  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3137  if (it_config != mapOfConfigFiles.end()) {
3138  tracker->loadConfigFile(it_config->second, verbose);
3139  }
3140  else {
3141  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
3142  it_tracker->first.c_str());
3143  }
3144  }
3145 
3146  // Set the reference camera parameters
3147  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3148  if (it != m_mapOfTrackers.end()) {
3149  TrackerWrapper *tracker = it->second;
3150  tracker->getCameraParameters(m_cam);
3151 
3152  // Set clipping
3153  this->clippingFlag = tracker->getClipping();
3154  this->angleAppears = tracker->getAngleAppear();
3155  this->angleDisappears = tracker->getAngleDisappear();
3156  }
3157  else {
3158  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3159  m_referenceCameraName.c_str());
3160  }
3161 }
3162 
3181 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
3182 {
3183  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3184  it != m_mapOfTrackers.end(); ++it) {
3185  TrackerWrapper *tracker = it->second;
3186  tracker->loadModel(modelFile, verbose, T);
3187  }
3188 }
3189 
3212 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
3213  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3214 {
3215  if (m_mapOfTrackers.size() != 2) {
3216  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3217  }
3218 
3219  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3220  TrackerWrapper *tracker = it_tracker->second;
3221  tracker->loadModel(modelFile1, verbose, T1);
3222 
3223  ++it_tracker;
3224  tracker = it_tracker->second;
3225  tracker->loadModel(modelFile2, verbose, T2);
3226 }
3227 
3247 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
3248  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3249 {
3250  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3251  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3252  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3253 
3254  if (it_model != mapOfModelFiles.end()) {
3255  TrackerWrapper *tracker = it_tracker->second;
3256  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3257 
3258  if (it_T != mapOfT.end())
3259  tracker->loadModel(it_model->second, verbose, it_T->second);
3260  else
3261  tracker->loadModel(it_model->second, verbose);
3262  }
3263  else {
3264  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3265  it_tracker->first.c_str());
3266  }
3267  }
3268 }
3269 
3270 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
3271 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3272  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3273 {
3274  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3275  it != m_mapOfTrackers.end(); ++it) {
3276  TrackerWrapper *tracker = it->second;
3277  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3278  }
3279 }
3280 #endif
3281 
3282 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3283  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3284  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3285  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3286 {
3287  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3288  it != m_mapOfTrackers.end(); ++it) {
3289  TrackerWrapper *tracker = it->second;
3290  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3291  mapOfPointCloudHeights[it->first]);
3292  }
3293 }
3294 
3295 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3296  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
3297  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3298  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3299 {
3300  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3301  it != m_mapOfTrackers.end(); ++it) {
3302  TrackerWrapper *tracker = it->second;
3303  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3304  mapOfPointCloudHeights[it->first]);
3305  }
3306 }
3307 
3319 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3320  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3321 {
3322  if (m_mapOfTrackers.size() != 1) {
3323  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3324  m_mapOfTrackers.size());
3325  }
3326 
3327  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3328  if (it_tracker != m_mapOfTrackers.end()) {
3329  TrackerWrapper *tracker = it_tracker->second;
3330  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3331 
3332  // Set reference pose
3333  tracker->getPose(m_cMo);
3334  }
3335  else {
3336  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3337  }
3338 
3339  modelInitialised = true;
3340 }
3341 
3353 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3354  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3355 {
3356  if (m_mapOfTrackers.size() != 1) {
3357  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3358  m_mapOfTrackers.size());
3359  }
3360 
3361  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3362  if (it_tracker != m_mapOfTrackers.end()) {
3363  TrackerWrapper *tracker = it_tracker->second;
3364  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3365 
3366  // Set reference pose
3367  tracker->getPose(m_cMo);
3368  }
3369  else {
3370  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3371  }
3372 
3373  modelInitialised = true;
3374 }
3375 
3397  const std::string &cad_name1, const std::string &cad_name2,
3398  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3399  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3400 {
3401  if (m_mapOfTrackers.size() == 2) {
3402  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3403 
3404  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3405 
3406  ++it_tracker;
3407 
3408  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3409 
3410  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3411  if (it_tracker != m_mapOfTrackers.end()) {
3412  // Set reference pose
3413  it_tracker->second->getPose(m_cMo);
3414  }
3415  }
3416  else {
3417  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3418  }
3419 
3420  modelInitialised = true;
3421 }
3422 
3444  const std::string &cad_name1, const std::string &cad_name2,
3445  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3446  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3447 {
3448  if (m_mapOfTrackers.size() == 2) {
3449  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3450 
3451  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3452 
3453  ++it_tracker;
3454 
3455  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3456 
3457  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3458  if (it_tracker != m_mapOfTrackers.end()) {
3459  // Set reference pose
3460  it_tracker->second->getPose(m_cMo);
3461  }
3462  }
3463  else {
3464  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3465  }
3466 
3467  modelInitialised = true;
3468 }
3469 
3484 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3485  const std::map<std::string, std::string> &mapOfModelFiles,
3486  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3487  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3488 {
3489  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3490  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3491  mapOfImages.find(m_referenceCameraName);
3492  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3493  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3494 
3495  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3496  it_camPose != mapOfCameraPoses.end()) {
3497  TrackerWrapper *tracker = it_tracker->second;
3498  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3499  if (it_T != mapOfT.end())
3500  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3501  else
3502  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3503 
3504  // Set reference pose
3505  tracker->getPose(m_cMo);
3506  }
3507  else {
3508  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3509  }
3510 
3511  std::vector<std::string> vectorOfMissingCameras;
3512  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3513  if (it_tracker->first != m_referenceCameraName) {
3514  it_img = mapOfImages.find(it_tracker->first);
3515  it_model = mapOfModelFiles.find(it_tracker->first);
3516  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3517 
3518  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3519  TrackerWrapper *tracker = it_tracker->second;
3520  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3521  }
3522  else {
3523  vectorOfMissingCameras.push_back(it_tracker->first);
3524  }
3525  }
3526  }
3527 
3528  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3529  ++it) {
3530  it_img = mapOfImages.find(*it);
3531  it_model = mapOfModelFiles.find(*it);
3532  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3534 
3535  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3536  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3537  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3538  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3539  }
3540  }
3541 
3542  modelInitialised = true;
3543 }
3544 
3559 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3560  const std::map<std::string, std::string> &mapOfModelFiles,
3561  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3562  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3563 {
3564  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3565  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3566  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3567  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3568 
3569  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3570  it_camPose != mapOfCameraPoses.end()) {
3571  TrackerWrapper *tracker = it_tracker->second;
3572  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3573  if (it_T != mapOfT.end())
3574  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3575  else
3576  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3577 
3578  // Set reference pose
3579  tracker->getPose(m_cMo);
3580  }
3581  else {
3582  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3583  }
3584 
3585  std::vector<std::string> vectorOfMissingCameras;
3586  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3587  if (it_tracker->first != m_referenceCameraName) {
3588  it_img = mapOfColorImages.find(it_tracker->first);
3589  it_model = mapOfModelFiles.find(it_tracker->first);
3590  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3591 
3592  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3593  it_camPose != mapOfCameraPoses.end()) {
3594  TrackerWrapper *tracker = it_tracker->second;
3595  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3596  }
3597  else {
3598  vectorOfMissingCameras.push_back(it_tracker->first);
3599  }
3600  }
3601  }
3602 
3603  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3604  ++it) {
3605  it_img = mapOfColorImages.find(*it);
3606  it_model = mapOfModelFiles.find(*it);
3607  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3609 
3610  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3611  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3612  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3613  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3614  }
3615  }
3616 
3617  modelInitialised = true;
3618 }
3619 
3625 {
3626  m_cMo.eye();
3627 
3628  useScanLine = false;
3629 
3630 #ifdef VISP_HAVE_OGRE
3631  useOgre = false;
3632 #endif
3633 
3634  m_computeInteraction = true;
3635  m_lambda = 1.0;
3636 
3637  angleAppears = vpMath::rad(89);
3640  distNearClip = 0.001;
3641  distFarClip = 100;
3642 
3644  m_maxIter = 30;
3645  m_stopCriteriaEpsilon = 1e-8;
3646  m_initialMu = 0.01;
3647 
3648  // Only for Edge
3649  m_percentageGdPt = 0.4;
3650 
3651  // Only for KLT
3652  m_thresholdOutlier = 0.5;
3653 
3654  // Reset default ponderation between each feature type
3656 
3657 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3659 #endif
3660 
3663 
3664  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3665  it != m_mapOfTrackers.end(); ++it) {
3666  TrackerWrapper *tracker = it->second;
3667  tracker->resetTracker();
3668  }
3669 }
3670 
3681 {
3683 
3684  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3685  it != m_mapOfTrackers.end(); ++it) {
3686  TrackerWrapper *tracker = it->second;
3687  tracker->setAngleAppear(a);
3688  }
3689 }
3690 
3703 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3704 {
3705  if (m_mapOfTrackers.size() == 2) {
3706  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3707  it->second->setAngleAppear(a1);
3708 
3709  ++it;
3710  it->second->setAngleAppear(a2);
3711 
3713  if (it != m_mapOfTrackers.end()) {
3714  angleAppears = it->second->getAngleAppear();
3715  }
3716  else {
3717  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3718  }
3719  }
3720  else {
3721  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3722  m_mapOfTrackers.size());
3723  }
3724 }
3725 
3735 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3736 {
3737  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3738  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3739 
3740  if (it_tracker != m_mapOfTrackers.end()) {
3741  TrackerWrapper *tracker = it_tracker->second;
3742  tracker->setAngleAppear(it->second);
3743 
3744  if (it->first == m_referenceCameraName) {
3745  angleAppears = it->second;
3746  }
3747  }
3748  }
3749 }
3750 
3761 {
3763 
3764  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3765  it != m_mapOfTrackers.end(); ++it) {
3766  TrackerWrapper *tracker = it->second;
3767  tracker->setAngleDisappear(a);
3768  }
3769 }
3770 
3783 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3784 {
3785  if (m_mapOfTrackers.size() == 2) {
3786  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3787  it->second->setAngleDisappear(a1);
3788 
3789  ++it;
3790  it->second->setAngleDisappear(a2);
3791 
3793  if (it != m_mapOfTrackers.end()) {
3794  angleDisappears = it->second->getAngleDisappear();
3795  }
3796  else {
3797  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3798  }
3799  }
3800  else {
3801  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3802  m_mapOfTrackers.size());
3803  }
3804 }
3805 
3815 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3816 {
3817  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3818  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3819 
3820  if (it_tracker != m_mapOfTrackers.end()) {
3821  TrackerWrapper *tracker = it_tracker->second;
3822  tracker->setAngleDisappear(it->second);
3823 
3824  if (it->first == m_referenceCameraName) {
3825  angleDisappears = it->second;
3826  }
3827  }
3828  }
3829 }
3830 
3837 {
3839 
3840  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3841  it != m_mapOfTrackers.end(); ++it) {
3842  TrackerWrapper *tracker = it->second;
3843  tracker->setCameraParameters(camera);
3844  }
3845 }
3846 
3856 {
3857  if (m_mapOfTrackers.size() == 2) {
3858  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3859  it->second->setCameraParameters(camera1);
3860 
3861  ++it;
3862  it->second->setCameraParameters(camera2);
3863 
3865  if (it != m_mapOfTrackers.end()) {
3866  it->second->getCameraParameters(m_cam);
3867  }
3868  else {
3869  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3870  }
3871  }
3872  else {
3873  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3874  m_mapOfTrackers.size());
3875  }
3876 }
3877 
3886 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3887 {
3888  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3889  it != mapOfCameraParameters.end(); ++it) {
3890  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3891 
3892  if (it_tracker != m_mapOfTrackers.end()) {
3893  TrackerWrapper *tracker = it_tracker->second;
3894  tracker->setCameraParameters(it->second);
3895 
3896  if (it->first == m_referenceCameraName) {
3897  m_cam = it->second;
3898  }
3899  }
3900  }
3901 }
3902 
3911 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3912  const vpHomogeneousMatrix &cameraTransformationMatrix)
3913 {
3914  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3915 
3916  if (it != m_mapOfCameraTransformationMatrix.end()) {
3917  it->second = cameraTransformationMatrix;
3918  }
3919  else {
3920  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3921  }
3922 }
3923 
3932  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3933 {
3934  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3935  it != mapOfTransformationMatrix.end(); ++it) {
3936  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3937  m_mapOfCameraTransformationMatrix.find(it->first);
3938 
3939  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3940  it_camTrans->second = it->second;
3941  }
3942  }
3943 }
3944 
3954 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3955 {
3956  vpMbTracker::setClipping(flags);
3957 
3958  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3959  it != m_mapOfTrackers.end(); ++it) {
3960  TrackerWrapper *tracker = it->second;
3961  tracker->setClipping(flags);
3962  }
3963 }
3964 
3975 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3976 {
3977  if (m_mapOfTrackers.size() == 2) {
3978  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3979  it->second->setClipping(flags1);
3980 
3981  ++it;
3982  it->second->setClipping(flags2);
3983 
3985  if (it != m_mapOfTrackers.end()) {
3986  clippingFlag = it->second->getClipping();
3987  }
3988  else {
3989  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3990  }
3991  }
3992  else {
3993  std::stringstream ss;
3994  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3996  }
3997 }
3998 
4006 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
4007 {
4008  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
4009  it != mapOfClippingFlags.end(); ++it) {
4010  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4011 
4012  if (it_tracker != m_mapOfTrackers.end()) {
4013  TrackerWrapper *tracker = it_tracker->second;
4014  tracker->setClipping(it->second);
4015 
4016  if (it->first == m_referenceCameraName) {
4017  clippingFlag = it->second;
4018  }
4019  }
4020  }
4021 }
4022 
4033 {
4034  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4035  it != m_mapOfTrackers.end(); ++it) {
4036  TrackerWrapper *tracker = it->second;
4037  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4038  }
4039 }
4040 
4050 {
4051  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4052  it != m_mapOfTrackers.end(); ++it) {
4053  TrackerWrapper *tracker = it->second;
4054  tracker->setDepthDenseFilteringMethod(method);
4055  }
4056 }
4057 
4068 {
4069  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4070  it != m_mapOfTrackers.end(); ++it) {
4071  TrackerWrapper *tracker = it->second;
4072  tracker->setDepthDenseFilteringMinDistance(minDistance);
4073  }
4074 }
4075 
4086 {
4087  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4088  it != m_mapOfTrackers.end(); ++it) {
4089  TrackerWrapper *tracker = it->second;
4090  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4091  }
4092 }
4093 
4102 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
4103 {
4104  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4105  it != m_mapOfTrackers.end(); ++it) {
4106  TrackerWrapper *tracker = it->second;
4107  tracker->setDepthDenseSamplingStep(stepX, stepY);
4108  }
4109 }
4110 
4119 {
4120  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4121  it != m_mapOfTrackers.end(); ++it) {
4122  TrackerWrapper *tracker = it->second;
4123  tracker->setDepthNormalFaceCentroidMethod(method);
4124  }
4125 }
4126 
4136 {
4137  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4138  it != m_mapOfTrackers.end(); ++it) {
4139  TrackerWrapper *tracker = it->second;
4140  tracker->setDepthNormalFeatureEstimationMethod(method);
4141  }
4142 }
4143 
4152 {
4153  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4154  it != m_mapOfTrackers.end(); ++it) {
4155  TrackerWrapper *tracker = it->second;
4156  tracker->setDepthNormalPclPlaneEstimationMethod(method);
4157  }
4158 }
4159 
4168 {
4169  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4170  it != m_mapOfTrackers.end(); ++it) {
4171  TrackerWrapper *tracker = it->second;
4172  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4173  }
4174 }
4175 
4184 {
4185  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4186  it != m_mapOfTrackers.end(); ++it) {
4187  TrackerWrapper *tracker = it->second;
4188  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4189  }
4190 }
4191 
4200 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
4201 {
4202  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4203  it != m_mapOfTrackers.end(); ++it) {
4204  TrackerWrapper *tracker = it->second;
4205  tracker->setDepthNormalSamplingStep(stepX, stepY);
4206  }
4207 }
4208 
4228 {
4230 
4231  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4232  it != m_mapOfTrackers.end(); ++it) {
4233  TrackerWrapper *tracker = it->second;
4234  tracker->setDisplayFeatures(displayF);
4235  }
4236 }
4237 
4246 {
4248 
4249  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4250  it != m_mapOfTrackers.end(); ++it) {
4251  TrackerWrapper *tracker = it->second;
4252  tracker->setFarClippingDistance(dist);
4253  }
4254 }
4255 
4264 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
4265 {
4266  if (m_mapOfTrackers.size() == 2) {
4267  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4268  it->second->setFarClippingDistance(dist1);
4269 
4270  ++it;
4271  it->second->setFarClippingDistance(dist2);
4272 
4274  if (it != m_mapOfTrackers.end()) {
4275  distFarClip = it->second->getFarClippingDistance();
4276  }
4277  else {
4278  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4279  }
4280  }
4281  else {
4282  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4283  m_mapOfTrackers.size());
4284  }
4285 }
4286 
4292 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4293 {
4294  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4295  ++it) {
4296  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4297 
4298  if (it_tracker != m_mapOfTrackers.end()) {
4299  TrackerWrapper *tracker = it_tracker->second;
4300  tracker->setFarClippingDistance(it->second);
4301 
4302  if (it->first == m_referenceCameraName) {
4303  distFarClip = it->second;
4304  }
4305  }
4306  }
4307 }
4308 
4315 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4316 {
4317  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4318  ++it) {
4319  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4320  if (it_factor != mapOfFeatureFactors.end()) {
4321  it->second = it_factor->second;
4322  }
4323  }
4324 }
4325 
4342 {
4343  m_percentageGdPt = threshold;
4344 
4345  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4346  it != m_mapOfTrackers.end(); ++it) {
4347  TrackerWrapper *tracker = it->second;
4348  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4349  }
4350 }
4351 
4352 #ifdef VISP_HAVE_OGRE
4365 {
4366  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4367  it != m_mapOfTrackers.end(); ++it) {
4368  TrackerWrapper *tracker = it->second;
4369  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4370  }
4371 }
4372 
4385 {
4386  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4387  it != m_mapOfTrackers.end(); ++it) {
4388  TrackerWrapper *tracker = it->second;
4389  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4390  }
4391 }
4392 #endif
4393 
4394 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4403 {
4404  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4405  it != m_mapOfTrackers.end(); ++it) {
4406  TrackerWrapper *tracker = it->second;
4407  tracker->setKltOpencv(t);
4408  }
4409 }
4410 
4420 {
4421  if (m_mapOfTrackers.size() == 2) {
4422  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4423  it->second->setKltOpencv(t1);
4424 
4425  ++it;
4426  it->second->setKltOpencv(t2);
4427  }
4428  else {
4429  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4430  m_mapOfTrackers.size());
4431  }
4432 }
4433 
4439 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4440 {
4441  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4442  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4443 
4444  if (it_tracker != m_mapOfTrackers.end()) {
4445  TrackerWrapper *tracker = it_tracker->second;
4446  tracker->setKltOpencv(it->second);
4447  }
4448  }
4449 }
4450 
4459 {
4460  m_thresholdOutlier = th;
4461 
4462  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4463  it != m_mapOfTrackers.end(); ++it) {
4464  TrackerWrapper *tracker = it->second;
4465  tracker->setKltThresholdAcceptation(th);
4466  }
4467 }
4468 #endif
4469 
4482 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4483 {
4484  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4485  it != m_mapOfTrackers.end(); ++it) {
4486  TrackerWrapper *tracker = it->second;
4487  tracker->setLod(useLod, name);
4488  }
4489 }
4490 
4491 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4499 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4500 {
4501  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4502  it != m_mapOfTrackers.end(); ++it) {
4503  TrackerWrapper *tracker = it->second;
4504  tracker->setKltMaskBorder(e);
4505  }
4506 }
4507 
4516 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4517 {
4518  if (m_mapOfTrackers.size() == 2) {
4519  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4520  it->second->setKltMaskBorder(e1);
4521 
4522  ++it;
4523 
4524  it->second->setKltMaskBorder(e2);
4525  }
4526  else {
4527  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4528  m_mapOfTrackers.size());
4529  }
4530 }
4531 
4537 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4538 {
4539  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4540  ++it) {
4541  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4542 
4543  if (it_tracker != m_mapOfTrackers.end()) {
4544  TrackerWrapper *tracker = it_tracker->second;
4545  tracker->setKltMaskBorder(it->second);
4546  }
4547  }
4548 }
4549 #endif
4550 
4557 {
4558  vpMbTracker::setMask(mask);
4559 
4560  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4561  it != m_mapOfTrackers.end(); ++it) {
4562  TrackerWrapper *tracker = it->second;
4563  tracker->setMask(mask);
4564  }
4565 }
4566 
4578 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4579 {
4580  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4581  it != m_mapOfTrackers.end(); ++it) {
4582  TrackerWrapper *tracker = it->second;
4583  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4584  }
4585 }
4586 
4597 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4598 {
4599  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4600  it != m_mapOfTrackers.end(); ++it) {
4601  TrackerWrapper *tracker = it->second;
4602  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4603  }
4604 }
4605 
4614 {
4615  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4616  it != m_mapOfTrackers.end(); ++it) {
4617  TrackerWrapper *tracker = it->second;
4618  tracker->setMovingEdge(me);
4619  }
4620 }
4621 
4631 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4632 {
4633  if (m_mapOfTrackers.size() == 2) {
4634  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4635  it->second->setMovingEdge(me1);
4636 
4637  ++it;
4638 
4639  it->second->setMovingEdge(me2);
4640  }
4641  else {
4642  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4643  m_mapOfTrackers.size());
4644  }
4645 }
4646 
4652 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4653 {
4654  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4655  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4656 
4657  if (it_tracker != m_mapOfTrackers.end()) {
4658  TrackerWrapper *tracker = it_tracker->second;
4659  tracker->setMovingEdge(it->second);
4660  }
4661  }
4662 }
4663 
4672 {
4674 
4675  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4676  it != m_mapOfTrackers.end(); ++it) {
4677  TrackerWrapper *tracker = it->second;
4678  tracker->setNearClippingDistance(dist);
4679  }
4680 }
4681 
4690 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4691 {
4692  if (m_mapOfTrackers.size() == 2) {
4693  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4694  it->second->setNearClippingDistance(dist1);
4695 
4696  ++it;
4697 
4698  it->second->setNearClippingDistance(dist2);
4699 
4701  if (it != m_mapOfTrackers.end()) {
4702  distNearClip = it->second->getNearClippingDistance();
4703  }
4704  else {
4705  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4706  }
4707  }
4708  else {
4709  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4710  m_mapOfTrackers.size());
4711  }
4712 }
4713 
4719 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4720 {
4721  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4722  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4723 
4724  if (it_tracker != m_mapOfTrackers.end()) {
4725  TrackerWrapper *tracker = it_tracker->second;
4726  tracker->setNearClippingDistance(it->second);
4727 
4728  if (it->first == m_referenceCameraName) {
4729  distNearClip = it->second;
4730  }
4731  }
4732  }
4733 }
4734 
4748 {
4749  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4750 
4751  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4752  it != m_mapOfTrackers.end(); ++it) {
4753  TrackerWrapper *tracker = it->second;
4754  tracker->setOgreShowConfigDialog(showConfigDialog);
4755  }
4756 }
4757 
4769 {
4771 
4772  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4773  it != m_mapOfTrackers.end(); ++it) {
4774  TrackerWrapper *tracker = it->second;
4775  tracker->setOgreVisibilityTest(v);
4776  }
4777 
4778 #ifdef VISP_HAVE_OGRE
4779  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4780  it != m_mapOfTrackers.end(); ++it) {
4781  TrackerWrapper *tracker = it->second;
4782  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4783  }
4784 #endif
4785 }
4786 
4795 {
4797 
4798  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4799  it != m_mapOfTrackers.end(); ++it) {
4800  TrackerWrapper *tracker = it->second;
4801  tracker->setOptimizationMethod(opt);
4802  }
4803 }
4804 
4818 {
4819  if (m_mapOfTrackers.size() > 1) {
4820  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4821  "to be configured with only one camera!");
4822  }
4823 
4824  m_cMo = cdMo;
4825 
4826  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4827  if (it != m_mapOfTrackers.end()) {
4828  TrackerWrapper *tracker = it->second;
4829  tracker->setPose(I, cdMo);
4830  }
4831  else {
4832  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4833  m_referenceCameraName.c_str());
4834  }
4835 }
4836 
4850 {
4851  if (m_mapOfTrackers.size() > 1) {
4852  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4853  "to be configured with only one camera!");
4854  }
4855 
4856  m_cMo = cdMo;
4857 
4858  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4859  if (it != m_mapOfTrackers.end()) {
4860  TrackerWrapper *tracker = it->second;
4861  vpImageConvert::convert(I_color, m_I);
4862  tracker->setPose(m_I, cdMo);
4863  }
4864  else {
4865  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4866  m_referenceCameraName.c_str());
4867  }
4868 }
4869 
4882  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4883 {
4884  if (m_mapOfTrackers.size() == 2) {
4885  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4886  it->second->setPose(I1, c1Mo);
4887 
4888  ++it;
4889 
4890  it->second->setPose(I2, c2Mo);
4891 
4893  if (it != m_mapOfTrackers.end()) {
4894  // Set reference pose
4895  it->second->getPose(m_cMo);
4896  }
4897  else {
4898  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4899  m_referenceCameraName.c_str());
4900  }
4901  }
4902  else {
4903  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4904  m_mapOfTrackers.size());
4905  }
4906 }
4907 
4919 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4920  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4921 {
4922  if (m_mapOfTrackers.size() == 2) {
4923  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4924  it->second->setPose(I_color1, c1Mo);
4925 
4926  ++it;
4927 
4928  it->second->setPose(I_color2, c2Mo);
4929 
4931  if (it != m_mapOfTrackers.end()) {
4932  // Set reference pose
4933  it->second->getPose(m_cMo);
4934  }
4935  else {
4936  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4937  m_referenceCameraName.c_str());
4938  }
4939  }
4940  else {
4941  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4942  m_mapOfTrackers.size());
4943  }
4944 }
4945 
4961 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4962  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4963 {
4964  // Set the reference cMo
4965  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4966  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4967  mapOfImages.find(m_referenceCameraName);
4968  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4969 
4970  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4971  TrackerWrapper *tracker = it_tracker->second;
4972  tracker->setPose(*it_img->second, it_camPose->second);
4973  tracker->getPose(m_cMo);
4974  }
4975  else {
4976  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4977  }
4978 
4979  // Vector of missing pose matrices for cameras
4980  std::vector<std::string> vectorOfMissingCameraPoses;
4981 
4982  // Set pose for the specified cameras
4983  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4984  if (it_tracker->first != m_referenceCameraName) {
4985  it_img = mapOfImages.find(it_tracker->first);
4986  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4987 
4988  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4989  // Set pose
4990  TrackerWrapper *tracker = it_tracker->second;
4991  tracker->setPose(*it_img->second, it_camPose->second);
4992  }
4993  else {
4994  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4995  }
4996  }
4997  }
4998 
4999  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5000  it != vectorOfMissingCameraPoses.end(); ++it) {
5001  it_img = mapOfImages.find(*it);
5002  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5004 
5005  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5006  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5007  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5008  }
5009  else {
5011  "Missing image or missing camera transformation "
5012  "matrix! Cannot set pose for camera: %s!",
5013  it->c_str());
5014  }
5015  }
5016 }
5017 
5033 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5034  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5035 {
5036  // Set the reference cMo
5037  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
5038  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
5039  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
5040 
5041  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5042  TrackerWrapper *tracker = it_tracker->second;
5043  tracker->setPose(*it_img->second, it_camPose->second);
5044  tracker->getPose(m_cMo);
5045  }
5046  else {
5047  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
5048  }
5049 
5050  // Vector of missing pose matrices for cameras
5051  std::vector<std::string> vectorOfMissingCameraPoses;
5052 
5053  // Set pose for the specified cameras
5054  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5055  if (it_tracker->first != m_referenceCameraName) {
5056  it_img = mapOfColorImages.find(it_tracker->first);
5057  it_camPose = mapOfCameraPoses.find(it_tracker->first);
5058 
5059  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5060  // Set pose
5061  TrackerWrapper *tracker = it_tracker->second;
5062  tracker->setPose(*it_img->second, it_camPose->second);
5063  }
5064  else {
5065  vectorOfMissingCameraPoses.push_back(it_tracker->first);
5066  }
5067  }
5068  }
5069 
5070  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5071  it != vectorOfMissingCameraPoses.end(); ++it) {
5072  it_img = mapOfColorImages.find(*it);
5073  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5075 
5076  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5077  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5078  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5079  }
5080  else {
5082  "Missing image or missing camera transformation "
5083  "matrix! Cannot set pose for camera: %s!",
5084  it->c_str());
5085  }
5086  }
5087 }
5088 
5104 {
5106 
5107  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5108  it != m_mapOfTrackers.end(); ++it) {
5109  TrackerWrapper *tracker = it->second;
5110  tracker->setProjectionErrorComputation(flag);
5111  }
5112 }
5113 
5118 {
5120 
5121  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5122  it != m_mapOfTrackers.end(); ++it) {
5123  TrackerWrapper *tracker = it->second;
5124  tracker->setProjectionErrorDisplay(display);
5125  }
5126 }
5127 
5132 {
5134 
5135  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5136  it != m_mapOfTrackers.end(); ++it) {
5137  TrackerWrapper *tracker = it->second;
5138  tracker->setProjectionErrorDisplayArrowLength(length);
5139  }
5140 }
5141 
5143 {
5145 
5146  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5147  it != m_mapOfTrackers.end(); ++it) {
5148  TrackerWrapper *tracker = it->second;
5149  tracker->setProjectionErrorDisplayArrowThickness(thickness);
5150  }
5151 }
5152 
5158 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
5159 {
5160  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
5161  if (it != m_mapOfTrackers.end()) {
5162  m_referenceCameraName = referenceCameraName;
5163  }
5164  else {
5165  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
5166  }
5167 }
5168 
5170 {
5172 
5173  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5174  it != m_mapOfTrackers.end(); ++it) {
5175  TrackerWrapper *tracker = it->second;
5176  tracker->setScanLineVisibilityTest(v);
5177  }
5178 }
5179 
5192 {
5193  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5194  it != m_mapOfTrackers.end(); ++it) {
5195  TrackerWrapper *tracker = it->second;
5196  tracker->setTrackerType(type);
5197  }
5198 }
5199 
5209 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
5210 {
5211  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5212  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
5213  if (it_tracker != m_mapOfTrackers.end()) {
5214  TrackerWrapper *tracker = it_tracker->second;
5215  tracker->setTrackerType(it->second);
5216  }
5217  }
5218 }
5219 
5229 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
5230 {
5231  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5232  it != m_mapOfTrackers.end(); ++it) {
5233  TrackerWrapper *tracker = it->second;
5234  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5235  }
5236 }
5237 
5247 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
5248 {
5249  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5250  it != m_mapOfTrackers.end(); ++it) {
5251  TrackerWrapper *tracker = it->second;
5252  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5253  }
5254 }
5255 
5265 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
5266 {
5267  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5268  it != m_mapOfTrackers.end(); ++it) {
5269  TrackerWrapper *tracker = it->second;
5270  tracker->setUseEdgeTracking(name, useEdgeTracking);
5271  }
5272 }
5273 
5274 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5284 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
5285 {
5286  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5287  it != m_mapOfTrackers.end(); ++it) {
5288  TrackerWrapper *tracker = it->second;
5289  tracker->setUseKltTracking(name, useKltTracking);
5290  }
5291 }
5292 #endif
5293 
5295 {
5296  // Test tracking fails only if all testTracking have failed
5297  bool isOneTestTrackingOk = false;
5298  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5299  it != m_mapOfTrackers.end(); ++it) {
5300  TrackerWrapper *tracker = it->second;
5301  try {
5302  tracker->testTracking();
5303  isOneTestTrackingOk = true;
5304  }
5305  catch (...) {
5306  }
5307  }
5308 
5309  if (!isOneTestTrackingOk) {
5310  std::ostringstream oss;
5311  oss << "Not enough moving edges to track the object. Try to reduce the "
5312  "threshold="
5313  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5315  }
5316 }
5317 
5328 {
5329  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5330  mapOfImages[m_referenceCameraName] = &I;
5331 
5332  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5333  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5334 
5335  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5336 }
5337 
5348 {
5349  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5350  mapOfColorImages[m_referenceCameraName] = &I_color;
5351 
5352  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5353  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5354 
5355  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5356 }
5357 
5369 {
5370  if (m_mapOfTrackers.size() == 2) {
5371  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5372  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5373  mapOfImages[it->first] = &I1;
5374  ++it;
5375 
5376  mapOfImages[it->first] = &I2;
5377 
5378  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5379  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5380 
5381  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5382  }
5383  else {
5384  std::stringstream ss;
5385  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5386  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5387  }
5388 }
5389 
5400 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5401 {
5402  if (m_mapOfTrackers.size() == 2) {
5403  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5404  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5405  mapOfImages[it->first] = &I_color1;
5406  ++it;
5407 
5408  mapOfImages[it->first] = &_colorI2;
5409 
5410  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5411  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5412 
5413  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5414  }
5415  else {
5416  std::stringstream ss;
5417  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5418  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5419  }
5420 }
5421 
5429 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5430 {
5431  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5432  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5433 
5434  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5435 }
5436 
5444 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5445 {
5446  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5447  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5448 
5449  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5450 }
5451 
5452 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
5461 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5462  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5463 {
5464  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5465  it != m_mapOfTrackers.end(); ++it) {
5466  TrackerWrapper *tracker = it->second;
5467 
5468  if ((tracker->m_trackerType & (EDGE_TRACKER |
5469 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5470  KLT_TRACKER |
5471 #endif
5473  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5474  }
5475 
5476  if (tracker->m_trackerType & (EDGE_TRACKER
5477 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5478  | KLT_TRACKER
5479 #endif
5480  ) &&
5481  mapOfImages[it->first] == nullptr) {
5482  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5483  }
5484 
5485  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5486  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5487  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5488  }
5489  }
5490 
5491  preTracking(mapOfImages, mapOfPointClouds);
5492 
5493  try {
5494  computeVVS(mapOfImages);
5495  }
5496  catch (...) {
5497  covarianceMatrix = -1;
5498  throw; // throw the original exception
5499  }
5500 
5501  testTracking();
5502 
5503  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5504  it != m_mapOfTrackers.end(); ++it) {
5505  TrackerWrapper *tracker = it->second;
5506 
5507  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5508  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5509  }
5510 
5511  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5512 
5513  if (displayFeatures) {
5514 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5515  if (tracker->m_trackerType & KLT_TRACKER) {
5516  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5517  }
5518 #endif
5519 
5520  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5521  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5522  }
5523  }
5524  }
5525 
5527 }
5528 
5537 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5538  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5539 {
5540  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5541  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5542  it != m_mapOfTrackers.end(); ++it) {
5543  TrackerWrapper *tracker = it->second;
5544 
5545  if ((tracker->m_trackerType & (EDGE_TRACKER |
5546 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5547  KLT_TRACKER |
5548 #endif
5550  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5551  }
5552 
5553  if (tracker->m_trackerType & (EDGE_TRACKER
5554 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5555  | KLT_TRACKER
5556 #endif
5557  ) &&
5558  mapOfImages[it->first] == nullptr) {
5559  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5560  }
5561  else if (tracker->m_trackerType & (EDGE_TRACKER
5562 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5563  | KLT_TRACKER
5564 #endif
5565  ) &&
5566  mapOfImages[it->first] != nullptr) {
5567  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5568  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5569  }
5570 
5571  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5572  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5573  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5574  }
5575  }
5576 
5577  preTracking(mapOfImages, mapOfPointClouds);
5578 
5579  try {
5580  computeVVS(mapOfImages);
5581  }
5582  catch (...) {
5583  covarianceMatrix = -1;
5584  throw; // throw the original exception
5585  }
5586 
5587  testTracking();
5588 
5589  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5590  it != m_mapOfTrackers.end(); ++it) {
5591  TrackerWrapper *tracker = it->second;
5592 
5593  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5594  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5595  }
5596 
5597  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5598 
5599  if (displayFeatures) {
5600 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5601  if (tracker->m_trackerType & KLT_TRACKER) {
5602  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5603  }
5604 #endif
5605 
5606  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5607  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5608  }
5609  }
5610  }
5611 
5613 }
5614 #endif
5615 
5626 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5627  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5628  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5629  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5630 {
5631  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5632  it != m_mapOfTrackers.end(); ++it) {
5633  TrackerWrapper *tracker = it->second;
5634 
5635  if ((tracker->m_trackerType & (EDGE_TRACKER |
5636 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5637  KLT_TRACKER |
5638 #endif
5640  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5641  }
5642 
5643  if (tracker->m_trackerType & (EDGE_TRACKER
5644 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5645  | KLT_TRACKER
5646 #endif
5647  ) &&
5648  mapOfImages[it->first] == nullptr) {
5649  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5650  }
5651 
5652  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5653  (mapOfPointClouds[it->first] == nullptr)) {
5654  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5655  }
5656  }
5657 
5658  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5659 
5660  try {
5661  computeVVS(mapOfImages);
5662  }
5663  catch (...) {
5664  covarianceMatrix = -1;
5665  throw; // throw the original exception
5666  }
5667 
5668  testTracking();
5669 
5670  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5671  it != m_mapOfTrackers.end(); ++it) {
5672  TrackerWrapper *tracker = it->second;
5673 
5674  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5675  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5676  }
5677 
5678  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5679 
5680  if (displayFeatures) {
5681 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5682  if (tracker->m_trackerType & KLT_TRACKER) {
5683  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5684  }
5685 #endif
5686 
5687  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5688  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5689  }
5690  }
5691  }
5692 
5694 }
5695 
5706 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5707  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5708  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5709  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5710 {
5711  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5712  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5713  it != m_mapOfTrackers.end(); ++it) {
5714  TrackerWrapper *tracker = it->second;
5715 
5716  if ((tracker->m_trackerType & (EDGE_TRACKER |
5717 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5718  KLT_TRACKER |
5719 #endif
5721  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5722  }
5723 
5724  if (tracker->m_trackerType & (EDGE_TRACKER
5725 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5726  | KLT_TRACKER
5727 #endif
5728  ) &&
5729  mapOfColorImages[it->first] == nullptr) {
5730  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5731  }
5732  else if (tracker->m_trackerType & (EDGE_TRACKER
5733 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5734  | KLT_TRACKER
5735 #endif
5736  ) &&
5737  mapOfColorImages[it->first] != nullptr) {
5738  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5739  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5740  }
5741 
5742  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5743  (mapOfPointClouds[it->first] == nullptr)) {
5744  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5745  }
5746  }
5747 
5748  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5749 
5750  try {
5751  computeVVS(mapOfImages);
5752  }
5753  catch (...) {
5754  covarianceMatrix = -1;
5755  throw; // throw the original exception
5756  }
5757 
5758  testTracking();
5759 
5760  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5761  it != m_mapOfTrackers.end(); ++it) {
5762  TrackerWrapper *tracker = it->second;
5763 
5764  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5765  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5766  }
5767 
5768  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5769 
5770  if (displayFeatures) {
5771 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5772  if (tracker->m_trackerType & KLT_TRACKER) {
5773  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5774  }
5775 #endif
5776 
5777  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5778  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5779  }
5780  }
5781  }
5782 
5784 }
5785 
5786 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5787  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5788  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5789  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5790 {
5791  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5792  it != m_mapOfTrackers.end(); ++it) {
5793  TrackerWrapper *tracker = it->second;
5794 
5795  if ((tracker->m_trackerType & (EDGE_TRACKER |
5796 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5797  KLT_TRACKER |
5798 #endif
5800  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5801  }
5802 
5803  if (tracker->m_trackerType & (EDGE_TRACKER
5804 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5805  | KLT_TRACKER
5806 #endif
5807  ) &&
5808  mapOfImages[it->first] == nullptr) {
5809  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5810  }
5811 
5812  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5813  (mapOfPointClouds[it->first] == nullptr)) {
5814  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5815  }
5816  }
5817 
5818  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5819 
5820  try {
5821  computeVVS(mapOfImages);
5822  }
5823  catch (...) {
5824  covarianceMatrix = -1;
5825  throw; // throw the original exception
5826  }
5827 
5828  testTracking();
5829 
5830  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5831  it != m_mapOfTrackers.end(); ++it) {
5832  TrackerWrapper *tracker = it->second;
5833 
5834  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5835  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5836  }
5837 
5838  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5839 
5840  if (displayFeatures) {
5841 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5842  if (tracker->m_trackerType & KLT_TRACKER) {
5843  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5844  }
5845 #endif
5846 
5847  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5848  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5849  }
5850  }
5851  }
5852 
5854 }
5855 
5866 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5867  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5868  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5869  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5870 {
5871  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5872  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5873  it != m_mapOfTrackers.end(); ++it) {
5874  TrackerWrapper *tracker = it->second;
5875 
5876  if ((tracker->m_trackerType & (EDGE_TRACKER |
5877 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5878  KLT_TRACKER |
5879 #endif
5881  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5882  }
5883 
5884  if (tracker->m_trackerType & (EDGE_TRACKER
5885 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5886  | KLT_TRACKER
5887 #endif
5888  ) &&
5889  mapOfColorImages[it->first] == nullptr) {
5890  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5891  }
5892  else if (tracker->m_trackerType & (EDGE_TRACKER
5893 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5894  | KLT_TRACKER
5895 #endif
5896  ) &&
5897  mapOfColorImages[it->first] != nullptr) {
5898  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5899  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5900  }
5901 
5902  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5903  (mapOfPointClouds[it->first] == nullptr)) {
5904  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5905  }
5906  }
5907 
5908  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5909 
5910  try {
5911  computeVVS(mapOfImages);
5912  }
5913  catch (...) {
5914  covarianceMatrix = -1;
5915  throw; // throw the original exception
5916  }
5917 
5918  testTracking();
5919 
5920  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5921  it != m_mapOfTrackers.end(); ++it) {
5922  TrackerWrapper *tracker = it->second;
5923 
5924  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5925  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5926  }
5927 
5928  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5929 
5930  if (displayFeatures) {
5931 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5932  if (tracker->m_trackerType & KLT_TRACKER) {
5933  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5934  }
5935 #endif
5936 
5937  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5938  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5939  }
5940  }
5941  }
5942 
5944 }
5945 
5947 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5948  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5949 {
5950  m_lambda = 1.0;
5951  m_maxIter = 30;
5952 
5953 #ifdef VISP_HAVE_OGRE
5954  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5955 
5956  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5957 #endif
5958 }
5959 
5960 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5961  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5962 {
5963  if ((m_trackerType & (EDGE_TRACKER |
5964 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5965  KLT_TRACKER |
5966 #endif
5968  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5969  }
5970 
5971  m_lambda = 1.0;
5972  m_maxIter = 30;
5973 
5974 #ifdef VISP_HAVE_OGRE
5975  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5976 
5977  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5978 #endif
5979 }
5980 
5981 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5983 {
5984  computeVVSInit(ptr_I);
5985 
5986  if (m_error.getRows() < 4) {
5987  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5988  }
5989 
5990  double normRes = 0;
5991  double normRes_1 = -1;
5992  unsigned int iter = 0;
5993 
5994  double factorEdge = 1.0;
5995 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5996  double factorKlt = 1.0;
5997 #endif
5998  double factorDepth = 1.0;
5999  double factorDepthDense = 1.0;
6000 
6001  vpMatrix LTL;
6002  vpColVector LTR, v;
6003  vpColVector error_prev;
6004 
6005  double mu = m_initialMu;
6006  vpHomogeneousMatrix cMo_prev;
6007 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6008  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
6009 #endif
6010  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
6011  if (isoJoIdentity)
6012  oJo.eye();
6013 
6014  // Covariance
6015  vpColVector W_true(m_error.getRows());
6016  vpMatrix L_true, LVJ_true;
6017 
6018  unsigned int nb_edge_features = m_error_edge.getRows();
6019 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6020  unsigned int nb_klt_features = m_error_klt.getRows();
6021 #endif
6022  unsigned int nb_depth_features = m_error_depthNormal.getRows();
6023  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
6024 
6025  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
6026  computeVVSInteractionMatrixAndResidu(ptr_I);
6027 
6028  bool reStartFromLastIncrement = false;
6029  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
6030 
6031 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6032  if (reStartFromLastIncrement) {
6033  if (m_trackerType & KLT_TRACKER) {
6034  ctTc0 = ctTc0_Prev;
6035  }
6036  }
6037 #endif
6038 
6039  if (!reStartFromLastIncrement) {
6040  computeVVSWeights();
6041 
6042  if (computeCovariance) {
6043  L_true = m_L;
6044  if (!isoJoIdentity) {
6046  cVo.build(m_cMo);
6047  LVJ_true = (m_L * cVo * oJo);
6048  }
6049  }
6050 
6052  if (iter == 0) {
6053  // If all the 6 dof should be estimated, we check if the interaction
6054  // matrix is full rank. If not we remove automatically the dof that
6055  // cannot be estimated. This is particularly useful when considering
6056  // circles (rank 5) and cylinders (rank 4)
6057  if (isoJoIdentity) {
6058  cVo.build(m_cMo);
6059 
6060  vpMatrix K; // kernel
6061  unsigned int rank = (m_L * cVo).kernel(K);
6062  if (rank == 0) {
6063  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
6064  }
6065 
6066  if (rank != 6) {
6067  vpMatrix I; // Identity
6068  I.eye(6);
6069  oJo = I - K.AtA();
6070 
6071  isoJoIdentity = false;
6072  }
6073  }
6074  }
6075 
6076  // Weighting
6077  double num = 0;
6078  double den = 0;
6079 
6080  unsigned int start_index = 0;
6081  if (m_trackerType & EDGE_TRACKER) {
6082  for (unsigned int i = 0; i < nb_edge_features; i++) {
6083  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
6084  W_true[i] = wi;
6085  m_weightedError[i] = wi * m_error[i];
6086 
6087  num += wi * vpMath::sqr(m_error[i]);
6088  den += wi;
6089 
6090  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6091  m_L[i][j] *= wi;
6092  }
6093  }
6094 
6095  start_index += nb_edge_features;
6096  }
6097 
6098 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6099  if (m_trackerType & KLT_TRACKER) {
6100  for (unsigned int i = 0; i < nb_klt_features; i++) {
6101  double wi = m_w_klt[i] * factorKlt;
6102  W_true[start_index + i] = wi;
6103  m_weightedError[start_index + i] = wi * m_error_klt[i];
6104 
6105  num += wi * vpMath::sqr(m_error[start_index + i]);
6106  den += wi;
6107 
6108  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6109  m_L[start_index + i][j] *= wi;
6110  }
6111  }
6112 
6113  start_index += nb_klt_features;
6114  }
6115 #endif
6116 
6117  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6118  for (unsigned int i = 0; i < nb_depth_features; i++) {
6119  double wi = m_w_depthNormal[i] * factorDepth;
6120  m_w[start_index + i] = m_w_depthNormal[i];
6121  m_weightedError[start_index + i] = wi * m_error[start_index + i];
6122 
6123  num += wi * vpMath::sqr(m_error[start_index + i]);
6124  den += wi;
6125 
6126  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6127  m_L[start_index + i][j] *= wi;
6128  }
6129  }
6130 
6131  start_index += nb_depth_features;
6132  }
6133 
6134  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6135  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
6136  double wi = m_w_depthDense[i] * factorDepthDense;
6137  m_w[start_index + i] = m_w_depthDense[i];
6138  m_weightedError[start_index + i] = wi * m_error[start_index + i];
6139 
6140  num += wi * vpMath::sqr(m_error[start_index + i]);
6141  den += wi;
6142 
6143  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6144  m_L[start_index + i][j] *= wi;
6145  }
6146  }
6147 
6148  // start_index += nb_depth_dense_features;
6149  }
6150 
6151  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
6152 
6153  cMo_prev = m_cMo;
6154 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6155  if (m_trackerType & KLT_TRACKER) {
6156  ctTc0_Prev = ctTc0;
6157  }
6158 #endif
6159 
6160  m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo;
6161 
6162 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6163  if (m_trackerType & KLT_TRACKER) {
6164  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
6165  }
6166 #endif
6167  normRes_1 = normRes;
6168 
6169  normRes = sqrt(num / den);
6170  }
6171 
6172  iter++;
6173  }
6174 
6175  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
6176 
6177  if (m_trackerType & EDGE_TRACKER) {
6179  }
6180 }
6181 
6182 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
6183 {
6184  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6185  "TrackerWrapper::computeVVSInit("
6186  ") should not be called!");
6187 }
6188 
6189 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
6190 {
6191  initMbtTracking(ptr_I);
6192 
6193  unsigned int nbFeatures = 0;
6194 
6195  if (m_trackerType & EDGE_TRACKER) {
6196  nbFeatures += m_error_edge.getRows();
6197  }
6198  else {
6199  m_error_edge.clear();
6200  m_weightedError_edge.clear();
6201  m_L_edge.clear();
6202  m_w_edge.clear();
6203  }
6204 
6205 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6206  if (m_trackerType & KLT_TRACKER) {
6208  nbFeatures += m_error_klt.getRows();
6209  }
6210  else {
6211  m_error_klt.clear();
6212  m_weightedError_klt.clear();
6213  m_L_klt.clear();
6214  m_w_klt.clear();
6215  }
6216 #endif
6217 
6218  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6220  nbFeatures += m_error_depthNormal.getRows();
6221  }
6222  else {
6223  m_error_depthNormal.clear();
6224  m_weightedError_depthNormal.clear();
6225  m_L_depthNormal.clear();
6226  m_w_depthNormal.clear();
6227  }
6228 
6229  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6231  nbFeatures += m_error_depthDense.getRows();
6232  }
6233  else {
6234  m_error_depthDense.clear();
6235  m_weightedError_depthDense.clear();
6236  m_L_depthDense.clear();
6237  m_w_depthDense.clear();
6238  }
6239 
6240  m_L.resize(nbFeatures, 6, false, false);
6241  m_error.resize(nbFeatures, false);
6242 
6243  m_weightedError.resize(nbFeatures, false);
6244  m_w.resize(nbFeatures, false);
6245  m_w = 1;
6246 }
6247 
6249 {
6250  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6251  "TrackerWrapper::"
6252  "computeVVSInteractionMatrixAndR"
6253  "esidu() should not be called!");
6254 }
6255 
6257 {
6258  if (m_trackerType & EDGE_TRACKER) {
6260  }
6261 
6262 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6263  if (m_trackerType & KLT_TRACKER) {
6265  }
6266 #endif
6267 
6268  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6270  }
6271 
6272  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6274  }
6275 
6276  unsigned int start_index = 0;
6277  if (m_trackerType & EDGE_TRACKER) {
6278  m_L.insert(m_L_edge, start_index, 0);
6279  m_error.insert(start_index, m_error_edge);
6280 
6281  start_index += m_error_edge.getRows();
6282  }
6283 
6284 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6285  if (m_trackerType & KLT_TRACKER) {
6286  m_L.insert(m_L_klt, start_index, 0);
6287  m_error.insert(start_index, m_error_klt);
6288 
6289  start_index += m_error_klt.getRows();
6290  }
6291 #endif
6292 
6293  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6294  m_L.insert(m_L_depthNormal, start_index, 0);
6295  m_error.insert(start_index, m_error_depthNormal);
6296 
6297  start_index += m_error_depthNormal.getRows();
6298  }
6299 
6300  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6301  m_L.insert(m_L_depthDense, start_index, 0);
6302  m_error.insert(start_index, m_error_depthDense);
6303 
6304  // start_index += m_error_depthDense.getRows();
6305  }
6306 }
6307 
6309 {
6310  unsigned int start_index = 0;
6311 
6312  if (m_trackerType & EDGE_TRACKER) {
6314  m_w.insert(start_index, m_w_edge);
6315 
6316  start_index += m_w_edge.getRows();
6317  }
6318 
6319 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6320  if (m_trackerType & KLT_TRACKER) {
6321  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
6322  m_w.insert(start_index, m_w_klt);
6323 
6324  start_index += m_w_klt.getRows();
6325  }
6326 #endif
6327 
6328  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6329  if (m_depthNormalUseRobust) {
6330  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
6331  m_w.insert(start_index, m_w_depthNormal);
6332  }
6333 
6334  start_index += m_w_depthNormal.getRows();
6335  }
6336 
6337  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6339  m_w.insert(start_index, m_w_depthDense);
6340 
6341  // start_index += m_w_depthDense.getRows();
6342  }
6343 }
6344 
6345 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
6346  const vpCameraParameters &cam, const vpColor &col,
6347  unsigned int thickness, bool displayFullModel)
6348 {
6349  if (displayFeatures) {
6350  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6351  for (size_t i = 0; i < features.size(); i++) {
6352  if (vpMath::equal(features[i][0], 0)) {
6353  vpImagePoint ip(features[i][1], features[i][2]);
6354  int state = static_cast<int>(features[i][3]);
6355 
6356  switch (state) {
6359  break;
6360 
6361  case vpMeSite::CONTRAST:
6362  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6363  break;
6364 
6365  case vpMeSite::THRESHOLD:
6367  break;
6368 
6369  case vpMeSite::M_ESTIMATOR:
6370  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6371  break;
6372 
6373  case vpMeSite::TOO_NEAR:
6374  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6375  break;
6376 
6377  default:
6379  }
6380  }
6381  else if (vpMath::equal(features[i][0], 1)) {
6382  vpImagePoint ip1(features[i][1], features[i][2]);
6384 
6385  vpImagePoint ip2(features[i][3], features[i][4]);
6386  double id = features[i][5];
6387  std::stringstream ss;
6388  ss << id;
6389  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6390  }
6391  else if (vpMath::equal(features[i][0], 2)) {
6392  vpImagePoint im_centroid(features[i][1], features[i][2]);
6393  vpImagePoint im_extremity(features[i][3], features[i][4]);
6394  bool desired = vpMath::equal(features[i][0], 2);
6395  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6396  }
6397  }
6398  }
6399 
6400  std::vector<std::vector<double> > models =
6401  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6402  for (size_t i = 0; i < models.size(); i++) {
6403  if (vpMath::equal(models[i][0], 0)) {
6404  vpImagePoint ip1(models[i][1], models[i][2]);
6405  vpImagePoint ip2(models[i][3], models[i][4]);
6406  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6407  }
6408  else if (vpMath::equal(models[i][0], 1)) {
6409  vpImagePoint center(models[i][1], models[i][2]);
6410  double n20 = models[i][3];
6411  double n11 = models[i][4];
6412  double n02 = models[i][5];
6413  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6414  }
6415  }
6416 
6417 #ifdef VISP_HAVE_OGRE
6418  if ((m_trackerType & EDGE_TRACKER)
6419 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6420  || (m_trackerType & KLT_TRACKER)
6421 #endif
6422  ) {
6423  if (useOgre)
6424  faces.displayOgre(cMo);
6425  }
6426 #endif
6427 }
6428 
6429 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
6430  const vpCameraParameters &cam, const vpColor &col,
6431  unsigned int thickness, bool displayFullModel)
6432 {
6433  if (displayFeatures) {
6434  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6435  for (size_t i = 0; i < features.size(); i++) {
6436  if (vpMath::equal(features[i][0], 0)) {
6437  vpImagePoint ip(features[i][1], features[i][2]);
6438  int state = static_cast<int>(features[i][3]);
6439 
6440  switch (state) {
6443  break;
6444 
6445  case vpMeSite::CONTRAST:
6446  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6447  break;
6448 
6449  case vpMeSite::THRESHOLD:
6451  break;
6452 
6453  case vpMeSite::M_ESTIMATOR:
6454  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6455  break;
6456 
6457  case vpMeSite::TOO_NEAR:
6458  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6459  break;
6460 
6461  default:
6463  }
6464  }
6465  else if (vpMath::equal(features[i][0], 1)) {
6466  vpImagePoint ip1(features[i][1], features[i][2]);
6468 
6469  vpImagePoint ip2(features[i][3], features[i][4]);
6470  double id = features[i][5];
6471  std::stringstream ss;
6472  ss << id;
6473  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6474  }
6475  else if (vpMath::equal(features[i][0], 2)) {
6476  vpImagePoint im_centroid(features[i][1], features[i][2]);
6477  vpImagePoint im_extremity(features[i][3], features[i][4]);
6478  bool desired = vpMath::equal(features[i][0], 2);
6479  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6480  }
6481  }
6482  }
6483 
6484  std::vector<std::vector<double> > models =
6485  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6486  for (size_t i = 0; i < models.size(); i++) {
6487  if (vpMath::equal(models[i][0], 0)) {
6488  vpImagePoint ip1(models[i][1], models[i][2]);
6489  vpImagePoint ip2(models[i][3], models[i][4]);
6490  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6491  }
6492  else if (vpMath::equal(models[i][0], 1)) {
6493  vpImagePoint center(models[i][1], models[i][2]);
6494  double n20 = models[i][3];
6495  double n11 = models[i][4];
6496  double n02 = models[i][5];
6497  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6498  }
6499  }
6500 
6501 #ifdef VISP_HAVE_OGRE
6502  if ((m_trackerType & EDGE_TRACKER)
6503 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6504  || (m_trackerType & KLT_TRACKER)
6505 #endif
6506  ) {
6507  if (useOgre)
6508  faces.displayOgre(cMo);
6509  }
6510 #endif
6511 }
6512 
6513 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6514 {
6515  std::vector<std::vector<double> > features;
6516 
6517  if (m_trackerType & EDGE_TRACKER) {
6518  // m_featuresToBeDisplayedEdge updated after computeVVS()
6519  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6520  }
6521 
6522 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6523  if (m_trackerType & KLT_TRACKER) {
6524  // m_featuresToBeDisplayedKlt updated after postTracking()
6525  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6526  }
6527 #endif
6528 
6529  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6530  // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6531  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6532  m_featuresToBeDisplayedDepthNormal.end());
6533  }
6534 
6535  return features;
6536 }
6537 
6538 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6539  unsigned int height,
6540  const vpHomogeneousMatrix &cMo,
6541  const vpCameraParameters &cam,
6542  bool displayFullModel)
6543 {
6544  std::vector<std::vector<double> > models;
6545 
6546  // Do not add multiple times the same models
6547  if (m_trackerType == EDGE_TRACKER) {
6548  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6549  }
6550 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6551  else if (m_trackerType == KLT_TRACKER) {
6552  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6553  }
6554 #endif
6555  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6556  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6557  }
6558  else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6559  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6560  }
6561  else {
6562  // Edge and KLT trackers use the same primitives
6563  if (m_trackerType & EDGE_TRACKER) {
6564  std::vector<std::vector<double> > edgeModels =
6565  vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6566  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6567  }
6568 
6569  // Depth dense and depth normal trackers use the same primitives
6570  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6571  std::vector<std::vector<double> > depthDenseModels =
6572  vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6573  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6574  }
6575  }
6576 
6577  return models;
6578 }
6579 
6580 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6581 {
6582  if (!modelInitialised) {
6583  throw vpException(vpException::fatalError, "model not initialized");
6584  }
6585 
6586  if (useScanLine || clippingFlag > 3)
6587  m_cam.computeFov(I.getWidth(), I.getHeight());
6588 
6589  bool reInitialisation = false;
6590  if (!useOgre) {
6591  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6592  }
6593  else {
6594 #ifdef VISP_HAVE_OGRE
6595  if (!faces.isOgreInitialised()) {
6596  faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6597 
6598  faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6599  faces.initOgre(m_cam);
6600  // Turn off Ogre config dialog display for the next call to this
6601  // function since settings are saved in the ogre.cfg file and used
6602  // during the next call
6603  ogreShowConfigDialog = false;
6604  }
6605 
6606  faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6607 #else
6608  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6609 #endif
6610  }
6611 
6612  if (useScanLine) {
6613  faces.computeClippedPolygons(m_cMo, m_cam);
6614  faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6615  }
6616 
6617 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6618  if (m_trackerType & KLT_TRACKER)
6620 #endif
6621 
6622  if (m_trackerType & EDGE_TRACKER) {
6624 
6625  bool a = false;
6626  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6627 
6629  }
6630 
6631  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6632  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6633 
6634  if (m_trackerType & DEPTH_DENSE_TRACKER)
6635  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6636 }
6637 
6638 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6639  double radius, int idFace, const std::string &name)
6640 {
6641  if (m_trackerType & EDGE_TRACKER)
6642  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6643 
6644 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6645  if (m_trackerType & KLT_TRACKER)
6646  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6647 #endif
6648 }
6649 
6650 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6651  const std::string &name)
6652 {
6653  if (m_trackerType & EDGE_TRACKER)
6654  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6655 
6656 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6657  if (m_trackerType & KLT_TRACKER)
6658  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6659 #endif
6660 }
6661 
6662 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6663 {
6664  if (m_trackerType & EDGE_TRACKER)
6666 
6667 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6668  if (m_trackerType & KLT_TRACKER)
6670 #endif
6671 
6672  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6674 
6675  if (m_trackerType & DEPTH_DENSE_TRACKER)
6677 }
6678 
6679 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6680 {
6681  if (m_trackerType & EDGE_TRACKER)
6683 
6684 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6685  if (m_trackerType & KLT_TRACKER)
6687 #endif
6688 
6689  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6691 
6692  if (m_trackerType & DEPTH_DENSE_TRACKER)
6694 }
6695 
6697 {
6698  if (m_trackerType & EDGE_TRACKER) {
6701  }
6702 }
6703 
6704 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6705 {
6706 #if defined(VISP_HAVE_PUGIXML)
6707  // Load projection error config
6708  vpMbTracker::loadConfigFile(configFile, verbose);
6709 
6711  xmlp.setVerbose(verbose);
6712  xmlp.setCameraParameters(m_cam);
6713  xmlp.setAngleAppear(vpMath::deg(angleAppears));
6714  xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6715 
6716  // Edge
6717  xmlp.setEdgeMe(me);
6718 
6719  // KLT
6720  xmlp.setKltMaxFeatures(10000);
6721  xmlp.setKltWindowSize(5);
6722  xmlp.setKltQuality(0.01);
6723  xmlp.setKltMinDistance(5);
6724  xmlp.setKltHarrisParam(0.01);
6725  xmlp.setKltBlockSize(3);
6726  xmlp.setKltPyramidLevels(3);
6727 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6728  xmlp.setKltMaskBorder(maskBorder);
6729 #endif
6730 
6731  // Depth normal
6732  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6733  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6734  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6735  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6736  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6737  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6738 
6739  // Depth dense
6740  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6741  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6742 
6743  try {
6744  if (verbose) {
6745  std::cout << " *********** Parsing XML for";
6746  }
6747 
6748  std::vector<std::string> tracker_names;
6749  if (m_trackerType & EDGE_TRACKER)
6750  tracker_names.push_back("Edge");
6751 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6752  if (m_trackerType & KLT_TRACKER)
6753  tracker_names.push_back("Klt");
6754 #endif
6755  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6756  tracker_names.push_back("Depth Normal");
6757  if (m_trackerType & DEPTH_DENSE_TRACKER)
6758  tracker_names.push_back("Depth Dense");
6759 
6760  if (verbose) {
6761  for (size_t i = 0; i < tracker_names.size(); i++) {
6762  std::cout << " " << tracker_names[i];
6763  if (i == tracker_names.size() - 1) {
6764  std::cout << " ";
6765  }
6766  }
6767 
6768  std::cout << "Model-Based Tracker ************ " << std::endl;
6769  }
6770 
6771  xmlp.parse(configFile);
6772  }
6773  catch (...) {
6774  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6775  }
6776 
6777  vpCameraParameters camera;
6778  xmlp.getCameraParameters(camera);
6779  setCameraParameters(camera);
6780 
6781  angleAppears = vpMath::rad(xmlp.getAngleAppear());
6782  angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6783 
6784  if (xmlp.hasNearClippingDistance())
6785  setNearClippingDistance(xmlp.getNearClippingDistance());
6786 
6787  if (xmlp.hasFarClippingDistance())
6788  setFarClippingDistance(xmlp.getFarClippingDistance());
6789 
6790  if (xmlp.getFovClipping()) {
6792  }
6793 
6794  useLodGeneral = xmlp.getLodState();
6795  minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6796  minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6797 
6798  applyLodSettingInConfig = false;
6799  if (this->getNbPolygon() > 0) {
6800  applyLodSettingInConfig = true;
6801  setLod(useLodGeneral);
6802  setMinLineLengthThresh(minLineLengthThresholdGeneral);
6803  setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6804  }
6805 
6806  // Edge
6807  vpMe meParser;
6808  xmlp.getEdgeMe(meParser);
6810 
6811  // KLT
6812 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6813  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6814  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6815  tracker.setQuality(xmlp.getKltQuality());
6816  tracker.setMinDistance(xmlp.getKltMinDistance());
6817  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6818  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6819  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6820  maskBorder = xmlp.getKltMaskBorder();
6821 
6822  // if(useScanLine)
6823  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6824 #endif
6825 
6826  // Depth normal
6827  setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6828  setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6829  setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6830  setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6831  setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6832 
6833  // Depth dense
6834  setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6835 #else
6836  (void)configFile;
6837  (void)verbose;
6838  throw(vpException(vpException::ioError, "vpMbGenericTracker::TrackerWrapper::loadConfigFile() needs pugixml built-in 3rdparty"));
6839 #endif
6840 }
6841 
6842 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
6844  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6845 {
6846 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6847  // KLT
6848  if (m_trackerType & KLT_TRACKER) {
6849  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6850  vpMbKltTracker::reinit(*ptr_I);
6851  }
6852  }
6853 #endif
6854 
6855  // Looking for new visible face
6856  if (m_trackerType & EDGE_TRACKER) {
6857  bool newvisibleface = false;
6858  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6859 
6860  if (useScanLine) {
6861  faces.computeClippedPolygons(m_cMo, m_cam);
6862  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6863  }
6864  }
6865 
6866  // Depth normal
6867  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6868  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6869 
6870  // Depth dense
6871  if (m_trackerType & DEPTH_DENSE_TRACKER)
6872  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6873 
6874  // Edge
6875  if (m_trackerType & EDGE_TRACKER) {
6877 
6878  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6879  // Reinit the moving edge for the lines which need it.
6880  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6881 
6882  if (computeProjError) {
6884  }
6885  }
6886 }
6887 
6889  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6890 {
6891  if (m_trackerType & EDGE_TRACKER) {
6892  try {
6894  }
6895  catch (...) {
6896  std::cerr << "Error in moving edge tracking" << std::endl;
6897  throw;
6898  }
6899  }
6900 
6901 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6902  if (m_trackerType & KLT_TRACKER) {
6903  try {
6905  }
6906  catch (const vpException &e) {
6907  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6908  throw;
6909  }
6910  }
6911 #endif
6912 
6913  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6914  try {
6916  }
6917  catch (...) {
6918  std::cerr << "Error in Depth normal tracking" << std::endl;
6919  throw;
6920  }
6921  }
6922 
6923  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6924  try {
6926  }
6927  catch (...) {
6928  std::cerr << "Error in Depth dense tracking" << std::endl;
6929  throw;
6930  }
6931  }
6932 }
6933 #endif
6934 
6936  const unsigned int pointcloud_width,
6937  const unsigned int pointcloud_height)
6938 {
6939 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6940  // KLT
6941  if (m_trackerType & KLT_TRACKER) {
6942  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6943  vpMbKltTracker::reinit(*ptr_I);
6944  }
6945  }
6946 #endif
6947 
6948  // Looking for new visible face
6949  if (m_trackerType & EDGE_TRACKER) {
6950  bool newvisibleface = false;
6951  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6952 
6953  if (useScanLine) {
6954  faces.computeClippedPolygons(m_cMo, m_cam);
6955  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6956  }
6957  }
6958 
6959  // Depth normal
6960  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6961  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6962 
6963  // Depth dense
6964  if (m_trackerType & DEPTH_DENSE_TRACKER)
6965  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6966 
6967  // Edge
6968  if (m_trackerType & EDGE_TRACKER) {
6970 
6971  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6972  // Reinit the moving edge for the lines which need it.
6973  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6974 
6975  if (computeProjError) {
6977  }
6978  }
6979 }
6980 
6982  const std::vector<vpColVector> *const point_cloud,
6983  const unsigned int pointcloud_width,
6984  const unsigned int pointcloud_height)
6985 {
6986  if (m_trackerType & EDGE_TRACKER) {
6987  try {
6989  }
6990  catch (...) {
6991  std::cerr << "Error in moving edge tracking" << std::endl;
6992  throw;
6993  }
6994  }
6995 
6996 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6997  if (m_trackerType & KLT_TRACKER) {
6998  try {
7000  }
7001  catch (const vpException &e) {
7002  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7003  throw;
7004  }
7005  }
7006 #endif
7007 
7008  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7009  try {
7010  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7011  }
7012  catch (...) {
7013  std::cerr << "Error in Depth tracking" << std::endl;
7014  throw;
7015  }
7016  }
7017 
7018  if (m_trackerType & DEPTH_DENSE_TRACKER) {
7019  try {
7020  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7021  }
7022  catch (...) {
7023  std::cerr << "Error in Depth dense tracking" << std::endl;
7024  throw;
7025  }
7026  }
7027 }
7028 
7030  const vpMatrix *const point_cloud,
7031  const unsigned int pointcloud_width,
7032  const unsigned int pointcloud_height)
7033 {
7034  if (m_trackerType & EDGE_TRACKER) {
7035  try {
7037  }
7038  catch (...) {
7039  std::cerr << "Error in moving edge tracking" << std::endl;
7040  throw;
7041  }
7042  }
7043 
7044 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7045  if (m_trackerType & KLT_TRACKER) {
7046  try {
7048  }
7049  catch (const vpException &e) {
7050  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7051  throw;
7052  }
7053  }
7054 #endif
7055 
7056  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7057  try {
7058  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7059  }
7060  catch (...) {
7061  std::cerr << "Error in Depth tracking" << std::endl;
7062  throw;
7063  }
7064  }
7065 
7066  if (m_trackerType & DEPTH_DENSE_TRACKER) {
7067  try {
7068  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7069  }
7070  catch (...) {
7071  std::cerr << "Error in Depth dense tracking" << std::endl;
7072  throw;
7073  }
7074  }
7075 }
7076 
7078  const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
7079  const vpHomogeneousMatrix &cMo, bool verbose,
7080  const vpHomogeneousMatrix &T)
7081 {
7082  m_cMo.eye();
7083 
7084  // Edge
7085  vpMbtDistanceLine *l;
7087  vpMbtDistanceCircle *ci;
7088 
7089  for (unsigned int i = 0; i < scales.size(); i++) {
7090  if (scales[i]) {
7091  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
7092  l = *it;
7093  if (l != nullptr)
7094  delete l;
7095  l = nullptr;
7096  }
7097 
7098  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
7099  ++it) {
7100  cy = *it;
7101  if (cy != nullptr)
7102  delete cy;
7103  cy = nullptr;
7104  }
7105 
7106  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
7107  ci = *it;
7108  if (ci != nullptr)
7109  delete ci;
7110  ci = nullptr;
7111  }
7112 
7113  lines[i].clear();
7114  cylinders[i].clear();
7115  circles[i].clear();
7116  }
7117  }
7118 
7119  nline = 0;
7120  ncylinder = 0;
7121  ncircle = 0;
7122  nbvisiblepolygone = 0;
7123 
7124  // KLT
7125 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7126  // delete the Klt Polygon features
7127  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
7128  vpMbtDistanceKltPoints *kltpoly = *it;
7129  if (kltpoly != nullptr) {
7130  delete kltpoly;
7131  }
7132  kltpoly = nullptr;
7133  }
7134  kltPolygons.clear();
7135 
7136  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
7137  ++it) {
7138  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
7139  if (kltPolyCylinder != nullptr) {
7140  delete kltPolyCylinder;
7141  }
7142  kltPolyCylinder = nullptr;
7143  }
7144  kltCylinders.clear();
7145 
7146  // delete the structures used to display circles
7147  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
7148  ci = *it;
7149  if (ci != nullptr) {
7150  delete ci;
7151  }
7152  ci = nullptr;
7153  }
7154  circles_disp.clear();
7155 
7156  firstInitialisation = true;
7157 
7158 #endif
7159 
7160  // Depth normal
7161  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
7162  delete m_depthNormalFaces[i];
7163  m_depthNormalFaces[i] = nullptr;
7164  }
7165  m_depthNormalFaces.clear();
7166 
7167  // Depth dense
7168  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
7169  delete m_depthDenseFaces[i];
7170  m_depthDenseFaces[i] = nullptr;
7171  }
7172  m_depthDenseFaces.clear();
7173 
7174  faces.reset();
7175 
7176  loadModel(cad_name, verbose, T);
7177  if (I) {
7178  initFromPose(*I, cMo);
7179  }
7180  else {
7181  initFromPose(*I_color, cMo);
7182  }
7183 }
7184 
7185 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
7186  const vpHomogeneousMatrix &cMo, bool verbose,
7187  const vpHomogeneousMatrix &T)
7188 {
7189  reInitModel(&I, nullptr, cad_name, cMo, verbose, T);
7190 }
7191 
7192 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
7193  const vpHomogeneousMatrix &cMo, bool verbose,
7194  const vpHomogeneousMatrix &T)
7195 {
7196  reInitModel(nullptr, &I_color, cad_name, cMo, verbose, T);
7197 }
7198 
7199 void vpMbGenericTracker::TrackerWrapper::resetTracker()
7200 {
7202 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7204 #endif
7207 }
7208 
7209 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
7210 {
7211  m_cam = cam;
7212 
7214 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7216 #endif
7219 }
7220 
7221 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
7222 
7223 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
7224 {
7226 }
7227 
7228 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
7229 {
7231 }
7232 
7233 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
7234 {
7236 #ifdef VISP_HAVE_OGRE
7237  faces.getOgreContext()->setWindowName("TrackerWrapper");
7238 #endif
7239 }
7240 
7242  const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
7243 {
7244  bool performKltSetPose = false;
7245  if (I_color) {
7246  vpImageConvert::convert(*I_color, m_I);
7247  }
7248 
7249 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7250  if (m_trackerType & KLT_TRACKER) {
7251  performKltSetPose = true;
7252 
7253  if (useScanLine || clippingFlag > 3) {
7254  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7255  }
7256 
7257  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7258  }
7259 #endif
7260 
7261  if (!performKltSetPose) {
7262  m_cMo = cdMo;
7263  init(I ? *I : m_I);
7264  return;
7265  }
7266 
7267  if (m_trackerType & EDGE_TRACKER) {
7269  }
7270 
7271  if (useScanLine) {
7272  faces.computeClippedPolygons(m_cMo, m_cam);
7273  faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7274  }
7275 
7276 #if 0
7277  if (m_trackerType & EDGE_TRACKER) {
7278  initPyramid(I, Ipyramid);
7279 
7280  unsigned int i = (unsigned int)scales.size();
7281  do {
7282  i--;
7283  if (scales[i]) {
7284  downScale(i);
7285  vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
7286  upScale(i);
7287  }
7288  } while (i != 0);
7289 
7290  cleanPyramid(Ipyramid);
7291  }
7292 #else
7293  if (m_trackerType & EDGE_TRACKER) {
7294  vpMbEdgeTracker::initMovingEdge(I ? *I : m_I, m_cMo);
7295  }
7296 #endif
7297 
7298  // Depth normal
7299  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7300 
7301  // Depth dense
7302  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7303 }
7304 
7306 {
7307  setPose(&I, nullptr, cdMo);
7308 }
7309 
7311 {
7312  setPose(nullptr, &I_color, cdMo);
7313 }
7314 
7315 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
7316 {
7318 }
7319 
7320 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
7321 {
7323 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7325 #endif
7328 }
7329 
7330 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
7331 {
7332  if ((type & (EDGE_TRACKER |
7333 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7334  KLT_TRACKER |
7335 #endif
7336  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7337  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
7338  }
7339 
7340  m_trackerType = type;
7341 }
7342 
7343 void vpMbGenericTracker::TrackerWrapper::testTracking()
7344 {
7345  if (m_trackerType & EDGE_TRACKER) {
7347  }
7348 }
7349 
7351 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7352  I
7353 #endif
7354 )
7355 {
7356  if ((m_trackerType & (EDGE_TRACKER
7357 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7358  | KLT_TRACKER
7359 #endif
7360  )) == 0) {
7361  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7362  return;
7363  }
7364 
7365 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7366  track(&I, nullptr);
7367 #endif
7368 }
7369 
7371 {
7372  // not exposed to the public API, only for debug
7373 }
7374 
7375 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7377  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7378 {
7379  if ((m_trackerType & (EDGE_TRACKER |
7380 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7381  KLT_TRACKER |
7382 #endif
7383  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7384  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7385  return;
7386  }
7387 
7388  if (m_trackerType & (EDGE_TRACKER
7389 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7390  | KLT_TRACKER
7391 #endif
7392  ) &&
7393  ptr_I == nullptr) {
7394  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
7395  }
7396 
7397  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
7398  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
7399  }
7400 
7401  // Back-up cMo in case of exception
7402  vpHomogeneousMatrix cMo_1 = m_cMo;
7403  try {
7404  preTracking(ptr_I, point_cloud);
7405 
7406  try {
7407  computeVVS(ptr_I);
7408  }
7409  catch (...) {
7410  covarianceMatrix = -1;
7411  throw; // throw the original exception
7412  }
7413 
7414  if (m_trackerType == EDGE_TRACKER)
7415  testTracking();
7416 
7417  postTracking(ptr_I, point_cloud);
7418 
7419  }
7420  catch (const vpException &e) {
7421  std::cerr << "Exception: " << e.what() << std::endl;
7422  m_cMo = cMo_1;
7423  throw; // rethrowing the original exception
7424  }
7425 }
7426 #endif
7427 END_VISP_NAMESPACE
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:265
unsigned int getCols() const
Definition: vpArray2D.h:337
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:362
unsigned int getRows() const
Definition: vpArray2D.h:347
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1143
Class to define RGB colors available for display functionalities.
Definition: vpColor.h:157
static const vpColor red
Definition: vpColor.h:217
static const vpColor cyan
Definition: vpColor.h:226
static const vpColor blue
Definition: vpColor.h:223
static const vpColor purple
Definition: vpColor.h:228
static const vpColor yellow
Definition: vpColor.h:225
static const vpColor green
Definition: vpColor.h:220
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ ioError
I/O error.
Definition: vpException.h:67
@ badValue
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:73
@ notInitialized
Used to indicate that a parameter is not initialized.
Definition: vpException.h:74
@ fatalError
Fatal error.
Definition: vpException.h:72
const char * what() const
Definition: vpException.cpp:71
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
VP_DEPRECATED void init()
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
void init(unsigned int h, unsigned int w, Type value)
Definition: vpImage.h:372
static std::string getFileExtension(const std::string &pathname, bool checkFile=false)
Definition: vpIoTools.cpp:1124
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:74
static double rad(double deg)
Definition: vpMath.h:129
static double sqr(double x)
Definition: vpMath.h:203
static bool equal(double x, double y, double threshold=0.001)
Definition: vpMath.h:458
static double deg(double rad)
Definition: vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
vpMatrix AtA() const
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:1133
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void computeVVSInit() VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void testTracking() VP_OVERRIDE
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual void computeVVSWeights()
virtual void computeVVSInit() VP_OVERRIDE
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
void resetTracker() VP_OVERRIDE
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void setCameraParameters(const vpCameraParameters &cam) VP_OVERRIDE
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
virtual void setOgreVisibilityTest(const bool &v) VP_OVERRIDE
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false) VP_OVERRIDE
virtual void setKltMaskBorder(const unsigned int &e)
virtual void setProjectionErrorComputation(const bool &flag) VP_OVERRIDE
vpMbGenericTracker()
json namespace shortcut
virtual void setDisplayFeatures(bool displayF) VP_OVERRIDE
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) VP_OVERRIDE
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:250
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void setProjectionErrorDisplayArrowLength(unsigned int length) VP_OVERRIDE
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpMbTracker.h:416
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual vpMbtPolygon * getPolygon(unsigned int index) VP_OVERRIDE
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="") VP_OVERRIDE
virtual void setLod(bool useLod, const std::string &name="") VP_OVERRIDE
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void testTracking() VP_OVERRIDE
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setAngleDisappear(const double &a) VP_OVERRIDE
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void computeVVSInit() VP_OVERRIDE
virtual void init(const vpImage< unsigned char > &I) VP_OVERRIDE
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
virtual void track(const vpImage< unsigned char > &I) VP_OVERRIDE
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) VP_OVERRIDE
vpMatrix m_L
Interaction matrix.
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt) VP_OVERRIDE
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void setMask(const vpImage< bool > &mask) VP_OVERRIDE
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="") VP_OVERRIDE
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual vpKltOpencv getKltOpencv() const
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void setOgreShowConfigDialog(bool showConfigDialog) VP_OVERRIDE
virtual vpMe getMovingEdge() const
virtual unsigned int getNbPolygon() const VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual ~vpMbGenericTracker() VP_OVERRIDE
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness) VP_OVERRIDE
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
std::string m_referenceCameraName
Name of the reference camera.
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces() VP_OVERRIDE
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) VP_OVERRIDE
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) VP_OVERRIDE
virtual void setProjectionErrorDisplay(bool display) VP_OVERRIDE
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:258
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
virtual void setAngleAppear(const double &a) VP_OVERRIDE
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
vpAROgre * getOgreContext()
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="") VP_OVERRIDE
void resetTracker() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void setCameraParameters(const vpCameraParameters &cam) VP_OVERRIDE
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
void preTracking(const vpImage< unsigned char > &I)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="") VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void reinit(const vpImage< unsigned char > &I)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
Definition: vpMbTracker.h:597
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
virtual void setMaxIter(unsigned int max)
Definition: vpMbTracker.h:547
bool modelInitialised
Definition: vpMbTracker.h:125
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
Definition: vpMbTracker.h:215
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:652
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:566
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:250
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:225
virtual void setDisplayFeatures(bool displayF)
Definition: vpMbTracker.h:520
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:425
bool m_computeInteraction
Definition: vpMbTracker.h:187
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:117
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:132
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:195
bool computeProjError
Definition: vpMbTracker.h:135
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:115
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=nullptr, const vpColVector *const m_w_prev=nullptr)
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:113
double projectionError
Definition: vpMbTracker.h:138
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:193
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:157
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:145
virtual void setCameraParameters(const vpCameraParameters &cam)
Definition: vpMbTracker.h:490
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:483
virtual void setInitialMu(double mu)
Definition: vpMbTracker.h:531
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:610
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:142
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=nullptr, vpColVector *const m_w_prev=nullptr)
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:140
virtual void setProjectionErrorDisplay(bool display)
Definition: vpMbTracker.h:592
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:149
virtual void setLambda(double gain)
Definition: vpMbTracker.h:538
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:153
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation)
Definition: vpMbTracker.h:119
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:160
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:587
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:147
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:560
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
Definition: vpMbTracker.h:605
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:472
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:130
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:151
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:191
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:155
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
Definition: vpMbTracker.h:204
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Manage a circle used in the model-based tracker.
Manage a cylinder used in the model-based tracker.
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Manage the line of a polygon used in the model-based tracker.
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:60
Parse an Xml file to extract configuration parameters of a mbtConfig object.
@ TOO_NEAR
Point not tracked anymore, since too near from its neighbor.
Definition: vpMeSite.h:93
@ THRESHOLD
Point not tracked due to the likelihood that is below the threshold, but retained in the ME list.
Definition: vpMeSite.h:91
@ CONTRAST
Point not tracked due to a contrast problem, but retained in the ME list.
Definition: vpMeSite.h:87
@ M_ESTIMATOR
Point detected as an outlier during virtual visual-servoing.
Definition: vpMeSite.h:92
@ NO_SUPPRESSION
Point successfully tracked.
Definition: vpMeSite.h:86
Definition: vpMe.h:134
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79
Error that can be emitted by the vpTracker class and its derivatives.
@ notEnoughPointError
Not enough point to track.
@ initializationError
Tracker initialization error.
@ fatalError
Tracker fatal error.
vpVelocityTwistMatrix & build(const vpTranslationVector &t, const vpRotationMatrix &R)