Visual Servoing Platform  version 3.6.1 under development (2025-02-19)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
43 
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include VISP_NLOHMANN_JSON(json.hpp)
46 using json = nlohmann::json;
47 #endif
48 
49 BEGIN_VISP_NAMESPACE
51  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
52  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
53  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
54 {
55  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
56 
57  // Add default camera transformation matrix
59 
60  // Add default ponderation between each feature type
62 
63 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
64  m_mapOfFeatureFactors[KLT_TRACKER] = 1.0;
65 #endif
66 
69 }
70 
71 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
72  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
73  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
74  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
75 {
76  if (nbCameras == 0) {
77  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
78  }
79  else if (nbCameras == 1) {
80  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
81 
82  // Add default camera transformation matrix
84  }
85  else {
86  for (unsigned int i = 1; i <= nbCameras; i++) {
87  std::stringstream ss;
88  ss << "Camera" << i;
89  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
90 
91  // Add default camera transformation matrix
93  }
94 
95  // Set by default the reference camera to the first one
96  m_referenceCameraName = m_mapOfTrackers.begin()->first;
97  }
98 
99  // Add default ponderation between each feature type
101 
102 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
103  m_mapOfFeatureFactors[KLT_TRACKER] = 1.0;
104 #endif
105 
108 }
109 
110 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
111  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
112  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
113  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
114 {
115  if (trackerTypes.empty()) {
116  throw vpException(vpException::badValue, "There is no camera!");
117  }
118 
119  if (trackerTypes.size() == 1) {
120  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
121 
122  // Add default camera transformation matrix
124  }
125  else {
126  for (size_t i = 1; i <= trackerTypes.size(); i++) {
127  std::stringstream ss;
128  ss << "Camera" << i;
129  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
130 
131  // Add default camera transformation matrix
133  }
134 
135  // Set by default the reference camera to the first one
136  m_referenceCameraName = m_mapOfTrackers.begin()->first;
137  }
138 
139  // Add default ponderation between each feature type
141 
142 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
143  m_mapOfFeatureFactors[KLT_TRACKER] = 1.0;
144 #endif
145 
148 }
149 
150 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
151  const std::vector<int> &trackerTypes)
152  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
153  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
154  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
155 {
156  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
158  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
159  }
160 
161  for (size_t i = 0; i < cameraNames.size(); i++) {
162  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
163 
164  // Add default camera transformation matrix
166  }
167 
168  // Set by default the reference camera to the first one
169  m_referenceCameraName = m_mapOfTrackers.begin()->first;
170 
171  // Add default ponderation between each feature type
173 
174 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
175  m_mapOfFeatureFactors[KLT_TRACKER] = 1.0;
176 #endif
177 
180 }
181 
183 {
184  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
185  ++it) {
186  delete it->second;
187  it->second = nullptr;
188  }
189 }
190 
209  const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
210 {
211  double rawTotalProjectionError = 0.0;
212  unsigned int nbTotalFeaturesUsed = 0;
213 
214  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
215  it != m_mapOfTrackers.end(); ++it) {
216  TrackerWrapper *tracker = it->second;
217 
218  unsigned int nbFeaturesUsed = 0;
219  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
220 
221  if (nbFeaturesUsed > 0) {
222  nbTotalFeaturesUsed += nbFeaturesUsed;
223  rawTotalProjectionError += curProjError;
224  }
225  }
226 
227  if (nbTotalFeaturesUsed > 0) {
228  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
229  }
230 
231  return 90.0;
232 }
233 
252  const vpHomogeneousMatrix &_cMo,
253  const vpCameraParameters &_cam)
254 {
256  vpImageConvert::convert(I_color, I); // FS: Shouldn't we use here m_I that was converted in track() ?
257 
258  return computeCurrentProjectionError(I, _cMo, _cam);
259 }
260 
262 {
263  if (computeProjError) {
264  double rawTotalProjectionError = 0.0;
265  unsigned int nbTotalFeaturesUsed = 0;
266 
267  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
268  it != m_mapOfTrackers.end(); ++it) {
269  TrackerWrapper *tracker = it->second;
270 
271  double curProjError = tracker->getProjectionError();
272  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
273 
274  if (nbFeaturesUsed > 0) {
275  nbTotalFeaturesUsed += nbFeaturesUsed;
276  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
277  }
278  }
279 
280  if (nbTotalFeaturesUsed > 0) {
281  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
282  }
283  else {
284  projectionError = 90.0;
285  }
286  }
287  else {
288  projectionError = 90.0;
289  }
290 }
291 
292 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
293 {
294  computeVVSInit(mapOfImages);
295 
296  if (m_error.getRows() < 4) {
297  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
298  }
299 
300  double normRes = 0;
301  double normRes_1 = -1;
302  unsigned int iter = 0;
303 
304  vpMatrix LTL;
305  vpColVector LTR, v;
306  vpColVector error_prev;
307 
308  double mu = m_initialMu;
309  vpHomogeneousMatrix cMo_prev;
310 
311  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
312  if (isoJoIdentity)
313  oJo.eye();
314 
315  // Covariance
316  vpColVector W_true(m_error.getRows());
317  vpMatrix L_true, LVJ_true;
318 
319  // Create the map of VelocityTwistMatrices
320  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
321  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
322  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
324  cVo.buildFrom(it->second);
325  mapOfVelocityTwist[it->first] = cVo;
326  }
327 
328  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
329 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
330  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
331 #endif
332  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
333  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
334 
335  m_nb_feat_edge = 0;
336  m_nb_feat_klt = 0;
339 
340  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
341  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
342 
343  bool reStartFromLastIncrement = false;
344  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
345  if (reStartFromLastIncrement) {
346  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
347  it != m_mapOfTrackers.end(); ++it) {
348  TrackerWrapper *tracker = it->second;
349 
350  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
351 
352 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
353  vpHomogeneousMatrix c_curr_tTc_curr0 =
354  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
355  tracker->ctTc0 = c_curr_tTc_curr0;
356 #endif
357  }
358  }
359 
360  if (!reStartFromLastIncrement) {
362 
363  if (computeCovariance) {
364  L_true = m_L;
365  if (!isoJoIdentity) {
367  cVo.buildFrom(m_cMo);
368  LVJ_true = (m_L * (cVo * oJo));
369  }
370  }
371 
373  if (iter == 0) {
374  // If all the 6 dof should be estimated, we check if the interaction
375  // matrix is full rank. If not we remove automatically the dof that
376  // cannot be estimated. This is particularly useful when considering
377  // circles (rank 5) and cylinders (rank 4)
378  if (isoJoIdentity) {
379  cVo.buildFrom(m_cMo);
380 
381  vpMatrix K; // kernel
382  unsigned int rank = (m_L * cVo).kernel(K);
383  if (rank == 0) {
384  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
385  }
386 
387  if (rank != 6) {
388  vpMatrix I; // Identity
389  I.eye(6);
390  oJo = I - K.AtA();
391  isoJoIdentity = false;
392  }
393  }
394  }
395 
396  // Weighting
397  double num = 0;
398  double den = 0;
399 
400  unsigned int start_index = 0;
401  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
402  it != m_mapOfTrackers.end(); ++it) {
403  TrackerWrapper *tracker = it->second;
404 
405  if (tracker->m_trackerType & EDGE_TRACKER) {
406  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
407  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
408  W_true[start_index + i] = wi;
409  m_weightedError[start_index + i] = wi * m_error[start_index + i];
410 
411  num += wi * vpMath::sqr(m_error[start_index + i]);
412  den += wi;
413 
414  for (unsigned int j = 0; j < m_L.getCols(); j++) {
415  m_L[start_index + i][j] *= wi;
416  }
417  }
418 
419  start_index += tracker->m_error_edge.getRows();
420  }
421 
422 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
423  if (tracker->m_trackerType & KLT_TRACKER) {
424  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
425  double wi = tracker->m_w_klt[i] * factorKlt;
426  W_true[start_index + i] = wi;
427  m_weightedError[start_index + i] = wi * m_error[start_index + i];
428 
429  num += wi * vpMath::sqr(m_error[start_index + i]);
430  den += wi;
431 
432  for (unsigned int j = 0; j < m_L.getCols(); j++) {
433  m_L[start_index + i][j] *= wi;
434  }
435  }
436 
437  start_index += tracker->m_error_klt.getRows();
438  }
439 #endif
440 
441  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
442  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
443  double wi = tracker->m_w_depthNormal[i] * factorDepth;
444  W_true[start_index + i] = wi;
445  m_weightedError[start_index + i] = wi * m_error[start_index + i];
446 
447  num += wi * vpMath::sqr(m_error[start_index + i]);
448  den += wi;
449 
450  for (unsigned int j = 0; j < m_L.getCols(); j++) {
451  m_L[start_index + i][j] *= wi;
452  }
453  }
454 
455  start_index += tracker->m_error_depthNormal.getRows();
456  }
457 
458  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
459  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
460  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
461  W_true[start_index + i] = wi;
462  m_weightedError[start_index + i] = wi * m_error[start_index + i];
463 
464  num += wi * vpMath::sqr(m_error[start_index + i]);
465  den += wi;
466 
467  for (unsigned int j = 0; j < m_L.getCols(); j++) {
468  m_L[start_index + i][j] *= wi;
469  }
470  }
471 
472  start_index += tracker->m_error_depthDense.getRows();
473  }
474  }
475 
476  normRes_1 = normRes;
477  normRes = sqrt(num / den);
478 
479  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
480 
481  cMo_prev = m_cMo;
482 
484 
485 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
486  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
487  it != m_mapOfTrackers.end(); ++it) {
488  TrackerWrapper *tracker = it->second;
489 
490  vpHomogeneousMatrix c_curr_tTc_curr0 =
491  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
492  tracker->ctTc0 = c_curr_tTc_curr0;
493  }
494 #endif
495 
496  // Update cMo
497  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
498  it != m_mapOfTrackers.end(); ++it) {
499  TrackerWrapper *tracker = it->second;
500  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
501  }
502  }
503 
504  iter++;
505  }
506 
507  // Update features number
508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
509  it != m_mapOfTrackers.end(); ++it) {
510  TrackerWrapper *tracker = it->second;
511  if (tracker->m_trackerType & EDGE_TRACKER) {
512  m_nb_feat_edge += tracker->m_error_edge.size();
513  }
514 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
515  if (tracker->m_trackerType & KLT_TRACKER) {
516  m_nb_feat_klt += tracker->m_error_klt.size();
517  }
518 #endif
519  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
520  m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
521  }
522  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
523  m_nb_feat_depthDense += tracker->m_error_depthDense.size();
524  }
525  }
526 
527  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
528 
529  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
530  it != m_mapOfTrackers.end(); ++it) {
531  TrackerWrapper *tracker = it->second;
532 
533  if (tracker->m_trackerType & EDGE_TRACKER) {
534  tracker->updateMovingEdgeWeights();
535  }
536  }
537 }
538 
540 {
541  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
542 }
543 
544 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
545 {
546  unsigned int nbFeatures = 0;
547 
548  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
549  it != m_mapOfTrackers.end(); ++it) {
550  TrackerWrapper *tracker = it->second;
551  tracker->computeVVSInit(mapOfImages[it->first]);
552 
553  nbFeatures += tracker->m_error.getRows();
554  }
555 
556  m_L.resize(nbFeatures, 6, false, false);
557  m_error.resize(nbFeatures, false);
558 
559  m_weightedError.resize(nbFeatures, false);
560  m_w.resize(nbFeatures, false);
561  m_w = 1;
562 }
563 
565 {
566  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
567  "computeVVSInteractionMatrixAndR"
568  "esidu() should not be called");
569 }
570 
572  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
573  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
574 {
575  unsigned int start_index = 0;
576 
577  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
578  it != m_mapOfTrackers.end(); ++it) {
579  TrackerWrapper *tracker = it->second;
580 
581  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
582 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
583  vpHomogeneousMatrix c_curr_tTc_curr0 =
584  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
585  tracker->ctTc0 = c_curr_tTc_curr0;
586 #endif
587 
588  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
589 
590  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
591  m_error.insert(start_index, tracker->m_error);
592 
593  start_index += tracker->m_error.getRows();
594  }
595 }
596 
598 {
599  unsigned int start_index = 0;
600 
601  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
602  it != m_mapOfTrackers.end(); ++it) {
603  TrackerWrapper *tracker = it->second;
604  tracker->computeVVSWeights();
605 
606  m_w.insert(start_index, tracker->m_w);
607  start_index += tracker->m_w.getRows();
608  }
609 }
610 
625  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
626  bool displayFullModel)
627 {
628  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
629  if (it != m_mapOfTrackers.end()) {
630  TrackerWrapper *tracker = it->second;
631  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
632  }
633  else {
634  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
635  }
636 }
637 
652  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
653  bool displayFullModel)
654 {
655  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
656  if (it != m_mapOfTrackers.end()) {
657  TrackerWrapper *tracker = it->second;
658  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
659  }
660  else {
661  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
662  }
663 }
664 
682  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
683  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
684  unsigned int thickness, bool displayFullModel)
685 {
686  if (m_mapOfTrackers.size() == 2) {
687  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
688  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
689  ++it;
690 
691  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
692  }
693  else {
694  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
695  << std::endl;
696  }
697 }
698 
716  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
717  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
718  bool displayFullModel)
719 {
720  if (m_mapOfTrackers.size() == 2) {
721  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
722  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
723  ++it;
724 
725  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
726  }
727  else {
728  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
729  << std::endl;
730  }
731 }
732 
744 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
745  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
746  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
747  const vpColor &col, unsigned int thickness, bool displayFullModel)
748 {
749  // Display only for the given images
750  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
751  it_img != mapOfImages.end(); ++it_img) {
752  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
753  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
754  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
755 
756  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
757  it_cam != mapOfCameraParameters.end()) {
758  TrackerWrapper *tracker = it_tracker->second;
759  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
760  }
761  else {
762  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
763  }
764  }
765 }
766 
778 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
779  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
780  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
781  const vpColor &col, unsigned int thickness, bool displayFullModel)
782 {
783  // Display only for the given images
784  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
785  it_img != mapOfImages.end(); ++it_img) {
786  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
787  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
788  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
789 
790  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
791  it_cam != mapOfCameraParameters.end()) {
792  TrackerWrapper *tracker = it_tracker->second;
793  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
794  }
795  else {
796  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
797  }
798  }
799 }
800 
806 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
807 {
808  std::vector<std::string> cameraNames;
809 
810  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
811  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
812  cameraNames.push_back(it_tracker->first);
813  }
814 
815  return cameraNames;
816 }
817 
819 {
821 }
822 
832 {
833  if (m_mapOfTrackers.size() == 2) {
834  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
835  it->second->getCameraParameters(cam1);
836  ++it;
837 
838  it->second->getCameraParameters(cam2);
839  }
840  else {
841  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
842  << std::endl;
843  }
844 }
845 
851 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
852 {
853  // Clear the input map
854  mapOfCameraParameters.clear();
855 
856  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
857  it != m_mapOfTrackers.end(); ++it) {
858  vpCameraParameters cam_;
859  it->second->getCameraParameters(cam_);
860  mapOfCameraParameters[it->first] = cam_;
861  }
862 }
863 
870 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
871 {
872  std::map<std::string, int> trackingTypes;
873 
874  TrackerWrapper *traker;
875  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
876  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
877  traker = it_tracker->second;
878  trackingTypes[it_tracker->first] = traker->getTrackerType();
879  }
880 
881  return trackingTypes;
882 }
883 
892 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
893 {
894  if (m_mapOfTrackers.size() == 2) {
895  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
896  clippingFlag1 = it->second->getClipping();
897  ++it;
898 
899  clippingFlag2 = it->second->getClipping();
900  }
901  else {
902  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
903  << std::endl;
904  }
905 }
906 
912 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
913 {
914  mapOfClippingFlags.clear();
915 
916  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
917  it != m_mapOfTrackers.end(); ++it) {
918  TrackerWrapper *tracker = it->second;
919  mapOfClippingFlags[it->first] = tracker->getClipping();
920  }
921 }
922 
929 {
930  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
931  if (it != m_mapOfTrackers.end()) {
932  return it->second->getFaces();
933  }
934 
935  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
936  return faces;
937 }
938 
945 {
946  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
947  if (it != m_mapOfTrackers.end()) {
948  return it->second->getFaces();
949  }
950 
951  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
952  return faces;
953 }
954 
955 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
959 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
960 {
961  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
962  if (it != m_mapOfTrackers.end()) {
963  TrackerWrapper *tracker = it->second;
964  return tracker->getFeaturesCircle();
965  }
966  else {
967  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
968  m_referenceCameraName.c_str());
969  }
970 }
971 
975 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
976 {
977  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
978  if (it != m_mapOfTrackers.end()) {
979  TrackerWrapper *tracker = it->second;
980  return tracker->getFeaturesKltCylinder();
981  }
982  else {
983  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
984  m_referenceCameraName.c_str());
985  }
986 }
987 
991 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
992 {
993  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
994  if (it != m_mapOfTrackers.end()) {
995  TrackerWrapper *tracker = it->second;
996  return tracker->getFeaturesKlt();
997  }
998  else {
999  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1000  m_referenceCameraName.c_str());
1001  }
1002 }
1003 #endif
1004 
1031 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1032 {
1033  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1034 
1035  if (it != m_mapOfTrackers.end()) {
1036  return it->second->getFeaturesForDisplay();
1037  }
1038  else {
1039  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1040  }
1041 
1042  return std::vector<std::vector<double> >();
1043 }
1044 
1070 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1071 {
1072  // Clear the input map
1073  mapOfFeatures.clear();
1074 
1075  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1076  it != m_mapOfTrackers.end(); ++it) {
1077  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1078  }
1079 }
1080 
1091 
1092 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1101 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1102 {
1103  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1104  if (it != m_mapOfTrackers.end()) {
1105  TrackerWrapper *tracker = it->second;
1106  return tracker->getKltImagePoints();
1107  }
1108  else {
1109  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1110  }
1111 
1112  return std::vector<vpImagePoint>();
1113 }
1114 
1123 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1124 {
1125  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1126  if (it != m_mapOfTrackers.end()) {
1127  TrackerWrapper *tracker = it->second;
1128  return tracker->getKltImagePointsWithId();
1129  }
1130  else {
1131  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1132  }
1133 
1134  return std::map<int, vpImagePoint>();
1135 }
1136 
1142 unsigned int vpMbGenericTracker::getKltMaskBorder() const
1143 {
1144  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1145  if (it != m_mapOfTrackers.end()) {
1146  TrackerWrapper *tracker = it->second;
1147  return tracker->getKltMaskBorder();
1148  }
1149  else {
1150  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1151  }
1152 
1153  return 0;
1154 }
1155 
1161 int vpMbGenericTracker::getKltNbPoints() const
1162 {
1163  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1164  if (it != m_mapOfTrackers.end()) {
1165  TrackerWrapper *tracker = it->second;
1166  return tracker->getKltNbPoints();
1167  }
1168  else {
1169  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1170  }
1171 
1172  return 0;
1173 }
1174 
1180 vpKltOpencv vpMbGenericTracker::getKltOpencv() const
1181 {
1182  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1183 
1184  if (it_tracker != m_mapOfTrackers.end()) {
1185  TrackerWrapper *tracker;
1186  tracker = it_tracker->second;
1187  return tracker->getKltOpencv();
1188  }
1189  else {
1190  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1191  }
1192 
1193  return vpKltOpencv();
1194 }
1195 
1204 void vpMbGenericTracker::getKltOpencv(vpKltOpencv &klt1, vpKltOpencv &klt2) const
1205 {
1206  if (m_mapOfTrackers.size() == 2) {
1207  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1208  klt1 = it->second->getKltOpencv();
1209  ++it;
1210 
1211  klt2 = it->second->getKltOpencv();
1212  }
1213  else {
1214  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1215  << std::endl;
1216  }
1217 }
1218 
1224 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1225 {
1226  mapOfKlts.clear();
1227 
1228  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1229  it != m_mapOfTrackers.end(); ++it) {
1230  TrackerWrapper *tracker = it->second;
1231  mapOfKlts[it->first] = tracker->getKltOpencv();
1232  }
1233 }
1234 
1240 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1241 {
1242  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1243  if (it != m_mapOfTrackers.end()) {
1244  TrackerWrapper *tracker = it->second;
1245  return tracker->getKltPoints();
1246  }
1247  else {
1248  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1249  }
1250 
1251  return std::vector<cv::Point2f>();
1252 }
1253 
1260 double vpMbGenericTracker::getKltThresholdAcceptation() const { return m_thresholdOutlier; }
1261 #endif
1262 
1275 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1276 {
1277  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1278  if (it != m_mapOfTrackers.end()) {
1279  it->second->getLcircle(circlesList, level);
1280  }
1281  else {
1282  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1283  }
1284 }
1285 
1299 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1300  unsigned int level) const
1301 {
1302  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1303  if (it != m_mapOfTrackers.end()) {
1304  it->second->getLcircle(circlesList, level);
1305  }
1306  else {
1307  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1308  }
1309 }
1310 
1323 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1324 {
1325  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1326  if (it != m_mapOfTrackers.end()) {
1327  it->second->getLcylinder(cylindersList, level);
1328  }
1329  else {
1330  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1331  }
1332 }
1333 
1347 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1348  unsigned int level) const
1349 {
1350  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1351  if (it != m_mapOfTrackers.end()) {
1352  it->second->getLcylinder(cylindersList, level);
1353  }
1354  else {
1355  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1356  }
1357 }
1358 
1371 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1372 {
1373  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1374 
1375  if (it != m_mapOfTrackers.end()) {
1376  it->second->getLline(linesList, level);
1377  }
1378  else {
1379  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1380  }
1381 }
1382 
1396 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1397  unsigned int level) const
1398 {
1399  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1400  if (it != m_mapOfTrackers.end()) {
1401  it->second->getLline(linesList, level);
1402  }
1403  else {
1404  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1405  }
1406 }
1407 
1437 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1438  const vpHomogeneousMatrix &cMo,
1439  const vpCameraParameters &cam,
1440  bool displayFullModel)
1441 {
1442  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1443 
1444  if (it != m_mapOfTrackers.end()) {
1445  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1446  }
1447  else {
1448  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1449  }
1450 
1451  return std::vector<std::vector<double> >();
1452 }
1453 
1479 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1480  const std::map<std::string, unsigned int> &mapOfwidths,
1481  const std::map<std::string, unsigned int> &mapOfheights,
1482  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1483  const std::map<std::string, vpCameraParameters> &mapOfCams,
1484  bool displayFullModel)
1485 {
1486  // Clear the input map
1487  mapOfModels.clear();
1488 
1489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1490  it != m_mapOfTrackers.end(); ++it) {
1491  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1492  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1493  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1494  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1495 
1496  if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1497  findCam != mapOfCams.end()) {
1498  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1499  findCam->second, displayFullModel);
1500  }
1501  }
1502 }
1503 
1510 {
1511  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1512 
1513  if (it != m_mapOfTrackers.end()) {
1514  return it->second->getMovingEdge();
1515  }
1516  else {
1517  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1518  }
1519 
1520  return vpMe();
1521 }
1522 
1532 {
1533  if (m_mapOfTrackers.size() == 2) {
1534  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1535  it->second->getMovingEdge(me1);
1536  ++it;
1537 
1538  it->second->getMovingEdge(me2);
1539  }
1540  else {
1541  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1542  << std::endl;
1543  }
1544 }
1545 
1551 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1552 {
1553  mapOfMovingEdges.clear();
1554 
1555  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1556  it != m_mapOfTrackers.end(); ++it) {
1557  TrackerWrapper *tracker = it->second;
1558  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1559  }
1560 }
1561 
1579 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1580 {
1581  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1582 
1583  unsigned int nbGoodPoints = 0;
1584  if (it != m_mapOfTrackers.end()) {
1585 
1586  nbGoodPoints = it->second->getNbPoints(level);
1587  }
1588  else {
1589  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1590  }
1591 
1592  return nbGoodPoints;
1593 }
1594 
1609 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1610 {
1611  mapOfNbPoints.clear();
1612 
1613  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1614  it != m_mapOfTrackers.end(); ++it) {
1615  TrackerWrapper *tracker = it->second;
1616  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1617  }
1618 }
1619 
1626 {
1627  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1628  if (it != m_mapOfTrackers.end()) {
1629  return it->second->getNbPolygon();
1630  }
1631 
1632  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1633  return 0;
1634 }
1635 
1641 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1642 {
1643  mapOfNbPolygons.clear();
1644 
1645  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1646  it != m_mapOfTrackers.end(); ++it) {
1647  TrackerWrapper *tracker = it->second;
1648  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1649  }
1650 }
1651 
1663 {
1664  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1665  if (it != m_mapOfTrackers.end()) {
1666  return it->second->getPolygon(index);
1667  }
1668 
1669  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1670  return nullptr;
1671 }
1672 
1684 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1685 {
1686  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1687  if (it != m_mapOfTrackers.end()) {
1688  return it->second->getPolygon(index);
1689  }
1690 
1691  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1692  return nullptr;
1693 }
1694 
1710 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1711 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1712 {
1713  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1714 
1715  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1716  if (it != m_mapOfTrackers.end()) {
1717  TrackerWrapper *tracker = it->second;
1718  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1719  }
1720  else {
1721  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1722  }
1723 
1724  return polygonFaces;
1725 }
1726 
1744 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1745  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1746  bool orderPolygons, bool useVisibility, bool clipPolygon)
1747 {
1748  mapOfPolygons.clear();
1749  mapOfPoints.clear();
1750 
1751  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1752  it != m_mapOfTrackers.end(); ++it) {
1753  TrackerWrapper *tracker = it->second;
1754  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1755  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1756 
1757  mapOfPolygons[it->first] = polygonFaces.first;
1758  mapOfPoints[it->first] = polygonFaces.second;
1759  }
1760 }
1761 
1763 
1773 {
1774  if (m_mapOfTrackers.size() == 2) {
1775  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1776  it->second->getPose(c1Mo);
1777  ++it;
1778 
1779  it->second->getPose(c2Mo);
1780  }
1781  else {
1782  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1783  << std::endl;
1784  }
1785 }
1786 
1792 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1793 {
1794  // Clear the map
1795  mapOfCameraPoses.clear();
1796 
1797  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1798  it != m_mapOfTrackers.end(); ++it) {
1799  TrackerWrapper *tracker = it->second;
1800  tracker->getPose(mapOfCameraPoses[it->first]);
1801  }
1802 }
1803 
1808 
1813 {
1814  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1815  if (it != m_mapOfTrackers.end()) {
1816  TrackerWrapper *tracker = it->second;
1817  return tracker->getTrackerType();
1818  }
1819  else {
1820  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1821  m_referenceCameraName.c_str());
1822  }
1823 }
1824 
1826 {
1827  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1828  it != m_mapOfTrackers.end(); ++it) {
1829  TrackerWrapper *tracker = it->second;
1830  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1831  tracker->init(I);
1832  }
1833 }
1834 
1835 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1836  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1837 {
1838  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1839 }
1840 
1841 #ifdef VISP_HAVE_MODULE_GUI
1842 
1885 void vpMbGenericTracker::initClick(const vpImage<unsigned char> &I1, const vpImage<unsigned char> &I2,
1886  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1887  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1888 {
1889  if (m_mapOfTrackers.size() == 2) {
1890  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1891  TrackerWrapper *tracker = it->second;
1892  tracker->initClick(I1, initFile1, displayHelp, T1);
1893 
1894  ++it;
1895 
1896  tracker = it->second;
1897  tracker->initClick(I2, initFile2, displayHelp, T2);
1898 
1900  if (it != m_mapOfTrackers.end()) {
1901  tracker = it->second;
1902 
1903  // Set the reference cMo
1904  tracker->getPose(m_cMo);
1905  }
1906  }
1907  else {
1909  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1910  }
1911 }
1912 
1955 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1956  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1957  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1958 {
1959  if (m_mapOfTrackers.size() == 2) {
1960  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1961  TrackerWrapper *tracker = it->second;
1962  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1963 
1964  ++it;
1965 
1966  tracker = it->second;
1967  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1968 
1970  if (it != m_mapOfTrackers.end()) {
1971  tracker = it->second;
1972 
1973  // Set the reference cMo
1974  tracker->getPose(m_cMo);
1975  }
1976  }
1977  else {
1979  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1980  }
1981 }
1982 
2025 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2026  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2027  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2028 {
2029  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2030  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2031  mapOfImages.find(m_referenceCameraName);
2032  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2033 
2034  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2035  TrackerWrapper *tracker = it_tracker->second;
2036  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2037  if (it_T != mapOfT.end())
2038  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2039  else
2040  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2041  tracker->getPose(m_cMo);
2042  }
2043  else {
2044  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2045  }
2046 
2047  // Vector of missing initFile for cameras
2048  std::vector<std::string> vectorOfMissingCameraPoses;
2049 
2050  // Set pose for the specified cameras
2051  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2052  if (it_tracker->first != m_referenceCameraName) {
2053  it_img = mapOfImages.find(it_tracker->first);
2054  it_initFile = mapOfInitFiles.find(it_tracker->first);
2055 
2056  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2057  // InitClick for the current camera
2058  TrackerWrapper *tracker = it_tracker->second;
2059  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2060  }
2061  else {
2062  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2063  }
2064  }
2065  }
2066 
2067  // Init for cameras that do not have an initFile
2068  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2069  it != vectorOfMissingCameraPoses.end(); ++it) {
2070  it_img = mapOfImages.find(*it);
2071  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2073 
2074  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2075  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2076  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2077  m_mapOfTrackers[*it]->init(*it_img->second);
2078  }
2079  else {
2081  "Missing image or missing camera transformation "
2082  "matrix! Cannot set the pose for camera: %s!",
2083  it->c_str());
2084  }
2085  }
2086 }
2087 
2130 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2131  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2132  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2133 {
2134  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2135  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2136  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2137 
2138  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2139  TrackerWrapper *tracker = it_tracker->second;
2140  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2141  if (it_T != mapOfT.end())
2142  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2143  else
2144  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2145  tracker->getPose(m_cMo);
2146  }
2147  else {
2148  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2149  }
2150 
2151  // Vector of missing initFile for cameras
2152  std::vector<std::string> vectorOfMissingCameraPoses;
2153 
2154  // Set pose for the specified cameras
2155  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2156  if (it_tracker->first != m_referenceCameraName) {
2157  it_img = mapOfColorImages.find(it_tracker->first);
2158  it_initFile = mapOfInitFiles.find(it_tracker->first);
2159 
2160  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2161  // InitClick for the current camera
2162  TrackerWrapper *tracker = it_tracker->second;
2163  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2164  }
2165  else {
2166  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2167  }
2168  }
2169  }
2170 
2171  // Init for cameras that do not have an initFile
2172  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2173  it != vectorOfMissingCameraPoses.end(); ++it) {
2174  it_img = mapOfColorImages.find(*it);
2175  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2177 
2178  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2179  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2180  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2181  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2182  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2183  }
2184  else {
2186  "Missing image or missing camera transformation "
2187  "matrix! Cannot set the pose for camera: %s!",
2188  it->c_str());
2189  }
2190  }
2191 }
2192 #endif
2193 
2194 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2195  const int /*idFace*/, const std::string & /*name*/)
2196 {
2197  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2198 }
2199 
2201 {
2202  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2203 }
2204 
2206 {
2207  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2208 }
2209 
2240  const std::string &initFile1, const std::string &initFile2)
2241 {
2242  if (m_mapOfTrackers.size() == 2) {
2243  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2244  TrackerWrapper *tracker = it->second;
2245  tracker->initFromPoints(I1, initFile1);
2246 
2247  ++it;
2248 
2249  tracker = it->second;
2250  tracker->initFromPoints(I2, initFile2);
2251 
2253  if (it != m_mapOfTrackers.end()) {
2254  tracker = it->second;
2255 
2256  // Set the reference cMo
2257  tracker->getPose(m_cMo);
2258 
2259  // Set the reference camera parameters
2260  tracker->getCameraParameters(m_cam);
2261  }
2262  }
2263  else {
2265  "Cannot initFromPoints()! Require two cameras but "
2266  "there are %d cameras!",
2267  m_mapOfTrackers.size());
2268  }
2269 }
2270 
2301  const std::string &initFile1, const std::string &initFile2)
2302 {
2303  if (m_mapOfTrackers.size() == 2) {
2304  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2305  TrackerWrapper *tracker = it->second;
2306  tracker->initFromPoints(I_color1, initFile1);
2307 
2308  ++it;
2309 
2310  tracker = it->second;
2311  tracker->initFromPoints(I_color2, initFile2);
2312 
2314  if (it != m_mapOfTrackers.end()) {
2315  tracker = it->second;
2316 
2317  // Set the reference cMo
2318  tracker->getPose(m_cMo);
2319 
2320  // Set the reference camera parameters
2321  tracker->getCameraParameters(m_cam);
2322  }
2323  }
2324  else {
2326  "Cannot initFromPoints()! Require two cameras but "
2327  "there are %d cameras!",
2328  m_mapOfTrackers.size());
2329  }
2330 }
2331 
2332 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2333  const std::map<std::string, std::string> &mapOfInitPoints)
2334 {
2335  // Set the reference cMo
2336  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2337  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2338  mapOfImages.find(m_referenceCameraName);
2339  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2340 
2341  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2342  TrackerWrapper *tracker = it_tracker->second;
2343  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2344  tracker->getPose(m_cMo);
2345  }
2346  else {
2347  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2348  }
2349 
2350  // Vector of missing initPoints for cameras
2351  std::vector<std::string> vectorOfMissingCameraPoints;
2352 
2353  // Set pose for the specified cameras
2354  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2355  it_img = mapOfImages.find(it_tracker->first);
2356  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2357 
2358  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2359  // Set pose
2360  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2361  }
2362  else {
2363  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2364  }
2365  }
2366 
2367  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2368  it != vectorOfMissingCameraPoints.end(); ++it) {
2369  it_img = mapOfImages.find(*it);
2370  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2372 
2373  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2374  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2375  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2376  }
2377  else {
2379  "Missing image or missing camera transformation "
2380  "matrix! Cannot init the pose for camera: %s!",
2381  it->c_str());
2382  }
2383  }
2384 }
2385 
2386 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2387  const std::map<std::string, std::string> &mapOfInitPoints)
2388 {
2389  // Set the reference cMo
2390  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2391  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2392  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2393 
2394  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2395  it_initPoints != mapOfInitPoints.end()) {
2396  TrackerWrapper *tracker = it_tracker->second;
2397  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2398  tracker->getPose(m_cMo);
2399  }
2400  else {
2401  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2402  }
2403 
2404  // Vector of missing initPoints for cameras
2405  std::vector<std::string> vectorOfMissingCameraPoints;
2406 
2407  // Set pose for the specified cameras
2408  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2409  it_img = mapOfColorImages.find(it_tracker->first);
2410  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2411 
2412  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2413  // Set pose
2414  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2415  }
2416  else {
2417  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2418  }
2419  }
2420 
2421  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2422  it != vectorOfMissingCameraPoints.end(); ++it) {
2423  it_img = mapOfColorImages.find(*it);
2424  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2426 
2427  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2428  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2429  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2430  }
2431  else {
2433  "Missing image or missing camera transformation "
2434  "matrix! Cannot init the pose for camera: %s!",
2435  it->c_str());
2436  }
2437  }
2438 }
2439 
2441 {
2442  vpMbTracker::initFromPose(I, cMo);
2443 }
2444 
2457  const std::string &initFile1, const std::string &initFile2)
2458 {
2459  if (m_mapOfTrackers.size() == 2) {
2460  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2461  TrackerWrapper *tracker = it->second;
2462  tracker->initFromPose(I1, initFile1);
2463 
2464  ++it;
2465 
2466  tracker = it->second;
2467  tracker->initFromPose(I2, initFile2);
2468 
2470  if (it != m_mapOfTrackers.end()) {
2471  tracker = it->second;
2472 
2473  // Set the reference cMo
2474  tracker->getPose(m_cMo);
2475 
2476  // Set the reference camera parameters
2477  tracker->getCameraParameters(m_cam);
2478  }
2479  }
2480  else {
2482  "Cannot initFromPose()! Require two cameras but there "
2483  "are %d cameras!",
2484  m_mapOfTrackers.size());
2485  }
2486 }
2487 
2500  const std::string &initFile1, const std::string &initFile2)
2501 {
2502  if (m_mapOfTrackers.size() == 2) {
2503  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2504  TrackerWrapper *tracker = it->second;
2505  tracker->initFromPose(I_color1, initFile1);
2506 
2507  ++it;
2508 
2509  tracker = it->second;
2510  tracker->initFromPose(I_color2, initFile2);
2511 
2513  if (it != m_mapOfTrackers.end()) {
2514  tracker = it->second;
2515 
2516  // Set the reference cMo
2517  tracker->getPose(m_cMo);
2518 
2519  // Set the reference camera parameters
2520  tracker->getCameraParameters(m_cam);
2521  }
2522  }
2523  else {
2525  "Cannot initFromPose()! Require two cameras but there "
2526  "are %d cameras!",
2527  m_mapOfTrackers.size());
2528  }
2529 }
2530 
2545 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2546  const std::map<std::string, std::string> &mapOfInitPoses)
2547 {
2548  // Set the reference cMo
2549  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2550  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2551  mapOfImages.find(m_referenceCameraName);
2552  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2553 
2554  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2555  TrackerWrapper *tracker = it_tracker->second;
2556  tracker->initFromPose(*it_img->second, it_initPose->second);
2557  tracker->getPose(m_cMo);
2558  }
2559  else {
2560  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2561  }
2562 
2563  // Vector of missing pose matrices for cameras
2564  std::vector<std::string> vectorOfMissingCameraPoses;
2565 
2566  // Set pose for the specified cameras
2567  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2568  it_img = mapOfImages.find(it_tracker->first);
2569  it_initPose = mapOfInitPoses.find(it_tracker->first);
2570 
2571  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2572  // Set pose
2573  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2574  }
2575  else {
2576  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2577  }
2578  }
2579 
2580  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2581  it != vectorOfMissingCameraPoses.end(); ++it) {
2582  it_img = mapOfImages.find(*it);
2583  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2585 
2586  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2587  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2588  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2589  }
2590  else {
2592  "Missing image or missing camera transformation "
2593  "matrix! Cannot init the pose for camera: %s!",
2594  it->c_str());
2595  }
2596  }
2597 }
2598 
2613 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2614  const std::map<std::string, std::string> &mapOfInitPoses)
2615 {
2616  // Set the reference cMo
2617  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2618  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2619  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2620 
2621  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2622  TrackerWrapper *tracker = it_tracker->second;
2623  tracker->initFromPose(*it_img->second, it_initPose->second);
2624  tracker->getPose(m_cMo);
2625  }
2626  else {
2627  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2628  }
2629 
2630  // Vector of missing pose matrices for cameras
2631  std::vector<std::string> vectorOfMissingCameraPoses;
2632 
2633  // Set pose for the specified cameras
2634  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2635  it_img = mapOfColorImages.find(it_tracker->first);
2636  it_initPose = mapOfInitPoses.find(it_tracker->first);
2637 
2638  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2639  // Set pose
2640  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2641  }
2642  else {
2643  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2644  }
2645  }
2646 
2647  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2648  it != vectorOfMissingCameraPoses.end(); ++it) {
2649  it_img = mapOfColorImages.find(*it);
2650  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2652 
2653  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2654  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2655  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2656  }
2657  else {
2659  "Missing image or missing camera transformation "
2660  "matrix! Cannot init the pose for camera: %s!",
2661  it->c_str());
2662  }
2663  }
2664 }
2665 
2677  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2678 {
2679  if (m_mapOfTrackers.size() == 2) {
2680  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2681  it->second->initFromPose(I1, c1Mo);
2682 
2683  ++it;
2684 
2685  it->second->initFromPose(I2, c2Mo);
2686 
2687  m_cMo = c1Mo;
2688  }
2689  else {
2691  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2692  }
2693 }
2694 
2706  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2707 {
2708  if (m_mapOfTrackers.size() == 2) {
2709  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2710  it->second->initFromPose(I_color1, c1Mo);
2711 
2712  ++it;
2713 
2714  it->second->initFromPose(I_color2, c2Mo);
2715 
2716  m_cMo = c1Mo;
2717  }
2718  else {
2720  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2721  }
2722 }
2723 
2737 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2738  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2739 {
2740  // Set the reference cMo
2741  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2742  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2743  mapOfImages.find(m_referenceCameraName);
2744  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2745 
2746  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2747  TrackerWrapper *tracker = it_tracker->second;
2748  tracker->initFromPose(*it_img->second, it_camPose->second);
2749  tracker->getPose(m_cMo);
2750  }
2751  else {
2752  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2753  }
2754 
2755  // Vector of missing pose matrices for cameras
2756  std::vector<std::string> vectorOfMissingCameraPoses;
2757 
2758  // Set pose for the specified cameras
2759  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2760  it_img = mapOfImages.find(it_tracker->first);
2761  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2762 
2763  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2764  // Set pose
2765  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2766  }
2767  else {
2768  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2769  }
2770  }
2771 
2772  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2773  it != vectorOfMissingCameraPoses.end(); ++it) {
2774  it_img = mapOfImages.find(*it);
2775  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2777 
2778  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2779  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2780  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2781  }
2782  else {
2784  "Missing image or missing camera transformation "
2785  "matrix! Cannot set the pose for camera: %s!",
2786  it->c_str());
2787  }
2788  }
2789 }
2790 
2804 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2805  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2806 {
2807  // Set the reference cMo
2808  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2809  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2810  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2811 
2812  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2813  TrackerWrapper *tracker = it_tracker->second;
2814  tracker->initFromPose(*it_img->second, it_camPose->second);
2815  tracker->getPose(m_cMo);
2816  }
2817  else {
2818  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2819  }
2820 
2821  // Vector of missing pose matrices for cameras
2822  std::vector<std::string> vectorOfMissingCameraPoses;
2823 
2824  // Set pose for the specified cameras
2825  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2826  it_img = mapOfColorImages.find(it_tracker->first);
2827  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2828 
2829  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2830  // Set pose
2831  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2832  }
2833  else {
2834  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2835  }
2836  }
2837 
2838  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2839  it != vectorOfMissingCameraPoses.end(); ++it) {
2840  it_img = mapOfColorImages.find(*it);
2841  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2843 
2844  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2845  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2846  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2847  }
2848  else {
2850  "Missing image or missing camera transformation "
2851  "matrix! Cannot set the pose for camera: %s!",
2852  it->c_str());
2853  }
2854  }
2855 }
2856 
2868 void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2869 {
2870  const std::string extension = vpIoTools::getFileExtension(configFile);
2871  if (extension == ".xml") {
2872  loadConfigFileXML(configFile, verbose);
2873  }
2874 #ifdef VISP_HAVE_NLOHMANN_JSON
2875  else if (extension == ".json") {
2876  loadConfigFileJSON(configFile, verbose);
2877  }
2878 #endif
2879  else {
2880  throw vpException(vpException::ioError, "MBT config parsing: File format " + extension + "for file " + configFile + " is not supported.");
2881  }
2882 }
2883 
2895 void vpMbGenericTracker::loadConfigFileXML(const std::string &configFile, bool verbose)
2896 {
2897  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2898  it != m_mapOfTrackers.end(); ++it) {
2899  TrackerWrapper *tracker = it->second;
2900  tracker->loadConfigFile(configFile, verbose);
2901  }
2902 
2904  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2905  }
2906 
2907  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2908  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2909  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2910  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2911 }
2912 
2913 #ifdef VISP_HAVE_NLOHMANN_JSON
2922 void vpMbGenericTracker::loadConfigFileJSON(const std::string &settingsFile, bool verbose)
2923 {
2924  //Read file
2925  std::ifstream jsonFile(settingsFile);
2926  if (!jsonFile.good()) {
2927  throw vpException(vpException::ioError, "Could not read from settings file " + settingsFile + " to initialise the vpMbGenericTracker");
2928  }
2929  json settings;
2930  try {
2931  settings = json::parse(jsonFile);
2932  }
2933  catch (json::parse_error &e) {
2934  std::stringstream msg;
2935  msg << "Could not parse JSON file : \n";
2936 
2937  msg << e.what() << std::endl;
2938  msg << "Byte position of error: " << e.byte;
2939  throw vpException(vpException::ioError, msg.str());
2940  }
2941  jsonFile.close();
2942 
2943  if (!settings.contains("version")) {
2944  throw vpException(vpException::notInitialized, "JSON configuration does not contain versioning information");
2945  }
2946  else if (settings["version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2947  throw vpException(vpException::badValue, "Trying to load an old configuration file");
2948  }
2949 
2950  //Load Basic settings
2951  settings.at("referenceCameraName").get_to(m_referenceCameraName);
2952  json trackersJson;
2953  trackersJson = settings.at("trackers");
2954 
2955  //Find camera that are already present in the tracker but not in the config file: they will be removed
2956  std::vector<std::string> unusedCameraNames = getCameraNames();
2957 
2958  bool refCameraFound = false;
2959  //Foreach camera
2960  for (const auto &it : trackersJson.items()) {
2961  const std::string cameraName = it.key();
2962  const json trackerJson = it.value();
2963  refCameraFound = refCameraFound || cameraName == m_referenceCameraName;
2964 
2965  //Load transformation between current camera and reference camera, if it exists
2966  if (trackerJson.contains("camTref")) {
2967  m_mapOfCameraTransformationMatrix[cameraName] = trackerJson["camTref"].get<vpHomogeneousMatrix>();
2968  }
2969  else if (cameraName != m_referenceCameraName) { // No transformation to reference and its not the reference itself
2970  throw vpException(vpException::notInitialized, "Camera " + cameraName + " has no transformation to the reference camera");
2971  }
2972  if (verbose) {
2973  std::cout << "Loading tracker " << cameraName << std::endl << " with settings: " << std::endl << trackerJson.dump(2);
2974  }
2975  if (m_mapOfTrackers.count(cameraName)) {
2976  if (verbose) {
2977  std::cout << "Updating an already existing tracker with JSON configuration." << std::endl;
2978  }
2979  from_json(trackerJson, *(m_mapOfTrackers[cameraName]));
2980  }
2981  else {
2982  if (verbose) {
2983  std::cout << "Creating a new tracker from JSON configuration." << std::endl;
2984  }
2985  TrackerWrapper *tw = new TrackerWrapper(); //vpMBTracker is responsible for deleting trackers
2986  *tw = trackerJson;
2987  m_mapOfTrackers[cameraName] = tw;
2988  }
2989  const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName); // Mark this camera name as used
2990  unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2991  }
2992  if (!refCameraFound) {
2993  throw vpException(vpException::badValue, "Reference camera not found in trackers");
2994  }
2995 
2996  // All camerasthat were defined in the tracker but not in the config file are removed
2997  for (const std::string &oldCameraName : unusedCameraNames) {
2998  m_mapOfCameraTransformationMatrix.erase(oldCameraName);
2999  TrackerWrapper *tw = m_mapOfTrackers[oldCameraName];
3000  m_mapOfTrackers.erase(oldCameraName);
3001  delete tw;
3002  }
3003 
3004  const TrackerWrapper *refTracker = m_mapOfTrackers[m_referenceCameraName];
3005  refTracker->getCameraParameters(m_cam);
3006  this->angleAppears = refTracker->getAngleAppear();
3007  this->angleDisappears = refTracker->getAngleDisappear();
3008  this->clippingFlag = refTracker->getClipping();
3009  this->distNearClip = refTracker->getNearClippingDistance();
3010  this->distFarClip = refTracker->getFarClippingDistance();
3011 
3012  // These settings can be set in each tracker or globally. Global value overrides local ones.
3013  if (settings.contains("display")) {
3014  const json displayJson = settings["display"];
3015  setDisplayFeatures(displayJson.value("features", displayFeatures));
3016  setProjectionErrorDisplay(displayJson.value("projectionError", m_projectionErrorDisplay));
3017  }
3018  if (settings.contains("visibilityTest")) {
3019  const json visJson = settings["visibilityTest"];
3020  setOgreVisibilityTest(visJson.value("ogre", useOgre));
3021  setScanLineVisibilityTest(visJson.value("scanline", useScanLine));
3022  }
3023 
3024  // VVS global settings
3025  if (settings.contains("vvs")) {
3026  const json vvsJson = settings["vvs"];
3027  setLambda(vvsJson.value("lambda", this->m_lambda));
3028  setMaxIter(vvsJson.value("maxIter", this->m_maxIter));
3029  setInitialMu(vvsJson.value("initialMu", this->m_initialMu));
3030  }
3031 
3032  //If a 3D model is defined, load it
3033  if (settings.contains("model")) {
3034  loadModel(settings.at("model").get<std::string>(), verbose);
3035  }
3036 }
3037 
3045 void vpMbGenericTracker::saveConfigFile(const std::string &settingsFile) const
3046 {
3047  json j;
3048  j["referenceCameraName"] = m_referenceCameraName;
3049  j["version"] = MBT_JSON_SETTINGS_VERSION;
3050  // j["thresholdOutlier"] = m_thresholdOutlier;
3051  json trackers;
3052  for (const auto &kv : m_mapOfTrackers) {
3053  trackers[kv.first] = *(kv.second);
3054  const auto itTransformation = m_mapOfCameraTransformationMatrix.find(kv.first);
3055  if (itTransformation != m_mapOfCameraTransformationMatrix.end()) {
3056  trackers[kv.first]["camTref"] = itTransformation->second;
3057  }
3058  }
3059  j["trackers"] = trackers;
3060  j["vvs"] = json {
3061  {"lambda", m_lambda},
3062  {"maxIter", m_maxIter},
3063  {"initialMu", m_initialMu}
3064  };
3065 
3066  std::ofstream f(settingsFile);
3067  if (f.good()) {
3068  const unsigned indentLevel = 4;
3069  f << j.dump(indentLevel);
3070  f.close();
3071  }
3072  else {
3073  throw vpException(vpException::ioError, "Could not save tracker configuration to JSON file: " + settingsFile);
3074  }
3075 }
3076 
3077 #endif
3078 
3093 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
3094 {
3095  if (m_mapOfTrackers.size() != 2) {
3096  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3097  }
3098 
3099  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3100  TrackerWrapper *tracker = it_tracker->second;
3101  tracker->loadConfigFile(configFile1, verbose);
3102 
3103  ++it_tracker;
3104  tracker = it_tracker->second;
3105  tracker->loadConfigFile(configFile2, verbose);
3106 
3108  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
3109  }
3110 
3111  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
3112  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
3113  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
3114  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
3115 }
3116 
3130 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
3131 {
3132  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3133  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3134  TrackerWrapper *tracker = it_tracker->second;
3135 
3136  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3137  if (it_config != mapOfConfigFiles.end()) {
3138  tracker->loadConfigFile(it_config->second, verbose);
3139  }
3140  else {
3141  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
3142  it_tracker->first.c_str());
3143  }
3144  }
3145 
3146  // Set the reference camera parameters
3147  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3148  if (it != m_mapOfTrackers.end()) {
3149  TrackerWrapper *tracker = it->second;
3150  tracker->getCameraParameters(m_cam);
3151 
3152  // Set clipping
3153  this->clippingFlag = tracker->getClipping();
3154  this->angleAppears = tracker->getAngleAppear();
3155  this->angleDisappears = tracker->getAngleDisappear();
3156  }
3157  else {
3158  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3159  m_referenceCameraName.c_str());
3160  }
3161 }
3162 
3181 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
3182 {
3183  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3184  it != m_mapOfTrackers.end(); ++it) {
3185  TrackerWrapper *tracker = it->second;
3186  tracker->loadModel(modelFile, verbose, T);
3187  }
3188 }
3189 
3212 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
3213  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3214 {
3215  if (m_mapOfTrackers.size() != 2) {
3216  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3217  }
3218 
3219  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3220  TrackerWrapper *tracker = it_tracker->second;
3221  tracker->loadModel(modelFile1, verbose, T1);
3222 
3223  ++it_tracker;
3224  tracker = it_tracker->second;
3225  tracker->loadModel(modelFile2, verbose, T2);
3226 }
3227 
3247 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
3248  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3249 {
3250  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3251  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3252  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3253 
3254  if (it_model != mapOfModelFiles.end()) {
3255  TrackerWrapper *tracker = it_tracker->second;
3256  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3257 
3258  if (it_T != mapOfT.end())
3259  tracker->loadModel(it_model->second, verbose, it_T->second);
3260  else
3261  tracker->loadModel(it_model->second, verbose);
3262  }
3263  else {
3264  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3265  it_tracker->first.c_str());
3266  }
3267  }
3268 }
3269 
3270 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
3271 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3272  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3273 {
3274  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3275  it != m_mapOfTrackers.end(); ++it) {
3276  TrackerWrapper *tracker = it->second;
3277  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3278  }
3279 }
3280 #endif
3281 
3282 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3283  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3284  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3285  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3286 {
3287  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3288  it != m_mapOfTrackers.end(); ++it) {
3289  TrackerWrapper *tracker = it->second;
3290  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3291  mapOfPointCloudHeights[it->first]);
3292  }
3293 }
3294 
3295 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3296  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
3297  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3298  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3299 {
3300  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3301  it != m_mapOfTrackers.end(); ++it) {
3302  TrackerWrapper *tracker = it->second;
3303  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3304  mapOfPointCloudHeights[it->first]);
3305  }
3306 }
3307 
3319 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3320  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3321 {
3322  if (m_mapOfTrackers.size() != 1) {
3323  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3324  m_mapOfTrackers.size());
3325  }
3326 
3327  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3328  if (it_tracker != m_mapOfTrackers.end()) {
3329  TrackerWrapper *tracker = it_tracker->second;
3330  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3331 
3332  // Set reference pose
3333  tracker->getPose(m_cMo);
3334  }
3335  else {
3336  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3337  }
3338 
3339  modelInitialised = true;
3340 }
3341 
3353 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3354  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3355 {
3356  if (m_mapOfTrackers.size() != 1) {
3357  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3358  m_mapOfTrackers.size());
3359  }
3360 
3361  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3362  if (it_tracker != m_mapOfTrackers.end()) {
3363  TrackerWrapper *tracker = it_tracker->second;
3364  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3365 
3366  // Set reference pose
3367  tracker->getPose(m_cMo);
3368  }
3369  else {
3370  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3371  }
3372 
3373  modelInitialised = true;
3374 }
3375 
3397  const std::string &cad_name1, const std::string &cad_name2,
3398  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3399  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3400 {
3401  if (m_mapOfTrackers.size() == 2) {
3402  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3403 
3404  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3405 
3406  ++it_tracker;
3407 
3408  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3409 
3410  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3411  if (it_tracker != m_mapOfTrackers.end()) {
3412  // Set reference pose
3413  it_tracker->second->getPose(m_cMo);
3414  }
3415  }
3416  else {
3417  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3418  }
3419 
3420  modelInitialised = true;
3421 }
3422 
3444  const std::string &cad_name1, const std::string &cad_name2,
3445  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3446  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3447 {
3448  if (m_mapOfTrackers.size() == 2) {
3449  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3450 
3451  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3452 
3453  ++it_tracker;
3454 
3455  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3456 
3457  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3458  if (it_tracker != m_mapOfTrackers.end()) {
3459  // Set reference pose
3460  it_tracker->second->getPose(m_cMo);
3461  }
3462  }
3463  else {
3464  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3465  }
3466 
3467  modelInitialised = true;
3468 }
3469 
3484 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3485  const std::map<std::string, std::string> &mapOfModelFiles,
3486  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3487  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3488 {
3489  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3490  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3491  mapOfImages.find(m_referenceCameraName);
3492  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3493  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3494 
3495  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3496  it_camPose != mapOfCameraPoses.end()) {
3497  TrackerWrapper *tracker = it_tracker->second;
3498  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3499  if (it_T != mapOfT.end())
3500  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3501  else
3502  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3503 
3504  // Set reference pose
3505  tracker->getPose(m_cMo);
3506  }
3507  else {
3508  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3509  }
3510 
3511  std::vector<std::string> vectorOfMissingCameras;
3512  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3513  if (it_tracker->first != m_referenceCameraName) {
3514  it_img = mapOfImages.find(it_tracker->first);
3515  it_model = mapOfModelFiles.find(it_tracker->first);
3516  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3517 
3518  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3519  TrackerWrapper *tracker = it_tracker->second;
3520  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3521  }
3522  else {
3523  vectorOfMissingCameras.push_back(it_tracker->first);
3524  }
3525  }
3526  }
3527 
3528  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3529  ++it) {
3530  it_img = mapOfImages.find(*it);
3531  it_model = mapOfModelFiles.find(*it);
3532  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3534 
3535  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3536  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3537  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3538  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3539  }
3540  }
3541 
3542  modelInitialised = true;
3543 }
3544 
3559 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3560  const std::map<std::string, std::string> &mapOfModelFiles,
3561  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3562  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3563 {
3564  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3565  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3566  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3567  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3568 
3569  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3570  it_camPose != mapOfCameraPoses.end()) {
3571  TrackerWrapper *tracker = it_tracker->second;
3572  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3573  if (it_T != mapOfT.end())
3574  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3575  else
3576  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3577 
3578  // Set reference pose
3579  tracker->getPose(m_cMo);
3580  }
3581  else {
3582  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3583  }
3584 
3585  std::vector<std::string> vectorOfMissingCameras;
3586  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3587  if (it_tracker->first != m_referenceCameraName) {
3588  it_img = mapOfColorImages.find(it_tracker->first);
3589  it_model = mapOfModelFiles.find(it_tracker->first);
3590  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3591 
3592  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3593  it_camPose != mapOfCameraPoses.end()) {
3594  TrackerWrapper *tracker = it_tracker->second;
3595  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3596  }
3597  else {
3598  vectorOfMissingCameras.push_back(it_tracker->first);
3599  }
3600  }
3601  }
3602 
3603  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3604  ++it) {
3605  it_img = mapOfColorImages.find(*it);
3606  it_model = mapOfModelFiles.find(*it);
3607  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3609 
3610  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3611  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3612  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3613  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3614  }
3615  }
3616 
3617  modelInitialised = true;
3618 }
3619 
3625 {
3626  m_cMo.eye();
3627 
3628  useScanLine = false;
3629 
3630 #ifdef VISP_HAVE_OGRE
3631  useOgre = false;
3632 #endif
3633 
3634  m_computeInteraction = true;
3635  m_lambda = 1.0;
3636 
3637  angleAppears = vpMath::rad(89);
3640  distNearClip = 0.001;
3641  distFarClip = 100;
3642 
3644  m_maxIter = 30;
3645  m_stopCriteriaEpsilon = 1e-8;
3646  m_initialMu = 0.01;
3647 
3648  // Only for Edge
3649  m_percentageGdPt = 0.4;
3650 
3651  // Only for KLT
3652  m_thresholdOutlier = 0.5;
3653 
3654  // Reset default ponderation between each feature type
3656 
3657 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3658  m_mapOfFeatureFactors[KLT_TRACKER] = 1.0;
3659 #endif
3660 
3663 
3664  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3665  it != m_mapOfTrackers.end(); ++it) {
3666  TrackerWrapper *tracker = it->second;
3667  tracker->resetTracker();
3668  }
3669 }
3670 
3681 {
3683 
3684  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3685  it != m_mapOfTrackers.end(); ++it) {
3686  TrackerWrapper *tracker = it->second;
3687  tracker->setAngleAppear(a);
3688  }
3689 }
3690 
3703 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3704 {
3705  if (m_mapOfTrackers.size() == 2) {
3706  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3707  it->second->setAngleAppear(a1);
3708 
3709  ++it;
3710  it->second->setAngleAppear(a2);
3711 
3713  if (it != m_mapOfTrackers.end()) {
3714  angleAppears = it->second->getAngleAppear();
3715  }
3716  else {
3717  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3718  }
3719  }
3720  else {
3721  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3722  m_mapOfTrackers.size());
3723  }
3724 }
3725 
3735 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3736 {
3737  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3738  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3739 
3740  if (it_tracker != m_mapOfTrackers.end()) {
3741  TrackerWrapper *tracker = it_tracker->second;
3742  tracker->setAngleAppear(it->second);
3743 
3744  if (it->first == m_referenceCameraName) {
3745  angleAppears = it->second;
3746  }
3747  }
3748  }
3749 }
3750 
3761 {
3763 
3764  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3765  it != m_mapOfTrackers.end(); ++it) {
3766  TrackerWrapper *tracker = it->second;
3767  tracker->setAngleDisappear(a);
3768  }
3769 }
3770 
3783 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3784 {
3785  if (m_mapOfTrackers.size() == 2) {
3786  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3787  it->second->setAngleDisappear(a1);
3788 
3789  ++it;
3790  it->second->setAngleDisappear(a2);
3791 
3793  if (it != m_mapOfTrackers.end()) {
3794  angleDisappears = it->second->getAngleDisappear();
3795  }
3796  else {
3797  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3798  }
3799  }
3800  else {
3801  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3802  m_mapOfTrackers.size());
3803  }
3804 }
3805 
3815 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3816 {
3817  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3818  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3819 
3820  if (it_tracker != m_mapOfTrackers.end()) {
3821  TrackerWrapper *tracker = it_tracker->second;
3822  tracker->setAngleDisappear(it->second);
3823 
3824  if (it->first == m_referenceCameraName) {
3825  angleDisappears = it->second;
3826  }
3827  }
3828  }
3829 }
3830 
3837 {
3839 
3840  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3841  it != m_mapOfTrackers.end(); ++it) {
3842  TrackerWrapper *tracker = it->second;
3843  tracker->setCameraParameters(camera);
3844  }
3845 }
3846 
3856 {
3857  if (m_mapOfTrackers.size() == 2) {
3858  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3859  it->second->setCameraParameters(camera1);
3860 
3861  ++it;
3862  it->second->setCameraParameters(camera2);
3863 
3865  if (it != m_mapOfTrackers.end()) {
3866  it->second->getCameraParameters(m_cam);
3867  }
3868  else {
3869  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3870  }
3871  }
3872  else {
3873  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3874  m_mapOfTrackers.size());
3875  }
3876 }
3877 
3886 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3887 {
3888  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3889  it != mapOfCameraParameters.end(); ++it) {
3890  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3891 
3892  if (it_tracker != m_mapOfTrackers.end()) {
3893  TrackerWrapper *tracker = it_tracker->second;
3894  tracker->setCameraParameters(it->second);
3895 
3896  if (it->first == m_referenceCameraName) {
3897  m_cam = it->second;
3898  }
3899  }
3900  }
3901 }
3902 
3911 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3912  const vpHomogeneousMatrix &cameraTransformationMatrix)
3913 {
3914  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3915 
3916  if (it != m_mapOfCameraTransformationMatrix.end()) {
3917  it->second = cameraTransformationMatrix;
3918  }
3919  else {
3920  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3921  }
3922 }
3923 
3932  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3933 {
3934  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3935  it != mapOfTransformationMatrix.end(); ++it) {
3936  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3937  m_mapOfCameraTransformationMatrix.find(it->first);
3938 
3939  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3940  it_camTrans->second = it->second;
3941  }
3942  }
3943 }
3944 
3954 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3955 {
3956  vpMbTracker::setClipping(flags);
3957 
3958  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3959  it != m_mapOfTrackers.end(); ++it) {
3960  TrackerWrapper *tracker = it->second;
3961  tracker->setClipping(flags);
3962  }
3963 }
3964 
3975 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3976 {
3977  if (m_mapOfTrackers.size() == 2) {
3978  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3979  it->second->setClipping(flags1);
3980 
3981  ++it;
3982  it->second->setClipping(flags2);
3983 
3985  if (it != m_mapOfTrackers.end()) {
3986  clippingFlag = it->second->getClipping();
3987  }
3988  else {
3989  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3990  }
3991  }
3992  else {
3993  std::stringstream ss;
3994  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3996  }
3997 }
3998 
4006 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
4007 {
4008  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
4009  it != mapOfClippingFlags.end(); ++it) {
4010  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4011 
4012  if (it_tracker != m_mapOfTrackers.end()) {
4013  TrackerWrapper *tracker = it_tracker->second;
4014  tracker->setClipping(it->second);
4015 
4016  if (it->first == m_referenceCameraName) {
4017  clippingFlag = it->second;
4018  }
4019  }
4020  }
4021 }
4022 
4033 {
4034  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4035  it != m_mapOfTrackers.end(); ++it) {
4036  TrackerWrapper *tracker = it->second;
4037  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4038  }
4039 }
4040 
4050 {
4051  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4052  it != m_mapOfTrackers.end(); ++it) {
4053  TrackerWrapper *tracker = it->second;
4054  tracker->setDepthDenseFilteringMethod(method);
4055  }
4056 }
4057 
4068 {
4069  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4070  it != m_mapOfTrackers.end(); ++it) {
4071  TrackerWrapper *tracker = it->second;
4072  tracker->setDepthDenseFilteringMinDistance(minDistance);
4073  }
4074 }
4075 
4086 {
4087  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4088  it != m_mapOfTrackers.end(); ++it) {
4089  TrackerWrapper *tracker = it->second;
4090  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4091  }
4092 }
4093 
4102 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
4103 {
4104  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4105  it != m_mapOfTrackers.end(); ++it) {
4106  TrackerWrapper *tracker = it->second;
4107  tracker->setDepthDenseSamplingStep(stepX, stepY);
4108  }
4109 }
4110 
4119 {
4120  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4121  it != m_mapOfTrackers.end(); ++it) {
4122  TrackerWrapper *tracker = it->second;
4123  tracker->setDepthNormalFaceCentroidMethod(method);
4124  }
4125 }
4126 
4136 {
4137  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4138  it != m_mapOfTrackers.end(); ++it) {
4139  TrackerWrapper *tracker = it->second;
4140  tracker->setDepthNormalFeatureEstimationMethod(method);
4141  }
4142 }
4143 
4152 {
4153  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4154  it != m_mapOfTrackers.end(); ++it) {
4155  TrackerWrapper *tracker = it->second;
4156  tracker->setDepthNormalPclPlaneEstimationMethod(method);
4157  }
4158 }
4159 
4168 {
4169  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4170  it != m_mapOfTrackers.end(); ++it) {
4171  TrackerWrapper *tracker = it->second;
4172  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4173  }
4174 }
4175 
4184 {
4185  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4186  it != m_mapOfTrackers.end(); ++it) {
4187  TrackerWrapper *tracker = it->second;
4188  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4189  }
4190 }
4191 
4200 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
4201 {
4202  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4203  it != m_mapOfTrackers.end(); ++it) {
4204  TrackerWrapper *tracker = it->second;
4205  tracker->setDepthNormalSamplingStep(stepX, stepY);
4206  }
4207 }
4208 
4228 {
4230 
4231  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4232  it != m_mapOfTrackers.end(); ++it) {
4233  TrackerWrapper *tracker = it->second;
4234  tracker->setDisplayFeatures(displayF);
4235  }
4236 }
4237 
4246 {
4248 
4249  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4250  it != m_mapOfTrackers.end(); ++it) {
4251  TrackerWrapper *tracker = it->second;
4252  tracker->setFarClippingDistance(dist);
4253  }
4254 }
4255 
4264 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
4265 {
4266  if (m_mapOfTrackers.size() == 2) {
4267  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4268  it->second->setFarClippingDistance(dist1);
4269 
4270  ++it;
4271  it->second->setFarClippingDistance(dist2);
4272 
4274  if (it != m_mapOfTrackers.end()) {
4275  distFarClip = it->second->getFarClippingDistance();
4276  }
4277  else {
4278  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4279  }
4280  }
4281  else {
4282  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4283  m_mapOfTrackers.size());
4284  }
4285 }
4286 
4292 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4293 {
4294  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4295  ++it) {
4296  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4297 
4298  if (it_tracker != m_mapOfTrackers.end()) {
4299  TrackerWrapper *tracker = it_tracker->second;
4300  tracker->setFarClippingDistance(it->second);
4301 
4302  if (it->first == m_referenceCameraName) {
4303  distFarClip = it->second;
4304  }
4305  }
4306  }
4307 }
4308 
4315 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4316 {
4317  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4318  ++it) {
4319  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4320  if (it_factor != mapOfFeatureFactors.end()) {
4321  it->second = it_factor->second;
4322  }
4323  }
4324 }
4325 
4342 {
4343  m_percentageGdPt = threshold;
4344 
4345  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4346  it != m_mapOfTrackers.end(); ++it) {
4347  TrackerWrapper *tracker = it->second;
4348  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4349  }
4350 }
4351 
4352 #ifdef VISP_HAVE_OGRE
4365 {
4366  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4367  it != m_mapOfTrackers.end(); ++it) {
4368  TrackerWrapper *tracker = it->second;
4369  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4370  }
4371 }
4372 
4385 {
4386  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4387  it != m_mapOfTrackers.end(); ++it) {
4388  TrackerWrapper *tracker = it->second;
4389  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4390  }
4391 }
4392 #endif
4393 
4394 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4402 void vpMbGenericTracker::setKltOpencv(const vpKltOpencv &t)
4403 {
4404  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4405  it != m_mapOfTrackers.end(); ++it) {
4406  TrackerWrapper *tracker = it->second;
4407  tracker->setKltOpencv(t);
4408  }
4409 }
4410 
4419 void vpMbGenericTracker::setKltOpencv(const vpKltOpencv &t1, const vpKltOpencv &t2)
4420 {
4421  if (m_mapOfTrackers.size() == 2) {
4422  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4423  it->second->setKltOpencv(t1);
4424 
4425  ++it;
4426  it->second->setKltOpencv(t2);
4427  }
4428  else {
4429  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4430  m_mapOfTrackers.size());
4431  }
4432 }
4433 
4439 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4440 {
4441  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4442  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4443 
4444  if (it_tracker != m_mapOfTrackers.end()) {
4445  TrackerWrapper *tracker = it_tracker->second;
4446  tracker->setKltOpencv(it->second);
4447  }
4448  }
4449 }
4450 
4458 void vpMbGenericTracker::setKltThresholdAcceptation(double th)
4459 {
4460  m_thresholdOutlier = th;
4461 
4462  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4463  it != m_mapOfTrackers.end(); ++it) {
4464  TrackerWrapper *tracker = it->second;
4465  tracker->setKltThresholdAcceptation(th);
4466  }
4467 }
4468 #endif
4469 
4482 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4483 {
4484  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4485  it != m_mapOfTrackers.end(); ++it) {
4486  TrackerWrapper *tracker = it->second;
4487  tracker->setLod(useLod, name);
4488  }
4489 }
4490 
4491 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4499 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4500 {
4501  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4502  it != m_mapOfTrackers.end(); ++it) {
4503  TrackerWrapper *tracker = it->second;
4504  tracker->setKltMaskBorder(e);
4505  }
4506 }
4507 
4516 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4517 {
4518  if (m_mapOfTrackers.size() == 2) {
4519  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4520  it->second->setKltMaskBorder(e1);
4521 
4522  ++it;
4523 
4524  it->second->setKltMaskBorder(e2);
4525  }
4526  else {
4527  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4528  m_mapOfTrackers.size());
4529  }
4530 }
4531 
4537 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4538 {
4539  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4540  ++it) {
4541  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4542 
4543  if (it_tracker != m_mapOfTrackers.end()) {
4544  TrackerWrapper *tracker = it_tracker->second;
4545  tracker->setKltMaskBorder(it->second);
4546  }
4547  }
4548 }
4549 #endif
4550 
4557 {
4558  vpMbTracker::setMask(mask);
4559 
4560  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4561  it != m_mapOfTrackers.end(); ++it) {
4562  TrackerWrapper *tracker = it->second;
4563  tracker->setMask(mask);
4564  }
4565 }
4566 
4578 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4579 {
4580  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4581  it != m_mapOfTrackers.end(); ++it) {
4582  TrackerWrapper *tracker = it->second;
4583  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4584  }
4585 }
4586 
4597 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4598 {
4599  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4600  it != m_mapOfTrackers.end(); ++it) {
4601  TrackerWrapper *tracker = it->second;
4602  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4603  }
4604 }
4605 
4614 {
4615  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4616  it != m_mapOfTrackers.end(); ++it) {
4617  TrackerWrapper *tracker = it->second;
4618  tracker->setMovingEdge(me);
4619  }
4620 }
4621 
4631 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4632 {
4633  if (m_mapOfTrackers.size() == 2) {
4634  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4635  it->second->setMovingEdge(me1);
4636 
4637  ++it;
4638 
4639  it->second->setMovingEdge(me2);
4640  }
4641  else {
4642  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4643  m_mapOfTrackers.size());
4644  }
4645 }
4646 
4652 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4653 {
4654  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4655  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4656 
4657  if (it_tracker != m_mapOfTrackers.end()) {
4658  TrackerWrapper *tracker = it_tracker->second;
4659  tracker->setMovingEdge(it->second);
4660  }
4661  }
4662 }
4663 
4672 {
4674 
4675  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4676  it != m_mapOfTrackers.end(); ++it) {
4677  TrackerWrapper *tracker = it->second;
4678  tracker->setNearClippingDistance(dist);
4679  }
4680 }
4681 
4690 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4691 {
4692  if (m_mapOfTrackers.size() == 2) {
4693  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4694  it->second->setNearClippingDistance(dist1);
4695 
4696  ++it;
4697 
4698  it->second->setNearClippingDistance(dist2);
4699 
4701  if (it != m_mapOfTrackers.end()) {
4702  distNearClip = it->second->getNearClippingDistance();
4703  }
4704  else {
4705  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4706  }
4707  }
4708  else {
4709  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4710  m_mapOfTrackers.size());
4711  }
4712 }
4713 
4719 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4720 {
4721  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4722  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4723 
4724  if (it_tracker != m_mapOfTrackers.end()) {
4725  TrackerWrapper *tracker = it_tracker->second;
4726  tracker->setNearClippingDistance(it->second);
4727 
4728  if (it->first == m_referenceCameraName) {
4729  distNearClip = it->second;
4730  }
4731  }
4732  }
4733 }
4734 
4748 {
4749  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4750 
4751  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4752  it != m_mapOfTrackers.end(); ++it) {
4753  TrackerWrapper *tracker = it->second;
4754  tracker->setOgreShowConfigDialog(showConfigDialog);
4755  }
4756 }
4757 
4769 {
4771 
4772  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4773  it != m_mapOfTrackers.end(); ++it) {
4774  TrackerWrapper *tracker = it->second;
4775  tracker->setOgreVisibilityTest(v);
4776  }
4777 
4778 #ifdef VISP_HAVE_OGRE
4779  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4780  it != m_mapOfTrackers.end(); ++it) {
4781  TrackerWrapper *tracker = it->second;
4782  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4783  }
4784 #endif
4785 }
4786 
4795 {
4797 
4798  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4799  it != m_mapOfTrackers.end(); ++it) {
4800  TrackerWrapper *tracker = it->second;
4801  tracker->setOptimizationMethod(opt);
4802  }
4803 }
4804 
4818 {
4819  if (m_mapOfTrackers.size() > 1) {
4820  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4821  "to be configured with only one camera!");
4822  }
4823 
4824  m_cMo = cdMo;
4825 
4826  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4827  if (it != m_mapOfTrackers.end()) {
4828  TrackerWrapper *tracker = it->second;
4829  tracker->setPose(I, cdMo);
4830  }
4831  else {
4832  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4833  m_referenceCameraName.c_str());
4834  }
4835 }
4836 
4850 {
4851  if (m_mapOfTrackers.size() > 1) {
4852  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4853  "to be configured with only one camera!");
4854  }
4855 
4856  m_cMo = cdMo;
4857 
4858  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4859  if (it != m_mapOfTrackers.end()) {
4860  TrackerWrapper *tracker = it->second;
4861  vpImageConvert::convert(I_color, m_I);
4862  tracker->setPose(m_I, cdMo);
4863  }
4864  else {
4865  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4866  m_referenceCameraName.c_str());
4867  }
4868 }
4869 
4882  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4883 {
4884  if (m_mapOfTrackers.size() == 2) {
4885  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4886  it->second->setPose(I1, c1Mo);
4887 
4888  ++it;
4889 
4890  it->second->setPose(I2, c2Mo);
4891 
4893  if (it != m_mapOfTrackers.end()) {
4894  // Set reference pose
4895  it->second->getPose(m_cMo);
4896  }
4897  else {
4898  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4899  m_referenceCameraName.c_str());
4900  }
4901  }
4902  else {
4903  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4904  m_mapOfTrackers.size());
4905  }
4906 }
4907 
4919 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4920  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4921 {
4922  if (m_mapOfTrackers.size() == 2) {
4923  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4924  it->second->setPose(I_color1, c1Mo);
4925 
4926  ++it;
4927 
4928  it->second->setPose(I_color2, c2Mo);
4929 
4931  if (it != m_mapOfTrackers.end()) {
4932  // Set reference pose
4933  it->second->getPose(m_cMo);
4934  }
4935  else {
4936  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4937  m_referenceCameraName.c_str());
4938  }
4939  }
4940  else {
4941  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4942  m_mapOfTrackers.size());
4943  }
4944 }
4945 
4961 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4962  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4963 {
4964  // Set the reference cMo
4965  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4966  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4967  mapOfImages.find(m_referenceCameraName);
4968  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4969 
4970  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4971  TrackerWrapper *tracker = it_tracker->second;
4972  tracker->setPose(*it_img->second, it_camPose->second);
4973  tracker->getPose(m_cMo);
4974  }
4975  else {
4976  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4977  }
4978 
4979  // Vector of missing pose matrices for cameras
4980  std::vector<std::string> vectorOfMissingCameraPoses;
4981 
4982  // Set pose for the specified cameras
4983  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4984  if (it_tracker->first != m_referenceCameraName) {
4985  it_img = mapOfImages.find(it_tracker->first);
4986  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4987 
4988  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4989  // Set pose
4990  TrackerWrapper *tracker = it_tracker->second;
4991  tracker->setPose(*it_img->second, it_camPose->second);
4992  }
4993  else {
4994  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4995  }
4996  }
4997  }
4998 
4999  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5000  it != vectorOfMissingCameraPoses.end(); ++it) {
5001  it_img = mapOfImages.find(*it);
5002  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5004 
5005  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5006  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5007  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5008  }
5009  else {
5011  "Missing image or missing camera transformation "
5012  "matrix! Cannot set pose for camera: %s!",
5013  it->c_str());
5014  }
5015  }
5016 }
5017 
5033 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5034  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5035 {
5036  // Set the reference cMo
5037  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
5038  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
5039  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
5040 
5041  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5042  TrackerWrapper *tracker = it_tracker->second;
5043  tracker->setPose(*it_img->second, it_camPose->second);
5044  tracker->getPose(m_cMo);
5045  }
5046  else {
5047  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
5048  }
5049 
5050  // Vector of missing pose matrices for cameras
5051  std::vector<std::string> vectorOfMissingCameraPoses;
5052 
5053  // Set pose for the specified cameras
5054  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5055  if (it_tracker->first != m_referenceCameraName) {
5056  it_img = mapOfColorImages.find(it_tracker->first);
5057  it_camPose = mapOfCameraPoses.find(it_tracker->first);
5058 
5059  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5060  // Set pose
5061  TrackerWrapper *tracker = it_tracker->second;
5062  tracker->setPose(*it_img->second, it_camPose->second);
5063  }
5064  else {
5065  vectorOfMissingCameraPoses.push_back(it_tracker->first);
5066  }
5067  }
5068  }
5069 
5070  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5071  it != vectorOfMissingCameraPoses.end(); ++it) {
5072  it_img = mapOfColorImages.find(*it);
5073  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5075 
5076  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5077  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5078  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5079  }
5080  else {
5082  "Missing image or missing camera transformation "
5083  "matrix! Cannot set pose for camera: %s!",
5084  it->c_str());
5085  }
5086  }
5087 }
5088 
5104 {
5106 
5107  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5108  it != m_mapOfTrackers.end(); ++it) {
5109  TrackerWrapper *tracker = it->second;
5110  tracker->setProjectionErrorComputation(flag);
5111  }
5112 }
5113 
5118 {
5120 
5121  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5122  it != m_mapOfTrackers.end(); ++it) {
5123  TrackerWrapper *tracker = it->second;
5124  tracker->setProjectionErrorDisplay(display);
5125  }
5126 }
5127 
5132 {
5134 
5135  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5136  it != m_mapOfTrackers.end(); ++it) {
5137  TrackerWrapper *tracker = it->second;
5138  tracker->setProjectionErrorDisplayArrowLength(length);
5139  }
5140 }
5141 
5143 {
5145 
5146  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5147  it != m_mapOfTrackers.end(); ++it) {
5148  TrackerWrapper *tracker = it->second;
5149  tracker->setProjectionErrorDisplayArrowThickness(thickness);
5150  }
5151 }
5152 
5158 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
5159 {
5160  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
5161  if (it != m_mapOfTrackers.end()) {
5162  m_referenceCameraName = referenceCameraName;
5163  }
5164  else {
5165  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
5166  }
5167 }
5168 
5170 {
5172 
5173  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5174  it != m_mapOfTrackers.end(); ++it) {
5175  TrackerWrapper *tracker = it->second;
5176  tracker->setScanLineVisibilityTest(v);
5177  }
5178 }
5179 
5192 {
5193  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5194  it != m_mapOfTrackers.end(); ++it) {
5195  TrackerWrapper *tracker = it->second;
5196  tracker->setTrackerType(type);
5197  }
5198 }
5199 
5209 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
5210 {
5211  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5212  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
5213  if (it_tracker != m_mapOfTrackers.end()) {
5214  TrackerWrapper *tracker = it_tracker->second;
5215  tracker->setTrackerType(it->second);
5216  }
5217  }
5218 }
5219 
5229 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
5230 {
5231  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5232  it != m_mapOfTrackers.end(); ++it) {
5233  TrackerWrapper *tracker = it->second;
5234  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5235  }
5236 }
5237 
5247 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
5248 {
5249  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5250  it != m_mapOfTrackers.end(); ++it) {
5251  TrackerWrapper *tracker = it->second;
5252  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5253  }
5254 }
5255 
5265 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
5266 {
5267  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5268  it != m_mapOfTrackers.end(); ++it) {
5269  TrackerWrapper *tracker = it->second;
5270  tracker->setUseEdgeTracking(name, useEdgeTracking);
5271  }
5272 }
5273 
5274 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5284 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
5285 {
5286  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5287  it != m_mapOfTrackers.end(); ++it) {
5288  TrackerWrapper *tracker = it->second;
5289  tracker->setUseKltTracking(name, useKltTracking);
5290  }
5291 }
5292 #endif
5293 
5295 {
5296  // Test tracking fails only if all testTracking have failed
5297  bool isOneTestTrackingOk = false;
5298  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5299  it != m_mapOfTrackers.end(); ++it) {
5300  TrackerWrapper *tracker = it->second;
5301  try {
5302  tracker->testTracking();
5303  isOneTestTrackingOk = true;
5304  }
5305  catch (const vpException &e) {
5306  std::cerr << "[" << it->first << "] " << e.what() << std::endl;
5307  }
5308  }
5309 
5310  if (!isOneTestTrackingOk) {
5311  std::ostringstream oss;
5312  oss << "Not enough moving edges to track the object. Try to reduce the "
5313  "threshold="
5314  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5316  }
5317 }
5318 
5329 {
5330  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5331  mapOfImages[m_referenceCameraName] = &I;
5332 
5333  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5334  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5335 
5336  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5337 }
5338 
5349 {
5350  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5351  mapOfColorImages[m_referenceCameraName] = &I_color;
5352 
5353  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5354  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5355 
5356  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5357 }
5358 
5370 {
5371  if (m_mapOfTrackers.size() == 2) {
5372  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5373  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5374  mapOfImages[it->first] = &I1;
5375  ++it;
5376 
5377  mapOfImages[it->first] = &I2;
5378 
5379  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5380  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5381 
5382  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5383  }
5384  else {
5385  std::stringstream ss;
5386  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5387  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5388  }
5389 }
5390 
5401 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5402 {
5403  if (m_mapOfTrackers.size() == 2) {
5404  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5405  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5406  mapOfImages[it->first] = &I_color1;
5407  ++it;
5408 
5409  mapOfImages[it->first] = &_colorI2;
5410 
5411  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5412  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5413 
5414  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5415  }
5416  else {
5417  std::stringstream ss;
5418  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5419  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5420  }
5421 }
5422 
5430 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5431 {
5432  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5433  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5434 
5435  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5436 }
5437 
5445 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5446 {
5447  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5448  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5449 
5450  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5451 }
5452 
5453 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
5462 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5463  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5464 {
5465  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5466  it != m_mapOfTrackers.end(); ++it) {
5467  TrackerWrapper *tracker = it->second;
5468 
5469  if ((tracker->m_trackerType & (EDGE_TRACKER |
5470 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5471  KLT_TRACKER |
5472 #endif
5474  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5475  }
5476 
5477  if (tracker->m_trackerType & (EDGE_TRACKER
5478 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5479  | KLT_TRACKER
5480 #endif
5481  ) &&
5482  mapOfImages[it->first] == nullptr) {
5483  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5484  }
5485 
5486  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5487  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5488  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5489  }
5490  }
5491 
5492  preTracking(mapOfImages, mapOfPointClouds);
5493 
5494  try {
5495  computeVVS(mapOfImages);
5496  }
5497  catch (...) {
5498  covarianceMatrix = -1;
5499  throw; // throw the original exception
5500  }
5501 
5502  testTracking();
5503 
5504  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5505  it != m_mapOfTrackers.end(); ++it) {
5506  TrackerWrapper *tracker = it->second;
5507 
5508  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5509  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5510  }
5511 
5512  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5513 
5514  if (displayFeatures) {
5515 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5516  if (tracker->m_trackerType & KLT_TRACKER) {
5517  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5518  }
5519 #endif
5520 
5521  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5522  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5523  }
5524  }
5525  }
5526 
5528 }
5529 
5538 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5539  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5540 {
5541  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5542  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5543  it != m_mapOfTrackers.end(); ++it) {
5544  TrackerWrapper *tracker = it->second;
5545 
5546  if ((tracker->m_trackerType & (EDGE_TRACKER |
5547 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5548  KLT_TRACKER |
5549 #endif
5551  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5552  }
5553 
5554  if (tracker->m_trackerType & (EDGE_TRACKER
5555 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5556  | KLT_TRACKER
5557 #endif
5558  ) &&
5559  mapOfImages[it->first] == nullptr) {
5560  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5561  }
5562  else if (tracker->m_trackerType & (EDGE_TRACKER
5563 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5564  | KLT_TRACKER
5565 #endif
5566  ) &&
5567  mapOfImages[it->first] != nullptr) {
5568  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5569  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5570  }
5571 
5572  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5573  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5574  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5575  }
5576  }
5577 
5578  preTracking(mapOfImages, mapOfPointClouds);
5579 
5580  try {
5581  computeVVS(mapOfImages);
5582  }
5583  catch (...) {
5584  covarianceMatrix = -1;
5585  throw; // throw the original exception
5586  }
5587 
5588  testTracking();
5589 
5590  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5591  it != m_mapOfTrackers.end(); ++it) {
5592  TrackerWrapper *tracker = it->second;
5593 
5594  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5595  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5596  }
5597 
5598  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5599 
5600  if (displayFeatures) {
5601 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5602  if (tracker->m_trackerType & KLT_TRACKER) {
5603  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5604  }
5605 #endif
5606 
5607  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5608  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5609  }
5610  }
5611  }
5612 
5614 }
5615 #endif
5616 
5627 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5628  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5629  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5630  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5631 {
5632  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5633  it != m_mapOfTrackers.end(); ++it) {
5634  TrackerWrapper *tracker = it->second;
5635 
5636  if ((tracker->m_trackerType & (EDGE_TRACKER |
5637 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5638  KLT_TRACKER |
5639 #endif
5641  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5642  }
5643 
5644  if (tracker->m_trackerType & (EDGE_TRACKER
5645 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5646  | KLT_TRACKER
5647 #endif
5648  ) &&
5649  mapOfImages[it->first] == nullptr) {
5650  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5651  }
5652 
5653  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5654  (mapOfPointClouds[it->first] == nullptr)) {
5655  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5656  }
5657  }
5658 
5659  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5660 
5661  try {
5662  computeVVS(mapOfImages);
5663  }
5664  catch (...) {
5665  covarianceMatrix = -1;
5666  throw; // throw the original exception
5667  }
5668 
5669  testTracking();
5670 
5671  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5672  it != m_mapOfTrackers.end(); ++it) {
5673  TrackerWrapper *tracker = it->second;
5674 
5675  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5676  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5677  }
5678 
5679  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5680 
5681  if (displayFeatures) {
5682 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5683  if (tracker->m_trackerType & KLT_TRACKER) {
5684  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5685  }
5686 #endif
5687 
5688  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5689  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5690  }
5691  }
5692  }
5693 
5695 }
5696 
5707 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5708  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5709  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5710  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5711 {
5712  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5713  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5714  it != m_mapOfTrackers.end(); ++it) {
5715  TrackerWrapper *tracker = it->second;
5716 
5717  if ((tracker->m_trackerType & (EDGE_TRACKER |
5718 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5719  KLT_TRACKER |
5720 #endif
5722  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5723  }
5724 
5725  if (tracker->m_trackerType & (EDGE_TRACKER
5726 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5727  | KLT_TRACKER
5728 #endif
5729  ) &&
5730  mapOfColorImages[it->first] == nullptr) {
5731  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5732  }
5733  else if (tracker->m_trackerType & (EDGE_TRACKER
5734 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5735  | KLT_TRACKER
5736 #endif
5737  ) &&
5738  mapOfColorImages[it->first] != nullptr) {
5739  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5740  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5741  }
5742 
5743  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5744  (mapOfPointClouds[it->first] == nullptr)) {
5745  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5746  }
5747  }
5748 
5749  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5750 
5751  try {
5752  computeVVS(mapOfImages);
5753  }
5754  catch (...) {
5755  covarianceMatrix = -1;
5756  throw; // throw the original exception
5757  }
5758 
5759  testTracking();
5760 
5761  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5762  it != m_mapOfTrackers.end(); ++it) {
5763  TrackerWrapper *tracker = it->second;
5764 
5765  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5766  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5767  }
5768 
5769  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5770 
5771  if (displayFeatures) {
5772 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5773  if (tracker->m_trackerType & KLT_TRACKER) {
5774  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5775  }
5776 #endif
5777 
5778  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5779  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5780  }
5781  }
5782  }
5783 
5785 }
5786 
5787 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5788  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5789  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5790  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5791 {
5792  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5793  it != m_mapOfTrackers.end(); ++it) {
5794  TrackerWrapper *tracker = it->second;
5795 
5796  if ((tracker->m_trackerType & (EDGE_TRACKER |
5797 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5798  KLT_TRACKER |
5799 #endif
5801  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5802  }
5803 
5804  if (tracker->m_trackerType & (EDGE_TRACKER
5805 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5806  | KLT_TRACKER
5807 #endif
5808  ) &&
5809  mapOfImages[it->first] == nullptr) {
5810  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5811  }
5812 
5813  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5814  (mapOfPointClouds[it->first] == nullptr)) {
5815  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5816  }
5817  }
5818 
5819  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5820 
5821  try {
5822  computeVVS(mapOfImages);
5823  }
5824  catch (...) {
5825  covarianceMatrix = -1;
5826  throw; // throw the original exception
5827  }
5828 
5829  testTracking();
5830 
5831  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5832  it != m_mapOfTrackers.end(); ++it) {
5833  TrackerWrapper *tracker = it->second;
5834 
5835  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5836  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5837  }
5838 
5839  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5840 
5841  if (displayFeatures) {
5842 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5843  if (tracker->m_trackerType & KLT_TRACKER) {
5844  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5845  }
5846 #endif
5847 
5848  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5849  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5850  }
5851  }
5852  }
5853 
5855 }
5856 
5867 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5868  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5869  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5870  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5871 {
5872  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5873  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5874  it != m_mapOfTrackers.end(); ++it) {
5875  TrackerWrapper *tracker = it->second;
5876 
5877  if ((tracker->m_trackerType & (EDGE_TRACKER |
5878 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5879  KLT_TRACKER |
5880 #endif
5882  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5883  }
5884 
5885  if (tracker->m_trackerType & (EDGE_TRACKER
5886 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5887  | KLT_TRACKER
5888 #endif
5889  ) &&
5890  mapOfColorImages[it->first] == nullptr) {
5891  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5892  }
5893  else if (tracker->m_trackerType & (EDGE_TRACKER
5894 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5895  | KLT_TRACKER
5896 #endif
5897  ) &&
5898  mapOfColorImages[it->first] != nullptr) {
5899  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5900  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5901  }
5902 
5903  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5904  (mapOfPointClouds[it->first] == nullptr)) {
5905  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5906  }
5907  }
5908 
5909  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5910 
5911  try {
5912  computeVVS(mapOfImages);
5913  }
5914  catch (...) {
5915  covarianceMatrix = -1;
5916  throw; // throw the original exception
5917  }
5918 
5919  testTracking();
5920 
5921  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5922  it != m_mapOfTrackers.end(); ++it) {
5923  TrackerWrapper *tracker = it->second;
5924 
5925  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5926  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5927  }
5928 
5929  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5930 
5931  if (displayFeatures) {
5932 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5933  if (tracker->m_trackerType & KLT_TRACKER) {
5934  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5935  }
5936 #endif
5937 
5938  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5939  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5940  }
5941  }
5942  }
5943 
5945 }
5946 
5948 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5949  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5950 {
5951  m_lambda = 1.0;
5952  m_maxIter = 30;
5953 
5954 #ifdef VISP_HAVE_OGRE
5955  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5956 
5957  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5958 #endif
5959 }
5960 
5961 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5962  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5963 {
5964  if ((m_trackerType & (EDGE_TRACKER |
5965 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5966  KLT_TRACKER |
5967 #endif
5969  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5970  }
5971 
5972  m_lambda = 1.0;
5973  m_maxIter = 30;
5974 
5975 #ifdef VISP_HAVE_OGRE
5976  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5977 
5978  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5979 #endif
5980 }
5981 
5982 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5984 {
5985  computeVVSInit(ptr_I);
5986 
5987  if (m_error.getRows() < 4) {
5988  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5989  }
5990 
5991  double normRes = 0;
5992  double normRes_1 = -1;
5993  unsigned int iter = 0;
5994 
5995  double factorEdge = 1.0;
5996 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5997  double factorKlt = 1.0;
5998 #endif
5999  double factorDepth = 1.0;
6000  double factorDepthDense = 1.0;
6001 
6002  vpMatrix LTL;
6003  vpColVector LTR, v;
6004  vpColVector error_prev;
6005 
6006  double mu = m_initialMu;
6007  vpHomogeneousMatrix cMo_prev;
6008 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6009  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
6010 #endif
6011  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
6012  if (isoJoIdentity)
6013  oJo.eye();
6014 
6015  // Covariance
6016  vpColVector W_true(m_error.getRows());
6017  vpMatrix L_true, LVJ_true;
6018 
6019  unsigned int nb_edge_features = m_error_edge.getRows();
6020 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6021  unsigned int nb_klt_features = m_error_klt.getRows();
6022 #endif
6023  unsigned int nb_depth_features = m_error_depthNormal.getRows();
6024  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
6025 
6026  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
6027  computeVVSInteractionMatrixAndResidu(ptr_I);
6028 
6029  bool reStartFromLastIncrement = false;
6030  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
6031 
6032 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6033  if (reStartFromLastIncrement) {
6034  if (m_trackerType & KLT_TRACKER) {
6035  ctTc0 = ctTc0_Prev;
6036  }
6037  }
6038 #endif
6039 
6040  if (!reStartFromLastIncrement) {
6041  computeVVSWeights();
6042 
6043  if (computeCovariance) {
6044  L_true = m_L;
6045  if (!isoJoIdentity) {
6047  cVo.buildFrom(m_cMo);
6048  LVJ_true = (m_L * cVo * oJo);
6049  }
6050  }
6051 
6053  if (iter == 0) {
6054  // If all the 6 dof should be estimated, we check if the interaction
6055  // matrix is full rank. If not we remove automatically the dof that
6056  // cannot be estimated. This is particularly useful when considering
6057  // circles (rank 5) and cylinders (rank 4)
6058  if (isoJoIdentity) {
6059  cVo.buildFrom(m_cMo);
6060 
6061  vpMatrix K; // kernel
6062  unsigned int rank = (m_L * cVo).kernel(K);
6063  if (rank == 0) {
6064  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
6065  }
6066 
6067  if (rank != 6) {
6068  vpMatrix I; // Identity
6069  I.eye(6);
6070  oJo = I - K.AtA();
6071 
6072  isoJoIdentity = false;
6073  }
6074  }
6075  }
6076 
6077  // Weighting
6078  double num = 0;
6079  double den = 0;
6080 
6081  unsigned int start_index = 0;
6082  if (m_trackerType & EDGE_TRACKER) {
6083  for (unsigned int i = 0; i < nb_edge_features; i++) {
6084  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
6085  W_true[i] = wi;
6086  m_weightedError[i] = wi * m_error[i];
6087 
6088  num += wi * vpMath::sqr(m_error[i]);
6089  den += wi;
6090 
6091  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6092  m_L[i][j] *= wi;
6093  }
6094  }
6095 
6096  start_index += nb_edge_features;
6097  }
6098 
6099 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6100  if (m_trackerType & KLT_TRACKER) {
6101  for (unsigned int i = 0; i < nb_klt_features; i++) {
6102  double wi = m_w_klt[i] * factorKlt;
6103  W_true[start_index + i] = wi;
6104  m_weightedError[start_index + i] = wi * m_error_klt[i];
6105 
6106  num += wi * vpMath::sqr(m_error[start_index + i]);
6107  den += wi;
6108 
6109  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6110  m_L[start_index + i][j] *= wi;
6111  }
6112  }
6113 
6114  start_index += nb_klt_features;
6115  }
6116 #endif
6117 
6118  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6119  for (unsigned int i = 0; i < nb_depth_features; i++) {
6120  double wi = m_w_depthNormal[i] * factorDepth;
6121  m_w[start_index + i] = m_w_depthNormal[i];
6122  m_weightedError[start_index + i] = wi * m_error[start_index + i];
6123 
6124  num += wi * vpMath::sqr(m_error[start_index + i]);
6125  den += wi;
6126 
6127  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6128  m_L[start_index + i][j] *= wi;
6129  }
6130  }
6131 
6132  start_index += nb_depth_features;
6133  }
6134 
6135  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6136  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
6137  double wi = m_w_depthDense[i] * factorDepthDense;
6138  m_w[start_index + i] = m_w_depthDense[i];
6139  m_weightedError[start_index + i] = wi * m_error[start_index + i];
6140 
6141  num += wi * vpMath::sqr(m_error[start_index + i]);
6142  den += wi;
6143 
6144  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6145  m_L[start_index + i][j] *= wi;
6146  }
6147  }
6148 
6149  // start_index += nb_depth_dense_features;
6150  }
6151 
6152  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
6153 
6154  cMo_prev = m_cMo;
6155 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6156  if (m_trackerType & KLT_TRACKER) {
6157  ctTc0_Prev = ctTc0;
6158  }
6159 #endif
6160 
6161  m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo;
6162 
6163 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6164  if (m_trackerType & KLT_TRACKER) {
6165  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
6166  }
6167 #endif
6168  normRes_1 = normRes;
6169 
6170  normRes = sqrt(num / den);
6171  }
6172 
6173  iter++;
6174  }
6175 
6176  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
6177 
6178  if (m_trackerType & EDGE_TRACKER) {
6180  }
6181 }
6182 
6183 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
6184 {
6185  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6186  "TrackerWrapper::computeVVSInit("
6187  ") should not be called!");
6188 }
6189 
6190 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
6191 {
6192  initMbtTracking(ptr_I);
6193 
6194  unsigned int nbFeatures = 0;
6195 
6196  if (m_trackerType & EDGE_TRACKER) {
6197  nbFeatures += m_error_edge.getRows();
6198  }
6199  else {
6200  m_error_edge.clear();
6201  m_weightedError_edge.clear();
6202  m_L_edge.clear();
6203  m_w_edge.clear();
6204  }
6205 
6206 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6207  if (m_trackerType & KLT_TRACKER) {
6208  vpMbKltTracker::computeVVSInit();
6209  nbFeatures += m_error_klt.getRows();
6210  }
6211  else {
6212  m_error_klt.clear();
6213  m_weightedError_klt.clear();
6214  m_L_klt.clear();
6215  m_w_klt.clear();
6216  }
6217 #endif
6218 
6219  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6221  nbFeatures += m_error_depthNormal.getRows();
6222  }
6223  else {
6224  m_error_depthNormal.clear();
6225  m_weightedError_depthNormal.clear();
6226  m_L_depthNormal.clear();
6227  m_w_depthNormal.clear();
6228  }
6229 
6230  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6232  nbFeatures += m_error_depthDense.getRows();
6233  }
6234  else {
6235  m_error_depthDense.clear();
6236  m_weightedError_depthDense.clear();
6237  m_L_depthDense.clear();
6238  m_w_depthDense.clear();
6239  }
6240 
6241  m_L.resize(nbFeatures, 6, false, false);
6242  m_error.resize(nbFeatures, false);
6243 
6244  m_weightedError.resize(nbFeatures, false);
6245  m_w.resize(nbFeatures, false);
6246  m_w = 1;
6247 }
6248 
6250 {
6251  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6252  "TrackerWrapper::"
6253  "computeVVSInteractionMatrixAndR"
6254  "esidu() should not be called!");
6255 }
6256 
6258 {
6259  if (m_trackerType & EDGE_TRACKER) {
6261  }
6262 
6263 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6264  if (m_trackerType & KLT_TRACKER) {
6265  vpMbKltTracker::computeVVSInteractionMatrixAndResidu();
6266  }
6267 #endif
6268 
6269  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6271  }
6272 
6273  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6275  }
6276 
6277  unsigned int start_index = 0;
6278  if (m_trackerType & EDGE_TRACKER) {
6279  m_L.insert(m_L_edge, start_index, 0);
6280  m_error.insert(start_index, m_error_edge);
6281 
6282  start_index += m_error_edge.getRows();
6283  }
6284 
6285 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6286  if (m_trackerType & KLT_TRACKER) {
6287  m_L.insert(m_L_klt, start_index, 0);
6288  m_error.insert(start_index, m_error_klt);
6289 
6290  start_index += m_error_klt.getRows();
6291  }
6292 #endif
6293 
6294  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6295  m_L.insert(m_L_depthNormal, start_index, 0);
6296  m_error.insert(start_index, m_error_depthNormal);
6297 
6298  start_index += m_error_depthNormal.getRows();
6299  }
6300 
6301  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6302  m_L.insert(m_L_depthDense, start_index, 0);
6303  m_error.insert(start_index, m_error_depthDense);
6304 
6305  // start_index += m_error_depthDense.getRows();
6306  }
6307 }
6308 
6310 {
6311  unsigned int start_index = 0;
6312 
6313  if (m_trackerType & EDGE_TRACKER) {
6315  m_w.insert(start_index, m_w_edge);
6316 
6317  start_index += m_w_edge.getRows();
6318  }
6319 
6320 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6321  if (m_trackerType & KLT_TRACKER) {
6322  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
6323  m_w.insert(start_index, m_w_klt);
6324 
6325  start_index += m_w_klt.getRows();
6326  }
6327 #endif
6328 
6329  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6330  if (m_depthNormalUseRobust) {
6331  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
6332  m_w.insert(start_index, m_w_depthNormal);
6333  }
6334 
6335  start_index += m_w_depthNormal.getRows();
6336  }
6337 
6338  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6340  m_w.insert(start_index, m_w_depthDense);
6341 
6342  // start_index += m_w_depthDense.getRows();
6343  }
6344 }
6345 
6346 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
6347  const vpCameraParameters &cam, const vpColor &col,
6348  unsigned int thickness, bool displayFullModel)
6349 {
6350  if (displayFeatures) {
6351  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6352  for (size_t i = 0; i < features.size(); i++) {
6353  if (vpMath::equal(features[i][0], 0)) {
6354  vpImagePoint ip(features[i][1], features[i][2]);
6355  int state = static_cast<int>(features[i][3]);
6356 
6357  switch (state) {
6360  break;
6361 
6362  case vpMeSite::CONTRAST:
6363  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6364  break;
6365 
6366  case vpMeSite::THRESHOLD:
6368  break;
6369 
6370  case vpMeSite::M_ESTIMATOR:
6371  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6372  break;
6373 
6374  case vpMeSite::TOO_NEAR:
6375  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6376  break;
6377 
6378  default:
6380  }
6381  }
6382  else if (vpMath::equal(features[i][0], 1)) {
6383  vpImagePoint ip1(features[i][1], features[i][2]);
6385 
6386  vpImagePoint ip2(features[i][3], features[i][4]);
6387  double id = features[i][5];
6388  std::stringstream ss;
6389  ss << id;
6390  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6391  }
6392  else if (vpMath::equal(features[i][0], 2)) {
6393  vpImagePoint im_centroid(features[i][1], features[i][2]);
6394  vpImagePoint im_extremity(features[i][3], features[i][4]);
6395  bool desired = vpMath::equal(features[i][0], 2);
6396  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6397  }
6398  }
6399  }
6400 
6401  std::vector<std::vector<double> > models =
6402  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6403  for (size_t i = 0; i < models.size(); i++) {
6404  if (vpMath::equal(models[i][0], 0)) {
6405  vpImagePoint ip1(models[i][1], models[i][2]);
6406  vpImagePoint ip2(models[i][3], models[i][4]);
6407  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6408  }
6409  else if (vpMath::equal(models[i][0], 1)) {
6410  vpImagePoint center(models[i][1], models[i][2]);
6411  double n20 = models[i][3];
6412  double n11 = models[i][4];
6413  double n02 = models[i][5];
6414  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6415  }
6416  }
6417 
6418 #ifdef VISP_HAVE_OGRE
6419  if ((m_trackerType & EDGE_TRACKER)
6420 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6421  || (m_trackerType & KLT_TRACKER)
6422 #endif
6423  ) {
6424  if (useOgre)
6425  faces.displayOgre(cMo);
6426  }
6427 #endif
6428 }
6429 
6430 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
6431  const vpCameraParameters &cam, const vpColor &col,
6432  unsigned int thickness, bool displayFullModel)
6433 {
6434  if (displayFeatures) {
6435  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6436  for (size_t i = 0; i < features.size(); i++) {
6437  if (vpMath::equal(features[i][0], 0)) {
6438  vpImagePoint ip(features[i][1], features[i][2]);
6439  int state = static_cast<int>(features[i][3]);
6440 
6441  switch (state) {
6444  break;
6445 
6446  case vpMeSite::CONTRAST:
6447  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6448  break;
6449 
6450  case vpMeSite::THRESHOLD:
6452  break;
6453 
6454  case vpMeSite::M_ESTIMATOR:
6455  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6456  break;
6457 
6458  case vpMeSite::TOO_NEAR:
6459  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6460  break;
6461 
6462  default:
6464  }
6465  }
6466  else if (vpMath::equal(features[i][0], 1)) {
6467  vpImagePoint ip1(features[i][1], features[i][2]);
6469 
6470  vpImagePoint ip2(features[i][3], features[i][4]);
6471  double id = features[i][5];
6472  std::stringstream ss;
6473  ss << id;
6474  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6475  }
6476  else if (vpMath::equal(features[i][0], 2)) {
6477  vpImagePoint im_centroid(features[i][1], features[i][2]);
6478  vpImagePoint im_extremity(features[i][3], features[i][4]);
6479  bool desired = vpMath::equal(features[i][0], 2);
6480  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6481  }
6482  }
6483  }
6484 
6485  std::vector<std::vector<double> > models =
6486  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6487  for (size_t i = 0; i < models.size(); i++) {
6488  if (vpMath::equal(models[i][0], 0)) {
6489  vpImagePoint ip1(models[i][1], models[i][2]);
6490  vpImagePoint ip2(models[i][3], models[i][4]);
6491  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6492  }
6493  else if (vpMath::equal(models[i][0], 1)) {
6494  vpImagePoint center(models[i][1], models[i][2]);
6495  double n20 = models[i][3];
6496  double n11 = models[i][4];
6497  double n02 = models[i][5];
6498  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6499  }
6500  }
6501 
6502 #ifdef VISP_HAVE_OGRE
6503  if ((m_trackerType & EDGE_TRACKER)
6504 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6505  || (m_trackerType & KLT_TRACKER)
6506 #endif
6507  ) {
6508  if (useOgre)
6509  faces.displayOgre(cMo);
6510  }
6511 #endif
6512 }
6513 
6514 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6515 {
6516  std::vector<std::vector<double> > features;
6517 
6518  if (m_trackerType & EDGE_TRACKER) {
6519  // m_featuresToBeDisplayedEdge updated after computeVVS()
6520  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6521  }
6522 
6523 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6524  if (m_trackerType & KLT_TRACKER) {
6525  // m_featuresToBeDisplayedKlt updated after postTracking()
6526  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6527  }
6528 #endif
6529 
6530  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6531  // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6532  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6533  m_featuresToBeDisplayedDepthNormal.end());
6534  }
6535 
6536  return features;
6537 }
6538 
6539 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6540  unsigned int height,
6541  const vpHomogeneousMatrix &cMo,
6542  const vpCameraParameters &cam,
6543  bool displayFullModel)
6544 {
6545  std::vector<std::vector<double> > models;
6546 
6547  // Do not add multiple times the same models
6548  if (m_trackerType == EDGE_TRACKER) {
6549  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6550  }
6551 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6552  else if (m_trackerType == KLT_TRACKER) {
6553  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6554  }
6555 #endif
6556  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6557  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6558  }
6559  else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6560  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6561  }
6562  else {
6563  // Edge and KLT trackers use the same primitives
6564  if (m_trackerType & EDGE_TRACKER) {
6565  std::vector<std::vector<double> > edgeModels =
6566  vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6567  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6568  }
6569 
6570  // Depth dense and depth normal trackers use the same primitives
6571  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6572  std::vector<std::vector<double> > depthDenseModels =
6573  vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6574  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6575  }
6576  }
6577 
6578  return models;
6579 }
6580 
6581 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6582 {
6583  if (!modelInitialised) {
6584  throw vpException(vpException::fatalError, "model not initialized");
6585  }
6586 
6587  if (useScanLine || clippingFlag > 3)
6588  m_cam.computeFov(I.getWidth(), I.getHeight());
6589 
6590  bool reInitialisation = false;
6591  if (!useOgre) {
6592  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6593  }
6594  else {
6595 #ifdef VISP_HAVE_OGRE
6596  if (!faces.isOgreInitialised()) {
6597  faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6598 
6599  faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6600  faces.initOgre(m_cam);
6601  // Turn off Ogre config dialog display for the next call to this
6602  // function since settings are saved in the ogre.cfg file and used
6603  // during the next call
6604  ogreShowConfigDialog = false;
6605  }
6606 
6607  faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6608 #else
6609  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6610 #endif
6611  }
6612 
6613  if (useScanLine) {
6614  faces.computeClippedPolygons(m_cMo, m_cam);
6615  faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6616  }
6617 
6618 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6619  if (m_trackerType & KLT_TRACKER)
6620  vpMbKltTracker::reinit(I);
6621 #endif
6622 
6623  if (m_trackerType & EDGE_TRACKER) {
6625 
6626  bool a = false;
6627  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6628 
6630  }
6631 
6632  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6633  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6634 
6635  if (m_trackerType & DEPTH_DENSE_TRACKER)
6636  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6637 }
6638 
6639 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6640  double radius, int idFace, const std::string &name)
6641 {
6642  if (m_trackerType & EDGE_TRACKER)
6643  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6644 
6645 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6646  if (m_trackerType & KLT_TRACKER)
6647  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6648 #endif
6649 }
6650 
6651 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6652  const std::string &name)
6653 {
6654  if (m_trackerType & EDGE_TRACKER)
6655  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6656 
6657 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6658  if (m_trackerType & KLT_TRACKER)
6659  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6660 #endif
6661 }
6662 
6663 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6664 {
6665  if (m_trackerType & EDGE_TRACKER)
6667 
6668 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6669  if (m_trackerType & KLT_TRACKER)
6670  vpMbKltTracker::initFaceFromCorners(polygon);
6671 #endif
6672 
6673  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6675 
6676  if (m_trackerType & DEPTH_DENSE_TRACKER)
6678 }
6679 
6680 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6681 {
6682  if (m_trackerType & EDGE_TRACKER)
6684 
6685 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6686  if (m_trackerType & KLT_TRACKER)
6687  vpMbKltTracker::initFaceFromLines(polygon);
6688 #endif
6689 
6690  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6692 
6693  if (m_trackerType & DEPTH_DENSE_TRACKER)
6695 }
6696 
6698 {
6699  if (m_trackerType & EDGE_TRACKER) {
6702  }
6703 }
6704 
6705 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6706 {
6707 #if defined(VISP_HAVE_PUGIXML)
6708  // Load projection error config
6709  vpMbTracker::loadConfigFile(configFile, verbose);
6710 
6712  xmlp.setVerbose(verbose);
6713  xmlp.setCameraParameters(m_cam);
6714  xmlp.setAngleAppear(vpMath::deg(angleAppears));
6715  xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6716 
6717  // Edge
6718  xmlp.setEdgeMe(me);
6719 
6720  // KLT
6721  xmlp.setKltMaxFeatures(10000);
6722  xmlp.setKltWindowSize(5);
6723  xmlp.setKltQuality(0.01);
6724  xmlp.setKltMinDistance(5);
6725  xmlp.setKltHarrisParam(0.01);
6726  xmlp.setKltBlockSize(3);
6727  xmlp.setKltPyramidLevels(3);
6728 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6729  xmlp.setKltMaskBorder(maskBorder);
6730 #endif
6731 
6732  // Depth normal
6733  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6734  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6735  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6736  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6737  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6738  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6739 
6740  // Depth dense
6741  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6742  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6743 
6744  try {
6745  if (verbose) {
6746  std::cout << " *********** Parsing XML for";
6747  }
6748 
6749  std::vector<std::string> tracker_names;
6750  if (m_trackerType & EDGE_TRACKER)
6751  tracker_names.push_back("Edge");
6752 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6753  if (m_trackerType & KLT_TRACKER)
6754  tracker_names.push_back("Klt");
6755 #endif
6756  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6757  tracker_names.push_back("Depth Normal");
6758  if (m_trackerType & DEPTH_DENSE_TRACKER)
6759  tracker_names.push_back("Depth Dense");
6760 
6761  if (verbose) {
6762  for (size_t i = 0; i < tracker_names.size(); i++) {
6763  std::cout << " " << tracker_names[i];
6764  if (i == tracker_names.size() - 1) {
6765  std::cout << " ";
6766  }
6767  }
6768 
6769  std::cout << "Model-Based Tracker ************ " << std::endl;
6770  }
6771 
6772  xmlp.parse(configFile);
6773  }
6774  catch (...) {
6775  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6776  }
6777 
6778  vpCameraParameters camera;
6779  xmlp.getCameraParameters(camera);
6780  setCameraParameters(camera);
6781 
6782  angleAppears = vpMath::rad(xmlp.getAngleAppear());
6783  angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6784 
6785  if (xmlp.hasNearClippingDistance())
6786  setNearClippingDistance(xmlp.getNearClippingDistance());
6787 
6788  if (xmlp.hasFarClippingDistance())
6789  setFarClippingDistance(xmlp.getFarClippingDistance());
6790 
6791  if (xmlp.getFovClipping()) {
6793  }
6794 
6795  useLodGeneral = xmlp.getLodState();
6796  minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6797  minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6798 
6799  applyLodSettingInConfig = false;
6800  if (this->getNbPolygon() > 0) {
6801  applyLodSettingInConfig = true;
6802  setLod(useLodGeneral);
6803  setMinLineLengthThresh(minLineLengthThresholdGeneral);
6804  setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6805  }
6806 
6807  // Edge
6808  vpMe meParser;
6809  xmlp.getEdgeMe(meParser);
6811 
6812  // KLT
6813 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6814  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6815  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6816  tracker.setQuality(xmlp.getKltQuality());
6817  tracker.setMinDistance(xmlp.getKltMinDistance());
6818  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6819  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6820  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6821  maskBorder = xmlp.getKltMaskBorder();
6822 
6823  // if(useScanLine)
6824  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6825 #endif
6826 
6827  // Depth normal
6828  setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6829  setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6830  setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6831  setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6832  setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6833 
6834  // Depth dense
6835  setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6836 #else
6837  (void)configFile;
6838  (void)verbose;
6839  throw(vpException(vpException::ioError, "vpMbGenericTracker::TrackerWrapper::loadConfigFile() needs pugixml built-in 3rdparty"));
6840 #endif
6841 }
6842 
6843 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
6844 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6845  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6846 {
6847 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6848  // KLT
6849  if (m_trackerType & KLT_TRACKER) {
6850  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6851  vpMbKltTracker::reinit(*ptr_I);
6852  }
6853  }
6854 #endif
6855 
6856  // Looking for new visible face
6857  if (m_trackerType & EDGE_TRACKER) {
6858  bool newvisibleface = false;
6859  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6860 
6861  if (useScanLine) {
6862  faces.computeClippedPolygons(m_cMo, m_cam);
6863  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6864  }
6865  }
6866 
6867  // Depth normal
6868  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6869  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6870 
6871  // Depth dense
6872  if (m_trackerType & DEPTH_DENSE_TRACKER)
6873  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6874 
6875  // Edge
6876  if (m_trackerType & EDGE_TRACKER) {
6878 
6879  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6880  // Reinit the moving edge for the lines which need it.
6881  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6882 
6883  if (computeProjError) {
6885  }
6886  }
6887 }
6888 
6889 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
6890  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6891 {
6892  if (m_trackerType & EDGE_TRACKER) {
6893  try {
6895  }
6896  catch (...) {
6897  std::cerr << "Error in moving edge tracking" << std::endl;
6898  throw;
6899  }
6900  }
6901 
6902 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6903  if (m_trackerType & KLT_TRACKER) {
6904  try {
6905  vpMbKltTracker::preTracking(*ptr_I);
6906  }
6907  catch (const vpException &e) {
6908  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6909  throw;
6910  }
6911  }
6912 #endif
6913 
6914  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6915  try {
6917  }
6918  catch (...) {
6919  std::cerr << "Error in Depth normal tracking" << std::endl;
6920  throw;
6921  }
6922  }
6923 
6924  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6925  try {
6927  }
6928  catch (...) {
6929  std::cerr << "Error in Depth dense tracking" << std::endl;
6930  throw;
6931  }
6932  }
6933 }
6934 #endif
6935 
6936 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6937  const unsigned int pointcloud_width,
6938  const unsigned int pointcloud_height)
6939 {
6940 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6941  // KLT
6942  if (m_trackerType & KLT_TRACKER) {
6943  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6944  vpMbKltTracker::reinit(*ptr_I);
6945  }
6946  }
6947 #endif
6948 
6949  // Looking for new visible face
6950  if (m_trackerType & EDGE_TRACKER) {
6951  bool newvisibleface = false;
6952  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6953 
6954  if (useScanLine) {
6955  faces.computeClippedPolygons(m_cMo, m_cam);
6956  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6957  }
6958  }
6959 
6960  // Depth normal
6961  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6962  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6963 
6964  // Depth dense
6965  if (m_trackerType & DEPTH_DENSE_TRACKER)
6966  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6967 
6968  // Edge
6969  if (m_trackerType & EDGE_TRACKER) {
6971 
6972  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6973  // Reinit the moving edge for the lines which need it.
6974  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6975 
6976  if (computeProjError) {
6978  }
6979  }
6980 }
6981 
6982 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
6983  const std::vector<vpColVector> *const point_cloud,
6984  const unsigned int pointcloud_width,
6985  const unsigned int pointcloud_height)
6986 {
6987  if (m_trackerType & EDGE_TRACKER) {
6988  try {
6990  }
6991  catch (...) {
6992  std::cerr << "Error in moving edge tracking" << std::endl;
6993  throw;
6994  }
6995  }
6996 
6997 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6998  if (m_trackerType & KLT_TRACKER) {
6999  try {
7000  vpMbKltTracker::preTracking(*ptr_I);
7001  }
7002  catch (const vpException &e) {
7003  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7004  throw;
7005  }
7006  }
7007 #endif
7008 
7009  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7010  try {
7011  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7012  }
7013  catch (...) {
7014  std::cerr << "Error in Depth tracking" << std::endl;
7015  throw;
7016  }
7017  }
7018 
7019  if (m_trackerType & DEPTH_DENSE_TRACKER) {
7020  try {
7021  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7022  }
7023  catch (...) {
7024  std::cerr << "Error in Depth dense tracking" << std::endl;
7025  throw;
7026  }
7027  }
7028 }
7029 
7030 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
7031  const vpMatrix *const point_cloud,
7032  const unsigned int pointcloud_width,
7033  const unsigned int pointcloud_height)
7034 {
7035  if (m_trackerType & EDGE_TRACKER) {
7036  try {
7038  }
7039  catch (...) {
7040  std::cerr << "Error in moving edge tracking" << std::endl;
7041  throw;
7042  }
7043  }
7044 
7045 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7046  if (m_trackerType & KLT_TRACKER) {
7047  try {
7048  vpMbKltTracker::preTracking(*ptr_I);
7049  }
7050  catch (const vpException &e) {
7051  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7052  throw;
7053  }
7054  }
7055 #endif
7056 
7057  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7058  try {
7059  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7060  }
7061  catch (...) {
7062  std::cerr << "Error in Depth tracking" << std::endl;
7063  throw;
7064  }
7065  }
7066 
7067  if (m_trackerType & DEPTH_DENSE_TRACKER) {
7068  try {
7069  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7070  }
7071  catch (...) {
7072  std::cerr << "Error in Depth dense tracking" << std::endl;
7073  throw;
7074  }
7075  }
7076 }
7077 
7079  const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
7080  const vpHomogeneousMatrix &cMo, bool verbose,
7081  const vpHomogeneousMatrix &T)
7082 {
7083  m_cMo.eye();
7084 
7085  // Edge
7086  vpMbtDistanceLine *l;
7088  vpMbtDistanceCircle *ci;
7089 
7090  for (unsigned int i = 0; i < scales.size(); i++) {
7091  if (scales[i]) {
7092  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
7093  l = *it;
7094  if (l != nullptr)
7095  delete l;
7096  l = nullptr;
7097  }
7098 
7099  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
7100  ++it) {
7101  cy = *it;
7102  if (cy != nullptr)
7103  delete cy;
7104  cy = nullptr;
7105  }
7106 
7107  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
7108  ci = *it;
7109  if (ci != nullptr)
7110  delete ci;
7111  ci = nullptr;
7112  }
7113 
7114  lines[i].clear();
7115  cylinders[i].clear();
7116  circles[i].clear();
7117  }
7118  }
7119 
7120  nline = 0;
7121  ncylinder = 0;
7122  ncircle = 0;
7123  nbvisiblepolygone = 0;
7124 
7125  // KLT
7126 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7127  // delete the Klt Polygon features
7128  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
7129  vpMbtDistanceKltPoints *kltpoly = *it;
7130  if (kltpoly != nullptr) {
7131  delete kltpoly;
7132  }
7133  kltpoly = nullptr;
7134  }
7135  kltPolygons.clear();
7136 
7137  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
7138  ++it) {
7139  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
7140  if (kltPolyCylinder != nullptr) {
7141  delete kltPolyCylinder;
7142  }
7143  kltPolyCylinder = nullptr;
7144  }
7145  kltCylinders.clear();
7146 
7147  // delete the structures used to display circles
7148  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
7149  ci = *it;
7150  if (ci != nullptr) {
7151  delete ci;
7152  }
7153  ci = nullptr;
7154  }
7155  circles_disp.clear();
7156 
7157  firstInitialisation = true;
7158 
7159 #endif
7160 
7161  // Depth normal
7162  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
7163  delete m_depthNormalFaces[i];
7164  m_depthNormalFaces[i] = nullptr;
7165  }
7166  m_depthNormalFaces.clear();
7167 
7168  // Depth dense
7169  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
7170  delete m_depthDenseFaces[i];
7171  m_depthDenseFaces[i] = nullptr;
7172  }
7173  m_depthDenseFaces.clear();
7174 
7175  faces.reset();
7176 
7177  loadModel(cad_name, verbose, T);
7178  if (I) {
7179  initFromPose(*I, cMo);
7180  }
7181  else {
7182  initFromPose(*I_color, cMo);
7183  }
7184 }
7185 
7186 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
7187  const vpHomogeneousMatrix &cMo, bool verbose,
7188  const vpHomogeneousMatrix &T)
7189 {
7190  reInitModel(&I, nullptr, cad_name, cMo, verbose, T);
7191 }
7192 
7193 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
7194  const vpHomogeneousMatrix &cMo, bool verbose,
7195  const vpHomogeneousMatrix &T)
7196 {
7197  reInitModel(nullptr, &I_color, cad_name, cMo, verbose, T);
7198 }
7199 
7200 void vpMbGenericTracker::TrackerWrapper::resetTracker()
7201 {
7203 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7204  vpMbKltTracker::resetTracker();
7205 #endif
7208 }
7209 
7210 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
7211 {
7212  m_cam = cam;
7213 
7215 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7216  vpMbKltTracker::setCameraParameters(m_cam);
7217 #endif
7220 }
7221 
7222 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
7223 
7224 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
7225 {
7227 }
7228 
7229 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
7230 {
7232 }
7233 
7234 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
7235 {
7237 #ifdef VISP_HAVE_OGRE
7238  faces.getOgreContext()->setWindowName("TrackerWrapper");
7239 #endif
7240 }
7241 
7243  const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
7244 {
7245  bool performKltSetPose = false;
7246  if (I_color) {
7247  vpImageConvert::convert(*I_color, m_I);
7248  }
7249 
7250 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7251  if (m_trackerType & KLT_TRACKER) {
7252  performKltSetPose = true;
7253 
7254  if (useScanLine || clippingFlag > 3) {
7255  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7256  }
7257 
7258  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7259  }
7260 #endif
7261 
7262  if (!performKltSetPose) {
7263  m_cMo = cdMo;
7264  init(I ? *I : m_I);
7265  return;
7266  }
7267 
7268  if (m_trackerType & EDGE_TRACKER) {
7270  }
7271 
7272  if (useScanLine) {
7273  faces.computeClippedPolygons(m_cMo, m_cam);
7274  faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7275  }
7276 
7277 #if 0
7278  if (m_trackerType & EDGE_TRACKER) {
7279  initPyramid(I, Ipyramid);
7280 
7281  unsigned int i = (unsigned int)scales.size();
7282  do {
7283  i--;
7284  if (scales[i]) {
7285  downScale(i);
7286  vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
7287  upScale(i);
7288  }
7289  } while (i != 0);
7290 
7291  cleanPyramid(Ipyramid);
7292  }
7293 #else
7294  if (m_trackerType & EDGE_TRACKER) {
7295  vpMbEdgeTracker::initMovingEdge(I ? *I : m_I, m_cMo);
7296  }
7297 #endif
7298 
7299  // Depth normal
7300  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7301 
7302  // Depth dense
7303  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7304 }
7305 
7307 {
7308  setPose(&I, nullptr, cdMo);
7309 }
7310 
7312 {
7313  setPose(nullptr, &I_color, cdMo);
7314 }
7315 
7316 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
7317 {
7319 }
7320 
7321 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
7322 {
7324 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7325  vpMbKltTracker::setScanLineVisibilityTest(v);
7326 #endif
7329 }
7330 
7331 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
7332 {
7333  if ((type & (EDGE_TRACKER |
7334 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7335  KLT_TRACKER |
7336 #endif
7337  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7338  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
7339  }
7340 
7341  m_trackerType = type;
7342 }
7343 
7344 void vpMbGenericTracker::TrackerWrapper::testTracking()
7345 {
7346  if (m_trackerType & EDGE_TRACKER) {
7348  }
7349 }
7350 
7352 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7353  I
7354 #endif
7355 )
7356 {
7357  if ((m_trackerType & (EDGE_TRACKER
7358 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7359  | KLT_TRACKER
7360 #endif
7361  )) == 0) {
7362  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7363  return;
7364  }
7365 
7366 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7367  track(&I, nullptr);
7368 #endif
7369 }
7370 
7372 {
7373  // not exposed to the public API, only for debug
7374 }
7375 
7376 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7378  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7379 {
7380  if ((m_trackerType & (EDGE_TRACKER |
7381 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7382  KLT_TRACKER |
7383 #endif
7384  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7385  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7386  return;
7387  }
7388 
7389  if (m_trackerType & (EDGE_TRACKER
7390 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7391  | KLT_TRACKER
7392 #endif
7393  ) &&
7394  ptr_I == nullptr) {
7395  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
7396  }
7397 
7398  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
7399  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
7400  }
7401 
7402  // Back-up cMo in case of exception
7403  vpHomogeneousMatrix cMo_1 = m_cMo;
7404  try {
7405  preTracking(ptr_I, point_cloud);
7406 
7407  try {
7408  computeVVS(ptr_I);
7409  }
7410  catch (...) {
7411  covarianceMatrix = -1;
7412  throw; // throw the original exception
7413  }
7414 
7415  if (m_trackerType == EDGE_TRACKER)
7416  testTracking();
7417 
7418  postTracking(ptr_I, point_cloud);
7419 
7420  }
7421  catch (const vpException &e) {
7422  std::cerr << "Exception: " << e.what() << std::endl;
7423  m_cMo = cMo_1;
7424  throw; // rethrowing the original exception
7425  }
7426 }
7427 #endif
7428 END_VISP_NAMESPACE
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:285
unsigned int getCols() const
Definition: vpArray2D.h:417
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:442
unsigned int getRows() const
Definition: vpArray2D.h:427
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1148
Class to define RGB colors available for display functionalities.
Definition: vpColor.h:157
static const vpColor red
Definition: vpColor.h:198
static const vpColor cyan
Definition: vpColor.h:207
static const vpColor blue
Definition: vpColor.h:204
static const vpColor purple
Definition: vpColor.h:209
static const vpColor yellow
Definition: vpColor.h:206
static const vpColor green
Definition: vpColor.h:201
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ ioError
I/O error.
Definition: vpException.h:67
@ badValue
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:73
@ notInitialized
Used to indicate that a parameter is not initialized.
Definition: vpException.h:74
@ fatalError
Fatal error.
Definition: vpException.h:72
const char * what() const
Definition: vpException.cpp:71
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
void init(unsigned int h, unsigned int w, Type value)
Definition: vpImage.h:378
static std::string getFileExtension(const std::string &pathname, bool checkFile=false)
Definition: vpIoTools.cpp:1124
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:79
static double rad(double deg)
Definition: vpMath.h:129
static double sqr(double x)
Definition: vpMath.h:203
static bool equal(double x, double y, double threshold=0.001)
Definition: vpMath.h:459
static double deg(double rad)
Definition: vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
vpMatrix AtA() const
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:1139
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void computeVVSInit() VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void testTracking() VP_OVERRIDE
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual void computeVVSWeights()
virtual void computeVVSInit() VP_OVERRIDE
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
void resetTracker() VP_OVERRIDE
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void setCameraParameters(const vpCameraParameters &cam) VP_OVERRIDE
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
virtual void setOgreVisibilityTest(const bool &v) VP_OVERRIDE
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false) VP_OVERRIDE
virtual void setProjectionErrorComputation(const bool &flag) VP_OVERRIDE
vpMbGenericTracker()
json namespace shortcut
virtual void setDisplayFeatures(bool displayF) VP_OVERRIDE
unsigned int m_nb_feat_edge
Number of moving-edges features.
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) VP_OVERRIDE
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:250
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length) VP_OVERRIDE
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpMbTracker.h:416
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual vpMbtPolygon * getPolygon(unsigned int index) VP_OVERRIDE
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="") VP_OVERRIDE
virtual void setLod(bool useLod, const std::string &name="") VP_OVERRIDE
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void testTracking() VP_OVERRIDE
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setAngleDisappear(const double &a) VP_OVERRIDE
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void computeVVSInit() VP_OVERRIDE
virtual void init(const vpImage< unsigned char > &I) VP_OVERRIDE
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
virtual void track(const vpImage< unsigned char > &I) VP_OVERRIDE
vpColVector m_weightedError
Weighted error.
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) VP_OVERRIDE
vpMatrix m_L
Interaction matrix.
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt) VP_OVERRIDE
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void setMask(const vpImage< bool > &mask) VP_OVERRIDE
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="") VP_OVERRIDE
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
Definition: