Visual Servoing Platform  version 3.6.1 under development (2024-04-19)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
43 
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include <nlohmann/json.hpp>
46 using json = nlohmann::json;
47 #endif
48 
50  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
51  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
52  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
53 {
54  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
55 
56  // Add default camera transformation matrix
58 
59  // Add default ponderation between each feature type
61 
62 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
64 #endif
65 
68 }
69 
70 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
71  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
72  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
73  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
74 {
75  if (nbCameras == 0) {
76  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
77  }
78  else if (nbCameras == 1) {
79  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
80 
81  // Add default camera transformation matrix
83  }
84  else {
85  for (unsigned int i = 1; i <= nbCameras; i++) {
86  std::stringstream ss;
87  ss << "Camera" << i;
88  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
89 
90  // Add default camera transformation matrix
92  }
93 
94  // Set by default the reference camera to the first one
95  m_referenceCameraName = m_mapOfTrackers.begin()->first;
96  }
97 
98  // Add default ponderation between each feature type
100 
101 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
103 #endif
104 
107 }
108 
109 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
110  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
111  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
112  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
113 {
114  if (trackerTypes.empty()) {
115  throw vpException(vpException::badValue, "There is no camera!");
116  }
117 
118  if (trackerTypes.size() == 1) {
119  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
120 
121  // Add default camera transformation matrix
123  }
124  else {
125  for (size_t i = 1; i <= trackerTypes.size(); i++) {
126  std::stringstream ss;
127  ss << "Camera" << i;
128  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
129 
130  // Add default camera transformation matrix
132  }
133 
134  // Set by default the reference camera to the first one
135  m_referenceCameraName = m_mapOfTrackers.begin()->first;
136  }
137 
138  // Add default ponderation between each feature type
140 
141 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
143 #endif
144 
147 }
148 
149 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
150  const std::vector<int> &trackerTypes)
151  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
152  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
153  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
154 {
155  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
157  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
158  }
159 
160  for (size_t i = 0; i < cameraNames.size(); i++) {
161  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
162 
163  // Add default camera transformation matrix
165  }
166 
167  // Set by default the reference camera to the first one
168  m_referenceCameraName = m_mapOfTrackers.begin()->first;
169 
170  // Add default ponderation between each feature type
172 
173 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
175 #endif
176 
179 }
180 
182 {
183  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
184  ++it) {
185  delete it->second;
186  it->second = nullptr;
187  }
188 }
189 
208  const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
209 {
210  double rawTotalProjectionError = 0.0;
211  unsigned int nbTotalFeaturesUsed = 0;
212 
213  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
214  it != m_mapOfTrackers.end(); ++it) {
215  TrackerWrapper *tracker = it->second;
216 
217  unsigned int nbFeaturesUsed = 0;
218  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
219 
220  if (nbFeaturesUsed > 0) {
221  nbTotalFeaturesUsed += nbFeaturesUsed;
222  rawTotalProjectionError += curProjError;
223  }
224  }
225 
226  if (nbTotalFeaturesUsed > 0) {
227  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
228  }
229 
230  return 90.0;
231 }
232 
251  const vpHomogeneousMatrix &_cMo,
252  const vpCameraParameters &_cam)
253 {
255  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
256 
257  return computeCurrentProjectionError(I, _cMo, _cam);
258 }
259 
261 {
262  if (computeProjError) {
263  double rawTotalProjectionError = 0.0;
264  unsigned int nbTotalFeaturesUsed = 0;
265 
266  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
267  it != m_mapOfTrackers.end(); ++it) {
268  TrackerWrapper *tracker = it->second;
269 
270  double curProjError = tracker->getProjectionError();
271  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
272 
273  if (nbFeaturesUsed > 0) {
274  nbTotalFeaturesUsed += nbFeaturesUsed;
275  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
276  }
277  }
278 
279  if (nbTotalFeaturesUsed > 0) {
280  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
281  }
282  else {
283  projectionError = 90.0;
284  }
285  }
286  else {
287  projectionError = 90.0;
288  }
289 }
290 
291 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
292 {
293  computeVVSInit(mapOfImages);
294 
295  if (m_error.getRows() < 4) {
296  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
297  }
298 
299  double normRes = 0;
300  double normRes_1 = -1;
301  unsigned int iter = 0;
302 
303  vpMatrix LTL;
304  vpColVector LTR, v;
305  vpColVector error_prev;
306 
307  double mu = m_initialMu;
308  vpHomogeneousMatrix cMo_prev;
309 
310  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
311  if (isoJoIdentity)
312  oJo.eye();
313 
314  // Covariance
315  vpColVector W_true(m_error.getRows());
316  vpMatrix L_true, LVJ_true;
317 
318  // Create the map of VelocityTwistMatrices
319  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
320  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
321  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
323  cVo.buildFrom(it->second);
324  mapOfVelocityTwist[it->first] = cVo;
325  }
326 
327  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
328 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
329  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
330 #endif
331  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
332  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
333 
334  m_nb_feat_edge = 0;
335  m_nb_feat_klt = 0;
338 
339  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
340  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
341 
342  bool reStartFromLastIncrement = false;
343  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
344  if (reStartFromLastIncrement) {
345  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
346  it != m_mapOfTrackers.end(); ++it) {
347  TrackerWrapper *tracker = it->second;
348 
349  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
350 
351 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
352  vpHomogeneousMatrix c_curr_tTc_curr0 =
353  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
354  tracker->ctTc0 = c_curr_tTc_curr0;
355 #endif
356  }
357  }
358 
359  if (!reStartFromLastIncrement) {
361 
362  if (computeCovariance) {
363  L_true = m_L;
364  if (!isoJoIdentity) {
366  cVo.buildFrom(m_cMo);
367  LVJ_true = (m_L * (cVo * oJo));
368  }
369  }
370 
372  if (iter == 0) {
373  // If all the 6 dof should be estimated, we check if the interaction
374  // matrix is full rank. If not we remove automatically the dof that
375  // cannot be estimated. This is particularly useful when considering
376  // circles (rank 5) and cylinders (rank 4)
377  if (isoJoIdentity) {
378  cVo.buildFrom(m_cMo);
379 
380  vpMatrix K; // kernel
381  unsigned int rank = (m_L * cVo).kernel(K);
382  if (rank == 0) {
383  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
384  }
385 
386  if (rank != 6) {
387  vpMatrix I; // Identity
388  I.eye(6);
389  oJo = I - K.AtA();
390  isoJoIdentity = false;
391  }
392  }
393  }
394 
395  // Weighting
396  double num = 0;
397  double den = 0;
398 
399  unsigned int start_index = 0;
400  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
401  it != m_mapOfTrackers.end(); ++it) {
402  TrackerWrapper *tracker = it->second;
403 
404  if (tracker->m_trackerType & EDGE_TRACKER) {
405  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
406  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
407  W_true[start_index + i] = wi;
408  m_weightedError[start_index + i] = wi * m_error[start_index + i];
409 
410  num += wi * vpMath::sqr(m_error[start_index + i]);
411  den += wi;
412 
413  for (unsigned int j = 0; j < m_L.getCols(); j++) {
414  m_L[start_index + i][j] *= wi;
415  }
416  }
417 
418  start_index += tracker->m_error_edge.getRows();
419  }
420 
421 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
422  if (tracker->m_trackerType & KLT_TRACKER) {
423  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
424  double wi = tracker->m_w_klt[i] * factorKlt;
425  W_true[start_index + i] = wi;
426  m_weightedError[start_index + i] = wi * m_error[start_index + i];
427 
428  num += wi * vpMath::sqr(m_error[start_index + i]);
429  den += wi;
430 
431  for (unsigned int j = 0; j < m_L.getCols(); j++) {
432  m_L[start_index + i][j] *= wi;
433  }
434  }
435 
436  start_index += tracker->m_error_klt.getRows();
437  }
438 #endif
439 
440  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
441  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
442  double wi = tracker->m_w_depthNormal[i] * factorDepth;
443  W_true[start_index + i] = wi;
444  m_weightedError[start_index + i] = wi * m_error[start_index + i];
445 
446  num += wi * vpMath::sqr(m_error[start_index + i]);
447  den += wi;
448 
449  for (unsigned int j = 0; j < m_L.getCols(); j++) {
450  m_L[start_index + i][j] *= wi;
451  }
452  }
453 
454  start_index += tracker->m_error_depthNormal.getRows();
455  }
456 
457  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
458  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
459  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
460  W_true[start_index + i] = wi;
461  m_weightedError[start_index + i] = wi * m_error[start_index + i];
462 
463  num += wi * vpMath::sqr(m_error[start_index + i]);
464  den += wi;
465 
466  for (unsigned int j = 0; j < m_L.getCols(); j++) {
467  m_L[start_index + i][j] *= wi;
468  }
469  }
470 
471  start_index += tracker->m_error_depthDense.getRows();
472  }
473  }
474 
475  normRes_1 = normRes;
476  normRes = sqrt(num / den);
477 
478  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
479 
480  cMo_prev = m_cMo;
481 
483 
484 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
485  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
486  it != m_mapOfTrackers.end(); ++it) {
487  TrackerWrapper *tracker = it->second;
488 
489  vpHomogeneousMatrix c_curr_tTc_curr0 =
490  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
491  tracker->ctTc0 = c_curr_tTc_curr0;
492  }
493 #endif
494 
495  // Update cMo
496  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
497  it != m_mapOfTrackers.end(); ++it) {
498  TrackerWrapper *tracker = it->second;
499  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
500  }
501  }
502 
503  iter++;
504  }
505 
506  // Update features number
507  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
508  it != m_mapOfTrackers.end(); ++it) {
509  TrackerWrapper *tracker = it->second;
510  if (tracker->m_trackerType & EDGE_TRACKER) {
511  m_nb_feat_edge += tracker->m_error_edge.size();
512  }
513 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
514  if (tracker->m_trackerType & KLT_TRACKER) {
515  m_nb_feat_klt += tracker->m_error_klt.size();
516  }
517 #endif
518  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
519  m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
520  }
521  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
522  m_nb_feat_depthDense += tracker->m_error_depthDense.size();
523  }
524  }
525 
526  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
527 
528  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
529  it != m_mapOfTrackers.end(); ++it) {
530  TrackerWrapper *tracker = it->second;
531 
532  if (tracker->m_trackerType & EDGE_TRACKER) {
533  tracker->updateMovingEdgeWeights();
534  }
535  }
536 }
537 
539 {
540  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
541 }
542 
543 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
544 {
545  unsigned int nbFeatures = 0;
546 
547  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
548  it != m_mapOfTrackers.end(); ++it) {
549  TrackerWrapper *tracker = it->second;
550  tracker->computeVVSInit(mapOfImages[it->first]);
551 
552  nbFeatures += tracker->m_error.getRows();
553  }
554 
555  m_L.resize(nbFeatures, 6, false, false);
556  m_error.resize(nbFeatures, false);
557 
558  m_weightedError.resize(nbFeatures, false);
559  m_w.resize(nbFeatures, false);
560  m_w = 1;
561 }
562 
564 {
565  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
566  "computeVVSInteractionMatrixAndR"
567  "esidu() should not be called");
568 }
569 
571  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
572  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
573 {
574  unsigned int start_index = 0;
575 
576  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
577  it != m_mapOfTrackers.end(); ++it) {
578  TrackerWrapper *tracker = it->second;
579 
580  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
581 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
582  vpHomogeneousMatrix c_curr_tTc_curr0 =
583  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
584  tracker->ctTc0 = c_curr_tTc_curr0;
585 #endif
586 
587  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
588 
589  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
590  m_error.insert(start_index, tracker->m_error);
591 
592  start_index += tracker->m_error.getRows();
593  }
594 }
595 
597 {
598  unsigned int start_index = 0;
599 
600  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
601  it != m_mapOfTrackers.end(); ++it) {
602  TrackerWrapper *tracker = it->second;
603  tracker->computeVVSWeights();
604 
605  m_w.insert(start_index, tracker->m_w);
606  start_index += tracker->m_w.getRows();
607  }
608 }
609 
624  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
625  bool displayFullModel)
626 {
627  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
628  if (it != m_mapOfTrackers.end()) {
629  TrackerWrapper *tracker = it->second;
630  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
631  }
632  else {
633  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
634  }
635 }
636 
651  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
652  bool displayFullModel)
653 {
654  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
655  if (it != m_mapOfTrackers.end()) {
656  TrackerWrapper *tracker = it->second;
657  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
658  }
659  else {
660  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
661  }
662 }
663 
681  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
682  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
683  unsigned int thickness, bool displayFullModel)
684 {
685  if (m_mapOfTrackers.size() == 2) {
686  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
687  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
688  ++it;
689 
690  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
691  }
692  else {
693  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
694  << std::endl;
695  }
696 }
697 
715  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
716  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
717  bool displayFullModel)
718 {
719  if (m_mapOfTrackers.size() == 2) {
720  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
721  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
722  ++it;
723 
724  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
725  }
726  else {
727  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
728  << std::endl;
729  }
730 }
731 
743 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
744  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
745  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
746  const vpColor &col, unsigned int thickness, bool displayFullModel)
747 {
748  // Display only for the given images
749  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
750  it_img != mapOfImages.end(); ++it_img) {
751  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
752  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
753  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
754 
755  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
756  it_cam != mapOfCameraParameters.end()) {
757  TrackerWrapper *tracker = it_tracker->second;
758  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
759  }
760  else {
761  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
762  }
763  }
764 }
765 
777 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
778  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
779  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
780  const vpColor &col, unsigned int thickness, bool displayFullModel)
781 {
782  // Display only for the given images
783  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
784  it_img != mapOfImages.end(); ++it_img) {
785  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
786  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
787  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
788 
789  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
790  it_cam != mapOfCameraParameters.end()) {
791  TrackerWrapper *tracker = it_tracker->second;
792  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
793  }
794  else {
795  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
796  }
797  }
798 }
799 
805 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
806 {
807  std::vector<std::string> cameraNames;
808 
809  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
810  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
811  cameraNames.push_back(it_tracker->first);
812  }
813 
814  return cameraNames;
815 }
816 
818 {
820 }
821 
831 {
832  if (m_mapOfTrackers.size() == 2) {
833  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
834  it->second->getCameraParameters(cam1);
835  ++it;
836 
837  it->second->getCameraParameters(cam2);
838  }
839  else {
840  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
841  << std::endl;
842  }
843 }
844 
850 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
851 {
852  // Clear the input map
853  mapOfCameraParameters.clear();
854 
855  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
856  it != m_mapOfTrackers.end(); ++it) {
857  vpCameraParameters cam_;
858  it->second->getCameraParameters(cam_);
859  mapOfCameraParameters[it->first] = cam_;
860  }
861 }
862 
869 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
870 {
871  std::map<std::string, int> trackingTypes;
872 
873  TrackerWrapper *traker;
874  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
875  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
876  traker = it_tracker->second;
877  trackingTypes[it_tracker->first] = traker->getTrackerType();
878  }
879 
880  return trackingTypes;
881 }
882 
891 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
892 {
893  if (m_mapOfTrackers.size() == 2) {
894  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
895  clippingFlag1 = it->second->getClipping();
896  ++it;
897 
898  clippingFlag2 = it->second->getClipping();
899  }
900  else {
901  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
902  << std::endl;
903  }
904 }
905 
911 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
912 {
913  mapOfClippingFlags.clear();
914 
915  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
916  it != m_mapOfTrackers.end(); ++it) {
917  TrackerWrapper *tracker = it->second;
918  mapOfClippingFlags[it->first] = tracker->getClipping();
919  }
920 }
921 
928 {
929  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
930  if (it != m_mapOfTrackers.end()) {
931  return it->second->getFaces();
932  }
933 
934  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
935  return faces;
936 }
937 
944 {
945  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
946  if (it != m_mapOfTrackers.end()) {
947  return it->second->getFaces();
948  }
949 
950  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
951  return faces;
952 }
953 
954 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
958 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
959 {
960  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
961  if (it != m_mapOfTrackers.end()) {
962  TrackerWrapper *tracker = it->second;
963  return tracker->getFeaturesCircle();
964  }
965  else {
966  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
967  m_referenceCameraName.c_str());
968  }
969 }
970 
974 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
975 {
976  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
977  if (it != m_mapOfTrackers.end()) {
978  TrackerWrapper *tracker = it->second;
979  return tracker->getFeaturesKltCylinder();
980  }
981  else {
982  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
983  m_referenceCameraName.c_str());
984  }
985 }
986 
990 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
991 {
992  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
993  if (it != m_mapOfTrackers.end()) {
994  TrackerWrapper *tracker = it->second;
995  return tracker->getFeaturesKlt();
996  }
997  else {
998  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
999  m_referenceCameraName.c_str());
1000  }
1001 }
1002 #endif
1003 
1030 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1031 {
1032  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1033 
1034  if (it != m_mapOfTrackers.end()) {
1035  return it->second->getFeaturesForDisplay();
1036  }
1037  else {
1038  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1039  }
1040 
1041  return std::vector<std::vector<double> >();
1042 }
1043 
1069 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1070 {
1071  // Clear the input map
1072  mapOfFeatures.clear();
1073 
1074  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1075  it != m_mapOfTrackers.end(); ++it) {
1076  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1077  }
1078 }
1079 
1090 
1091 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1100 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1101 {
1102  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1103  if (it != m_mapOfTrackers.end()) {
1104  TrackerWrapper *tracker = it->second;
1105  return tracker->getKltImagePoints();
1106  }
1107  else {
1108  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1109  }
1110 
1111  return std::vector<vpImagePoint>();
1112 }
1113 
1122 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1123 {
1124  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1125  if (it != m_mapOfTrackers.end()) {
1126  TrackerWrapper *tracker = it->second;
1127  return tracker->getKltImagePointsWithId();
1128  }
1129  else {
1130  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1131  }
1132 
1133  return std::map<int, vpImagePoint>();
1134 }
1135 
1142 {
1143  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1144  if (it != m_mapOfTrackers.end()) {
1145  TrackerWrapper *tracker = it->second;
1146  return tracker->getKltMaskBorder();
1147  }
1148  else {
1149  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1150  }
1151 
1152  return 0;
1153 }
1154 
1161 {
1162  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1163  if (it != m_mapOfTrackers.end()) {
1164  TrackerWrapper *tracker = it->second;
1165  return tracker->getKltNbPoints();
1166  }
1167  else {
1168  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1169  }
1170 
1171  return 0;
1172 }
1173 
1180 {
1181  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1182 
1183  if (it_tracker != m_mapOfTrackers.end()) {
1184  TrackerWrapper *tracker;
1185  tracker = it_tracker->second;
1186  return tracker->getKltOpencv();
1187  }
1188  else {
1189  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1190  }
1191 
1192  return vpKltOpencv();
1193 }
1194 
1204 {
1205  if (m_mapOfTrackers.size() == 2) {
1206  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1207  klt1 = it->second->getKltOpencv();
1208  ++it;
1209 
1210  klt2 = it->second->getKltOpencv();
1211  }
1212  else {
1213  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1214  << std::endl;
1215  }
1216 }
1217 
1223 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1224 {
1225  mapOfKlts.clear();
1226 
1227  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1228  it != m_mapOfTrackers.end(); ++it) {
1229  TrackerWrapper *tracker = it->second;
1230  mapOfKlts[it->first] = tracker->getKltOpencv();
1231  }
1232 }
1233 
1239 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1240 {
1241  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1242  if (it != m_mapOfTrackers.end()) {
1243  TrackerWrapper *tracker = it->second;
1244  return tracker->getKltPoints();
1245  }
1246  else {
1247  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1248  }
1249 
1250  return std::vector<cv::Point2f>();
1251 }
1252 
1260 #endif
1261 
1274 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1275 {
1276  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1277  if (it != m_mapOfTrackers.end()) {
1278  it->second->getLcircle(circlesList, level);
1279  }
1280  else {
1281  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1282  }
1283 }
1284 
1298 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1299  unsigned int level) const
1300 {
1301  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1302  if (it != m_mapOfTrackers.end()) {
1303  it->second->getLcircle(circlesList, level);
1304  }
1305  else {
1306  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1307  }
1308 }
1309 
1322 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1323 {
1324  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1325  if (it != m_mapOfTrackers.end()) {
1326  it->second->getLcylinder(cylindersList, level);
1327  }
1328  else {
1329  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1330  }
1331 }
1332 
1346 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1347  unsigned int level) const
1348 {
1349  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1350  if (it != m_mapOfTrackers.end()) {
1351  it->second->getLcylinder(cylindersList, level);
1352  }
1353  else {
1354  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1355  }
1356 }
1357 
1370 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1371 {
1372  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1373 
1374  if (it != m_mapOfTrackers.end()) {
1375  it->second->getLline(linesList, level);
1376  }
1377  else {
1378  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1379  }
1380 }
1381 
1395 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1396  unsigned int level) const
1397 {
1398  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1399  if (it != m_mapOfTrackers.end()) {
1400  it->second->getLline(linesList, level);
1401  }
1402  else {
1403  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1404  }
1405 }
1406 
1436 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1437  const vpHomogeneousMatrix &cMo,
1438  const vpCameraParameters &cam,
1439  bool displayFullModel)
1440 {
1441  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1442 
1443  if (it != m_mapOfTrackers.end()) {
1444  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1445  }
1446  else {
1447  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1448  }
1449 
1450  return std::vector<std::vector<double> >();
1451 }
1452 
1478 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1479  const std::map<std::string, unsigned int> &mapOfwidths,
1480  const std::map<std::string, unsigned int> &mapOfheights,
1481  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1482  const std::map<std::string, vpCameraParameters> &mapOfCams,
1483  bool displayFullModel)
1484 {
1485  // Clear the input map
1486  mapOfModels.clear();
1487 
1488  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1489  it != m_mapOfTrackers.end(); ++it) {
1490  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1491  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1492  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1493  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1494 
1495  if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1496  findCam != mapOfCams.end()) {
1497  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1498  findCam->second, displayFullModel);
1499  }
1500  }
1501 }
1502 
1509 {
1510  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1511 
1512  if (it != m_mapOfTrackers.end()) {
1513  return it->second->getMovingEdge();
1514  }
1515  else {
1516  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1517  }
1518 
1519  return vpMe();
1520 }
1521 
1531 {
1532  if (m_mapOfTrackers.size() == 2) {
1533  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1534  it->second->getMovingEdge(me1);
1535  ++it;
1536 
1537  it->second->getMovingEdge(me2);
1538  }
1539  else {
1540  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1541  << std::endl;
1542  }
1543 }
1544 
1550 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1551 {
1552  mapOfMovingEdges.clear();
1553 
1554  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1555  it != m_mapOfTrackers.end(); ++it) {
1556  TrackerWrapper *tracker = it->second;
1557  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1558  }
1559 }
1560 
1578 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1579 {
1580  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1581 
1582  unsigned int nbGoodPoints = 0;
1583  if (it != m_mapOfTrackers.end()) {
1584 
1585  nbGoodPoints = it->second->getNbPoints(level);
1586  }
1587  else {
1588  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1589  }
1590 
1591  return nbGoodPoints;
1592 }
1593 
1608 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1609 {
1610  mapOfNbPoints.clear();
1611 
1612  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1613  it != m_mapOfTrackers.end(); ++it) {
1614  TrackerWrapper *tracker = it->second;
1615  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1616  }
1617 }
1618 
1625 {
1626  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1627  if (it != m_mapOfTrackers.end()) {
1628  return it->second->getNbPolygon();
1629  }
1630 
1631  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1632  return 0;
1633 }
1634 
1640 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1641 {
1642  mapOfNbPolygons.clear();
1643 
1644  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1645  it != m_mapOfTrackers.end(); ++it) {
1646  TrackerWrapper *tracker = it->second;
1647  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1648  }
1649 }
1650 
1662 {
1663  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1664  if (it != m_mapOfTrackers.end()) {
1665  return it->second->getPolygon(index);
1666  }
1667 
1668  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1669  return nullptr;
1670 }
1671 
1683 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1684 {
1685  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1686  if (it != m_mapOfTrackers.end()) {
1687  return it->second->getPolygon(index);
1688  }
1689 
1690  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1691  return nullptr;
1692 }
1693 
1709 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1710 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1711 {
1712  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1713 
1714  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1715  if (it != m_mapOfTrackers.end()) {
1716  TrackerWrapper *tracker = it->second;
1717  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1718  }
1719  else {
1720  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1721  }
1722 
1723  return polygonFaces;
1724 }
1725 
1743 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1744  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1745  bool orderPolygons, bool useVisibility, bool clipPolygon)
1746 {
1747  mapOfPolygons.clear();
1748  mapOfPoints.clear();
1749 
1750  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1751  it != m_mapOfTrackers.end(); ++it) {
1752  TrackerWrapper *tracker = it->second;
1753  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1754  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1755 
1756  mapOfPolygons[it->first] = polygonFaces.first;
1757  mapOfPoints[it->first] = polygonFaces.second;
1758  }
1759 }
1760 
1762 
1772 {
1773  if (m_mapOfTrackers.size() == 2) {
1774  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1775  it->second->getPose(c1Mo);
1776  ++it;
1777 
1778  it->second->getPose(c2Mo);
1779  }
1780  else {
1781  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1782  << std::endl;
1783  }
1784 }
1785 
1791 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1792 {
1793  // Clear the map
1794  mapOfCameraPoses.clear();
1795 
1796  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1797  it != m_mapOfTrackers.end(); ++it) {
1798  TrackerWrapper *tracker = it->second;
1799  tracker->getPose(mapOfCameraPoses[it->first]);
1800  }
1801 }
1802 
1807 
1812 {
1813  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1814  if (it != m_mapOfTrackers.end()) {
1815  TrackerWrapper *tracker = it->second;
1816  return tracker->getTrackerType();
1817  }
1818  else {
1819  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1820  m_referenceCameraName.c_str());
1821  }
1822 }
1823 
1825 {
1826  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1827  it != m_mapOfTrackers.end(); ++it) {
1828  TrackerWrapper *tracker = it->second;
1829  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1830  tracker->init(I);
1831  }
1832 }
1833 
1834 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1835  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1836 {
1837  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1838 }
1839 
1840 #ifdef VISP_HAVE_MODULE_GUI
1841 
1885  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1886  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1887 {
1888  if (m_mapOfTrackers.size() == 2) {
1889  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1890  TrackerWrapper *tracker = it->second;
1891  tracker->initClick(I1, initFile1, displayHelp, T1);
1892 
1893  ++it;
1894 
1895  tracker = it->second;
1896  tracker->initClick(I2, initFile2, displayHelp, T2);
1897 
1899  if (it != m_mapOfTrackers.end()) {
1900  tracker = it->second;
1901 
1902  // Set the reference cMo
1903  tracker->getPose(m_cMo);
1904  }
1905  }
1906  else {
1908  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1909  }
1910 }
1911 
1954 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1955  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1956  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1957 {
1958  if (m_mapOfTrackers.size() == 2) {
1959  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1960  TrackerWrapper *tracker = it->second;
1961  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1962 
1963  ++it;
1964 
1965  tracker = it->second;
1966  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1967 
1969  if (it != m_mapOfTrackers.end()) {
1970  tracker = it->second;
1971 
1972  // Set the reference cMo
1973  tracker->getPose(m_cMo);
1974  }
1975  }
1976  else {
1978  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1979  }
1980 }
1981 
2024 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2025  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2026  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2027 {
2028  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2029  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2030  mapOfImages.find(m_referenceCameraName);
2031  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2032 
2033  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2034  TrackerWrapper *tracker = it_tracker->second;
2035  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2036  if (it_T != mapOfT.end())
2037  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2038  else
2039  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2040  tracker->getPose(m_cMo);
2041  }
2042  else {
2043  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2044  }
2045 
2046  // Vector of missing initFile for cameras
2047  std::vector<std::string> vectorOfMissingCameraPoses;
2048 
2049  // Set pose for the specified cameras
2050  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2051  if (it_tracker->first != m_referenceCameraName) {
2052  it_img = mapOfImages.find(it_tracker->first);
2053  it_initFile = mapOfInitFiles.find(it_tracker->first);
2054 
2055  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2056  // InitClick for the current camera
2057  TrackerWrapper *tracker = it_tracker->second;
2058  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2059  }
2060  else {
2061  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2062  }
2063  }
2064  }
2065 
2066  // Init for cameras that do not have an initFile
2067  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2068  it != vectorOfMissingCameraPoses.end(); ++it) {
2069  it_img = mapOfImages.find(*it);
2070  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2072 
2073  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2074  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2075  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2076  m_mapOfTrackers[*it]->init(*it_img->second);
2077  }
2078  else {
2080  "Missing image or missing camera transformation "
2081  "matrix! Cannot set the pose for camera: %s!",
2082  it->c_str());
2083  }
2084  }
2085 }
2086 
2129 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2130  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2131  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2132 {
2133  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2134  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2135  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2136 
2137  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2138  TrackerWrapper *tracker = it_tracker->second;
2139  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2140  if (it_T != mapOfT.end())
2141  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2142  else
2143  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2144  tracker->getPose(m_cMo);
2145  }
2146  else {
2147  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2148  }
2149 
2150  // Vector of missing initFile for cameras
2151  std::vector<std::string> vectorOfMissingCameraPoses;
2152 
2153  // Set pose for the specified cameras
2154  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2155  if (it_tracker->first != m_referenceCameraName) {
2156  it_img = mapOfColorImages.find(it_tracker->first);
2157  it_initFile = mapOfInitFiles.find(it_tracker->first);
2158 
2159  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2160  // InitClick for the current camera
2161  TrackerWrapper *tracker = it_tracker->second;
2162  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2163  }
2164  else {
2165  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2166  }
2167  }
2168  }
2169 
2170  // Init for cameras that do not have an initFile
2171  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2172  it != vectorOfMissingCameraPoses.end(); ++it) {
2173  it_img = mapOfColorImages.find(*it);
2174  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2176 
2177  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2178  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2179  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2180  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2181  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2182  }
2183  else {
2185  "Missing image or missing camera transformation "
2186  "matrix! Cannot set the pose for camera: %s!",
2187  it->c_str());
2188  }
2189  }
2190 }
2191 #endif
2192 
2193 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2194  const int /*idFace*/, const std::string & /*name*/)
2195 {
2196  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2197 }
2198 
2200 {
2201  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2202 }
2203 
2205 {
2206  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2207 }
2208 
2239  const std::string &initFile1, const std::string &initFile2)
2240 {
2241  if (m_mapOfTrackers.size() == 2) {
2242  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2243  TrackerWrapper *tracker = it->second;
2244  tracker->initFromPoints(I1, initFile1);
2245 
2246  ++it;
2247 
2248  tracker = it->second;
2249  tracker->initFromPoints(I2, initFile2);
2250 
2252  if (it != m_mapOfTrackers.end()) {
2253  tracker = it->second;
2254 
2255  // Set the reference cMo
2256  tracker->getPose(m_cMo);
2257 
2258  // Set the reference camera parameters
2259  tracker->getCameraParameters(m_cam);
2260  }
2261  }
2262  else {
2264  "Cannot initFromPoints()! Require two cameras but "
2265  "there are %d cameras!",
2266  m_mapOfTrackers.size());
2267  }
2268 }
2269 
2300  const std::string &initFile1, const std::string &initFile2)
2301 {
2302  if (m_mapOfTrackers.size() == 2) {
2303  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2304  TrackerWrapper *tracker = it->second;
2305  tracker->initFromPoints(I_color1, initFile1);
2306 
2307  ++it;
2308 
2309  tracker = it->second;
2310  tracker->initFromPoints(I_color2, initFile2);
2311 
2313  if (it != m_mapOfTrackers.end()) {
2314  tracker = it->second;
2315 
2316  // Set the reference cMo
2317  tracker->getPose(m_cMo);
2318 
2319  // Set the reference camera parameters
2320  tracker->getCameraParameters(m_cam);
2321  }
2322  }
2323  else {
2325  "Cannot initFromPoints()! Require two cameras but "
2326  "there are %d cameras!",
2327  m_mapOfTrackers.size());
2328  }
2329 }
2330 
2331 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2332  const std::map<std::string, std::string> &mapOfInitPoints)
2333 {
2334  // Set the reference cMo
2335  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2336  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2337  mapOfImages.find(m_referenceCameraName);
2338  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2339 
2340  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2341  TrackerWrapper *tracker = it_tracker->second;
2342  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2343  tracker->getPose(m_cMo);
2344  }
2345  else {
2346  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2347  }
2348 
2349  // Vector of missing initPoints for cameras
2350  std::vector<std::string> vectorOfMissingCameraPoints;
2351 
2352  // Set pose for the specified cameras
2353  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2354  it_img = mapOfImages.find(it_tracker->first);
2355  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2356 
2357  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2358  // Set pose
2359  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2360  }
2361  else {
2362  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2363  }
2364  }
2365 
2366  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2367  it != vectorOfMissingCameraPoints.end(); ++it) {
2368  it_img = mapOfImages.find(*it);
2369  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2371 
2372  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2373  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2374  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2375  }
2376  else {
2378  "Missing image or missing camera transformation "
2379  "matrix! Cannot init the pose for camera: %s!",
2380  it->c_str());
2381  }
2382  }
2383 }
2384 
2385 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2386  const std::map<std::string, std::string> &mapOfInitPoints)
2387 {
2388  // Set the reference cMo
2389  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2390  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2391  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2392 
2393  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2394  it_initPoints != mapOfInitPoints.end()) {
2395  TrackerWrapper *tracker = it_tracker->second;
2396  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2397  tracker->getPose(m_cMo);
2398  }
2399  else {
2400  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2401  }
2402 
2403  // Vector of missing initPoints for cameras
2404  std::vector<std::string> vectorOfMissingCameraPoints;
2405 
2406  // Set pose for the specified cameras
2407  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2408  it_img = mapOfColorImages.find(it_tracker->first);
2409  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2410 
2411  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2412  // Set pose
2413  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2414  }
2415  else {
2416  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2417  }
2418  }
2419 
2420  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2421  it != vectorOfMissingCameraPoints.end(); ++it) {
2422  it_img = mapOfColorImages.find(*it);
2423  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2425 
2426  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2427  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2428  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2429  }
2430  else {
2432  "Missing image or missing camera transformation "
2433  "matrix! Cannot init the pose for camera: %s!",
2434  it->c_str());
2435  }
2436  }
2437 }
2438 
2440 {
2441  vpMbTracker::initFromPose(I, cMo);
2442 }
2443 
2456  const std::string &initFile1, const std::string &initFile2)
2457 {
2458  if (m_mapOfTrackers.size() == 2) {
2459  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2460  TrackerWrapper *tracker = it->second;
2461  tracker->initFromPose(I1, initFile1);
2462 
2463  ++it;
2464 
2465  tracker = it->second;
2466  tracker->initFromPose(I2, initFile2);
2467 
2469  if (it != m_mapOfTrackers.end()) {
2470  tracker = it->second;
2471 
2472  // Set the reference cMo
2473  tracker->getPose(m_cMo);
2474 
2475  // Set the reference camera parameters
2476  tracker->getCameraParameters(m_cam);
2477  }
2478  }
2479  else {
2481  "Cannot initFromPose()! Require two cameras but there "
2482  "are %d cameras!",
2483  m_mapOfTrackers.size());
2484  }
2485 }
2486 
2499  const std::string &initFile1, const std::string &initFile2)
2500 {
2501  if (m_mapOfTrackers.size() == 2) {
2502  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2503  TrackerWrapper *tracker = it->second;
2504  tracker->initFromPose(I_color1, initFile1);
2505 
2506  ++it;
2507 
2508  tracker = it->second;
2509  tracker->initFromPose(I_color2, initFile2);
2510 
2512  if (it != m_mapOfTrackers.end()) {
2513  tracker = it->second;
2514 
2515  // Set the reference cMo
2516  tracker->getPose(m_cMo);
2517 
2518  // Set the reference camera parameters
2519  tracker->getCameraParameters(m_cam);
2520  }
2521  }
2522  else {
2524  "Cannot initFromPose()! Require two cameras but there "
2525  "are %d cameras!",
2526  m_mapOfTrackers.size());
2527  }
2528 }
2529 
2544 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2545  const std::map<std::string, std::string> &mapOfInitPoses)
2546 {
2547  // Set the reference cMo
2548  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2549  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2550  mapOfImages.find(m_referenceCameraName);
2551  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2552 
2553  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2554  TrackerWrapper *tracker = it_tracker->second;
2555  tracker->initFromPose(*it_img->second, it_initPose->second);
2556  tracker->getPose(m_cMo);
2557  }
2558  else {
2559  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2560  }
2561 
2562  // Vector of missing pose matrices for cameras
2563  std::vector<std::string> vectorOfMissingCameraPoses;
2564 
2565  // Set pose for the specified cameras
2566  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2567  it_img = mapOfImages.find(it_tracker->first);
2568  it_initPose = mapOfInitPoses.find(it_tracker->first);
2569 
2570  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2571  // Set pose
2572  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2573  }
2574  else {
2575  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2576  }
2577  }
2578 
2579  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2580  it != vectorOfMissingCameraPoses.end(); ++it) {
2581  it_img = mapOfImages.find(*it);
2582  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2584 
2585  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2586  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2587  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2588  }
2589  else {
2591  "Missing image or missing camera transformation "
2592  "matrix! Cannot init the pose for camera: %s!",
2593  it->c_str());
2594  }
2595  }
2596 }
2597 
2612 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2613  const std::map<std::string, std::string> &mapOfInitPoses)
2614 {
2615  // Set the reference cMo
2616  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2617  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2618  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2619 
2620  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2621  TrackerWrapper *tracker = it_tracker->second;
2622  tracker->initFromPose(*it_img->second, it_initPose->second);
2623  tracker->getPose(m_cMo);
2624  }
2625  else {
2626  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2627  }
2628 
2629  // Vector of missing pose matrices for cameras
2630  std::vector<std::string> vectorOfMissingCameraPoses;
2631 
2632  // Set pose for the specified cameras
2633  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2634  it_img = mapOfColorImages.find(it_tracker->first);
2635  it_initPose = mapOfInitPoses.find(it_tracker->first);
2636 
2637  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2638  // Set pose
2639  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2640  }
2641  else {
2642  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2643  }
2644  }
2645 
2646  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2647  it != vectorOfMissingCameraPoses.end(); ++it) {
2648  it_img = mapOfColorImages.find(*it);
2649  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2651 
2652  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2653  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2654  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2655  }
2656  else {
2658  "Missing image or missing camera transformation "
2659  "matrix! Cannot init the pose for camera: %s!",
2660  it->c_str());
2661  }
2662  }
2663 }
2664 
2676  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2677 {
2678  if (m_mapOfTrackers.size() == 2) {
2679  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2680  it->second->initFromPose(I1, c1Mo);
2681 
2682  ++it;
2683 
2684  it->second->initFromPose(I2, c2Mo);
2685 
2686  m_cMo = c1Mo;
2687  }
2688  else {
2690  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2691  }
2692 }
2693 
2705  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2706 {
2707  if (m_mapOfTrackers.size() == 2) {
2708  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2709  it->second->initFromPose(I_color1, c1Mo);
2710 
2711  ++it;
2712 
2713  it->second->initFromPose(I_color2, c2Mo);
2714 
2715  m_cMo = c1Mo;
2716  }
2717  else {
2719  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2720  }
2721 }
2722 
2736 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2737  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2738 {
2739  // Set the reference cMo
2740  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2741  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2742  mapOfImages.find(m_referenceCameraName);
2743  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2744 
2745  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2746  TrackerWrapper *tracker = it_tracker->second;
2747  tracker->initFromPose(*it_img->second, it_camPose->second);
2748  tracker->getPose(m_cMo);
2749  }
2750  else {
2751  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2752  }
2753 
2754  // Vector of missing pose matrices for cameras
2755  std::vector<std::string> vectorOfMissingCameraPoses;
2756 
2757  // Set pose for the specified cameras
2758  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2759  it_img = mapOfImages.find(it_tracker->first);
2760  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2761 
2762  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2763  // Set pose
2764  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2765  }
2766  else {
2767  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2768  }
2769  }
2770 
2771  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2772  it != vectorOfMissingCameraPoses.end(); ++it) {
2773  it_img = mapOfImages.find(*it);
2774  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2776 
2777  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2778  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2779  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2780  }
2781  else {
2783  "Missing image or missing camera transformation "
2784  "matrix! Cannot set the pose for camera: %s!",
2785  it->c_str());
2786  }
2787  }
2788 }
2789 
2803 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2804  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2805 {
2806  // Set the reference cMo
2807  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2808  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2809  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2810 
2811  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2812  TrackerWrapper *tracker = it_tracker->second;
2813  tracker->initFromPose(*it_img->second, it_camPose->second);
2814  tracker->getPose(m_cMo);
2815  }
2816  else {
2817  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2818  }
2819 
2820  // Vector of missing pose matrices for cameras
2821  std::vector<std::string> vectorOfMissingCameraPoses;
2822 
2823  // Set pose for the specified cameras
2824  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2825  it_img = mapOfColorImages.find(it_tracker->first);
2826  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2827 
2828  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2829  // Set pose
2830  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2831  }
2832  else {
2833  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2834  }
2835  }
2836 
2837  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2838  it != vectorOfMissingCameraPoses.end(); ++it) {
2839  it_img = mapOfColorImages.find(*it);
2840  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2842 
2843  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2844  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2845  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2846  }
2847  else {
2849  "Missing image or missing camera transformation "
2850  "matrix! Cannot set the pose for camera: %s!",
2851  it->c_str());
2852  }
2853  }
2854 }
2855 
2867 void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2868 {
2869  const std::string extension = vpIoTools::getFileExtension(configFile);
2870  if (extension == ".xml") {
2871  loadConfigFileXML(configFile, verbose);
2872  }
2873 #ifdef VISP_HAVE_NLOHMANN_JSON
2874  else if (extension == ".json") {
2875  loadConfigFileJSON(configFile, verbose);
2876  }
2877 #endif
2878  else {
2879  throw vpException(vpException::ioError, "MBT config parsing: File format " + extension + "for file " + configFile + " is not supported.");
2880  }
2881 }
2882 
2894 void vpMbGenericTracker::loadConfigFileXML(const std::string &configFile, bool verbose)
2895 {
2896  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2897  it != m_mapOfTrackers.end(); ++it) {
2898  TrackerWrapper *tracker = it->second;
2899  tracker->loadConfigFile(configFile, verbose);
2900  }
2901 
2903  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2904  }
2905 
2906  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2907  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2908  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2909  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2910 }
2911 
2912 #ifdef VISP_HAVE_NLOHMANN_JSON
2921 void vpMbGenericTracker::loadConfigFileJSON(const std::string &settingsFile, bool verbose)
2922 {
2923  //Read file
2924  std::ifstream jsonFile(settingsFile);
2925  if (!jsonFile.good()) {
2926  throw vpException(vpException::ioError, "Could not read from settings file " + settingsFile + " to initialise the vpMbGenericTracker");
2927  }
2928  json settings;
2929  try {
2930  settings = json::parse(jsonFile);
2931  }
2932  catch (json::parse_error &e) {
2933  std::stringstream msg;
2934  msg << "Could not parse JSON file : \n";
2935 
2936  msg << e.what() << std::endl;
2937  msg << "Byte position of error: " << e.byte;
2938  throw vpException(vpException::ioError, msg.str());
2939  }
2940  jsonFile.close();
2941 
2942  if (!settings.contains("version")) {
2943  throw vpException(vpException::notInitialized, "JSON configuration does not contain versioning information");
2944  }
2945  else if (settings["version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2946  throw vpException(vpException::badValue, "Trying to load an old configuration file");
2947  }
2948 
2949  //Load Basic settings
2950  settings.at("referenceCameraName").get_to(m_referenceCameraName);
2951  json trackersJson;
2952  trackersJson = settings.at("trackers");
2953 
2954  //Find camera that are already present in the tracker but not in the config file: they will be removed
2955  std::vector<std::string> unusedCameraNames = getCameraNames();
2956 
2957  bool refCameraFound = false;
2958  //Foreach camera
2959  for (const auto &it : trackersJson.items()) {
2960  const std::string cameraName = it.key();
2961  const json trackerJson = it.value();
2962  refCameraFound = refCameraFound || cameraName == m_referenceCameraName;
2963 
2964  //Load transformation between current camera and reference camera, if it exists
2965  if (trackerJson.contains("camTref")) {
2966  m_mapOfCameraTransformationMatrix[cameraName] = trackerJson["camTref"].get<vpHomogeneousMatrix>();
2967  }
2968  else if (cameraName != m_referenceCameraName) { // No transformation to reference and its not the reference itself
2969  throw vpException(vpException::notInitialized, "Camera " + cameraName + " has no transformation to the reference camera");
2970  }
2971  if (verbose) {
2972  std::cout << "Loading tracker " << cameraName << std::endl << " with settings: " << std::endl << trackerJson.dump(2);
2973  }
2974  if (m_mapOfTrackers.count(cameraName)) {
2975  if (verbose) {
2976  std::cout << "Updating an already existing tracker with JSON configuration." << std::endl;
2977  }
2978  from_json(trackerJson, *(m_mapOfTrackers[cameraName]));
2979  }
2980  else {
2981  if (verbose) {
2982  std::cout << "Creating a new tracker from JSON configuration." << std::endl;
2983  }
2984  TrackerWrapper *tw = new TrackerWrapper(); //vpMBTracker is responsible for deleting trackers
2985  *tw = trackerJson;
2986  m_mapOfTrackers[cameraName] = tw;
2987  }
2988  const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName); // Mark this camera name as used
2989  unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2990  }
2991  if (!refCameraFound) {
2992  throw vpException(vpException::badValue, "Reference camera not found in trackers");
2993  }
2994 
2995  // All camerasthat were defined in the tracker but not in the config file are removed
2996  for (const std::string &oldCameraName : unusedCameraNames) {
2997  m_mapOfCameraTransformationMatrix.erase(oldCameraName);
2998  TrackerWrapper *tw = m_mapOfTrackers[oldCameraName];
2999  m_mapOfTrackers.erase(oldCameraName);
3000  delete tw;
3001  }
3002 
3003  const TrackerWrapper *refTracker = m_mapOfTrackers[m_referenceCameraName];
3004  refTracker->getCameraParameters(m_cam);
3005  this->angleAppears = refTracker->getAngleAppear();
3006  this->angleDisappears = refTracker->getAngleDisappear();
3007  this->clippingFlag = refTracker->getClipping();
3008  this->distNearClip = refTracker->getNearClippingDistance();
3009  this->distFarClip = refTracker->getFarClippingDistance();
3010 
3011  // These settings can be set in each tracker or globally. Global value overrides local ones.
3012  if (settings.contains("display")) {
3013  const json displayJson = settings["display"];
3014  setDisplayFeatures(displayJson.value("features", displayFeatures));
3015  setProjectionErrorDisplay(displayJson.value("projectionError", m_projectionErrorDisplay));
3016  }
3017  if (settings.contains("visibilityTest")) {
3018  const json visJson = settings["visibilityTest"];
3019  setOgreVisibilityTest(visJson.value("ogre", useOgre));
3020  setScanLineVisibilityTest(visJson.value("scanline", useScanLine));
3021  }
3022 
3023  // VVS global settings
3024  if (settings.contains("vvs")) {
3025  const json vvsJson = settings["vvs"];
3026  setLambda(vvsJson.value("lambda", this->m_lambda));
3027  setMaxIter(vvsJson.value("maxIter", this->m_maxIter));
3028  setInitialMu(vvsJson.value("initialMu", this->m_initialMu));
3029  }
3030 
3031  //If a 3D model is defined, load it
3032  if (settings.contains("model")) {
3033  loadModel(settings.at("model").get<std::string>(), verbose);
3034  }
3035 }
3036 
3044 void vpMbGenericTracker::saveConfigFile(const std::string &settingsFile) const
3045 {
3046  json j;
3047  j["referenceCameraName"] = m_referenceCameraName;
3048  j["version"] = MBT_JSON_SETTINGS_VERSION;
3049  // j["thresholdOutlier"] = m_thresholdOutlier;
3050  json trackers;
3051  for (const auto &kv : m_mapOfTrackers) {
3052  trackers[kv.first] = *(kv.second);
3053  const auto itTransformation = m_mapOfCameraTransformationMatrix.find(kv.first);
3054  if (itTransformation != m_mapOfCameraTransformationMatrix.end()) {
3055  trackers[kv.first]["camTref"] = itTransformation->second;
3056  }
3057  }
3058  j["trackers"] = trackers;
3059  j["vvs"] = json {
3060  {"lambda", m_lambda},
3061  {"maxIter", m_maxIter},
3062  {"initialMu", m_initialMu}
3063  };
3064 
3065  std::ofstream f(settingsFile);
3066  if (f.good()) {
3067  const unsigned indentLevel = 4;
3068  f << j.dump(indentLevel);
3069  f.close();
3070  }
3071  else {
3072  throw vpException(vpException::ioError, "Could not save tracker configuration to JSON file: " + settingsFile);
3073  }
3074 }
3075 
3076 #endif
3077 
3092 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
3093 {
3094  if (m_mapOfTrackers.size() != 2) {
3095  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3096  }
3097 
3098  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3099  TrackerWrapper *tracker = it_tracker->second;
3100  tracker->loadConfigFile(configFile1, verbose);
3101 
3102  ++it_tracker;
3103  tracker = it_tracker->second;
3104  tracker->loadConfigFile(configFile2, verbose);
3105 
3107  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
3108  }
3109 
3110  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
3111  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
3112  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
3113  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
3114 }
3115 
3129 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
3130 {
3131  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3132  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3133  TrackerWrapper *tracker = it_tracker->second;
3134 
3135  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3136  if (it_config != mapOfConfigFiles.end()) {
3137  tracker->loadConfigFile(it_config->second, verbose);
3138  }
3139  else {
3140  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
3141  it_tracker->first.c_str());
3142  }
3143  }
3144 
3145  // Set the reference camera parameters
3146  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3147  if (it != m_mapOfTrackers.end()) {
3148  TrackerWrapper *tracker = it->second;
3149  tracker->getCameraParameters(m_cam);
3150 
3151  // Set clipping
3152  this->clippingFlag = tracker->getClipping();
3153  this->angleAppears = tracker->getAngleAppear();
3154  this->angleDisappears = tracker->getAngleDisappear();
3155  }
3156  else {
3157  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3158  m_referenceCameraName.c_str());
3159  }
3160 }
3161 
3180 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
3181 {
3182  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3183  it != m_mapOfTrackers.end(); ++it) {
3184  TrackerWrapper *tracker = it->second;
3185  tracker->loadModel(modelFile, verbose, T);
3186  }
3187 }
3188 
3211 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
3212  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3213 {
3214  if (m_mapOfTrackers.size() != 2) {
3215  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3216  }
3217 
3218  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3219  TrackerWrapper *tracker = it_tracker->second;
3220  tracker->loadModel(modelFile1, verbose, T1);
3221 
3222  ++it_tracker;
3223  tracker = it_tracker->second;
3224  tracker->loadModel(modelFile2, verbose, T2);
3225 }
3226 
3246 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
3247  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3248 {
3249  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3250  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3251  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3252 
3253  if (it_model != mapOfModelFiles.end()) {
3254  TrackerWrapper *tracker = it_tracker->second;
3255  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3256 
3257  if (it_T != mapOfT.end())
3258  tracker->loadModel(it_model->second, verbose, it_T->second);
3259  else
3260  tracker->loadModel(it_model->second, verbose);
3261  }
3262  else {
3263  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3264  it_tracker->first.c_str());
3265  }
3266  }
3267 }
3268 
3269 #ifdef VISP_HAVE_PCL
3270 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3271  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3272 {
3273  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3274  it != m_mapOfTrackers.end(); ++it) {
3275  TrackerWrapper *tracker = it->second;
3276  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3277  }
3278 }
3279 #endif
3280 
3281 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3282  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3283  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3284  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3285 {
3286  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3287  it != m_mapOfTrackers.end(); ++it) {
3288  TrackerWrapper *tracker = it->second;
3289  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3290  mapOfPointCloudHeights[it->first]);
3291  }
3292 }
3293 
3294 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3295  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
3296  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3297  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3298 {
3299  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3300  it != m_mapOfTrackers.end(); ++it) {
3301  TrackerWrapper *tracker = it->second;
3302  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3303  mapOfPointCloudHeights[it->first]);
3304  }
3305 }
3306 
3318 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3319  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3320 {
3321  if (m_mapOfTrackers.size() != 1) {
3322  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3323  m_mapOfTrackers.size());
3324  }
3325 
3326  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3327  if (it_tracker != m_mapOfTrackers.end()) {
3328  TrackerWrapper *tracker = it_tracker->second;
3329  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3330 
3331  // Set reference pose
3332  tracker->getPose(m_cMo);
3333  }
3334  else {
3335  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3336  }
3337 
3338  modelInitialised = true;
3339 }
3340 
3352 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3353  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3354 {
3355  if (m_mapOfTrackers.size() != 1) {
3356  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3357  m_mapOfTrackers.size());
3358  }
3359 
3360  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3361  if (it_tracker != m_mapOfTrackers.end()) {
3362  TrackerWrapper *tracker = it_tracker->second;
3363  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3364 
3365  // Set reference pose
3366  tracker->getPose(m_cMo);
3367  }
3368  else {
3369  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3370  }
3371 
3372  modelInitialised = true;
3373 }
3374 
3396  const std::string &cad_name1, const std::string &cad_name2,
3397  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3398  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3399 {
3400  if (m_mapOfTrackers.size() == 2) {
3401  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3402 
3403  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3404 
3405  ++it_tracker;
3406 
3407  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3408 
3409  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3410  if (it_tracker != m_mapOfTrackers.end()) {
3411  // Set reference pose
3412  it_tracker->second->getPose(m_cMo);
3413  }
3414  }
3415  else {
3416  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3417  }
3418 
3419  modelInitialised = true;
3420 }
3421 
3443  const std::string &cad_name1, const std::string &cad_name2,
3444  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3445  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3446 {
3447  if (m_mapOfTrackers.size() == 2) {
3448  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3449 
3450  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3451 
3452  ++it_tracker;
3453 
3454  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3455 
3456  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3457  if (it_tracker != m_mapOfTrackers.end()) {
3458  // Set reference pose
3459  it_tracker->second->getPose(m_cMo);
3460  }
3461  }
3462  else {
3463  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3464  }
3465 
3466  modelInitialised = true;
3467 }
3468 
3483 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3484  const std::map<std::string, std::string> &mapOfModelFiles,
3485  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3486  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3487 {
3488  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3489  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3490  mapOfImages.find(m_referenceCameraName);
3491  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3492  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3493 
3494  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3495  it_camPose != mapOfCameraPoses.end()) {
3496  TrackerWrapper *tracker = it_tracker->second;
3497  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3498  if (it_T != mapOfT.end())
3499  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3500  else
3501  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3502 
3503  // Set reference pose
3504  tracker->getPose(m_cMo);
3505  }
3506  else {
3507  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3508  }
3509 
3510  std::vector<std::string> vectorOfMissingCameras;
3511  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3512  if (it_tracker->first != m_referenceCameraName) {
3513  it_img = mapOfImages.find(it_tracker->first);
3514  it_model = mapOfModelFiles.find(it_tracker->first);
3515  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3516 
3517  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3518  TrackerWrapper *tracker = it_tracker->second;
3519  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3520  }
3521  else {
3522  vectorOfMissingCameras.push_back(it_tracker->first);
3523  }
3524  }
3525  }
3526 
3527  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3528  ++it) {
3529  it_img = mapOfImages.find(*it);
3530  it_model = mapOfModelFiles.find(*it);
3531  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3533 
3534  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3535  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3536  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3537  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3538  }
3539  }
3540 
3541  modelInitialised = true;
3542 }
3543 
3558 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3559  const std::map<std::string, std::string> &mapOfModelFiles,
3560  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3561  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3562 {
3563  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3564  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3565  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3566  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3567 
3568  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3569  it_camPose != mapOfCameraPoses.end()) {
3570  TrackerWrapper *tracker = it_tracker->second;
3571  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3572  if (it_T != mapOfT.end())
3573  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3574  else
3575  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3576 
3577  // Set reference pose
3578  tracker->getPose(m_cMo);
3579  }
3580  else {
3581  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3582  }
3583 
3584  std::vector<std::string> vectorOfMissingCameras;
3585  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3586  if (it_tracker->first != m_referenceCameraName) {
3587  it_img = mapOfColorImages.find(it_tracker->first);
3588  it_model = mapOfModelFiles.find(it_tracker->first);
3589  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3590 
3591  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3592  it_camPose != mapOfCameraPoses.end()) {
3593  TrackerWrapper *tracker = it_tracker->second;
3594  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3595  }
3596  else {
3597  vectorOfMissingCameras.push_back(it_tracker->first);
3598  }
3599  }
3600  }
3601 
3602  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3603  ++it) {
3604  it_img = mapOfColorImages.find(*it);
3605  it_model = mapOfModelFiles.find(*it);
3606  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3608 
3609  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3610  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3611  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3612  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3613  }
3614  }
3615 
3616  modelInitialised = true;
3617 }
3618 
3624 {
3625  m_cMo.eye();
3626 
3627  useScanLine = false;
3628 
3629 #ifdef VISP_HAVE_OGRE
3630  useOgre = false;
3631 #endif
3632 
3633  m_computeInteraction = true;
3634  m_lambda = 1.0;
3635 
3636  angleAppears = vpMath::rad(89);
3639  distNearClip = 0.001;
3640  distFarClip = 100;
3641 
3643  m_maxIter = 30;
3644  m_stopCriteriaEpsilon = 1e-8;
3645  m_initialMu = 0.01;
3646 
3647  // Only for Edge
3648  m_percentageGdPt = 0.4;
3649 
3650  // Only for KLT
3651  m_thresholdOutlier = 0.5;
3652 
3653  // Reset default ponderation between each feature type
3655 
3656 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3658 #endif
3659 
3662 
3663  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3664  it != m_mapOfTrackers.end(); ++it) {
3665  TrackerWrapper *tracker = it->second;
3666  tracker->resetTracker();
3667  }
3668 }
3669 
3680 {
3682 
3683  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3684  it != m_mapOfTrackers.end(); ++it) {
3685  TrackerWrapper *tracker = it->second;
3686  tracker->setAngleAppear(a);
3687  }
3688 }
3689 
3702 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3703 {
3704  if (m_mapOfTrackers.size() == 2) {
3705  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3706  it->second->setAngleAppear(a1);
3707 
3708  ++it;
3709  it->second->setAngleAppear(a2);
3710 
3712  if (it != m_mapOfTrackers.end()) {
3713  angleAppears = it->second->getAngleAppear();
3714  }
3715  else {
3716  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3717  }
3718  }
3719  else {
3720  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3721  m_mapOfTrackers.size());
3722  }
3723 }
3724 
3734 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3735 {
3736  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3737  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3738 
3739  if (it_tracker != m_mapOfTrackers.end()) {
3740  TrackerWrapper *tracker = it_tracker->second;
3741  tracker->setAngleAppear(it->second);
3742 
3743  if (it->first == m_referenceCameraName) {
3744  angleAppears = it->second;
3745  }
3746  }
3747  }
3748 }
3749 
3760 {
3762 
3763  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3764  it != m_mapOfTrackers.end(); ++it) {
3765  TrackerWrapper *tracker = it->second;
3766  tracker->setAngleDisappear(a);
3767  }
3768 }
3769 
3782 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3783 {
3784  if (m_mapOfTrackers.size() == 2) {
3785  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3786  it->second->setAngleDisappear(a1);
3787 
3788  ++it;
3789  it->second->setAngleDisappear(a2);
3790 
3792  if (it != m_mapOfTrackers.end()) {
3793  angleDisappears = it->second->getAngleDisappear();
3794  }
3795  else {
3796  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3797  }
3798  }
3799  else {
3800  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3801  m_mapOfTrackers.size());
3802  }
3803 }
3804 
3814 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3815 {
3816  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3817  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3818 
3819  if (it_tracker != m_mapOfTrackers.end()) {
3820  TrackerWrapper *tracker = it_tracker->second;
3821  tracker->setAngleDisappear(it->second);
3822 
3823  if (it->first == m_referenceCameraName) {
3824  angleDisappears = it->second;
3825  }
3826  }
3827  }
3828 }
3829 
3836 {
3838 
3839  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3840  it != m_mapOfTrackers.end(); ++it) {
3841  TrackerWrapper *tracker = it->second;
3842  tracker->setCameraParameters(camera);
3843  }
3844 }
3845 
3855 {
3856  if (m_mapOfTrackers.size() == 2) {
3857  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3858  it->second->setCameraParameters(camera1);
3859 
3860  ++it;
3861  it->second->setCameraParameters(camera2);
3862 
3864  if (it != m_mapOfTrackers.end()) {
3865  it->second->getCameraParameters(m_cam);
3866  }
3867  else {
3868  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3869  }
3870  }
3871  else {
3872  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3873  m_mapOfTrackers.size());
3874  }
3875 }
3876 
3885 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3886 {
3887  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3888  it != mapOfCameraParameters.end(); ++it) {
3889  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3890 
3891  if (it_tracker != m_mapOfTrackers.end()) {
3892  TrackerWrapper *tracker = it_tracker->second;
3893  tracker->setCameraParameters(it->second);
3894 
3895  if (it->first == m_referenceCameraName) {
3896  m_cam = it->second;
3897  }
3898  }
3899  }
3900 }
3901 
3910 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3911  const vpHomogeneousMatrix &cameraTransformationMatrix)
3912 {
3913  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3914 
3915  if (it != m_mapOfCameraTransformationMatrix.end()) {
3916  it->second = cameraTransformationMatrix;
3917  }
3918  else {
3919  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3920  }
3921 }
3922 
3931  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3932 {
3933  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3934  it != mapOfTransformationMatrix.end(); ++it) {
3935  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3936  m_mapOfCameraTransformationMatrix.find(it->first);
3937 
3938  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3939  it_camTrans->second = it->second;
3940  }
3941  }
3942 }
3943 
3953 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3954 {
3955  vpMbTracker::setClipping(flags);
3956 
3957  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3958  it != m_mapOfTrackers.end(); ++it) {
3959  TrackerWrapper *tracker = it->second;
3960  tracker->setClipping(flags);
3961  }
3962 }
3963 
3974 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3975 {
3976  if (m_mapOfTrackers.size() == 2) {
3977  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3978  it->second->setClipping(flags1);
3979 
3980  ++it;
3981  it->second->setClipping(flags2);
3982 
3984  if (it != m_mapOfTrackers.end()) {
3985  clippingFlag = it->second->getClipping();
3986  }
3987  else {
3988  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3989  }
3990  }
3991  else {
3992  std::stringstream ss;
3993  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3995  }
3996 }
3997 
4005 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
4006 {
4007  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
4008  it != mapOfClippingFlags.end(); ++it) {
4009  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4010 
4011  if (it_tracker != m_mapOfTrackers.end()) {
4012  TrackerWrapper *tracker = it_tracker->second;
4013  tracker->setClipping(it->second);
4014 
4015  if (it->first == m_referenceCameraName) {
4016  clippingFlag = it->second;
4017  }
4018  }
4019  }
4020 }
4021 
4032 {
4033  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4034  it != m_mapOfTrackers.end(); ++it) {
4035  TrackerWrapper *tracker = it->second;
4036  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4037  }
4038 }
4039 
4049 {
4050  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4051  it != m_mapOfTrackers.end(); ++it) {
4052  TrackerWrapper *tracker = it->second;
4053  tracker->setDepthDenseFilteringMethod(method);
4054  }
4055 }
4056 
4067 {
4068  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4069  it != m_mapOfTrackers.end(); ++it) {
4070  TrackerWrapper *tracker = it->second;
4071  tracker->setDepthDenseFilteringMinDistance(minDistance);
4072  }
4073 }
4074 
4085 {
4086  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4087  it != m_mapOfTrackers.end(); ++it) {
4088  TrackerWrapper *tracker = it->second;
4089  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4090  }
4091 }
4092 
4101 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
4102 {
4103  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4104  it != m_mapOfTrackers.end(); ++it) {
4105  TrackerWrapper *tracker = it->second;
4106  tracker->setDepthDenseSamplingStep(stepX, stepY);
4107  }
4108 }
4109 
4118 {
4119  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4120  it != m_mapOfTrackers.end(); ++it) {
4121  TrackerWrapper *tracker = it->second;
4122  tracker->setDepthNormalFaceCentroidMethod(method);
4123  }
4124 }
4125 
4135 {
4136  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4137  it != m_mapOfTrackers.end(); ++it) {
4138  TrackerWrapper *tracker = it->second;
4139  tracker->setDepthNormalFeatureEstimationMethod(method);
4140  }
4141 }
4142 
4151 {
4152  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4153  it != m_mapOfTrackers.end(); ++it) {
4154  TrackerWrapper *tracker = it->second;
4155  tracker->setDepthNormalPclPlaneEstimationMethod(method);
4156  }
4157 }
4158 
4167 {
4168  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4169  it != m_mapOfTrackers.end(); ++it) {
4170  TrackerWrapper *tracker = it->second;
4171  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4172  }
4173 }
4174 
4183 {
4184  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4185  it != m_mapOfTrackers.end(); ++it) {
4186  TrackerWrapper *tracker = it->second;
4187  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4188  }
4189 }
4190 
4199 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
4200 {
4201  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4202  it != m_mapOfTrackers.end(); ++it) {
4203  TrackerWrapper *tracker = it->second;
4204  tracker->setDepthNormalSamplingStep(stepX, stepY);
4205  }
4206 }
4207 
4227 {
4229 
4230  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4231  it != m_mapOfTrackers.end(); ++it) {
4232  TrackerWrapper *tracker = it->second;
4233  tracker->setDisplayFeatures(displayF);
4234  }
4235 }
4236 
4245 {
4247 
4248  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4249  it != m_mapOfTrackers.end(); ++it) {
4250  TrackerWrapper *tracker = it->second;
4251  tracker->setFarClippingDistance(dist);
4252  }
4253 }
4254 
4263 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
4264 {
4265  if (m_mapOfTrackers.size() == 2) {
4266  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4267  it->second->setFarClippingDistance(dist1);
4268 
4269  ++it;
4270  it->second->setFarClippingDistance(dist2);
4271 
4273  if (it != m_mapOfTrackers.end()) {
4274  distFarClip = it->second->getFarClippingDistance();
4275  }
4276  else {
4277  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4278  }
4279  }
4280  else {
4281  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4282  m_mapOfTrackers.size());
4283  }
4284 }
4285 
4291 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4292 {
4293  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4294  ++it) {
4295  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4296 
4297  if (it_tracker != m_mapOfTrackers.end()) {
4298  TrackerWrapper *tracker = it_tracker->second;
4299  tracker->setFarClippingDistance(it->second);
4300 
4301  if (it->first == m_referenceCameraName) {
4302  distFarClip = it->second;
4303  }
4304  }
4305  }
4306 }
4307 
4314 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4315 {
4316  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4317  ++it) {
4318  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4319  if (it_factor != mapOfFeatureFactors.end()) {
4320  it->second = it_factor->second;
4321  }
4322  }
4323 }
4324 
4341 {
4342  m_percentageGdPt = threshold;
4343 
4344  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4345  it != m_mapOfTrackers.end(); ++it) {
4346  TrackerWrapper *tracker = it->second;
4347  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4348  }
4349 }
4350 
4351 #ifdef VISP_HAVE_OGRE
4364 {
4365  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4366  it != m_mapOfTrackers.end(); ++it) {
4367  TrackerWrapper *tracker = it->second;
4368  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4369  }
4370 }
4371 
4384 {
4385  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4386  it != m_mapOfTrackers.end(); ++it) {
4387  TrackerWrapper *tracker = it->second;
4388  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4389  }
4390 }
4391 #endif
4392 
4393 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4402 {
4403  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4404  it != m_mapOfTrackers.end(); ++it) {
4405  TrackerWrapper *tracker = it->second;
4406  tracker->setKltOpencv(t);
4407  }
4408 }
4409 
4419 {
4420  if (m_mapOfTrackers.size() == 2) {
4421  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4422  it->second->setKltOpencv(t1);
4423 
4424  ++it;
4425  it->second->setKltOpencv(t2);
4426  }
4427  else {
4428  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4429  m_mapOfTrackers.size());
4430  }
4431 }
4432 
4438 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4439 {
4440  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4441  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4442 
4443  if (it_tracker != m_mapOfTrackers.end()) {
4444  TrackerWrapper *tracker = it_tracker->second;
4445  tracker->setKltOpencv(it->second);
4446  }
4447  }
4448 }
4449 
4458 {
4459  m_thresholdOutlier = th;
4460 
4461  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4462  it != m_mapOfTrackers.end(); ++it) {
4463  TrackerWrapper *tracker = it->second;
4464  tracker->setKltThresholdAcceptation(th);
4465  }
4466 }
4467 #endif
4468 
4481 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4482 {
4483  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4484  it != m_mapOfTrackers.end(); ++it) {
4485  TrackerWrapper *tracker = it->second;
4486  tracker->setLod(useLod, name);
4487  }
4488 }
4489 
4490 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4498 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4499 {
4500  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4501  it != m_mapOfTrackers.end(); ++it) {
4502  TrackerWrapper *tracker = it->second;
4503  tracker->setKltMaskBorder(e);
4504  }
4505 }
4506 
4515 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4516 {
4517  if (m_mapOfTrackers.size() == 2) {
4518  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4519  it->second->setKltMaskBorder(e1);
4520 
4521  ++it;
4522 
4523  it->second->setKltMaskBorder(e2);
4524  }
4525  else {
4526  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4527  m_mapOfTrackers.size());
4528  }
4529 }
4530 
4536 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4537 {
4538  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4539  ++it) {
4540  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4541 
4542  if (it_tracker != m_mapOfTrackers.end()) {
4543  TrackerWrapper *tracker = it_tracker->second;
4544  tracker->setKltMaskBorder(it->second);
4545  }
4546  }
4547 }
4548 #endif
4549 
4556 {
4557  vpMbTracker::setMask(mask);
4558 
4559  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4560  it != m_mapOfTrackers.end(); ++it) {
4561  TrackerWrapper *tracker = it->second;
4562  tracker->setMask(mask);
4563  }
4564 }
4565 
4577 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4578 {
4579  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4580  it != m_mapOfTrackers.end(); ++it) {
4581  TrackerWrapper *tracker = it->second;
4582  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4583  }
4584 }
4585 
4596 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4597 {
4598  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4599  it != m_mapOfTrackers.end(); ++it) {
4600  TrackerWrapper *tracker = it->second;
4601  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4602  }
4603 }
4604 
4613 {
4614  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4615  it != m_mapOfTrackers.end(); ++it) {
4616  TrackerWrapper *tracker = it->second;
4617  tracker->setMovingEdge(me);
4618  }
4619 }
4620 
4630 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4631 {
4632  if (m_mapOfTrackers.size() == 2) {
4633  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4634  it->second->setMovingEdge(me1);
4635 
4636  ++it;
4637 
4638  it->second->setMovingEdge(me2);
4639  }
4640  else {
4641  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4642  m_mapOfTrackers.size());
4643  }
4644 }
4645 
4651 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4652 {
4653  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4654  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4655 
4656  if (it_tracker != m_mapOfTrackers.end()) {
4657  TrackerWrapper *tracker = it_tracker->second;
4658  tracker->setMovingEdge(it->second);
4659  }
4660  }
4661 }
4662 
4671 {
4673 
4674  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4675  it != m_mapOfTrackers.end(); ++it) {
4676  TrackerWrapper *tracker = it->second;
4677  tracker->setNearClippingDistance(dist);
4678  }
4679 }
4680 
4689 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4690 {
4691  if (m_mapOfTrackers.size() == 2) {
4692  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4693  it->second->setNearClippingDistance(dist1);
4694 
4695  ++it;
4696 
4697  it->second->setNearClippingDistance(dist2);
4698 
4700  if (it != m_mapOfTrackers.end()) {
4701  distNearClip = it->second->getNearClippingDistance();
4702  }
4703  else {
4704  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4705  }
4706  }
4707  else {
4708  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4709  m_mapOfTrackers.size());
4710  }
4711 }
4712 
4718 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4719 {
4720  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4721  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4722 
4723  if (it_tracker != m_mapOfTrackers.end()) {
4724  TrackerWrapper *tracker = it_tracker->second;
4725  tracker->setNearClippingDistance(it->second);
4726 
4727  if (it->first == m_referenceCameraName) {
4728  distNearClip = it->second;
4729  }
4730  }
4731  }
4732 }
4733 
4747 {
4748  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4749 
4750  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4751  it != m_mapOfTrackers.end(); ++it) {
4752  TrackerWrapper *tracker = it->second;
4753  tracker->setOgreShowConfigDialog(showConfigDialog);
4754  }
4755 }
4756 
4768 {
4770 
4771  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4772  it != m_mapOfTrackers.end(); ++it) {
4773  TrackerWrapper *tracker = it->second;
4774  tracker->setOgreVisibilityTest(v);
4775  }
4776 
4777 #ifdef VISP_HAVE_OGRE
4778  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4779  it != m_mapOfTrackers.end(); ++it) {
4780  TrackerWrapper *tracker = it->second;
4781  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4782  }
4783 #endif
4784 }
4785 
4794 {
4796 
4797  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4798  it != m_mapOfTrackers.end(); ++it) {
4799  TrackerWrapper *tracker = it->second;
4800  tracker->setOptimizationMethod(opt);
4801  }
4802 }
4803 
4817 {
4818  if (m_mapOfTrackers.size() > 1) {
4819  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4820  "to be configured with only one camera!");
4821  }
4822 
4823  m_cMo = cdMo;
4824 
4825  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4826  if (it != m_mapOfTrackers.end()) {
4827  TrackerWrapper *tracker = it->second;
4828  tracker->setPose(I, cdMo);
4829  }
4830  else {
4831  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4832  m_referenceCameraName.c_str());
4833  }
4834 }
4835 
4849 {
4850  if (m_mapOfTrackers.size() > 1) {
4851  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4852  "to be configured with only one camera!");
4853  }
4854 
4855  m_cMo = cdMo;
4856 
4857  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4858  if (it != m_mapOfTrackers.end()) {
4859  TrackerWrapper *tracker = it->second;
4860  vpImageConvert::convert(I_color, m_I);
4861  tracker->setPose(m_I, cdMo);
4862  }
4863  else {
4864  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4865  m_referenceCameraName.c_str());
4866  }
4867 }
4868 
4881  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4882 {
4883  if (m_mapOfTrackers.size() == 2) {
4884  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4885  it->second->setPose(I1, c1Mo);
4886 
4887  ++it;
4888 
4889  it->second->setPose(I2, c2Mo);
4890 
4892  if (it != m_mapOfTrackers.end()) {
4893  // Set reference pose
4894  it->second->getPose(m_cMo);
4895  }
4896  else {
4897  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4898  m_referenceCameraName.c_str());
4899  }
4900  }
4901  else {
4902  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4903  m_mapOfTrackers.size());
4904  }
4905 }
4906 
4918 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4919  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4920 {
4921  if (m_mapOfTrackers.size() == 2) {
4922  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4923  it->second->setPose(I_color1, c1Mo);
4924 
4925  ++it;
4926 
4927  it->second->setPose(I_color2, c2Mo);
4928 
4930  if (it != m_mapOfTrackers.end()) {
4931  // Set reference pose
4932  it->second->getPose(m_cMo);
4933  }
4934  else {
4935  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4936  m_referenceCameraName.c_str());
4937  }
4938  }
4939  else {
4940  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4941  m_mapOfTrackers.size());
4942  }
4943 }
4944 
4960 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4961  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4962 {
4963  // Set the reference cMo
4964  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4965  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4966  mapOfImages.find(m_referenceCameraName);
4967  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4968 
4969  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4970  TrackerWrapper *tracker = it_tracker->second;
4971  tracker->setPose(*it_img->second, it_camPose->second);
4972  tracker->getPose(m_cMo);
4973  }
4974  else {
4975  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4976  }
4977 
4978  // Vector of missing pose matrices for cameras
4979  std::vector<std::string> vectorOfMissingCameraPoses;
4980 
4981  // Set pose for the specified cameras
4982  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4983  if (it_tracker->first != m_referenceCameraName) {
4984  it_img = mapOfImages.find(it_tracker->first);
4985  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4986 
4987  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4988  // Set pose
4989  TrackerWrapper *tracker = it_tracker->second;
4990  tracker->setPose(*it_img->second, it_camPose->second);
4991  }
4992  else {
4993  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4994  }
4995  }
4996  }
4997 
4998  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4999  it != vectorOfMissingCameraPoses.end(); ++it) {
5000  it_img = mapOfImages.find(*it);
5001  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5003 
5004  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5005  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5006  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5007  }
5008  else {
5010  "Missing image or missing camera transformation "
5011  "matrix! Cannot set pose for camera: %s!",
5012  it->c_str());
5013  }
5014  }
5015 }
5016 
5032 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5033  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5034 {
5035  // Set the reference cMo
5036  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
5037  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
5038  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
5039 
5040  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5041  TrackerWrapper *tracker = it_tracker->second;
5042  tracker->setPose(*it_img->second, it_camPose->second);
5043  tracker->getPose(m_cMo);
5044  }
5045  else {
5046  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
5047  }
5048 
5049  // Vector of missing pose matrices for cameras
5050  std::vector<std::string> vectorOfMissingCameraPoses;
5051 
5052  // Set pose for the specified cameras
5053  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5054  if (it_tracker->first != m_referenceCameraName) {
5055  it_img = mapOfColorImages.find(it_tracker->first);
5056  it_camPose = mapOfCameraPoses.find(it_tracker->first);
5057 
5058  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5059  // Set pose
5060  TrackerWrapper *tracker = it_tracker->second;
5061  tracker->setPose(*it_img->second, it_camPose->second);
5062  }
5063  else {
5064  vectorOfMissingCameraPoses.push_back(it_tracker->first);
5065  }
5066  }
5067  }
5068 
5069  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5070  it != vectorOfMissingCameraPoses.end(); ++it) {
5071  it_img = mapOfColorImages.find(*it);
5072  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5074 
5075  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5076  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5077  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5078  }
5079  else {
5081  "Missing image or missing camera transformation "
5082  "matrix! Cannot set pose for camera: %s!",
5083  it->c_str());
5084  }
5085  }
5086 }
5087 
5103 {
5105 
5106  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5107  it != m_mapOfTrackers.end(); ++it) {
5108  TrackerWrapper *tracker = it->second;
5109  tracker->setProjectionErrorComputation(flag);
5110  }
5111 }
5112 
5117 {
5119 
5120  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5121  it != m_mapOfTrackers.end(); ++it) {
5122  TrackerWrapper *tracker = it->second;
5123  tracker->setProjectionErrorDisplay(display);
5124  }
5125 }
5126 
5131 {
5133 
5134  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5135  it != m_mapOfTrackers.end(); ++it) {
5136  TrackerWrapper *tracker = it->second;
5137  tracker->setProjectionErrorDisplayArrowLength(length);
5138  }
5139 }
5140 
5142 {
5144 
5145  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5146  it != m_mapOfTrackers.end(); ++it) {
5147  TrackerWrapper *tracker = it->second;
5148  tracker->setProjectionErrorDisplayArrowThickness(thickness);
5149  }
5150 }
5151 
5157 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
5158 {
5159  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
5160  if (it != m_mapOfTrackers.end()) {
5161  m_referenceCameraName = referenceCameraName;
5162  }
5163  else {
5164  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
5165  }
5166 }
5167 
5169 {
5171 
5172  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5173  it != m_mapOfTrackers.end(); ++it) {
5174  TrackerWrapper *tracker = it->second;
5175  tracker->setScanLineVisibilityTest(v);
5176  }
5177 }
5178 
5191 {
5192  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5193  it != m_mapOfTrackers.end(); ++it) {
5194  TrackerWrapper *tracker = it->second;
5195  tracker->setTrackerType(type);
5196  }
5197 }
5198 
5208 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
5209 {
5210  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5211  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
5212  if (it_tracker != m_mapOfTrackers.end()) {
5213  TrackerWrapper *tracker = it_tracker->second;
5214  tracker->setTrackerType(it->second);
5215  }
5216  }
5217 }
5218 
5228 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
5229 {
5230  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5231  it != m_mapOfTrackers.end(); ++it) {
5232  TrackerWrapper *tracker = it->second;
5233  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5234  }
5235 }
5236 
5246 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
5247 {
5248  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5249  it != m_mapOfTrackers.end(); ++it) {
5250  TrackerWrapper *tracker = it->second;
5251  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5252  }
5253 }
5254 
5264 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
5265 {
5266  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5267  it != m_mapOfTrackers.end(); ++it) {
5268  TrackerWrapper *tracker = it->second;
5269  tracker->setUseEdgeTracking(name, useEdgeTracking);
5270  }
5271 }
5272 
5273 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5283 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
5284 {
5285  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5286  it != m_mapOfTrackers.end(); ++it) {
5287  TrackerWrapper *tracker = it->second;
5288  tracker->setUseKltTracking(name, useKltTracking);
5289  }
5290 }
5291 #endif
5292 
5294 {
5295  // Test tracking fails only if all testTracking have failed
5296  bool isOneTestTrackingOk = false;
5297  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5298  it != m_mapOfTrackers.end(); ++it) {
5299  TrackerWrapper *tracker = it->second;
5300  try {
5301  tracker->testTracking();
5302  isOneTestTrackingOk = true;
5303  }
5304  catch (...) {
5305  }
5306  }
5307 
5308  if (!isOneTestTrackingOk) {
5309  std::ostringstream oss;
5310  oss << "Not enough moving edges to track the object. Try to reduce the "
5311  "threshold="
5312  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5314  }
5315 }
5316 
5327 {
5328  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5329  mapOfImages[m_referenceCameraName] = &I;
5330 
5331  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5332  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5333 
5334  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5335 }
5336 
5347 {
5348  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5349  mapOfColorImages[m_referenceCameraName] = &I_color;
5350 
5351  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5352  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5353 
5354  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5355 }
5356 
5368 {
5369  if (m_mapOfTrackers.size() == 2) {
5370  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5371  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5372  mapOfImages[it->first] = &I1;
5373  ++it;
5374 
5375  mapOfImages[it->first] = &I2;
5376 
5377  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5378  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5379 
5380  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5381  }
5382  else {
5383  std::stringstream ss;
5384  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5385  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5386  }
5387 }
5388 
5399 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5400 {
5401  if (m_mapOfTrackers.size() == 2) {
5402  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5403  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5404  mapOfImages[it->first] = &I_color1;
5405  ++it;
5406 
5407  mapOfImages[it->first] = &_colorI2;
5408 
5409  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5410  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5411 
5412  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5413  }
5414  else {
5415  std::stringstream ss;
5416  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5417  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5418  }
5419 }
5420 
5428 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5429 {
5430  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5431  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5432 
5433  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5434 }
5435 
5443 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5444 {
5445  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5446  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5447 
5448  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5449 }
5450 
5451 #ifdef VISP_HAVE_PCL
5460 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5461  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5462 {
5463  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5464  it != m_mapOfTrackers.end(); ++it) {
5465  TrackerWrapper *tracker = it->second;
5466 
5467  if ((tracker->m_trackerType & (EDGE_TRACKER |
5468 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5469  KLT_TRACKER |
5470 #endif
5472  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5473  }
5474 
5475  if (tracker->m_trackerType & (EDGE_TRACKER
5476 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5477  | KLT_TRACKER
5478 #endif
5479  ) &&
5480  mapOfImages[it->first] == nullptr) {
5481  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5482  }
5483 
5484  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5485  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5486  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5487  }
5488  }
5489 
5490  preTracking(mapOfImages, mapOfPointClouds);
5491 
5492  try {
5493  computeVVS(mapOfImages);
5494  }
5495  catch (...) {
5496  covarianceMatrix = -1;
5497  throw; // throw the original exception
5498  }
5499 
5500  testTracking();
5501 
5502  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5503  it != m_mapOfTrackers.end(); ++it) {
5504  TrackerWrapper *tracker = it->second;
5505 
5506  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5507  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5508  }
5509 
5510  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5511 
5512  if (displayFeatures) {
5513 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5514  if (tracker->m_trackerType & KLT_TRACKER) {
5515  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5516  }
5517 #endif
5518 
5519  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5520  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5521  }
5522  }
5523  }
5524 
5526 }
5527 
5536 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5537  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5538 {
5539  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5540  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5541  it != m_mapOfTrackers.end(); ++it) {
5542  TrackerWrapper *tracker = it->second;
5543 
5544  if ((tracker->m_trackerType & (EDGE_TRACKER |
5545 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5546  KLT_TRACKER |
5547 #endif
5549  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5550  }
5551 
5552  if (tracker->m_trackerType & (EDGE_TRACKER
5553 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5554  | KLT_TRACKER
5555 #endif
5556  ) &&
5557  mapOfImages[it->first] == nullptr) {
5558  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5559  }
5560  else if (tracker->m_trackerType & (EDGE_TRACKER
5561 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5562  | KLT_TRACKER
5563 #endif
5564  ) &&
5565  mapOfImages[it->first] != nullptr) {
5566  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5567  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5568  }
5569 
5570  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5571  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5572  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5573  }
5574  }
5575 
5576  preTracking(mapOfImages, mapOfPointClouds);
5577 
5578  try {
5579  computeVVS(mapOfImages);
5580  }
5581  catch (...) {
5582  covarianceMatrix = -1;
5583  throw; // throw the original exception
5584  }
5585 
5586  testTracking();
5587 
5588  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5589  it != m_mapOfTrackers.end(); ++it) {
5590  TrackerWrapper *tracker = it->second;
5591 
5592  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5593  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5594  }
5595 
5596  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5597 
5598  if (displayFeatures) {
5599 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5600  if (tracker->m_trackerType & KLT_TRACKER) {
5601  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5602  }
5603 #endif
5604 
5605  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5606  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5607  }
5608  }
5609  }
5610 
5612 }
5613 #endif
5614 
5625 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5626  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5627  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5628  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5629 {
5630  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5631  it != m_mapOfTrackers.end(); ++it) {
5632  TrackerWrapper *tracker = it->second;
5633 
5634  if ((tracker->m_trackerType & (EDGE_TRACKER |
5635 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5636  KLT_TRACKER |
5637 #endif
5639  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5640  }
5641 
5642  if (tracker->m_trackerType & (EDGE_TRACKER
5643 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5644  | KLT_TRACKER
5645 #endif
5646  ) &&
5647  mapOfImages[it->first] == nullptr) {
5648  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5649  }
5650 
5651  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5652  (mapOfPointClouds[it->first] == nullptr)) {
5653  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5654  }
5655  }
5656 
5657  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5658 
5659  try {
5660  computeVVS(mapOfImages);
5661  }
5662  catch (...) {
5663  covarianceMatrix = -1;
5664  throw; // throw the original exception
5665  }
5666 
5667  testTracking();
5668 
5669  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5670  it != m_mapOfTrackers.end(); ++it) {
5671  TrackerWrapper *tracker = it->second;
5672 
5673  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5674  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5675  }
5676 
5677  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5678 
5679  if (displayFeatures) {
5680 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5681  if (tracker->m_trackerType & KLT_TRACKER) {
5682  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5683  }
5684 #endif
5685 
5686  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5687  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5688  }
5689  }
5690  }
5691 
5693 }
5694 
5705 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5706  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5707  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5708  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5709 {
5710  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5711  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5712  it != m_mapOfTrackers.end(); ++it) {
5713  TrackerWrapper *tracker = it->second;
5714 
5715  if ((tracker->m_trackerType & (EDGE_TRACKER |
5716 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5717  KLT_TRACKER |
5718 #endif
5720  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5721  }
5722 
5723  if (tracker->m_trackerType & (EDGE_TRACKER
5724 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5725  | KLT_TRACKER
5726 #endif
5727  ) &&
5728  mapOfColorImages[it->first] == nullptr) {
5729  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5730  }
5731  else if (tracker->m_trackerType & (EDGE_TRACKER
5732 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5733  | KLT_TRACKER
5734 #endif
5735  ) &&
5736  mapOfColorImages[it->first] != nullptr) {
5737  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5738  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5739  }
5740 
5741  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5742  (mapOfPointClouds[it->first] == nullptr)) {
5743  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5744  }
5745  }
5746 
5747  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5748 
5749  try {
5750  computeVVS(mapOfImages);
5751  }
5752  catch (...) {
5753  covarianceMatrix = -1;
5754  throw; // throw the original exception
5755  }
5756 
5757  testTracking();
5758 
5759  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5760  it != m_mapOfTrackers.end(); ++it) {
5761  TrackerWrapper *tracker = it->second;
5762 
5763  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5764  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5765  }
5766 
5767  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5768 
5769  if (displayFeatures) {
5770 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5771  if (tracker->m_trackerType & KLT_TRACKER) {
5772  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5773  }
5774 #endif
5775 
5776  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5777  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5778  }
5779  }
5780  }
5781 
5783 }
5784 
5785 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5786  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5787  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5788  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5789 {
5790  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5791  it != m_mapOfTrackers.end(); ++it) {
5792  TrackerWrapper *tracker = it->second;
5793 
5794  if ((tracker->m_trackerType & (EDGE_TRACKER |
5795 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5796  KLT_TRACKER |
5797 #endif
5799  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5800  }
5801 
5802  if (tracker->m_trackerType & (EDGE_TRACKER
5803 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5804  | KLT_TRACKER
5805 #endif
5806  ) &&
5807  mapOfImages[it->first] == nullptr) {
5808  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5809  }
5810 
5811  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5812  (mapOfPointClouds[it->first] == nullptr)) {
5813  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5814  }
5815  }
5816 
5817  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5818 
5819  try {
5820  computeVVS(mapOfImages);
5821  }
5822  catch (...) {
5823  covarianceMatrix = -1;
5824  throw; // throw the original exception
5825  }
5826 
5827  testTracking();
5828 
5829  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5830  it != m_mapOfTrackers.end(); ++it) {
5831  TrackerWrapper *tracker = it->second;
5832 
5833  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5834  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5835  }
5836 
5837  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5838 
5839  if (displayFeatures) {
5840 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5841  if (tracker->m_trackerType & KLT_TRACKER) {
5842  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5843  }
5844 #endif
5845 
5846  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5847  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5848  }
5849  }
5850  }
5851 
5853 }
5854 
5865 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5866  std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5867  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5868  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5869 {
5870  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5871  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5872  it != m_mapOfTrackers.end(); ++it) {
5873  TrackerWrapper *tracker = it->second;
5874 
5875  if ((tracker->m_trackerType & (EDGE_TRACKER |
5876 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5877  KLT_TRACKER |
5878 #endif
5880  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5881  }
5882 
5883  if (tracker->m_trackerType & (EDGE_TRACKER
5884 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5885  | KLT_TRACKER
5886 #endif
5887  ) &&
5888  mapOfColorImages[it->first] == nullptr) {
5889  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5890  }
5891  else if (tracker->m_trackerType & (EDGE_TRACKER
5892 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5893  | KLT_TRACKER
5894 #endif
5895  ) &&
5896  mapOfColorImages[it->first] != nullptr) {
5897  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5898  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5899  }
5900 
5901  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5902  (mapOfPointClouds[it->first] == nullptr)) {
5903  throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5904  }
5905  }
5906 
5907  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5908 
5909  try {
5910  computeVVS(mapOfImages);
5911  }
5912  catch (...) {
5913  covarianceMatrix = -1;
5914  throw; // throw the original exception
5915  }
5916 
5917  testTracking();
5918 
5919  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5920  it != m_mapOfTrackers.end(); ++it) {
5921  TrackerWrapper *tracker = it->second;
5922 
5923  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5924  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5925  }
5926 
5927  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5928 
5929  if (displayFeatures) {
5930 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5931  if (tracker->m_trackerType & KLT_TRACKER) {
5932  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5933  }
5934 #endif
5935 
5936  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5937  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5938  }
5939  }
5940  }
5941 
5943 }
5944 
5946 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5947  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5948 {
5949  m_lambda = 1.0;
5950  m_maxIter = 30;
5951 
5952 #ifdef VISP_HAVE_OGRE
5953  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5954 
5955  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5956 #endif
5957 }
5958 
5959 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5960  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5961 {
5962  if ((m_trackerType & (EDGE_TRACKER |
5963 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5964  KLT_TRACKER |
5965 #endif
5967  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5968  }
5969 
5970  m_lambda = 1.0;
5971  m_maxIter = 30;
5972 
5973 #ifdef VISP_HAVE_OGRE
5974  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5975 
5976  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5977 #endif
5978 }
5979 
5980 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5982 {
5983  computeVVSInit(ptr_I);
5984 
5985  if (m_error.getRows() < 4) {
5986  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5987  }
5988 
5989  double normRes = 0;
5990  double normRes_1 = -1;
5991  unsigned int iter = 0;
5992 
5993  double factorEdge = 1.0;
5994 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5995  double factorKlt = 1.0;
5996 #endif
5997  double factorDepth = 1.0;
5998  double factorDepthDense = 1.0;
5999 
6000  vpMatrix LTL;
6001  vpColVector LTR, v;
6002  vpColVector error_prev;
6003 
6004  double mu = m_initialMu;
6005  vpHomogeneousMatrix cMo_prev;
6006 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6007  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
6008 #endif
6009  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
6010  if (isoJoIdentity)
6011  oJo.eye();
6012 
6013  // Covariance
6014  vpColVector W_true(m_error.getRows());
6015  vpMatrix L_true, LVJ_true;
6016 
6017  unsigned int nb_edge_features = m_error_edge.getRows();
6018 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6019  unsigned int nb_klt_features = m_error_klt.getRows();
6020 #endif
6021  unsigned int nb_depth_features = m_error_depthNormal.getRows();
6022  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
6023 
6024  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
6025  computeVVSInteractionMatrixAndResidu(ptr_I);
6026 
6027  bool reStartFromLastIncrement = false;
6028  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
6029 
6030 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6031  if (reStartFromLastIncrement) {
6032  if (m_trackerType & KLT_TRACKER) {
6033  ctTc0 = ctTc0_Prev;
6034  }
6035  }
6036 #endif
6037 
6038  if (!reStartFromLastIncrement) {
6039  computeVVSWeights();
6040 
6041  if (computeCovariance) {
6042  L_true = m_L;
6043  if (!isoJoIdentity) {
6045  cVo.buildFrom(m_cMo);
6046  LVJ_true = (m_L * cVo * oJo);
6047  }
6048  }
6049 
6051  if (iter == 0) {
6052  // If all the 6 dof should be estimated, we check if the interaction
6053  // matrix is full rank. If not we remove automatically the dof that
6054  // cannot be estimated. This is particularly useful when considering
6055  // circles (rank 5) and cylinders (rank 4)
6056  if (isoJoIdentity) {
6057  cVo.buildFrom(m_cMo);
6058 
6059  vpMatrix K; // kernel
6060  unsigned int rank = (m_L * cVo).kernel(K);
6061  if (rank == 0) {
6062  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
6063  }
6064 
6065  if (rank != 6) {
6066  vpMatrix I; // Identity
6067  I.eye(6);
6068  oJo = I - K.AtA();
6069 
6070  isoJoIdentity = false;
6071  }
6072  }
6073  }
6074 
6075  // Weighting
6076  double num = 0;
6077  double den = 0;
6078 
6079  unsigned int start_index = 0;
6080  if (m_trackerType & EDGE_TRACKER) {
6081  for (unsigned int i = 0; i < nb_edge_features; i++) {
6082  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
6083  W_true[i] = wi;
6084  m_weightedError[i] = wi * m_error[i];
6085 
6086  num += wi * vpMath::sqr(m_error[i]);
6087  den += wi;
6088 
6089  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6090  m_L[i][j] *= wi;
6091  }
6092  }
6093 
6094  start_index += nb_edge_features;
6095  }
6096 
6097 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6098  if (m_trackerType & KLT_TRACKER) {
6099  for (unsigned int i = 0; i < nb_klt_features; i++) {
6100  double wi = m_w_klt[i] * factorKlt;
6101  W_true[start_index + i] = wi;
6102  m_weightedError[start_index + i] = wi * m_error_klt[i];
6103 
6104  num += wi * vpMath::sqr(m_error[start_index + i]);
6105  den += wi;
6106 
6107  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6108  m_L[start_index + i][j] *= wi;
6109  }
6110  }
6111 
6112  start_index += nb_klt_features;
6113  }
6114 #endif
6115 
6116  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6117  for (unsigned int i = 0; i < nb_depth_features; i++) {
6118  double wi = m_w_depthNormal[i] * factorDepth;
6119  m_w[start_index + i] = m_w_depthNormal[i];
6120  m_weightedError[start_index + i] = wi * m_error[start_index + i];
6121 
6122  num += wi * vpMath::sqr(m_error[start_index + i]);
6123  den += wi;
6124 
6125  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6126  m_L[start_index + i][j] *= wi;
6127  }
6128  }
6129 
6130  start_index += nb_depth_features;
6131  }
6132 
6133  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6134  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
6135  double wi = m_w_depthDense[i] * factorDepthDense;
6136  m_w[start_index + i] = m_w_depthDense[i];
6137  m_weightedError[start_index + i] = wi * m_error[start_index + i];
6138 
6139  num += wi * vpMath::sqr(m_error[start_index + i]);
6140  den += wi;
6141 
6142  for (unsigned int j = 0; j < m_L.getCols(); j++) {
6143  m_L[start_index + i][j] *= wi;
6144  }
6145  }
6146 
6147  // start_index += nb_depth_dense_features;
6148  }
6149 
6150  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
6151 
6152  cMo_prev = m_cMo;
6153 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6154  if (m_trackerType & KLT_TRACKER) {
6155  ctTc0_Prev = ctTc0;
6156  }
6157 #endif
6158 
6159  m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo;
6160 
6161 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6162  if (m_trackerType & KLT_TRACKER) {
6163  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
6164  }
6165 #endif
6166  normRes_1 = normRes;
6167 
6168  normRes = sqrt(num / den);
6169  }
6170 
6171  iter++;
6172  }
6173 
6174  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
6175 
6176  if (m_trackerType & EDGE_TRACKER) {
6178  }
6179 }
6180 
6181 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
6182 {
6183  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6184  "TrackerWrapper::computeVVSInit("
6185  ") should not be called!");
6186 }
6187 
6188 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
6189 {
6190  initMbtTracking(ptr_I);
6191 
6192  unsigned int nbFeatures = 0;
6193 
6194  if (m_trackerType & EDGE_TRACKER) {
6195  nbFeatures += m_error_edge.getRows();
6196  }
6197  else {
6198  m_error_edge.clear();
6199  m_weightedError_edge.clear();
6200  m_L_edge.clear();
6201  m_w_edge.clear();
6202  }
6203 
6204 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6205  if (m_trackerType & KLT_TRACKER) {
6207  nbFeatures += m_error_klt.getRows();
6208  }
6209  else {
6210  m_error_klt.clear();
6211  m_weightedError_klt.clear();
6212  m_L_klt.clear();
6213  m_w_klt.clear();
6214  }
6215 #endif
6216 
6217  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6219  nbFeatures += m_error_depthNormal.getRows();
6220  }
6221  else {
6222  m_error_depthNormal.clear();
6223  m_weightedError_depthNormal.clear();
6224  m_L_depthNormal.clear();
6225  m_w_depthNormal.clear();
6226  }
6227 
6228  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6230  nbFeatures += m_error_depthDense.getRows();
6231  }
6232  else {
6233  m_error_depthDense.clear();
6234  m_weightedError_depthDense.clear();
6235  m_L_depthDense.clear();
6236  m_w_depthDense.clear();
6237  }
6238 
6239  m_L.resize(nbFeatures, 6, false, false);
6240  m_error.resize(nbFeatures, false);
6241 
6242  m_weightedError.resize(nbFeatures, false);
6243  m_w.resize(nbFeatures, false);
6244  m_w = 1;
6245 }
6246 
6248 {
6249  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6250  "TrackerWrapper::"
6251  "computeVVSInteractionMatrixAndR"
6252  "esidu() should not be called!");
6253 }
6254 
6256 {
6257  if (m_trackerType & EDGE_TRACKER) {
6259  }
6260 
6261 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6262  if (m_trackerType & KLT_TRACKER) {
6264  }
6265 #endif
6266 
6267  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6269  }
6270 
6271  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6273  }
6274 
6275  unsigned int start_index = 0;
6276  if (m_trackerType & EDGE_TRACKER) {
6277  m_L.insert(m_L_edge, start_index, 0);
6278  m_error.insert(start_index, m_error_edge);
6279 
6280  start_index += m_error_edge.getRows();
6281  }
6282 
6283 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6284  if (m_trackerType & KLT_TRACKER) {
6285  m_L.insert(m_L_klt, start_index, 0);
6286  m_error.insert(start_index, m_error_klt);
6287 
6288  start_index += m_error_klt.getRows();
6289  }
6290 #endif
6291 
6292  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6293  m_L.insert(m_L_depthNormal, start_index, 0);
6294  m_error.insert(start_index, m_error_depthNormal);
6295 
6296  start_index += m_error_depthNormal.getRows();
6297  }
6298 
6299  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6300  m_L.insert(m_L_depthDense, start_index, 0);
6301  m_error.insert(start_index, m_error_depthDense);
6302 
6303  // start_index += m_error_depthDense.getRows();
6304  }
6305 }
6306 
6308 {
6309  unsigned int start_index = 0;
6310 
6311  if (m_trackerType & EDGE_TRACKER) {
6313  m_w.insert(start_index, m_w_edge);
6314 
6315  start_index += m_w_edge.getRows();
6316  }
6317 
6318 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6319  if (m_trackerType & KLT_TRACKER) {
6320  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
6321  m_w.insert(start_index, m_w_klt);
6322 
6323  start_index += m_w_klt.getRows();
6324  }
6325 #endif
6326 
6327  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6328  if (m_depthNormalUseRobust) {
6329  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
6330  m_w.insert(start_index, m_w_depthNormal);
6331  }
6332 
6333  start_index += m_w_depthNormal.getRows();
6334  }
6335 
6336  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6338  m_w.insert(start_index, m_w_depthDense);
6339 
6340  // start_index += m_w_depthDense.getRows();
6341  }
6342 }
6343 
6344 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
6345  const vpCameraParameters &cam, const vpColor &col,
6346  unsigned int thickness, bool displayFullModel)
6347 {
6348  if (displayFeatures) {
6349  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6350  for (size_t i = 0; i < features.size(); i++) {
6351  if (vpMath::equal(features[i][0], 0)) {
6352  vpImagePoint ip(features[i][1], features[i][2]);
6353  int state = static_cast<int>(features[i][3]);
6354 
6355  switch (state) {
6358  break;
6359 
6360  case vpMeSite::CONTRAST:
6361  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6362  break;
6363 
6364  case vpMeSite::THRESHOLD:
6366  break;
6367 
6368  case vpMeSite::M_ESTIMATOR:
6369  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6370  break;
6371 
6372  case vpMeSite::TOO_NEAR:
6373  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6374  break;
6375 
6376  default:
6378  }
6379  }
6380  else if (vpMath::equal(features[i][0], 1)) {
6381  vpImagePoint ip1(features[i][1], features[i][2]);
6383 
6384  vpImagePoint ip2(features[i][3], features[i][4]);
6385  double id = features[i][5];
6386  std::stringstream ss;
6387  ss << id;
6388  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6389  }
6390  else if (vpMath::equal(features[i][0], 2)) {
6391  vpImagePoint im_centroid(features[i][1], features[i][2]);
6392  vpImagePoint im_extremity(features[i][3], features[i][4]);
6393  bool desired = vpMath::equal(features[i][0], 2);
6394  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6395  }
6396  }
6397  }
6398 
6399  std::vector<std::vector<double> > models =
6400  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6401  for (size_t i = 0; i < models.size(); i++) {
6402  if (vpMath::equal(models[i][0], 0)) {
6403  vpImagePoint ip1(models[i][1], models[i][2]);
6404  vpImagePoint ip2(models[i][3], models[i][4]);
6405  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6406  }
6407  else if (vpMath::equal(models[i][0], 1)) {
6408  vpImagePoint center(models[i][1], models[i][2]);
6409  double n20 = models[i][3];
6410  double n11 = models[i][4];
6411  double n02 = models[i][5];
6412  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6413  }
6414  }
6415 
6416 #ifdef VISP_HAVE_OGRE
6417  if ((m_trackerType & EDGE_TRACKER)
6418 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6419  || (m_trackerType & KLT_TRACKER)
6420 #endif
6421  ) {
6422  if (useOgre)
6423  faces.displayOgre(cMo);
6424  }
6425 #endif
6426 }
6427 
6428 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
6429  const vpCameraParameters &cam, const vpColor &col,
6430  unsigned int thickness, bool displayFullModel)
6431 {
6432  if (displayFeatures) {
6433  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6434  for (size_t i = 0; i < features.size(); i++) {
6435  if (vpMath::equal(features[i][0], 0)) {
6436  vpImagePoint ip(features[i][1], features[i][2]);
6437  int state = static_cast<int>(features[i][3]);
6438 
6439  switch (state) {
6442  break;
6443 
6444  case vpMeSite::CONTRAST:
6445  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6446  break;
6447 
6448  case vpMeSite::THRESHOLD:
6450  break;
6451 
6452  case vpMeSite::M_ESTIMATOR:
6453  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6454  break;
6455 
6456  case vpMeSite::TOO_NEAR:
6457  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6458  break;
6459 
6460  default:
6462  }
6463  }
6464  else if (vpMath::equal(features[i][0], 1)) {
6465  vpImagePoint ip1(features[i][1], features[i][2]);
6467 
6468  vpImagePoint ip2(features[i][3], features[i][4]);
6469  double id = features[i][5];
6470  std::stringstream ss;
6471  ss << id;
6472  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6473  }
6474  else if (vpMath::equal(features[i][0], 2)) {
6475  vpImagePoint im_centroid(features[i][1], features[i][2]);
6476  vpImagePoint im_extremity(features[i][3], features[i][4]);
6477  bool desired = vpMath::equal(features[i][0], 2);
6478  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6479  }
6480  }
6481  }
6482 
6483  std::vector<std::vector<double> > models =
6484  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6485  for (size_t i = 0; i < models.size(); i++) {
6486  if (vpMath::equal(models[i][0], 0)) {
6487  vpImagePoint ip1(models[i][1], models[i][2]);
6488  vpImagePoint ip2(models[i][3], models[i][4]);
6489  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6490  }
6491  else if (vpMath::equal(models[i][0], 1)) {
6492  vpImagePoint center(models[i][1], models[i][2]);
6493  double n20 = models[i][3];
6494  double n11 = models[i][4];
6495  double n02 = models[i][5];
6496  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6497  }
6498  }
6499 
6500 #ifdef VISP_HAVE_OGRE
6501  if ((m_trackerType & EDGE_TRACKER)
6502 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6503  || (m_trackerType & KLT_TRACKER)
6504 #endif
6505  ) {
6506  if (useOgre)
6507  faces.displayOgre(cMo);
6508  }
6509 #endif
6510 }
6511 
6512 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6513 {
6514  std::vector<std::vector<double> > features;
6515 
6516  if (m_trackerType & EDGE_TRACKER) {
6517  // m_featuresToBeDisplayedEdge updated after computeVVS()
6518  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6519  }
6520 
6521 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6522  if (m_trackerType & KLT_TRACKER) {
6523  // m_featuresToBeDisplayedKlt updated after postTracking()
6524  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6525  }
6526 #endif
6527 
6528  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6529  // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6530  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6531  m_featuresToBeDisplayedDepthNormal.end());
6532  }
6533 
6534  return features;
6535 }
6536 
6537 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6538  unsigned int height,
6539  const vpHomogeneousMatrix &cMo,
6540  const vpCameraParameters &cam,
6541  bool displayFullModel)
6542 {
6543  std::vector<std::vector<double> > models;
6544 
6545  // Do not add multiple times the same models
6546  if (m_trackerType == EDGE_TRACKER) {
6547  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6548  }
6549 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6550  else if (m_trackerType == KLT_TRACKER) {
6551  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6552  }
6553 #endif
6554  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6555  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6556  }
6557  else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6558  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6559  }
6560  else {
6561  // Edge and KLT trackers use the same primitives
6562  if (m_trackerType & EDGE_TRACKER) {
6563  std::vector<std::vector<double> > edgeModels =
6564  vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6565  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6566  }
6567 
6568  // Depth dense and depth normal trackers use the same primitives
6569  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6570  std::vector<std::vector<double> > depthDenseModels =
6571  vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6572  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6573  }
6574  }
6575 
6576  return models;
6577 }
6578 
6579 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6580 {
6581  if (!modelInitialised) {
6582  throw vpException(vpException::fatalError, "model not initialized");
6583  }
6584 
6585  if (useScanLine || clippingFlag > 3)
6586  m_cam.computeFov(I.getWidth(), I.getHeight());
6587 
6588  bool reInitialisation = false;
6589  if (!useOgre) {
6590  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6591  }
6592  else {
6593 #ifdef VISP_HAVE_OGRE
6594  if (!faces.isOgreInitialised()) {
6595  faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6596 
6597  faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6598  faces.initOgre(m_cam);
6599  // Turn off Ogre config dialog display for the next call to this
6600  // function since settings are saved in the ogre.cfg file and used
6601  // during the next call
6602  ogreShowConfigDialog = false;
6603  }
6604 
6605  faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6606 #else
6607  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6608 #endif
6609  }
6610 
6611  if (useScanLine) {
6612  faces.computeClippedPolygons(m_cMo, m_cam);
6613  faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6614  }
6615 
6616 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6617  if (m_trackerType & KLT_TRACKER)
6619 #endif
6620 
6621  if (m_trackerType & EDGE_TRACKER) {
6623 
6624  bool a = false;
6625  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6626 
6628  }
6629 
6630  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6631  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6632 
6633  if (m_trackerType & DEPTH_DENSE_TRACKER)
6634  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6635 }
6636 
6637 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6638  double radius, int idFace, const std::string &name)
6639 {
6640  if (m_trackerType & EDGE_TRACKER)
6641  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6642 
6643 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6644  if (m_trackerType & KLT_TRACKER)
6645  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6646 #endif
6647 }
6648 
6649 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6650  const std::string &name)
6651 {
6652  if (m_trackerType & EDGE_TRACKER)
6653  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6654 
6655 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6656  if (m_trackerType & KLT_TRACKER)
6657  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6658 #endif
6659 }
6660 
6661 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6662 {
6663  if (m_trackerType & EDGE_TRACKER)
6665 
6666 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6667  if (m_trackerType & KLT_TRACKER)
6669 #endif
6670 
6671  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6673 
6674  if (m_trackerType & DEPTH_DENSE_TRACKER)
6676 }
6677 
6678 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6679 {
6680  if (m_trackerType & EDGE_TRACKER)
6682 
6683 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6684  if (m_trackerType & KLT_TRACKER)
6686 #endif
6687 
6688  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6690 
6691  if (m_trackerType & DEPTH_DENSE_TRACKER)
6693 }
6694 
6696 {
6697  if (m_trackerType & EDGE_TRACKER) {
6700  }
6701 }
6702 
6703 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6704 {
6705 #if defined(VISP_HAVE_PUGIXML)
6706  // Load projection error config
6707  vpMbTracker::loadConfigFile(configFile, verbose);
6708 
6710  xmlp.setVerbose(verbose);
6711  xmlp.setCameraParameters(m_cam);
6712  xmlp.setAngleAppear(vpMath::deg(angleAppears));
6713  xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6714 
6715  // Edge
6716  xmlp.setEdgeMe(me);
6717 
6718  // KLT
6719  xmlp.setKltMaxFeatures(10000);
6720  xmlp.setKltWindowSize(5);
6721  xmlp.setKltQuality(0.01);
6722  xmlp.setKltMinDistance(5);
6723  xmlp.setKltHarrisParam(0.01);
6724  xmlp.setKltBlockSize(3);
6725  xmlp.setKltPyramidLevels(3);
6726 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6727  xmlp.setKltMaskBorder(maskBorder);
6728 #endif
6729 
6730  // Depth normal
6731  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6732  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6733  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6734  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6735  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6736  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6737 
6738  // Depth dense
6739  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6740  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6741 
6742  try {
6743  if (verbose) {
6744  std::cout << " *********** Parsing XML for";
6745  }
6746 
6747  std::vector<std::string> tracker_names;
6748  if (m_trackerType & EDGE_TRACKER)
6749  tracker_names.push_back("Edge");
6750 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6751  if (m_trackerType & KLT_TRACKER)
6752  tracker_names.push_back("Klt");
6753 #endif
6754  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6755  tracker_names.push_back("Depth Normal");
6756  if (m_trackerType & DEPTH_DENSE_TRACKER)
6757  tracker_names.push_back("Depth Dense");
6758 
6759  if (verbose) {
6760  for (size_t i = 0; i < tracker_names.size(); i++) {
6761  std::cout << " " << tracker_names[i];
6762  if (i == tracker_names.size() - 1) {
6763  std::cout << " ";
6764  }
6765  }
6766 
6767  std::cout << "Model-Based Tracker ************ " << std::endl;
6768  }
6769 
6770  xmlp.parse(configFile);
6771  }
6772  catch (...) {
6773  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6774  }
6775 
6776  vpCameraParameters camera;
6777  xmlp.getCameraParameters(camera);
6778  setCameraParameters(camera);
6779 
6780  angleAppears = vpMath::rad(xmlp.getAngleAppear());
6781  angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6782 
6783  if (xmlp.hasNearClippingDistance())
6784  setNearClippingDistance(xmlp.getNearClippingDistance());
6785 
6786  if (xmlp.hasFarClippingDistance())
6787  setFarClippingDistance(xmlp.getFarClippingDistance());
6788 
6789  if (xmlp.getFovClipping()) {
6791  }
6792 
6793  useLodGeneral = xmlp.getLodState();
6794  minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6795  minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6796 
6797  applyLodSettingInConfig = false;
6798  if (this->getNbPolygon() > 0) {
6799  applyLodSettingInConfig = true;
6800  setLod(useLodGeneral);
6801  setMinLineLengthThresh(minLineLengthThresholdGeneral);
6802  setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6803  }
6804 
6805  // Edge
6806  vpMe meParser;
6807  xmlp.getEdgeMe(meParser);
6809 
6810  // KLT
6811 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6812  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6813  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6814  tracker.setQuality(xmlp.getKltQuality());
6815  tracker.setMinDistance(xmlp.getKltMinDistance());
6816  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6817  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6818  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6819  maskBorder = xmlp.getKltMaskBorder();
6820 
6821  // if(useScanLine)
6822  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6823 #endif
6824 
6825  // Depth normal
6826  setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6827  setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6828  setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6829  setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6830  setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6831 
6832  // Depth dense
6833  setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6834 #else
6835  (void)configFile;
6836  (void)verbose;
6837  throw(vpException(vpException::ioError, "vpMbGenericTracker::TrackerWrapper::loadConfigFile() needs pugixml built-in 3rdparty"));
6838 #endif
6839 }
6840 
6841 #ifdef VISP_HAVE_PCL
6843  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6844 {
6845 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6846  // KLT
6847  if (m_trackerType & KLT_TRACKER) {
6848  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6849  vpMbKltTracker::reinit(*ptr_I);
6850  }
6851  }
6852 #endif
6853 
6854  // Looking for new visible face
6855  if (m_trackerType & EDGE_TRACKER) {
6856  bool newvisibleface = false;
6857  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6858 
6859  if (useScanLine) {
6860  faces.computeClippedPolygons(m_cMo, m_cam);
6861  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6862  }
6863  }
6864 
6865  // Depth normal
6866  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6867  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6868 
6869  // Depth dense
6870  if (m_trackerType & DEPTH_DENSE_TRACKER)
6871  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6872 
6873  // Edge
6874  if (m_trackerType & EDGE_TRACKER) {
6876 
6877  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6878  // Reinit the moving edge for the lines which need it.
6879  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6880 
6881  if (computeProjError) {
6883  }
6884  }
6885 }
6886 
6888  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6889 {
6890  if (m_trackerType & EDGE_TRACKER) {
6891  try {
6893  }
6894  catch (...) {
6895  std::cerr << "Error in moving edge tracking" << std::endl;
6896  throw;
6897  }
6898  }
6899 
6900 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6901  if (m_trackerType & KLT_TRACKER) {
6902  try {
6904  }
6905  catch (const vpException &e) {
6906  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6907  throw;
6908  }
6909  }
6910 #endif
6911 
6912  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6913  try {
6915  }
6916  catch (...) {
6917  std::cerr << "Error in Depth normal tracking" << std::endl;
6918  throw;
6919  }
6920  }
6921 
6922  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6923  try {
6925  }
6926  catch (...) {
6927  std::cerr << "Error in Depth dense tracking" << std::endl;
6928  throw;
6929  }
6930  }
6931 }
6932 #endif
6933 
6935  const unsigned int pointcloud_width,
6936  const unsigned int pointcloud_height)
6937 {
6938 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6939  // KLT
6940  if (m_trackerType & KLT_TRACKER) {
6941  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6942  vpMbKltTracker::reinit(*ptr_I);
6943  }
6944  }
6945 #endif
6946 
6947  // Looking for new visible face
6948  if (m_trackerType & EDGE_TRACKER) {
6949  bool newvisibleface = false;
6950  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6951 
6952  if (useScanLine) {
6953  faces.computeClippedPolygons(m_cMo, m_cam);
6954  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6955  }
6956  }
6957 
6958  // Depth normal
6959  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6960  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6961 
6962  // Depth dense
6963  if (m_trackerType & DEPTH_DENSE_TRACKER)
6964  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6965 
6966  // Edge
6967  if (m_trackerType & EDGE_TRACKER) {
6969 
6970  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6971  // Reinit the moving edge for the lines which need it.
6972  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6973 
6974  if (computeProjError) {
6976  }
6977  }
6978 }
6979 
6981  const std::vector<vpColVector> *const point_cloud,
6982  const unsigned int pointcloud_width,
6983  const unsigned int pointcloud_height)
6984 {
6985  if (m_trackerType & EDGE_TRACKER) {
6986  try {
6988  }
6989  catch (...) {
6990  std::cerr << "Error in moving edge tracking" << std::endl;
6991  throw;
6992  }
6993  }
6994 
6995 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6996  if (m_trackerType & KLT_TRACKER) {
6997  try {
6999  }
7000  catch (const vpException &e) {
7001  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7002  throw;
7003  }
7004  }
7005 #endif
7006 
7007  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7008  try {
7009  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7010  }
7011  catch (...) {
7012  std::cerr << "Error in Depth tracking" << std::endl;
7013  throw;
7014  }
7015  }
7016 
7017  if (m_trackerType & DEPTH_DENSE_TRACKER) {
7018  try {
7019  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7020  }
7021  catch (...) {
7022  std::cerr << "Error in Depth dense tracking" << std::endl;
7023  throw;
7024  }
7025  }
7026 }
7027 
7029  const vpMatrix *const point_cloud,
7030  const unsigned int pointcloud_width,
7031  const unsigned int pointcloud_height)
7032 {
7033  if (m_trackerType & EDGE_TRACKER) {
7034  try {
7036  }
7037  catch (...) {
7038  std::cerr << "Error in moving edge tracking" << std::endl;
7039  throw;
7040  }
7041  }
7042 
7043 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7044  if (m_trackerType & KLT_TRACKER) {
7045  try {
7047  }
7048  catch (const vpException &e) {
7049  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7050  throw;
7051  }
7052  }
7053 #endif
7054 
7055  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7056  try {
7057  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7058  }
7059  catch (...) {
7060  std::cerr << "Error in Depth tracking" << std::endl;
7061  throw;
7062  }
7063  }
7064 
7065  if (m_trackerType & DEPTH_DENSE_TRACKER) {
7066  try {
7067  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7068  }
7069  catch (...) {
7070  std::cerr << "Error in Depth dense tracking" << std::endl;
7071  throw;
7072  }
7073  }
7074 }
7075 
7077  const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
7078  const vpHomogeneousMatrix &cMo, bool verbose,
7079  const vpHomogeneousMatrix &T)
7080 {
7081  m_cMo.eye();
7082 
7083  // Edge
7084  vpMbtDistanceLine *l;
7086  vpMbtDistanceCircle *ci;
7087 
7088  for (unsigned int i = 0; i < scales.size(); i++) {
7089  if (scales[i]) {
7090  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
7091  l = *it;
7092  if (l != nullptr)
7093  delete l;
7094  l = nullptr;
7095  }
7096 
7097  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
7098  ++it) {
7099  cy = *it;
7100  if (cy != nullptr)
7101  delete cy;
7102  cy = nullptr;
7103  }
7104 
7105  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
7106  ci = *it;
7107  if (ci != nullptr)
7108  delete ci;
7109  ci = nullptr;
7110  }
7111 
7112  lines[i].clear();
7113  cylinders[i].clear();
7114  circles[i].clear();
7115  }
7116  }
7117 
7118  nline = 0;
7119  ncylinder = 0;
7120  ncircle = 0;
7121  nbvisiblepolygone = 0;
7122 
7123  // KLT
7124 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7125  // delete the Klt Polygon features
7126  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
7127  vpMbtDistanceKltPoints *kltpoly = *it;
7128  if (kltpoly != nullptr) {
7129  delete kltpoly;
7130  }
7131  kltpoly = nullptr;
7132  }
7133  kltPolygons.clear();
7134 
7135  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
7136  ++it) {
7137  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
7138  if (kltPolyCylinder != nullptr) {
7139  delete kltPolyCylinder;
7140  }
7141  kltPolyCylinder = nullptr;
7142  }
7143  kltCylinders.clear();
7144 
7145  // delete the structures used to display circles
7146  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
7147  ci = *it;
7148  if (ci != nullptr) {
7149  delete ci;
7150  }
7151  ci = nullptr;
7152  }
7153  circles_disp.clear();
7154 
7155  firstInitialisation = true;
7156 
7157 #endif
7158 
7159  // Depth normal
7160  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
7161  delete m_depthNormalFaces[i];
7162  m_depthNormalFaces[i] = nullptr;
7163  }
7164  m_depthNormalFaces.clear();
7165 
7166  // Depth dense
7167  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
7168  delete m_depthDenseFaces[i];
7169  m_depthDenseFaces[i] = nullptr;
7170  }
7171  m_depthDenseFaces.clear();
7172 
7173  faces.reset();
7174 
7175  loadModel(cad_name, verbose, T);
7176  if (I) {
7177  initFromPose(*I, cMo);
7178  }
7179  else {
7180  initFromPose(*I_color, cMo);
7181  }
7182 }
7183 
7184 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
7185  const vpHomogeneousMatrix &cMo, bool verbose,
7186  const vpHomogeneousMatrix &T)
7187 {
7188  reInitModel(&I, nullptr, cad_name, cMo, verbose, T);
7189 }
7190 
7191 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
7192  const vpHomogeneousMatrix &cMo, bool verbose,
7193  const vpHomogeneousMatrix &T)
7194 {
7195  reInitModel(nullptr, &I_color, cad_name, cMo, verbose, T);
7196 }
7197 
7198 void vpMbGenericTracker::TrackerWrapper::resetTracker()
7199 {
7201 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7203 #endif
7206 }
7207 
7208 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
7209 {
7210  m_cam = cam;
7211 
7213 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7215 #endif
7218 }
7219 
7220 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
7221 
7222 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
7223 {
7225 }
7226 
7227 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
7228 {
7230 }
7231 
7232 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
7233 {
7235 #ifdef VISP_HAVE_OGRE
7236  faces.getOgreContext()->setWindowName("TrackerWrapper");
7237 #endif
7238 }
7239 
7241  const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
7242 {
7243  bool performKltSetPose = false;
7244  if (I_color) {
7245  vpImageConvert::convert(*I_color, m_I);
7246  }
7247 
7248 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7249  if (m_trackerType & KLT_TRACKER) {
7250  performKltSetPose = true;
7251 
7252  if (useScanLine || clippingFlag > 3) {
7253  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7254  }
7255 
7256  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7257  }
7258 #endif
7259 
7260  if (!performKltSetPose) {
7261  m_cMo = cdMo;
7262  init(I ? *I : m_I);
7263  return;
7264  }
7265 
7266  if (m_trackerType & EDGE_TRACKER) {
7268  }
7269 
7270  if (useScanLine) {
7271  faces.computeClippedPolygons(m_cMo, m_cam);
7272  faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7273  }
7274 
7275 #if 0
7276  if (m_trackerType & EDGE_TRACKER) {
7277  initPyramid(I, Ipyramid);
7278 
7279  unsigned int i = (unsigned int)scales.size();
7280  do {
7281  i--;
7282  if (scales[i]) {
7283  downScale(i);
7284  vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
7285  upScale(i);
7286  }
7287  } while (i != 0);
7288 
7289  cleanPyramid(Ipyramid);
7290  }
7291 #else
7292  if (m_trackerType & EDGE_TRACKER) {
7293  vpMbEdgeTracker::initMovingEdge(I ? *I : m_I, m_cMo);
7294  }
7295 #endif
7296 
7297  // Depth normal
7298  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7299 
7300  // Depth dense
7301  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7302 }
7303 
7305 {
7306  setPose(&I, nullptr, cdMo);
7307 }
7308 
7310 {
7311  setPose(nullptr, &I_color, cdMo);
7312 }
7313 
7314 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
7315 {
7317 }
7318 
7319 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
7320 {
7322 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7324 #endif
7327 }
7328 
7329 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
7330 {
7331  if ((type & (EDGE_TRACKER |
7332 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7333  KLT_TRACKER |
7334 #endif
7335  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7336  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
7337  }
7338 
7339  m_trackerType = type;
7340 }
7341 
7342 void vpMbGenericTracker::TrackerWrapper::testTracking()
7343 {
7344  if (m_trackerType & EDGE_TRACKER) {
7346  }
7347 }
7348 
7350 #if defined(VISP_HAVE_PCL)
7351  I
7352 #endif
7353 )
7354 {
7355  if ((m_trackerType & (EDGE_TRACKER
7356 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7357  | KLT_TRACKER
7358 #endif
7359  )) == 0) {
7360  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7361  return;
7362  }
7363 
7364 #if defined(VISP_HAVE_PCL)
7365  track(&I, nullptr);
7366 #endif
7367 }
7368 
7370 {
7371  // not exposed to the public API, only for debug
7372 }
7373 
7374 #ifdef VISP_HAVE_PCL
7376  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7377 {
7378  if ((m_trackerType & (EDGE_TRACKER |
7379 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7380  KLT_TRACKER |
7381 #endif
7382  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7383  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7384  return;
7385  }
7386 
7387  if (m_trackerType & (EDGE_TRACKER
7388 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7389  | KLT_TRACKER
7390 #endif
7391  ) &&
7392  ptr_I == nullptr) {
7393  throw vpException(vpException::fatalError, "Image pointer is nullptr!");
7394  }
7395 
7396  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
7397  throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
7398  }
7399 
7400  // Back-up cMo in case of exception
7401  vpHomogeneousMatrix cMo_1 = m_cMo;
7402  try {
7403  preTracking(ptr_I, point_cloud);
7404 
7405  try {
7406  computeVVS(ptr_I);
7407  }
7408  catch (...) {
7409  covarianceMatrix = -1;
7410  throw; // throw the original exception
7411  }
7412 
7413  if (m_trackerType == EDGE_TRACKER)
7414  testTracking();
7415 
7416  postTracking(ptr_I, point_cloud);
7417 
7418  }
7419  catch (const vpException &e) {
7420  std::cerr << "Exception: " << e.what() << std::endl;
7421  m_cMo = cMo_1;
7422  throw; // rethrowing the original exception
7423  }
7424 }
7425 #endif
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
unsigned int getCols() const
Definition: vpArray2D.h:327
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:352
unsigned int getRows() const
Definition: vpArray2D.h:337
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1056
Class to define RGB colors available for display functionalities.
Definition: vpColor.h:152
static const vpColor red
Definition: vpColor.h:211
static const vpColor cyan
Definition: vpColor.h:220
static const vpColor blue
Definition: vpColor.h:217
static const vpColor purple
Definition: vpColor.h:222
static const vpColor yellow
Definition: vpColor.h:219
static const vpColor green
Definition: vpColor.h:214
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:59
@ ioError
I/O error.
Definition: vpException.h:79
@ badValue
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:85
@ notInitialized
Used to indicate that a parameter is not initialized.
Definition: vpException.h:86
@ fatalError
Fatal error.
Definition: vpException.h:84
const char * what() const
Definition: vpException.cpp:70
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
vp_deprecated void init()
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
unsigned int getWidth() const
Definition: vpImage.h:245
unsigned int getHeight() const
Definition: vpImage.h:184
void init(unsigned int h, unsigned int w, Type value)
Definition: vpImage.h:619
static std::string getFileExtension(const std::string &pathname, bool checkFile=false)
Definition: vpIoTools.cpp:1900
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:73
static double rad(double deg)
Definition: vpMath.h:127
static double sqr(double x)
Definition: vpMath.h:201
static bool equal(double x, double y, double threshold=0.001)
Definition: vpMath.h:449
static double deg(double rad)
Definition: vpMath.h:117
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:146
void eye()
Definition: vpMatrix.cpp:448
vpMatrix AtA() const
Definition: vpMatrix.cpp:645
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:5781
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
void computeVisibility(unsigned int width, unsigned int height)
virtual void computeVVSInteractionMatrixAndResidu() vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
virtual void resetTracker() vp_override
virtual void computeVVSInit() vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void resetTracker() vp_override
virtual void computeVVSInteractionMatrixAndResidu() vp_override
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void computeVVSInit() vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void computeVVSWeights()
virtual void setNearClippingDistance(const double &dist) vp_override
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") vp_override
virtual void setFarClippingDistance(const double &dist) vp_override
void resetTracker() vp_override
virtual void computeVVSInteractionMatrixAndResidu() vp_override
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void setClipping(const unsigned int &flags) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") vp_override
virtual void setCameraParameters(const vpCameraParameters &cam) vp_override
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void computeVVSInit() vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void testTracking() vp_override
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) vp_override
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setMask(const vpImage< bool > &mask) vp_override
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt) vp_override
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void setKltMaskBorder(const unsigned int &e)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) vp_override
virtual ~vpMbGenericTracker() vp_override
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces() vp_override
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void setOgreShowConfigDialog(bool showConfigDialog) vp_override
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setAngleDisappear(const double &a) vp_override
virtual unsigned int getNbPolygon() const vp_override
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpMbTracker.h:414
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) vp_override
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="") vp_override
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void setProjectionErrorDisplay(bool display) vp_override
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
vpMbGenericTracker()
json namespace shortcut
virtual vpMbtPolygon * getPolygon(unsigned int index) vp_override
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length) vp_override
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setOgreVisibilityTest(const bool &v) vp_override
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setAngleAppear(const double &a) vp_override
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void computeVVSInteractionMatrixAndResidu() vp_override
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false) vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
virtual void computeVVSInit() vp_override
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
virtual void init(const vpImage< unsigned char > &I) vp_override
vpMatrix m_L
Interaction matrix.
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) vp_override
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void setFarClippingDistance(const double &dist) vp_override
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") vp_override
virtual void computeVVSWeights()
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") vp_override
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void testTracking() vp_override
virtual void setNearClippingDistance(const double &dist) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void resetTracker() vp_override
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness) vp_override
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
virtual void setTrackerType(int type)
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual vpKltOpencv getKltOpencv() const
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void setClipping(const unsigned int &flags) vp_override
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) vp_override
virtual vpMe getMovingEdge() const
virtual void setDisplayFeatures(bool displayF) vp_override
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void setProjectionErrorComputation(const bool &flag) vp_override
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="") vp_override
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void track(const vpImage< unsigned char > &I) vp_override
std::string m_referenceCameraName
Name of the reference camera.
virtual void setLod(bool useLod, const std::string &name="") vp_override
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:256
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
vpAROgre * getOgreContext()
virtual void computeVVSInit() vp_override
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) vp_override
virtual void computeVVSInteractionMatrixAndResidu() vp_override
void preTracking(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="") vp_override
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
virtual void reinit(const vpImage< unsigned char > &I)
void setCameraParameters(const vpCameraParameters &cam) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="") vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
void resetTracker() vp_override
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
Definition: vpMbTracker.h:595
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
virtual void setMaxIter(unsigned int max)
Definition: vpMbTracker.h:545
bool modelInitialised
Definition: vpMbTracker.h:123
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
Definition: vpMbTracker.h:213
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:650
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:564
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:223
virtual void setDisplayFeatures(bool displayF)
Definition: vpMbTracker.h:518
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:423
bool m_computeInteraction
Definition: vpMbTracker.h:185
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:115
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:130
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:193
bool computeProjError
Definition: vpMbTracker.h:133
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:113
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=nullptr, const vpColVector *const m_w_prev=nullptr)
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:111
double projectionError
Definition: vpMbTracker.h:136
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:191
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:155
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:143
virtual void setCameraParameters(const vpCameraParameters &cam)
Definition: vpMbTracker.h:488
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:481
virtual void setInitialMu(double mu)
Definition: vpMbTracker.h:529
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:608
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:140
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=nullptr, vpColVector *const m_w_prev=nullptr)
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:138
virtual void setProjectionErrorDisplay(bool display)
Definition: vpMbTracker.h:590
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:147
virtual void setLambda(double gain)
Definition: vpMbTracker.h:536
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:151
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation)
Definition: vpMbTracker.h:117
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:158
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:585
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:145
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:558
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
Definition: vpMbTracker.h:603
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:470
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:128
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:149
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:153
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
Definition: vpMbTracker.h:202
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Manage a circle used in the model-based tracker.
Manage a cylinder used in the model-based tracker.
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Manage the line of a polygon used in the model-based tracker.
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:58
Parse an Xml file to extract configuration parameters of a mbtConfig object.
@ TOO_NEAR
Point not tracked anymore, since too near from its neighbor.
Definition: vpMeSite.h:90
@ THRESHOLD
Point not tracked due to the likelihood that is below the threshold, but retained in the ME list.
Definition: vpMeSite.h:88
@ CONTRAST
Point not tracked due to a contrast problem, but retained in the ME list.
Definition: vpMeSite.h:84
@ M_ESTIMATOR
Point detected as an outlier during virtual visual-servoing.
Definition: vpMeSite.h:89
@ NO_SUPPRESSION
Point successfully tracked.
Definition: vpMeSite.h:83
Definition: vpMe.h:124
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:77
Error that can be emitted by the vpTracker class and its derivatives.
@ notEnoughPointError
Not enough point to track.
@ initializationError
Tracker initialization error.
@ fatalError
Tracker fatal error.
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)