Visual Servoing Platform  version 3.5.1 under development (2022-09-30)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/mbt/vpMbtXmlGenericParser.h>
42 
44  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
45  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
46  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
47 {
48  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
49 
50  // Add default camera transformation matrix
52 
53  // Add default ponderation between each feature type
55 
56 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
58 #endif
59 
62 }
63 
64 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
65  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
66  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
67  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
68 {
69  if (nbCameras == 0) {
70  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
71  } else if (nbCameras == 1) {
72  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
73 
74  // Add default camera transformation matrix
76  } else {
77  for (unsigned int i = 1; i <= nbCameras; i++) {
78  std::stringstream ss;
79  ss << "Camera" << i;
80  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
81 
82  // Add default camera transformation matrix
84  }
85 
86  // Set by default the reference camera to the first one
87  m_referenceCameraName = m_mapOfTrackers.begin()->first;
88  }
89 
90  // Add default ponderation between each feature type
92 
93 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
95 #endif
96 
99 }
100 
101 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
102  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
103  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
104  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
105 {
106  if (trackerTypes.empty()) {
107  throw vpException(vpException::badValue, "There is no camera!");
108  }
109 
110  if (trackerTypes.size() == 1) {
111  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
112 
113  // Add default camera transformation matrix
115  } else {
116  for (size_t i = 1; i <= trackerTypes.size(); i++) {
117  std::stringstream ss;
118  ss << "Camera" << i;
119  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
120 
121  // Add default camera transformation matrix
123  }
124 
125  // Set by default the reference camera to the first one
126  m_referenceCameraName = m_mapOfTrackers.begin()->first;
127  }
128 
129  // Add default ponderation between each feature type
131 
132 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
134 #endif
135 
138 }
139 
140 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
141  const std::vector<int> &trackerTypes)
142  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
143  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
144  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
145 {
146  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
148  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
149  }
150 
151  for (size_t i = 0; i < cameraNames.size(); i++) {
152  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
153 
154  // Add default camera transformation matrix
156  }
157 
158  // Set by default the reference camera to the first one
159  m_referenceCameraName = m_mapOfTrackers.begin()->first;
160 
161  // Add default ponderation between each feature type
163 
164 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
166 #endif
167 
170 }
171 
173 {
174  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
175  ++it) {
176  delete it->second;
177  it->second = NULL;
178  }
179 }
180 
199  const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
200 {
201  double rawTotalProjectionError = 0.0;
202  unsigned int nbTotalFeaturesUsed = 0;
203 
204  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
205  it != m_mapOfTrackers.end(); ++it) {
206  TrackerWrapper *tracker = it->second;
207 
208  unsigned int nbFeaturesUsed = 0;
209  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
210 
211  if (nbFeaturesUsed > 0) {
212  nbTotalFeaturesUsed += nbFeaturesUsed;
213  rawTotalProjectionError += curProjError;
214  }
215  }
216 
217  if (nbTotalFeaturesUsed > 0) {
218  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
219  }
220 
221  return 90.0;
222 }
223 
242  const vpHomogeneousMatrix &_cMo,
243  const vpCameraParameters &_cam)
244 {
246  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
247 
248  return computeCurrentProjectionError(I, _cMo, _cam);
249 }
250 
252 {
253  if (computeProjError) {
254  double rawTotalProjectionError = 0.0;
255  unsigned int nbTotalFeaturesUsed = 0;
256 
257  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
258  it != m_mapOfTrackers.end(); ++it) {
259  TrackerWrapper *tracker = it->second;
260 
261  double curProjError = tracker->getProjectionError();
262  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
263 
264  if (nbFeaturesUsed > 0) {
265  nbTotalFeaturesUsed += nbFeaturesUsed;
266  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
267  }
268  }
269 
270  if (nbTotalFeaturesUsed > 0) {
271  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
272  } else {
273  projectionError = 90.0;
274  }
275  } else {
276  projectionError = 90.0;
277  }
278 }
279 
280 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
281 {
282  computeVVSInit(mapOfImages);
283 
284  if (m_error.getRows() < 4) {
285  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
286  }
287 
288  double normRes = 0;
289  double normRes_1 = -1;
290  unsigned int iter = 0;
291 
292  vpMatrix LTL;
293  vpColVector LTR, v;
294  vpColVector error_prev;
295 
296  double mu = m_initialMu;
297  vpHomogeneousMatrix cMo_prev;
298 
299  bool isoJoIdentity_ = true;
300 
301  // Covariance
302  vpColVector W_true(m_error.getRows());
303  vpMatrix L_true, LVJ_true;
304 
305  // Create the map of VelocityTwistMatrices
306  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
307  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
308  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
310  cVo.buildFrom(it->second);
311  mapOfVelocityTwist[it->first] = cVo;
312  }
313 
314  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
315 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
316  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
317 #endif
318  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
319  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
320 
321  m_nb_feat_edge = 0;
322  m_nb_feat_klt = 0;
325 
326  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
327  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
328 
329  bool reStartFromLastIncrement = false;
330  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
331  if (reStartFromLastIncrement) {
332  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
333  it != m_mapOfTrackers.end(); ++it) {
334  TrackerWrapper *tracker = it->second;
335 
336  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
337 
338 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
339  vpHomogeneousMatrix c_curr_tTc_curr0 =
340  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
341  tracker->ctTc0 = c_curr_tTc_curr0;
342 #endif
343  }
344  }
345 
346  if (!reStartFromLastIncrement) {
348 
349  if (computeCovariance) {
350  L_true = m_L;
351  if (!isoJoIdentity_) {
353  cVo.buildFrom(m_cMo);
354  LVJ_true = (m_L * (cVo * oJo));
355  }
356  }
357 
359  if (iter == 0) {
360  isoJoIdentity_ = true;
361  oJo.eye();
362 
363  // If all the 6 dof should be estimated, we check if the interaction
364  // matrix is full rank. If not we remove automatically the dof that
365  // cannot be estimated This is particularly useful when consering
366  // circles (rank 5) and cylinders (rank 4)
367  if (isoJoIdentity_) {
368  cVo.buildFrom(m_cMo);
369 
370  vpMatrix K; // kernel
371  unsigned int rank = (m_L * cVo).kernel(K);
372  if (rank == 0) {
373  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
374  }
375 
376  if (rank != 6) {
377  vpMatrix I; // Identity
378  I.eye(6);
379  oJo = I - K.AtA();
380 
381  isoJoIdentity_ = false;
382  }
383  }
384  }
385 
386  // Weighting
387  double num = 0;
388  double den = 0;
389 
390  unsigned int start_index = 0;
391  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
392  it != m_mapOfTrackers.end(); ++it) {
393  TrackerWrapper *tracker = it->second;
394 
395  if (tracker->m_trackerType & EDGE_TRACKER) {
396  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
397  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
398  W_true[start_index + i] = wi;
399  m_weightedError[start_index + i] = wi * m_error[start_index + i];
400 
401  num += wi * vpMath::sqr(m_error[start_index + i]);
402  den += wi;
403 
404  for (unsigned int j = 0; j < m_L.getCols(); j++) {
405  m_L[start_index + i][j] *= wi;
406  }
407  }
408 
409  start_index += tracker->m_error_edge.getRows();
410  }
411 
412 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
413  if (tracker->m_trackerType & KLT_TRACKER) {
414  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
415  double wi = tracker->m_w_klt[i] * factorKlt;
416  W_true[start_index + i] = wi;
417  m_weightedError[start_index + i] = wi * m_error[start_index + i];
418 
419  num += wi * vpMath::sqr(m_error[start_index + i]);
420  den += wi;
421 
422  for (unsigned int j = 0; j < m_L.getCols(); j++) {
423  m_L[start_index + i][j] *= wi;
424  }
425  }
426 
427  start_index += tracker->m_error_klt.getRows();
428  }
429 #endif
430 
431  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
432  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
433  double wi = tracker->m_w_depthNormal[i] * factorDepth;
434  W_true[start_index + i] = wi;
435  m_weightedError[start_index + i] = wi * m_error[start_index + i];
436 
437  num += wi * vpMath::sqr(m_error[start_index + i]);
438  den += wi;
439 
440  for (unsigned int j = 0; j < m_L.getCols(); j++) {
441  m_L[start_index + i][j] *= wi;
442  }
443  }
444 
445  start_index += tracker->m_error_depthNormal.getRows();
446  }
447 
448  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
449  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
450  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
451  W_true[start_index + i] = wi;
452  m_weightedError[start_index + i] = wi * m_error[start_index + i];
453 
454  num += wi * vpMath::sqr(m_error[start_index + i]);
455  den += wi;
456 
457  for (unsigned int j = 0; j < m_L.getCols(); j++) {
458  m_L[start_index + i][j] *= wi;
459  }
460  }
461 
462  start_index += tracker->m_error_depthDense.getRows();
463  }
464  }
465 
466  normRes_1 = normRes;
467  normRes = sqrt(num / den);
468 
469  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
470 
471  cMo_prev = m_cMo;
472 
474 
475 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
476  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
477  it != m_mapOfTrackers.end(); ++it) {
478  TrackerWrapper *tracker = it->second;
479 
480  vpHomogeneousMatrix c_curr_tTc_curr0 =
481  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
482  tracker->ctTc0 = c_curr_tTc_curr0;
483  }
484 #endif
485 
486  // Update cMo
487  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
488  it != m_mapOfTrackers.end(); ++it) {
489  TrackerWrapper *tracker = it->second;
490  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
491  }
492  }
493 
494  iter++;
495  }
496 
497  // Update features number
498  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
499  it != m_mapOfTrackers.end(); ++it) {
500  TrackerWrapper *tracker = it->second;
501  if (tracker->m_trackerType & EDGE_TRACKER) {
502  m_nb_feat_edge += tracker->m_error_edge.size();
503  }
504 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
505  if (tracker->m_trackerType & KLT_TRACKER) {
506  m_nb_feat_klt += tracker->m_error_klt.size();
507  }
508 #endif
509  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
510  m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
511  }
512  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
513  m_nb_feat_depthDense += tracker->m_error_depthDense.size();
514  }
515  }
516 
517  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
518 
519  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
520  it != m_mapOfTrackers.end(); ++it) {
521  TrackerWrapper *tracker = it->second;
522 
523  if (tracker->m_trackerType & EDGE_TRACKER) {
524  tracker->updateMovingEdgeWeights();
525  }
526  }
527 }
528 
530 {
531  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
532 }
533 
534 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
535 {
536  unsigned int nbFeatures = 0;
537 
538  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
539  it != m_mapOfTrackers.end(); ++it) {
540  TrackerWrapper *tracker = it->second;
541  tracker->computeVVSInit(mapOfImages[it->first]);
542 
543  nbFeatures += tracker->m_error.getRows();
544  }
545 
546  m_L.resize(nbFeatures, 6, false, false);
547  m_error.resize(nbFeatures, false);
548 
549  m_weightedError.resize(nbFeatures, false);
550  m_w.resize(nbFeatures, false);
551  m_w = 1;
552 }
553 
555 {
556  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
557  "computeVVSInteractionMatrixAndR"
558  "esidu() should not be called");
559 }
560 
562  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
563  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
564 {
565  unsigned int start_index = 0;
566 
567  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
568  it != m_mapOfTrackers.end(); ++it) {
569  TrackerWrapper *tracker = it->second;
570 
571  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
572 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
573  vpHomogeneousMatrix c_curr_tTc_curr0 =
574  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
575  tracker->ctTc0 = c_curr_tTc_curr0;
576 #endif
577 
578  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
579 
580  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
581  m_error.insert(start_index, tracker->m_error);
582 
583  start_index += tracker->m_error.getRows();
584  }
585 }
586 
588 {
589  unsigned int start_index = 0;
590 
591  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
592  it != m_mapOfTrackers.end(); ++it) {
593  TrackerWrapper *tracker = it->second;
594  tracker->computeVVSWeights();
595 
596  m_w.insert(start_index, tracker->m_w);
597  start_index += tracker->m_w.getRows();
598  }
599 }
600 
615  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
616  bool displayFullModel)
617 {
618  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
619  if (it != m_mapOfTrackers.end()) {
620  TrackerWrapper *tracker = it->second;
621  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
622  } else {
623  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
624  }
625 }
626 
641  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
642  bool displayFullModel)
643 {
644  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
645  if (it != m_mapOfTrackers.end()) {
646  TrackerWrapper *tracker = it->second;
647  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
648  } else {
649  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
650  }
651 }
652 
670  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
671  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
672  unsigned int thickness, bool displayFullModel)
673 {
674  if (m_mapOfTrackers.size() == 2) {
675  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
676  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
677  ++it;
678 
679  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
680  } else {
681  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
682  << std::endl;
683  }
684 }
685 
703  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
704  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
705  bool displayFullModel)
706 {
707  if (m_mapOfTrackers.size() == 2) {
708  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
709  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
710  ++it;
711 
712  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
713  } else {
714  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
715  << std::endl;
716  }
717 }
718 
730 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
731  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
732  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
733  const vpColor &col, unsigned int thickness, bool displayFullModel)
734 {
735  // Display only for the given images
736  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
737  it_img != mapOfImages.end(); ++it_img) {
738  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
739  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
740  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
741 
742  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
743  it_cam != mapOfCameraParameters.end()) {
744  TrackerWrapper *tracker = it_tracker->second;
745  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
746  } else {
747  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
748  }
749  }
750 }
751 
763 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
764  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
765  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
766  const vpColor &col, unsigned int thickness, bool displayFullModel)
767 {
768  // Display only for the given images
769  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
770  it_img != mapOfImages.end(); ++it_img) {
771  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
772  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
773  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
774 
775  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
776  it_cam != mapOfCameraParameters.end()) {
777  TrackerWrapper *tracker = it_tracker->second;
778  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
779  } else {
780  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
781  }
782  }
783 }
784 
790 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
791 {
792  std::vector<std::string> cameraNames;
793 
794  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
795  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
796  cameraNames.push_back(it_tracker->first);
797  }
798 
799  return cameraNames;
800 }
801 
803 {
805 }
806 
816 {
817  if (m_mapOfTrackers.size() == 2) {
818  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
819  it->second->getCameraParameters(cam1);
820  ++it;
821 
822  it->second->getCameraParameters(cam2);
823  } else {
824  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
825  << std::endl;
826  }
827 }
828 
834 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
835 {
836  // Clear the input map
837  mapOfCameraParameters.clear();
838 
839  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
840  it != m_mapOfTrackers.end(); ++it) {
841  vpCameraParameters cam_;
842  it->second->getCameraParameters(cam_);
843  mapOfCameraParameters[it->first] = cam_;
844  }
845 }
846 
853 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
854 {
855  std::map<std::string, int> trackingTypes;
856 
857  TrackerWrapper *traker;
858  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
859  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
860  traker = it_tracker->second;
861  trackingTypes[it_tracker->first] = traker->getTrackerType();
862  }
863 
864  return trackingTypes;
865 }
866 
875 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
876 {
877  if (m_mapOfTrackers.size() == 2) {
878  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
879  clippingFlag1 = it->second->getClipping();
880  ++it;
881 
882  clippingFlag2 = it->second->getClipping();
883  } else {
884  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
885  << std::endl;
886  }
887 }
888 
894 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
895 {
896  mapOfClippingFlags.clear();
897 
898  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
899  it != m_mapOfTrackers.end(); ++it) {
900  TrackerWrapper *tracker = it->second;
901  mapOfClippingFlags[it->first] = tracker->getClipping();
902  }
903 }
904 
911 {
912  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
913  if (it != m_mapOfTrackers.end()) {
914  return it->second->getFaces();
915  }
916 
917  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
918  return faces;
919 }
920 
927 {
928  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
929  if (it != m_mapOfTrackers.end()) {
930  return it->second->getFaces();
931  }
932 
933  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
934  return faces;
935 }
936 
937 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
941 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
942 {
943  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
944  if (it != m_mapOfTrackers.end()) {
945  TrackerWrapper *tracker = it->second;
946  return tracker->getFeaturesCircle();
947  } else {
948  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
949  m_referenceCameraName.c_str());
950  }
951 }
952 
956 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
957 {
958  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
959  if (it != m_mapOfTrackers.end()) {
960  TrackerWrapper *tracker = it->second;
961  return tracker->getFeaturesKltCylinder();
962  } else {
963  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
964  m_referenceCameraName.c_str());
965  }
966 }
967 
971 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
972 {
973  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
974  if (it != m_mapOfTrackers.end()) {
975  TrackerWrapper *tracker = it->second;
976  return tracker->getFeaturesKlt();
977  } else {
978  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
979  m_referenceCameraName.c_str());
980  }
981 }
982 #endif
983 
1010 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1011 {
1012  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1013 
1014  if (it != m_mapOfTrackers.end()) {
1015  return it->second->getFeaturesForDisplay();
1016  } else {
1017  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1018  }
1019 
1020  return std::vector<std::vector<double> >();
1021 }
1022 
1048 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1049 {
1050  // Clear the input map
1051  mapOfFeatures.clear();
1052 
1053  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1054  it != m_mapOfTrackers.end(); ++it) {
1055  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1056  }
1057 }
1058 
1069 
1070 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
1079 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1080 {
1081  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1082  if (it != m_mapOfTrackers.end()) {
1083  TrackerWrapper *tracker = it->second;
1084  return tracker->getKltImagePoints();
1085  } else {
1086  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1087  }
1088 
1089  return std::vector<vpImagePoint>();
1090 }
1091 
1100 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1101 {
1102  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1103  if (it != m_mapOfTrackers.end()) {
1104  TrackerWrapper *tracker = it->second;
1105  return tracker->getKltImagePointsWithId();
1106  } else {
1107  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1108  }
1109 
1110  return std::map<int, vpImagePoint>();
1111 }
1112 
1119 {
1120  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1121  if (it != m_mapOfTrackers.end()) {
1122  TrackerWrapper *tracker = it->second;
1123  return tracker->getKltMaskBorder();
1124  } else {
1125  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1126  }
1127 
1128  return 0;
1129 }
1130 
1137 {
1138  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1139  if (it != m_mapOfTrackers.end()) {
1140  TrackerWrapper *tracker = it->second;
1141  return tracker->getKltNbPoints();
1142  } else {
1143  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1144  }
1145 
1146  return 0;
1147 }
1148 
1155 {
1156  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1157 
1158  if (it_tracker != m_mapOfTrackers.end()) {
1159  TrackerWrapper *tracker;
1160  tracker = it_tracker->second;
1161  return tracker->getKltOpencv();
1162  } else {
1163  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1164  }
1165 
1166  return vpKltOpencv();
1167 }
1168 
1178 {
1179  if (m_mapOfTrackers.size() == 2) {
1180  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1181  klt1 = it->second->getKltOpencv();
1182  ++it;
1183 
1184  klt2 = it->second->getKltOpencv();
1185  } else {
1186  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1187  << std::endl;
1188  }
1189 }
1190 
1196 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1197 {
1198  mapOfKlts.clear();
1199 
1200  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1201  it != m_mapOfTrackers.end(); ++it) {
1202  TrackerWrapper *tracker = it->second;
1203  mapOfKlts[it->first] = tracker->getKltOpencv();
1204  }
1205 }
1206 
1207 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
1213 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1214 {
1215  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1216  if (it != m_mapOfTrackers.end()) {
1217  TrackerWrapper *tracker = it->second;
1218  return tracker->getKltPoints();
1219  } else {
1220  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1221  }
1222 
1223  return std::vector<cv::Point2f>();
1224 }
1225 #endif
1226 
1234 #endif
1235 
1248 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1249 {
1250  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1251  if (it != m_mapOfTrackers.end()) {
1252  it->second->getLcircle(circlesList, level);
1253  } else {
1254  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1255  }
1256 }
1257 
1271 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1272  unsigned int level) const
1273 {
1274  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1275  if (it != m_mapOfTrackers.end()) {
1276  it->second->getLcircle(circlesList, level);
1277  } else {
1278  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1279  }
1280 }
1281 
1294 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1295 {
1296  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1297  if (it != m_mapOfTrackers.end()) {
1298  it->second->getLcylinder(cylindersList, level);
1299  } else {
1300  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1301  }
1302 }
1303 
1317 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1318  unsigned int level) const
1319 {
1320  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1321  if (it != m_mapOfTrackers.end()) {
1322  it->second->getLcylinder(cylindersList, level);
1323  } else {
1324  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1325  }
1326 }
1327 
1340 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1341 {
1342  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1343 
1344  if (it != m_mapOfTrackers.end()) {
1345  it->second->getLline(linesList, level);
1346  } else {
1347  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1348  }
1349 }
1350 
1364 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1365  unsigned int level) const
1366 {
1367  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1368  if (it != m_mapOfTrackers.end()) {
1369  it->second->getLline(linesList, level);
1370  } else {
1371  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1372  }
1373 }
1374 
1404 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1405  const vpHomogeneousMatrix &cMo,
1406  const vpCameraParameters &cam,
1407  bool displayFullModel)
1408 {
1409  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1410 
1411  if (it != m_mapOfTrackers.end()) {
1412  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1413  } else {
1414  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1415  }
1416 
1417  return std::vector<std::vector<double> >();
1418 }
1419 
1445 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1446  const std::map<std::string, unsigned int> &mapOfwidths,
1447  const std::map<std::string, unsigned int> &mapOfheights,
1448  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1449  const std::map<std::string, vpCameraParameters> &mapOfCams,
1450  bool displayFullModel)
1451 {
1452  // Clear the input map
1453  mapOfModels.clear();
1454 
1455  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1456  it != m_mapOfTrackers.end(); ++it) {
1457  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1458  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1459  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1460  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1461 
1462  if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1463  findCam != mapOfCams.end()) {
1464  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1465  findCam->second, displayFullModel);
1466  }
1467  }
1468 }
1469 
1476 {
1477  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1478 
1479  if (it != m_mapOfTrackers.end()) {
1480  return it->second->getMovingEdge();
1481  } else {
1482  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1483  }
1484 
1485  return vpMe();
1486 }
1487 
1497 {
1498  if (m_mapOfTrackers.size() == 2) {
1499  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1500  it->second->getMovingEdge(me1);
1501  ++it;
1502 
1503  it->second->getMovingEdge(me2);
1504  } else {
1505  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1506  << std::endl;
1507  }
1508 }
1509 
1515 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1516 {
1517  mapOfMovingEdges.clear();
1518 
1519  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1520  it != m_mapOfTrackers.end(); ++it) {
1521  TrackerWrapper *tracker = it->second;
1522  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1523  }
1524 }
1525 
1543 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1544 {
1545  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1546 
1547  unsigned int nbGoodPoints = 0;
1548  if (it != m_mapOfTrackers.end()) {
1549 
1550  nbGoodPoints = it->second->getNbPoints(level);
1551  } else {
1552  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1553  }
1554 
1555  return nbGoodPoints;
1556 }
1557 
1572 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1573 {
1574  mapOfNbPoints.clear();
1575 
1576  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1577  it != m_mapOfTrackers.end(); ++it) {
1578  TrackerWrapper *tracker = it->second;
1579  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1580  }
1581 }
1582 
1589 {
1590  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1591  if (it != m_mapOfTrackers.end()) {
1592  return it->second->getNbPolygon();
1593  }
1594 
1595  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1596  return 0;
1597 }
1598 
1604 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1605 {
1606  mapOfNbPolygons.clear();
1607 
1608  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1609  it != m_mapOfTrackers.end(); ++it) {
1610  TrackerWrapper *tracker = it->second;
1611  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1612  }
1613 }
1614 
1626 {
1627  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1628  if (it != m_mapOfTrackers.end()) {
1629  return it->second->getPolygon(index);
1630  }
1631 
1632  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1633  return NULL;
1634 }
1635 
1647 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1648 {
1649  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1650  if (it != m_mapOfTrackers.end()) {
1651  return it->second->getPolygon(index);
1652  }
1653 
1654  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1655  return NULL;
1656 }
1657 
1673 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1674 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1675 {
1676  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1677 
1678  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1679  if (it != m_mapOfTrackers.end()) {
1680  TrackerWrapper *tracker = it->second;
1681  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1682  } else {
1683  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1684  }
1685 
1686  return polygonFaces;
1687 }
1688 
1706 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1707  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1708  bool orderPolygons, bool useVisibility, bool clipPolygon)
1709 {
1710  mapOfPolygons.clear();
1711  mapOfPoints.clear();
1712 
1713  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1714  it != m_mapOfTrackers.end(); ++it) {
1715  TrackerWrapper *tracker = it->second;
1716  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1717  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1718 
1719  mapOfPolygons[it->first] = polygonFaces.first;
1720  mapOfPoints[it->first] = polygonFaces.second;
1721  }
1722 }
1723 
1725 
1735 {
1736  if (m_mapOfTrackers.size() == 2) {
1737  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1738  it->second->getPose(c1Mo);
1739  ++it;
1740 
1741  it->second->getPose(c2Mo);
1742  } else {
1743  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1744  << std::endl;
1745  }
1746 }
1747 
1753 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1754 {
1755  // Clear the map
1756  mapOfCameraPoses.clear();
1757 
1758  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1759  it != m_mapOfTrackers.end(); ++it) {
1760  TrackerWrapper *tracker = it->second;
1761  tracker->getPose(mapOfCameraPoses[it->first]);
1762  }
1763 }
1764 
1769 
1774 {
1775  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1776  if (it != m_mapOfTrackers.end()) {
1777  TrackerWrapper *tracker = it->second;
1778  return tracker->getTrackerType();
1779  } else {
1780  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1781  m_referenceCameraName.c_str());
1782  }
1783 }
1784 
1786 {
1787  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1788  it != m_mapOfTrackers.end(); ++it) {
1789  TrackerWrapper *tracker = it->second;
1790  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1791  tracker->init(I);
1792  }
1793 }
1794 
1795 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1796  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1797 {
1798  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1799 }
1800 
1801 #ifdef VISP_HAVE_MODULE_GUI
1802 
1846  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1847  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1848 {
1849  if (m_mapOfTrackers.size() == 2) {
1850  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1851  TrackerWrapper *tracker = it->second;
1852  tracker->initClick(I1, initFile1, displayHelp, T1);
1853 
1854  ++it;
1855 
1856  tracker = it->second;
1857  tracker->initClick(I2, initFile2, displayHelp, T2);
1858 
1860  if (it != m_mapOfTrackers.end()) {
1861  tracker = it->second;
1862 
1863  // Set the reference cMo
1864  tracker->getPose(m_cMo);
1865  }
1866  } else {
1868  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1869  }
1870 }
1871 
1914 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1915  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1916  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1917 {
1918  if (m_mapOfTrackers.size() == 2) {
1919  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1920  TrackerWrapper *tracker = it->second;
1921  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1922 
1923  ++it;
1924 
1925  tracker = it->second;
1926  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1927 
1929  if (it != m_mapOfTrackers.end()) {
1930  tracker = it->second;
1931 
1932  // Set the reference cMo
1933  tracker->getPose(m_cMo);
1934  }
1935  } else {
1937  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1938  }
1939 }
1940 
1983 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1984  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
1985  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
1986 {
1987  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1988  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1989  mapOfImages.find(m_referenceCameraName);
1990  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
1991 
1992  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1993  TrackerWrapper *tracker = it_tracker->second;
1994  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
1995  if (it_T != mapOfT.end())
1996  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
1997  else
1998  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1999  tracker->getPose(m_cMo);
2000  } else {
2001  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2002  }
2003 
2004  // Vector of missing initFile for cameras
2005  std::vector<std::string> vectorOfMissingCameraPoses;
2006 
2007  // Set pose for the specified cameras
2008  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2009  if (it_tracker->first != m_referenceCameraName) {
2010  it_img = mapOfImages.find(it_tracker->first);
2011  it_initFile = mapOfInitFiles.find(it_tracker->first);
2012 
2013  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2014  // InitClick for the current camera
2015  TrackerWrapper *tracker = it_tracker->second;
2016  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2017  } else {
2018  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2019  }
2020  }
2021  }
2022 
2023  // Init for cameras that do not have an initFile
2024  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2025  it != vectorOfMissingCameraPoses.end(); ++it) {
2026  it_img = mapOfImages.find(*it);
2027  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2029 
2030  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2031  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2032  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2033  m_mapOfTrackers[*it]->init(*it_img->second);
2034  } else {
2036  "Missing image or missing camera transformation "
2037  "matrix! Cannot set the pose for camera: %s!",
2038  it->c_str());
2039  }
2040  }
2041 }
2042 
2085 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2086  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2087  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2088 {
2089  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2090  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2091  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2092 
2093  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2094  TrackerWrapper *tracker = it_tracker->second;
2095  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2096  if (it_T != mapOfT.end())
2097  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2098  else
2099  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2100  tracker->getPose(m_cMo);
2101  } else {
2102  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2103  }
2104 
2105  // Vector of missing initFile for cameras
2106  std::vector<std::string> vectorOfMissingCameraPoses;
2107 
2108  // Set pose for the specified cameras
2109  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2110  if (it_tracker->first != m_referenceCameraName) {
2111  it_img = mapOfColorImages.find(it_tracker->first);
2112  it_initFile = mapOfInitFiles.find(it_tracker->first);
2113 
2114  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2115  // InitClick for the current camera
2116  TrackerWrapper *tracker = it_tracker->second;
2117  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2118  } else {
2119  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2120  }
2121  }
2122  }
2123 
2124  // Init for cameras that do not have an initFile
2125  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2126  it != vectorOfMissingCameraPoses.end(); ++it) {
2127  it_img = mapOfColorImages.find(*it);
2128  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2130 
2131  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2132  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2133  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2134  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2135  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2136  } else {
2138  "Missing image or missing camera transformation "
2139  "matrix! Cannot set the pose for camera: %s!",
2140  it->c_str());
2141  }
2142  }
2143 }
2144 #endif
2145 
2146 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2147  const int /*idFace*/, const std::string & /*name*/)
2148 {
2149  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2150 }
2151 
2153 {
2154  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2155 }
2156 
2158 {
2159  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2160 }
2161 
2192  const std::string &initFile1, const std::string &initFile2)
2193 {
2194  if (m_mapOfTrackers.size() == 2) {
2195  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2196  TrackerWrapper *tracker = it->second;
2197  tracker->initFromPoints(I1, initFile1);
2198 
2199  ++it;
2200 
2201  tracker = it->second;
2202  tracker->initFromPoints(I2, initFile2);
2203 
2205  if (it != m_mapOfTrackers.end()) {
2206  tracker = it->second;
2207 
2208  // Set the reference cMo
2209  tracker->getPose(m_cMo);
2210 
2211  // Set the reference camera parameters
2212  tracker->getCameraParameters(m_cam);
2213  }
2214  } else {
2216  "Cannot initFromPoints()! Require two cameras but "
2217  "there are %d cameras!",
2218  m_mapOfTrackers.size());
2219  }
2220 }
2221 
2252  const std::string &initFile1, const std::string &initFile2)
2253 {
2254  if (m_mapOfTrackers.size() == 2) {
2255  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2256  TrackerWrapper *tracker = it->second;
2257  tracker->initFromPoints(I_color1, initFile1);
2258 
2259  ++it;
2260 
2261  tracker = it->second;
2262  tracker->initFromPoints(I_color2, initFile2);
2263 
2265  if (it != m_mapOfTrackers.end()) {
2266  tracker = it->second;
2267 
2268  // Set the reference cMo
2269  tracker->getPose(m_cMo);
2270 
2271  // Set the reference camera parameters
2272  tracker->getCameraParameters(m_cam);
2273  }
2274  } else {
2276  "Cannot initFromPoints()! Require two cameras but "
2277  "there are %d cameras!",
2278  m_mapOfTrackers.size());
2279  }
2280 }
2281 
2282 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2283  const std::map<std::string, std::string> &mapOfInitPoints)
2284 {
2285  // Set the reference cMo
2286  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2287  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2288  mapOfImages.find(m_referenceCameraName);
2289  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2290 
2291  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2292  TrackerWrapper *tracker = it_tracker->second;
2293  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2294  tracker->getPose(m_cMo);
2295  } else {
2296  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2297  }
2298 
2299  // Vector of missing initPoints for cameras
2300  std::vector<std::string> vectorOfMissingCameraPoints;
2301 
2302  // Set pose for the specified cameras
2303  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2304  it_img = mapOfImages.find(it_tracker->first);
2305  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2306 
2307  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2308  // Set pose
2309  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2310  } else {
2311  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2312  }
2313  }
2314 
2315  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2316  it != vectorOfMissingCameraPoints.end(); ++it) {
2317  it_img = mapOfImages.find(*it);
2318  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2320 
2321  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2322  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2323  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2324  } else {
2326  "Missing image or missing camera transformation "
2327  "matrix! Cannot init the pose for camera: %s!",
2328  it->c_str());
2329  }
2330  }
2331 }
2332 
2333 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2334  const std::map<std::string, std::string> &mapOfInitPoints)
2335 {
2336  // Set the reference cMo
2337  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2338  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2339  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2340 
2341  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2342  it_initPoints != mapOfInitPoints.end()) {
2343  TrackerWrapper *tracker = it_tracker->second;
2344  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2345  tracker->getPose(m_cMo);
2346  } else {
2347  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2348  }
2349 
2350  // Vector of missing initPoints for cameras
2351  std::vector<std::string> vectorOfMissingCameraPoints;
2352 
2353  // Set pose for the specified cameras
2354  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2355  it_img = mapOfColorImages.find(it_tracker->first);
2356  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2357 
2358  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2359  // Set pose
2360  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2361  } else {
2362  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2363  }
2364  }
2365 
2366  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2367  it != vectorOfMissingCameraPoints.end(); ++it) {
2368  it_img = mapOfColorImages.find(*it);
2369  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2371 
2372  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2373  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2374  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2375  } else {
2377  "Missing image or missing camera transformation "
2378  "matrix! Cannot init the pose for camera: %s!",
2379  it->c_str());
2380  }
2381  }
2382 }
2383 
2385 {
2386  vpMbTracker::initFromPose(I, cMo);
2387 }
2388 
2401  const std::string &initFile1, const std::string &initFile2)
2402 {
2403  if (m_mapOfTrackers.size() == 2) {
2404  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2405  TrackerWrapper *tracker = it->second;
2406  tracker->initFromPose(I1, initFile1);
2407 
2408  ++it;
2409 
2410  tracker = it->second;
2411  tracker->initFromPose(I2, initFile2);
2412 
2414  if (it != m_mapOfTrackers.end()) {
2415  tracker = it->second;
2416 
2417  // Set the reference cMo
2418  tracker->getPose(m_cMo);
2419 
2420  // Set the reference camera parameters
2421  tracker->getCameraParameters(m_cam);
2422  }
2423  } else {
2425  "Cannot initFromPose()! Require two cameras but there "
2426  "are %d cameras!",
2427  m_mapOfTrackers.size());
2428  }
2429 }
2430 
2443  const std::string &initFile1, const std::string &initFile2)
2444 {
2445  if (m_mapOfTrackers.size() == 2) {
2446  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2447  TrackerWrapper *tracker = it->second;
2448  tracker->initFromPose(I_color1, initFile1);
2449 
2450  ++it;
2451 
2452  tracker = it->second;
2453  tracker->initFromPose(I_color2, initFile2);
2454 
2456  if (it != m_mapOfTrackers.end()) {
2457  tracker = it->second;
2458 
2459  // Set the reference cMo
2460  tracker->getPose(m_cMo);
2461 
2462  // Set the reference camera parameters
2463  tracker->getCameraParameters(m_cam);
2464  }
2465  } else {
2467  "Cannot initFromPose()! Require two cameras but there "
2468  "are %d cameras!",
2469  m_mapOfTrackers.size());
2470  }
2471 }
2472 
2487 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2488  const std::map<std::string, std::string> &mapOfInitPoses)
2489 {
2490  // Set the reference cMo
2491  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2492  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2493  mapOfImages.find(m_referenceCameraName);
2494  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2495 
2496  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2497  TrackerWrapper *tracker = it_tracker->second;
2498  tracker->initFromPose(*it_img->second, it_initPose->second);
2499  tracker->getPose(m_cMo);
2500  } else {
2501  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2502  }
2503 
2504  // Vector of missing pose matrices for cameras
2505  std::vector<std::string> vectorOfMissingCameraPoses;
2506 
2507  // Set pose for the specified cameras
2508  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2509  it_img = mapOfImages.find(it_tracker->first);
2510  it_initPose = mapOfInitPoses.find(it_tracker->first);
2511 
2512  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2513  // Set pose
2514  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2515  } else {
2516  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2517  }
2518  }
2519 
2520  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2521  it != vectorOfMissingCameraPoses.end(); ++it) {
2522  it_img = mapOfImages.find(*it);
2523  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2525 
2526  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2527  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2528  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2529  } else {
2531  "Missing image or missing camera transformation "
2532  "matrix! Cannot init the pose for camera: %s!",
2533  it->c_str());
2534  }
2535  }
2536 }
2537 
2552 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2553  const std::map<std::string, std::string> &mapOfInitPoses)
2554 {
2555  // Set the reference cMo
2556  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2557  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2558  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2559 
2560  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2561  TrackerWrapper *tracker = it_tracker->second;
2562  tracker->initFromPose(*it_img->second, it_initPose->second);
2563  tracker->getPose(m_cMo);
2564  } else {
2565  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2566  }
2567 
2568  // Vector of missing pose matrices for cameras
2569  std::vector<std::string> vectorOfMissingCameraPoses;
2570 
2571  // Set pose for the specified cameras
2572  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2573  it_img = mapOfColorImages.find(it_tracker->first);
2574  it_initPose = mapOfInitPoses.find(it_tracker->first);
2575 
2576  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2577  // Set pose
2578  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2579  } else {
2580  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2581  }
2582  }
2583 
2584  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2585  it != vectorOfMissingCameraPoses.end(); ++it) {
2586  it_img = mapOfColorImages.find(*it);
2587  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2589 
2590  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2591  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2592  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2593  } else {
2595  "Missing image or missing camera transformation "
2596  "matrix! Cannot init the pose for camera: %s!",
2597  it->c_str());
2598  }
2599  }
2600 }
2601 
2613  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2614 {
2615  if (m_mapOfTrackers.size() == 2) {
2616  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2617  it->second->initFromPose(I1, c1Mo);
2618 
2619  ++it;
2620 
2621  it->second->initFromPose(I2, c2Mo);
2622 
2623  m_cMo = c1Mo;
2624  } else {
2626  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2627  }
2628 }
2629 
2641  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2642 {
2643  if (m_mapOfTrackers.size() == 2) {
2644  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2645  it->second->initFromPose(I_color1, c1Mo);
2646 
2647  ++it;
2648 
2649  it->second->initFromPose(I_color2, c2Mo);
2650 
2651  m_cMo = c1Mo;
2652  } else {
2654  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2655  }
2656 }
2657 
2671 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2672  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2673 {
2674  // Set the reference cMo
2675  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2676  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2677  mapOfImages.find(m_referenceCameraName);
2678  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2679 
2680  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2681  TrackerWrapper *tracker = it_tracker->second;
2682  tracker->initFromPose(*it_img->second, it_camPose->second);
2683  tracker->getPose(m_cMo);
2684  } else {
2685  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2686  }
2687 
2688  // Vector of missing pose matrices for cameras
2689  std::vector<std::string> vectorOfMissingCameraPoses;
2690 
2691  // Set pose for the specified cameras
2692  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2693  it_img = mapOfImages.find(it_tracker->first);
2694  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2695 
2696  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2697  // Set pose
2698  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2699  } else {
2700  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2701  }
2702  }
2703 
2704  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2705  it != vectorOfMissingCameraPoses.end(); ++it) {
2706  it_img = mapOfImages.find(*it);
2707  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2709 
2710  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2711  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2712  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2713  } else {
2715  "Missing image or missing camera transformation "
2716  "matrix! Cannot set the pose for camera: %s!",
2717  it->c_str());
2718  }
2719  }
2720 }
2721 
2735 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2736  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2737 {
2738  // Set the reference cMo
2739  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2740  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2741  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2742 
2743  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2744  TrackerWrapper *tracker = it_tracker->second;
2745  tracker->initFromPose(*it_img->second, it_camPose->second);
2746  tracker->getPose(m_cMo);
2747  } else {
2748  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2749  }
2750 
2751  // Vector of missing pose matrices for cameras
2752  std::vector<std::string> vectorOfMissingCameraPoses;
2753 
2754  // Set pose for the specified cameras
2755  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2756  it_img = mapOfColorImages.find(it_tracker->first);
2757  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2758 
2759  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2760  // Set pose
2761  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2762  } else {
2763  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2764  }
2765  }
2766 
2767  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2768  it != vectorOfMissingCameraPoses.end(); ++it) {
2769  it_img = mapOfColorImages.find(*it);
2770  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2772 
2773  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2774  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2775  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2776  } else {
2778  "Missing image or missing camera transformation "
2779  "matrix! Cannot set the pose for camera: %s!",
2780  it->c_str());
2781  }
2782  }
2783 }
2784 
2796 void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2797 {
2798  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2799  it != m_mapOfTrackers.end(); ++it) {
2800  TrackerWrapper *tracker = it->second;
2801  tracker->loadConfigFile(configFile, verbose);
2802  }
2803 
2805  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2806  }
2807 
2808  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2809  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2810  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2811  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2812 }
2813 
2828 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
2829 {
2830  if (m_mapOfTrackers.size() != 2) {
2831  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2832  }
2833 
2834  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2835  TrackerWrapper *tracker = it_tracker->second;
2836  tracker->loadConfigFile(configFile1, verbose);
2837 
2838  ++it_tracker;
2839  tracker = it_tracker->second;
2840  tracker->loadConfigFile(configFile2, verbose);
2841 
2843  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2844  }
2845 
2846  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2847  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2848  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2849  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2850 }
2851 
2865 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
2866 {
2867  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2868  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2869  TrackerWrapper *tracker = it_tracker->second;
2870 
2871  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
2872  if (it_config != mapOfConfigFiles.end()) {
2873  tracker->loadConfigFile(it_config->second, verbose);
2874  } else {
2875  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
2876  it_tracker->first.c_str());
2877  }
2878  }
2879 
2880  // Set the reference camera parameters
2881  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
2882  if (it != m_mapOfTrackers.end()) {
2883  TrackerWrapper *tracker = it->second;
2884  tracker->getCameraParameters(m_cam);
2885 
2886  // Set clipping
2887  this->clippingFlag = tracker->getClipping();
2888  this->angleAppears = tracker->getAngleAppear();
2889  this->angleDisappears = tracker->getAngleDisappear();
2890  } else {
2891  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
2892  m_referenceCameraName.c_str());
2893  }
2894 }
2895 
2914 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
2915 {
2916  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2917  it != m_mapOfTrackers.end(); ++it) {
2918  TrackerWrapper *tracker = it->second;
2919  tracker->loadModel(modelFile, verbose, T);
2920  }
2921 }
2922 
2945 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
2946  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
2947 {
2948  if (m_mapOfTrackers.size() != 2) {
2949  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2950  }
2951 
2952  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2953  TrackerWrapper *tracker = it_tracker->second;
2954  tracker->loadModel(modelFile1, verbose, T1);
2955 
2956  ++it_tracker;
2957  tracker = it_tracker->second;
2958  tracker->loadModel(modelFile2, verbose, T2);
2959 }
2960 
2980 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
2981  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2982 {
2983  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2984  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2985  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
2986 
2987  if (it_model != mapOfModelFiles.end()) {
2988  TrackerWrapper *tracker = it_tracker->second;
2989  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2990 
2991  if (it_T != mapOfT.end())
2992  tracker->loadModel(it_model->second, verbose, it_T->second);
2993  else
2994  tracker->loadModel(it_model->second, verbose);
2995  } else {
2996  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
2997  it_tracker->first.c_str());
2998  }
2999  }
3000 }
3001 
3002 #ifdef VISP_HAVE_PCL
3003 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3004  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3005 {
3006  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3007  it != m_mapOfTrackers.end(); ++it) {
3008  TrackerWrapper *tracker = it->second;
3009  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3010  }
3011 }
3012 #endif
3013 
3014 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3015  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3016  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3017  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3018 {
3019  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3020  it != m_mapOfTrackers.end(); ++it) {
3021  TrackerWrapper *tracker = it->second;
3022  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3023  mapOfPointCloudHeights[it->first]);
3024  }
3025 }
3026 
3038 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3039  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3040 {
3041  if (m_mapOfTrackers.size() != 1) {
3042  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3043  m_mapOfTrackers.size());
3044  }
3045 
3046  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3047  if (it_tracker != m_mapOfTrackers.end()) {
3048  TrackerWrapper *tracker = it_tracker->second;
3049  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3050 
3051  // Set reference pose
3052  tracker->getPose(m_cMo);
3053  } else {
3054  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3055  }
3056 
3057  modelInitialised = true;
3058 }
3059 
3071 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3072  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3073 {
3074  if (m_mapOfTrackers.size() != 1) {
3075  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3076  m_mapOfTrackers.size());
3077  }
3078 
3079  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3080  if (it_tracker != m_mapOfTrackers.end()) {
3081  TrackerWrapper *tracker = it_tracker->second;
3082  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3083 
3084  // Set reference pose
3085  tracker->getPose(m_cMo);
3086  } else {
3087  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3088  }
3089 
3090  modelInitialised = true;
3091 }
3092 
3114  const std::string &cad_name1, const std::string &cad_name2,
3115  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3116  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3117 {
3118  if (m_mapOfTrackers.size() == 2) {
3119  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3120 
3121  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3122 
3123  ++it_tracker;
3124 
3125  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3126 
3127  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3128  if (it_tracker != m_mapOfTrackers.end()) {
3129  // Set reference pose
3130  it_tracker->second->getPose(m_cMo);
3131  }
3132  } else {
3133  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3134  }
3135 
3136  modelInitialised = true;
3137 }
3138 
3160  const std::string &cad_name1, const std::string &cad_name2,
3161  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3162  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3163 {
3164  if (m_mapOfTrackers.size() == 2) {
3165  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3166 
3167  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3168 
3169  ++it_tracker;
3170 
3171  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3172 
3173  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3174  if (it_tracker != m_mapOfTrackers.end()) {
3175  // Set reference pose
3176  it_tracker->second->getPose(m_cMo);
3177  }
3178  } else {
3179  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3180  }
3181 
3182  modelInitialised = true;
3183 }
3184 
3199 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3200  const std::map<std::string, std::string> &mapOfModelFiles,
3201  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3202  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3203 {
3204  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3205  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3206  mapOfImages.find(m_referenceCameraName);
3207  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3208  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3209 
3210  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3211  it_camPose != mapOfCameraPoses.end()) {
3212  TrackerWrapper *tracker = it_tracker->second;
3213  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3214  if (it_T != mapOfT.end())
3215  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3216  else
3217  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3218 
3219  // Set reference pose
3220  tracker->getPose(m_cMo);
3221  } else {
3222  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3223  }
3224 
3225  std::vector<std::string> vectorOfMissingCameras;
3226  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3227  if (it_tracker->first != m_referenceCameraName) {
3228  it_img = mapOfImages.find(it_tracker->first);
3229  it_model = mapOfModelFiles.find(it_tracker->first);
3230  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3231 
3232  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3233  TrackerWrapper *tracker = it_tracker->second;
3234  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3235  } else {
3236  vectorOfMissingCameras.push_back(it_tracker->first);
3237  }
3238  }
3239  }
3240 
3241  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3242  ++it) {
3243  it_img = mapOfImages.find(*it);
3244  it_model = mapOfModelFiles.find(*it);
3245  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3247 
3248  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3249  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3250  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3251  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3252  }
3253  }
3254 
3255  modelInitialised = true;
3256 }
3257 
3272 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3273  const std::map<std::string, std::string> &mapOfModelFiles,
3274  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3275  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3276 {
3277  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3278  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3279  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3280  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3281 
3282  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3283  it_camPose != mapOfCameraPoses.end()) {
3284  TrackerWrapper *tracker = it_tracker->second;
3285  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3286  if (it_T != mapOfT.end())
3287  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3288  else
3289  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3290 
3291  // Set reference pose
3292  tracker->getPose(m_cMo);
3293  } else {
3294  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3295  }
3296 
3297  std::vector<std::string> vectorOfMissingCameras;
3298  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3299  if (it_tracker->first != m_referenceCameraName) {
3300  it_img = mapOfColorImages.find(it_tracker->first);
3301  it_model = mapOfModelFiles.find(it_tracker->first);
3302  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3303 
3304  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3305  it_camPose != mapOfCameraPoses.end()) {
3306  TrackerWrapper *tracker = it_tracker->second;
3307  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3308  } else {
3309  vectorOfMissingCameras.push_back(it_tracker->first);
3310  }
3311  }
3312  }
3313 
3314  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3315  ++it) {
3316  it_img = mapOfColorImages.find(*it);
3317  it_model = mapOfModelFiles.find(*it);
3318  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3320 
3321  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3322  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3323  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3324  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3325  }
3326  }
3327 
3328  modelInitialised = true;
3329 }
3330 
3336 {
3337  m_cMo.eye();
3338 
3339  useScanLine = false;
3340 
3341 #ifdef VISP_HAVE_OGRE
3342  useOgre = false;
3343 #endif
3344 
3345  m_computeInteraction = true;
3346  m_lambda = 1.0;
3347 
3348  angleAppears = vpMath::rad(89);
3351  distNearClip = 0.001;
3352  distFarClip = 100;
3353 
3355  m_maxIter = 30;
3356  m_stopCriteriaEpsilon = 1e-8;
3357  m_initialMu = 0.01;
3358 
3359  // Only for Edge
3360  m_percentageGdPt = 0.4;
3361 
3362  // Only for KLT
3363  m_thresholdOutlier = 0.5;
3364 
3365  // Reset default ponderation between each feature type
3367 
3368 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3370 #endif
3371 
3374 
3375  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3376  it != m_mapOfTrackers.end(); ++it) {
3377  TrackerWrapper *tracker = it->second;
3378  tracker->resetTracker();
3379  }
3380 }
3381 
3392 {
3394 
3395  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3396  it != m_mapOfTrackers.end(); ++it) {
3397  TrackerWrapper *tracker = it->second;
3398  tracker->setAngleAppear(a);
3399  }
3400 }
3401 
3414 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3415 {
3416  if (m_mapOfTrackers.size() == 2) {
3417  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3418  it->second->setAngleAppear(a1);
3419 
3420  ++it;
3421  it->second->setAngleAppear(a2);
3422 
3424  if (it != m_mapOfTrackers.end()) {
3425  angleAppears = it->second->getAngleAppear();
3426  } else {
3427  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3428  }
3429  } else {
3430  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3431  m_mapOfTrackers.size());
3432  }
3433 }
3434 
3444 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3445 {
3446  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3447  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3448 
3449  if (it_tracker != m_mapOfTrackers.end()) {
3450  TrackerWrapper *tracker = it_tracker->second;
3451  tracker->setAngleAppear(it->second);
3452 
3453  if (it->first == m_referenceCameraName) {
3454  angleAppears = it->second;
3455  }
3456  }
3457  }
3458 }
3459 
3470 {
3472 
3473  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3474  it != m_mapOfTrackers.end(); ++it) {
3475  TrackerWrapper *tracker = it->second;
3476  tracker->setAngleDisappear(a);
3477  }
3478 }
3479 
3492 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3493 {
3494  if (m_mapOfTrackers.size() == 2) {
3495  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3496  it->second->setAngleDisappear(a1);
3497 
3498  ++it;
3499  it->second->setAngleDisappear(a2);
3500 
3502  if (it != m_mapOfTrackers.end()) {
3503  angleDisappears = it->second->getAngleDisappear();
3504  } else {
3505  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3506  }
3507  } else {
3508  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3509  m_mapOfTrackers.size());
3510  }
3511 }
3512 
3522 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3523 {
3524  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3525  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3526 
3527  if (it_tracker != m_mapOfTrackers.end()) {
3528  TrackerWrapper *tracker = it_tracker->second;
3529  tracker->setAngleDisappear(it->second);
3530 
3531  if (it->first == m_referenceCameraName) {
3532  angleDisappears = it->second;
3533  }
3534  }
3535  }
3536 }
3537 
3544 {
3546 
3547  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3548  it != m_mapOfTrackers.end(); ++it) {
3549  TrackerWrapper *tracker = it->second;
3550  tracker->setCameraParameters(camera);
3551  }
3552 }
3553 
3563 {
3564  if (m_mapOfTrackers.size() == 2) {
3565  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3566  it->second->setCameraParameters(camera1);
3567 
3568  ++it;
3569  it->second->setCameraParameters(camera2);
3570 
3572  if (it != m_mapOfTrackers.end()) {
3573  it->second->getCameraParameters(m_cam);
3574  } else {
3575  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3576  }
3577  } else {
3578  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3579  m_mapOfTrackers.size());
3580  }
3581 }
3582 
3591 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3592 {
3593  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3594  it != mapOfCameraParameters.end(); ++it) {
3595  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3596 
3597  if (it_tracker != m_mapOfTrackers.end()) {
3598  TrackerWrapper *tracker = it_tracker->second;
3599  tracker->setCameraParameters(it->second);
3600 
3601  if (it->first == m_referenceCameraName) {
3602  m_cam = it->second;
3603  }
3604  }
3605  }
3606 }
3607 
3616 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3617  const vpHomogeneousMatrix &cameraTransformationMatrix)
3618 {
3619  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3620 
3621  if (it != m_mapOfCameraTransformationMatrix.end()) {
3622  it->second = cameraTransformationMatrix;
3623  } else {
3624  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3625  }
3626 }
3627 
3636  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3637 {
3638  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3639  it != mapOfTransformationMatrix.end(); ++it) {
3640  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3641  m_mapOfCameraTransformationMatrix.find(it->first);
3642 
3643  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3644  it_camTrans->second = it->second;
3645  }
3646  }
3647 }
3648 
3658 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3659 {
3660  vpMbTracker::setClipping(flags);
3661 
3662  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3663  it != m_mapOfTrackers.end(); ++it) {
3664  TrackerWrapper *tracker = it->second;
3665  tracker->setClipping(flags);
3666  }
3667 }
3668 
3679 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3680 {
3681  if (m_mapOfTrackers.size() == 2) {
3682  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3683  it->second->setClipping(flags1);
3684 
3685  ++it;
3686  it->second->setClipping(flags2);
3687 
3689  if (it != m_mapOfTrackers.end()) {
3690  clippingFlag = it->second->getClipping();
3691  } else {
3692  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3693  }
3694  } else {
3695  std::stringstream ss;
3696  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3698  }
3699 }
3700 
3708 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
3709 {
3710  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3711  it != mapOfClippingFlags.end(); ++it) {
3712  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3713 
3714  if (it_tracker != m_mapOfTrackers.end()) {
3715  TrackerWrapper *tracker = it_tracker->second;
3716  tracker->setClipping(it->second);
3717 
3718  if (it->first == m_referenceCameraName) {
3719  clippingFlag = it->second;
3720  }
3721  }
3722  }
3723 }
3724 
3735 {
3736  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3737  it != m_mapOfTrackers.end(); ++it) {
3738  TrackerWrapper *tracker = it->second;
3739  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
3740  }
3741 }
3742 
3752 {
3753  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3754  it != m_mapOfTrackers.end(); ++it) {
3755  TrackerWrapper *tracker = it->second;
3756  tracker->setDepthDenseFilteringMethod(method);
3757  }
3758 }
3759 
3770 {
3771  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3772  it != m_mapOfTrackers.end(); ++it) {
3773  TrackerWrapper *tracker = it->second;
3774  tracker->setDepthDenseFilteringMinDistance(minDistance);
3775  }
3776 }
3777 
3788 {
3789  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3790  it != m_mapOfTrackers.end(); ++it) {
3791  TrackerWrapper *tracker = it->second;
3792  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
3793  }
3794 }
3795 
3804 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
3805 {
3806  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3807  it != m_mapOfTrackers.end(); ++it) {
3808  TrackerWrapper *tracker = it->second;
3809  tracker->setDepthDenseSamplingStep(stepX, stepY);
3810  }
3811 }
3812 
3821 {
3822  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3823  it != m_mapOfTrackers.end(); ++it) {
3824  TrackerWrapper *tracker = it->second;
3825  tracker->setDepthNormalFaceCentroidMethod(method);
3826  }
3827 }
3828 
3838 {
3839  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3840  it != m_mapOfTrackers.end(); ++it) {
3841  TrackerWrapper *tracker = it->second;
3842  tracker->setDepthNormalFeatureEstimationMethod(method);
3843  }
3844 }
3845 
3854 {
3855  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3856  it != m_mapOfTrackers.end(); ++it) {
3857  TrackerWrapper *tracker = it->second;
3858  tracker->setDepthNormalPclPlaneEstimationMethod(method);
3859  }
3860 }
3861 
3870 {
3871  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3872  it != m_mapOfTrackers.end(); ++it) {
3873  TrackerWrapper *tracker = it->second;
3874  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
3875  }
3876 }
3877 
3886 {
3887  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3888  it != m_mapOfTrackers.end(); ++it) {
3889  TrackerWrapper *tracker = it->second;
3890  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
3891  }
3892 }
3893 
3902 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
3903 {
3904  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3905  it != m_mapOfTrackers.end(); ++it) {
3906  TrackerWrapper *tracker = it->second;
3907  tracker->setDepthNormalSamplingStep(stepX, stepY);
3908  }
3909 }
3910 
3930 {
3932 
3933  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3934  it != m_mapOfTrackers.end(); ++it) {
3935  TrackerWrapper *tracker = it->second;
3936  tracker->setDisplayFeatures(displayF);
3937  }
3938 }
3939 
3948 {
3950 
3951  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3952  it != m_mapOfTrackers.end(); ++it) {
3953  TrackerWrapper *tracker = it->second;
3954  tracker->setFarClippingDistance(dist);
3955  }
3956 }
3957 
3966 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
3967 {
3968  if (m_mapOfTrackers.size() == 2) {
3969  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3970  it->second->setFarClippingDistance(dist1);
3971 
3972  ++it;
3973  it->second->setFarClippingDistance(dist2);
3974 
3976  if (it != m_mapOfTrackers.end()) {
3977  distFarClip = it->second->getFarClippingDistance();
3978  } else {
3979  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3980  }
3981  } else {
3982  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3983  m_mapOfTrackers.size());
3984  }
3985 }
3986 
3992 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
3993 {
3994  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
3995  ++it) {
3996  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3997 
3998  if (it_tracker != m_mapOfTrackers.end()) {
3999  TrackerWrapper *tracker = it_tracker->second;
4000  tracker->setFarClippingDistance(it->second);
4001 
4002  if (it->first == m_referenceCameraName) {
4003  distFarClip = it->second;
4004  }
4005  }
4006  }
4007 }
4008 
4015 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4016 {
4017  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4018  ++it) {
4019  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4020  if (it_factor != mapOfFeatureFactors.end()) {
4021  it->second = it_factor->second;
4022  }
4023  }
4024 }
4025 
4042 {
4043  m_percentageGdPt = threshold;
4044 
4045  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4046  it != m_mapOfTrackers.end(); ++it) {
4047  TrackerWrapper *tracker = it->second;
4048  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4049  }
4050 }
4051 
4052 #ifdef VISP_HAVE_OGRE
4065 {
4066  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4067  it != m_mapOfTrackers.end(); ++it) {
4068  TrackerWrapper *tracker = it->second;
4069  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4070  }
4071 }
4072 
4085 {
4086  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4087  it != m_mapOfTrackers.end(); ++it) {
4088  TrackerWrapper *tracker = it->second;
4089  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4090  }
4091 }
4092 #endif
4093 
4094 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4103 {
4104  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4105  it != m_mapOfTrackers.end(); ++it) {
4106  TrackerWrapper *tracker = it->second;
4107  tracker->setKltOpencv(t);
4108  }
4109 }
4110 
4120 {
4121  if (m_mapOfTrackers.size() == 2) {
4122  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4123  it->second->setKltOpencv(t1);
4124 
4125  ++it;
4126  it->second->setKltOpencv(t2);
4127  } else {
4128  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4129  m_mapOfTrackers.size());
4130  }
4131 }
4132 
4138 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4139 {
4140  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4141  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4142 
4143  if (it_tracker != m_mapOfTrackers.end()) {
4144  TrackerWrapper *tracker = it_tracker->second;
4145  tracker->setKltOpencv(it->second);
4146  }
4147  }
4148 }
4149 
4158 {
4159  m_thresholdOutlier = th;
4160 
4161  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4162  it != m_mapOfTrackers.end(); ++it) {
4163  TrackerWrapper *tracker = it->second;
4164  tracker->setKltThresholdAcceptation(th);
4165  }
4166 }
4167 #endif
4168 
4181 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4182 {
4183  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4184  it != m_mapOfTrackers.end(); ++it) {
4185  TrackerWrapper *tracker = it->second;
4186  tracker->setLod(useLod, name);
4187  }
4188 }
4189 
4190 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4198 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4199 {
4200  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4201  it != m_mapOfTrackers.end(); ++it) {
4202  TrackerWrapper *tracker = it->second;
4203  tracker->setKltMaskBorder(e);
4204  }
4205 }
4206 
4215 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4216 {
4217  if (m_mapOfTrackers.size() == 2) {
4218  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4219  it->second->setKltMaskBorder(e1);
4220 
4221  ++it;
4222 
4223  it->second->setKltMaskBorder(e2);
4224  } else {
4225  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4226  m_mapOfTrackers.size());
4227  }
4228 }
4229 
4235 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4236 {
4237  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4238  ++it) {
4239  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4240 
4241  if (it_tracker != m_mapOfTrackers.end()) {
4242  TrackerWrapper *tracker = it_tracker->second;
4243  tracker->setKltMaskBorder(it->second);
4244  }
4245  }
4246 }
4247 #endif
4248 
4255 {
4256  vpMbTracker::setMask(mask);
4257 
4258  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4259  it != m_mapOfTrackers.end(); ++it) {
4260  TrackerWrapper *tracker = it->second;
4261  tracker->setMask(mask);
4262  }
4263 }
4264 
4276 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4277 {
4278  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4279  it != m_mapOfTrackers.end(); ++it) {
4280  TrackerWrapper *tracker = it->second;
4281  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4282  }
4283 }
4284 
4295 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4296 {
4297  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4298  it != m_mapOfTrackers.end(); ++it) {
4299  TrackerWrapper *tracker = it->second;
4300  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4301  }
4302 }
4303 
4312 {
4313  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4314  it != m_mapOfTrackers.end(); ++it) {
4315  TrackerWrapper *tracker = it->second;
4316  tracker->setMovingEdge(me);
4317  }
4318 }
4319 
4329 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4330 {
4331  if (m_mapOfTrackers.size() == 2) {
4332  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4333  it->second->setMovingEdge(me1);
4334 
4335  ++it;
4336 
4337  it->second->setMovingEdge(me2);
4338  } else {
4339  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4340  m_mapOfTrackers.size());
4341  }
4342 }
4343 
4349 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4350 {
4351  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4352  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4353 
4354  if (it_tracker != m_mapOfTrackers.end()) {
4355  TrackerWrapper *tracker = it_tracker->second;
4356  tracker->setMovingEdge(it->second);
4357  }
4358  }
4359 }
4360 
4369 {
4371 
4372  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4373  it != m_mapOfTrackers.end(); ++it) {
4374  TrackerWrapper *tracker = it->second;
4375  tracker->setNearClippingDistance(dist);
4376  }
4377 }
4378 
4387 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4388 {
4389  if (m_mapOfTrackers.size() == 2) {
4390  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4391  it->second->setNearClippingDistance(dist1);
4392 
4393  ++it;
4394 
4395  it->second->setNearClippingDistance(dist2);
4396 
4398  if (it != m_mapOfTrackers.end()) {
4399  distNearClip = it->second->getNearClippingDistance();
4400  } else {
4401  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4402  }
4403  } else {
4404  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4405  m_mapOfTrackers.size());
4406  }
4407 }
4408 
4414 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4415 {
4416  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4417  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4418 
4419  if (it_tracker != m_mapOfTrackers.end()) {
4420  TrackerWrapper *tracker = it_tracker->second;
4421  tracker->setNearClippingDistance(it->second);
4422 
4423  if (it->first == m_referenceCameraName) {
4424  distNearClip = it->second;
4425  }
4426  }
4427  }
4428 }
4429 
4443 {
4444  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4445 
4446  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4447  it != m_mapOfTrackers.end(); ++it) {
4448  TrackerWrapper *tracker = it->second;
4449  tracker->setOgreShowConfigDialog(showConfigDialog);
4450  }
4451 }
4452 
4464 {
4466 
4467  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4468  it != m_mapOfTrackers.end(); ++it) {
4469  TrackerWrapper *tracker = it->second;
4470  tracker->setOgreVisibilityTest(v);
4471  }
4472 
4473 #ifdef VISP_HAVE_OGRE
4474  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4475  it != m_mapOfTrackers.end(); ++it) {
4476  TrackerWrapper *tracker = it->second;
4477  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4478  }
4479 #endif
4480 }
4481 
4490 {
4492 
4493  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4494  it != m_mapOfTrackers.end(); ++it) {
4495  TrackerWrapper *tracker = it->second;
4496  tracker->setOptimizationMethod(opt);
4497  }
4498 }
4499 
4513 {
4514  if (m_mapOfTrackers.size() > 1) {
4515  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4516  "to be configured with only one camera!");
4517  }
4518 
4519  m_cMo = cdMo;
4520 
4521  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4522  if (it != m_mapOfTrackers.end()) {
4523  TrackerWrapper *tracker = it->second;
4524  tracker->setPose(I, cdMo);
4525  } else {
4526  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4527  m_referenceCameraName.c_str());
4528  }
4529 }
4530 
4544 {
4545  if (m_mapOfTrackers.size() > 1) {
4546  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4547  "to be configured with only one camera!");
4548  }
4549 
4550  m_cMo = cdMo;
4551 
4552  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4553  if (it != m_mapOfTrackers.end()) {
4554  TrackerWrapper *tracker = it->second;
4555  vpImageConvert::convert(I_color, m_I);
4556  tracker->setPose(m_I, cdMo);
4557  } else {
4558  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4559  m_referenceCameraName.c_str());
4560  }
4561 }
4562 
4575  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4576 {
4577  if (m_mapOfTrackers.size() == 2) {
4578  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4579  it->second->setPose(I1, c1Mo);
4580 
4581  ++it;
4582 
4583  it->second->setPose(I2, c2Mo);
4584 
4586  if (it != m_mapOfTrackers.end()) {
4587  // Set reference pose
4588  it->second->getPose(m_cMo);
4589  } else {
4590  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4591  m_referenceCameraName.c_str());
4592  }
4593  } else {
4594  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4595  m_mapOfTrackers.size());
4596  }
4597 }
4598 
4610 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4611  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4612 {
4613  if (m_mapOfTrackers.size() == 2) {
4614  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4615  it->second->setPose(I_color1, c1Mo);
4616 
4617  ++it;
4618 
4619  it->second->setPose(I_color2, c2Mo);
4620 
4622  if (it != m_mapOfTrackers.end()) {
4623  // Set reference pose
4624  it->second->getPose(m_cMo);
4625  } else {
4626  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4627  m_referenceCameraName.c_str());
4628  }
4629  } else {
4630  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4631  m_mapOfTrackers.size());
4632  }
4633 }
4634 
4650 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4651  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4652 {
4653  // Set the reference cMo
4654  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4655  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4656  mapOfImages.find(m_referenceCameraName);
4657  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4658 
4659  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4660  TrackerWrapper *tracker = it_tracker->second;
4661  tracker->setPose(*it_img->second, it_camPose->second);
4662  tracker->getPose(m_cMo);
4663  } else {
4664  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4665  }
4666 
4667  // Vector of missing pose matrices for cameras
4668  std::vector<std::string> vectorOfMissingCameraPoses;
4669 
4670  // Set pose for the specified cameras
4671  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4672  if (it_tracker->first != m_referenceCameraName) {
4673  it_img = mapOfImages.find(it_tracker->first);
4674  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4675 
4676  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4677  // Set pose
4678  TrackerWrapper *tracker = it_tracker->second;
4679  tracker->setPose(*it_img->second, it_camPose->second);
4680  } else {
4681  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4682  }
4683  }
4684  }
4685 
4686  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4687  it != vectorOfMissingCameraPoses.end(); ++it) {
4688  it_img = mapOfImages.find(*it);
4689  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4691 
4692  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4693  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4694  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4695  } else {
4697  "Missing image or missing camera transformation "
4698  "matrix! Cannot set pose for camera: %s!",
4699  it->c_str());
4700  }
4701  }
4702 }
4703 
4719 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
4720  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4721 {
4722  // Set the reference cMo
4723  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4724  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
4725  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4726 
4727  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4728  TrackerWrapper *tracker = it_tracker->second;
4729  tracker->setPose(*it_img->second, it_camPose->second);
4730  tracker->getPose(m_cMo);
4731  } else {
4732  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4733  }
4734 
4735  // Vector of missing pose matrices for cameras
4736  std::vector<std::string> vectorOfMissingCameraPoses;
4737 
4738  // Set pose for the specified cameras
4739  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4740  if (it_tracker->first != m_referenceCameraName) {
4741  it_img = mapOfColorImages.find(it_tracker->first);
4742  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4743 
4744  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4745  // Set pose
4746  TrackerWrapper *tracker = it_tracker->second;
4747  tracker->setPose(*it_img->second, it_camPose->second);
4748  } else {
4749  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4750  }
4751  }
4752  }
4753 
4754  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4755  it != vectorOfMissingCameraPoses.end(); ++it) {
4756  it_img = mapOfColorImages.find(*it);
4757  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4759 
4760  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4761  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4762  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4763  } else {
4765  "Missing image or missing camera transformation "
4766  "matrix! Cannot set pose for camera: %s!",
4767  it->c_str());
4768  }
4769  }
4770 }
4771 
4787 {
4789 
4790  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4791  it != m_mapOfTrackers.end(); ++it) {
4792  TrackerWrapper *tracker = it->second;
4793  tracker->setProjectionErrorComputation(flag);
4794  }
4795 }
4796 
4801 {
4803 
4804  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4805  it != m_mapOfTrackers.end(); ++it) {
4806  TrackerWrapper *tracker = it->second;
4807  tracker->setProjectionErrorDisplay(display);
4808  }
4809 }
4810 
4815 {
4817 
4818  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4819  it != m_mapOfTrackers.end(); ++it) {
4820  TrackerWrapper *tracker = it->second;
4821  tracker->setProjectionErrorDisplayArrowLength(length);
4822  }
4823 }
4824 
4826 {
4828 
4829  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4830  it != m_mapOfTrackers.end(); ++it) {
4831  TrackerWrapper *tracker = it->second;
4832  tracker->setProjectionErrorDisplayArrowThickness(thickness);
4833  }
4834 }
4835 
4841 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
4842 {
4843  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
4844  if (it != m_mapOfTrackers.end()) {
4845  m_referenceCameraName = referenceCameraName;
4846  } else {
4847  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
4848  }
4849 }
4850 
4852 {
4854 
4855  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4856  it != m_mapOfTrackers.end(); ++it) {
4857  TrackerWrapper *tracker = it->second;
4858  tracker->setScanLineVisibilityTest(v);
4859  }
4860 }
4861 
4874 {
4875  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4876  it != m_mapOfTrackers.end(); ++it) {
4877  TrackerWrapper *tracker = it->second;
4878  tracker->setTrackerType(type);
4879  }
4880 }
4881 
4891 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
4892 {
4893  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
4894  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4895  if (it_tracker != m_mapOfTrackers.end()) {
4896  TrackerWrapper *tracker = it_tracker->second;
4897  tracker->setTrackerType(it->second);
4898  }
4899  }
4900 }
4901 
4911 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
4912 {
4913  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4914  it != m_mapOfTrackers.end(); ++it) {
4915  TrackerWrapper *tracker = it->second;
4916  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
4917  }
4918 }
4919 
4929 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
4930 {
4931  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4932  it != m_mapOfTrackers.end(); ++it) {
4933  TrackerWrapper *tracker = it->second;
4934  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
4935  }
4936 }
4937 
4947 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
4948 {
4949  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4950  it != m_mapOfTrackers.end(); ++it) {
4951  TrackerWrapper *tracker = it->second;
4952  tracker->setUseEdgeTracking(name, useEdgeTracking);
4953  }
4954 }
4955 
4956 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4966 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
4967 {
4968  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4969  it != m_mapOfTrackers.end(); ++it) {
4970  TrackerWrapper *tracker = it->second;
4971  tracker->setUseKltTracking(name, useKltTracking);
4972  }
4973 }
4974 #endif
4975 
4977 {
4978  // Test tracking fails only if all testTracking have failed
4979  bool isOneTestTrackingOk = false;
4980  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4981  it != m_mapOfTrackers.end(); ++it) {
4982  TrackerWrapper *tracker = it->second;
4983  try {
4984  tracker->testTracking();
4985  isOneTestTrackingOk = true;
4986  } catch (...) {
4987  }
4988  }
4989 
4990  if (!isOneTestTrackingOk) {
4991  std::ostringstream oss;
4992  oss << "Not enough moving edges to track the object. Try to reduce the "
4993  "threshold="
4994  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
4996  }
4997 }
4998 
5009 {
5010  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5011  mapOfImages[m_referenceCameraName] = &I;
5012 
5013  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5014  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5015 
5016  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5017 }
5018 
5029 {
5030  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5031  mapOfColorImages[m_referenceCameraName] = &I_color;
5032 
5033  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5034  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5035 
5036  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5037 }
5038 
5050 {
5051  if (m_mapOfTrackers.size() == 2) {
5052  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5053  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5054  mapOfImages[it->first] = &I1;
5055  ++it;
5056 
5057  mapOfImages[it->first] = &I2;
5058 
5059  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5060  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5061 
5062  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5063  } else {
5064  std::stringstream ss;
5065  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5066  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5067  }
5068 }
5069 
5080 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5081 {
5082  if (m_mapOfTrackers.size() == 2) {
5083  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5084  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5085  mapOfImages[it->first] = &I_color1;
5086  ++it;
5087 
5088  mapOfImages[it->first] = &_colorI2;
5089 
5090  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5091  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5092 
5093  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5094  } else {
5095  std::stringstream ss;
5096  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5097  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5098  }
5099 }
5100 
5108 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5109 {
5110  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5111  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5112 
5113  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5114 }
5115 
5123 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5124 {
5125  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5126  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5127 
5128  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5129 }
5130 
5131 #ifdef VISP_HAVE_PCL
5140 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5141  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5142 {
5143  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5144  it != m_mapOfTrackers.end(); ++it) {
5145  TrackerWrapper *tracker = it->second;
5146 
5147  if ((tracker->m_trackerType & (EDGE_TRACKER |
5148 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5149  KLT_TRACKER |
5150 #endif
5152  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5153  }
5154 
5155  if (tracker->m_trackerType & (EDGE_TRACKER
5156 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5157  | KLT_TRACKER
5158 #endif
5159  ) &&
5160  mapOfImages[it->first] == NULL) {
5161  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5162  }
5163 
5164  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5165  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5166  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5167  }
5168  }
5169 
5170  preTracking(mapOfImages, mapOfPointClouds);
5171 
5172  try {
5173  computeVVS(mapOfImages);
5174  } catch (...) {
5175  covarianceMatrix = -1;
5176  throw; // throw the original exception
5177  }
5178 
5179  testTracking();
5180 
5181  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5182  it != m_mapOfTrackers.end(); ++it) {
5183  TrackerWrapper *tracker = it->second;
5184 
5185  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5186  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5187  }
5188 
5189  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5190 
5191  if (displayFeatures) {
5192 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5193  if (tracker->m_trackerType & KLT_TRACKER) {
5194  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5195  }
5196 #endif
5197 
5198  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5199  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5200  }
5201  }
5202  }
5203 
5205 }
5206 
5215 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5216  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5217 {
5218  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5219  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5220  it != m_mapOfTrackers.end(); ++it) {
5221  TrackerWrapper *tracker = it->second;
5222 
5223  if ((tracker->m_trackerType & (EDGE_TRACKER |
5224 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5225  KLT_TRACKER |
5226 #endif
5228  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5229  }
5230 
5231  if (tracker->m_trackerType & (EDGE_TRACKER
5232 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5233  | KLT_TRACKER
5234 #endif
5235  ) &&
5236  mapOfImages[it->first] == NULL) {
5237  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5238  } else if (tracker->m_trackerType & (EDGE_TRACKER
5239 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5240  | KLT_TRACKER
5241 #endif
5242  ) &&
5243  mapOfImages[it->first] != NULL) {
5244  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5245  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5246  }
5247 
5248  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5249  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5250  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5251  }
5252  }
5253 
5254  preTracking(mapOfImages, mapOfPointClouds);
5255 
5256  try {
5257  computeVVS(mapOfImages);
5258  } catch (...) {
5259  covarianceMatrix = -1;
5260  throw; // throw the original exception
5261  }
5262 
5263  testTracking();
5264 
5265  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5266  it != m_mapOfTrackers.end(); ++it) {
5267  TrackerWrapper *tracker = it->second;
5268 
5269  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5270  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5271  }
5272 
5273  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5274 
5275  if (displayFeatures) {
5276 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5277  if (tracker->m_trackerType & KLT_TRACKER) {
5278  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5279  }
5280 #endif
5281 
5282  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5283  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5284  }
5285  }
5286  }
5287 
5289 }
5290 #endif
5291 
5302 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5303  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5304  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5305  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5306 {
5307  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5308  it != m_mapOfTrackers.end(); ++it) {
5309  TrackerWrapper *tracker = it->second;
5310 
5311  if ((tracker->m_trackerType & (EDGE_TRACKER |
5312 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5313  KLT_TRACKER |
5314 #endif
5316  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5317  }
5318 
5319  if (tracker->m_trackerType & (EDGE_TRACKER
5320 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5321  | KLT_TRACKER
5322 #endif
5323  ) &&
5324  mapOfImages[it->first] == NULL) {
5325  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5326  }
5327 
5328  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5329  (mapOfPointClouds[it->first] == NULL)) {
5330  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5331  }
5332  }
5333 
5334  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5335 
5336  try {
5337  computeVVS(mapOfImages);
5338  } catch (...) {
5339  covarianceMatrix = -1;
5340  throw; // throw the original exception
5341  }
5342 
5343  testTracking();
5344 
5345  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5346  it != m_mapOfTrackers.end(); ++it) {
5347  TrackerWrapper *tracker = it->second;
5348 
5349  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5350  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5351  }
5352 
5353  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5354 
5355  if (displayFeatures) {
5356 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5357  if (tracker->m_trackerType & KLT_TRACKER) {
5358  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5359  }
5360 #endif
5361 
5362  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5363  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5364  }
5365  }
5366  }
5367 
5369 }
5370 
5381 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5382  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5383  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5384  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5385 {
5386  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5387  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5388  it != m_mapOfTrackers.end(); ++it) {
5389  TrackerWrapper *tracker = it->second;
5390 
5391  if ((tracker->m_trackerType & (EDGE_TRACKER |
5392 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5393  KLT_TRACKER |
5394 #endif
5396  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5397  }
5398 
5399  if (tracker->m_trackerType & (EDGE_TRACKER
5400 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5401  | KLT_TRACKER
5402 #endif
5403  ) &&
5404  mapOfColorImages[it->first] == NULL) {
5405  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5406  } else if (tracker->m_trackerType & (EDGE_TRACKER
5407 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5408  | KLT_TRACKER
5409 #endif
5410  ) &&
5411  mapOfColorImages[it->first] != NULL) {
5412  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5413  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5414  }
5415 
5416  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5417  (mapOfPointClouds[it->first] == NULL)) {
5418  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5419  }
5420  }
5421 
5422  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5423 
5424  try {
5425  computeVVS(mapOfImages);
5426  } catch (...) {
5427  covarianceMatrix = -1;
5428  throw; // throw the original exception
5429  }
5430 
5431  testTracking();
5432 
5433  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5434  it != m_mapOfTrackers.end(); ++it) {
5435  TrackerWrapper *tracker = it->second;
5436 
5437  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5438  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5439  }
5440 
5441  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5442 
5443  if (displayFeatures) {
5444 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5445  if (tracker->m_trackerType & KLT_TRACKER) {
5446  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5447  }
5448 #endif
5449 
5450  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5451  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5452  }
5453  }
5454  }
5455 
5457 }
5458 
5460 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5461  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5462 {
5463  m_lambda = 1.0;
5464  m_maxIter = 30;
5465 
5466 #ifdef VISP_HAVE_OGRE
5467  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5468 
5469  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5470 #endif
5471 }
5472 
5473 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5474  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5475 {
5476  if ((m_trackerType & (EDGE_TRACKER |
5477 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5478  KLT_TRACKER |
5479 #endif
5481  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5482  }
5483 
5484  m_lambda = 1.0;
5485  m_maxIter = 30;
5486 
5487 #ifdef VISP_HAVE_OGRE
5488  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5489 
5490  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5491 #endif
5492 }
5493 
5494 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() {}
5495 
5496 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5498 {
5499  computeVVSInit(ptr_I);
5500 
5501  if (m_error.getRows() < 4) {
5502  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5503  }
5504 
5505  double normRes = 0;
5506  double normRes_1 = -1;
5507  unsigned int iter = 0;
5508 
5509  double factorEdge = 1.0;
5510 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5511  double factorKlt = 1.0;
5512 #endif
5513  double factorDepth = 1.0;
5514  double factorDepthDense = 1.0;
5515 
5516  vpMatrix LTL;
5517  vpColVector LTR, v;
5518  vpColVector error_prev;
5519 
5520  double mu = m_initialMu;
5521  vpHomogeneousMatrix cMo_prev;
5522 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5523  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
5524 #endif
5525  bool isoJoIdentity_ = true;
5526 
5527  // Covariance
5528  vpColVector W_true(m_error.getRows());
5529  vpMatrix L_true, LVJ_true;
5530 
5531  unsigned int nb_edge_features = m_error_edge.getRows();
5532 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5533  unsigned int nb_klt_features = m_error_klt.getRows();
5534 #endif
5535  unsigned int nb_depth_features = m_error_depthNormal.getRows();
5536  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5537 
5538  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5539  computeVVSInteractionMatrixAndResidu(ptr_I);
5540 
5541  bool reStartFromLastIncrement = false;
5542  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5543 
5544 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5545  if (reStartFromLastIncrement) {
5546  if (m_trackerType & KLT_TRACKER) {
5547  ctTc0 = ctTc0_Prev;
5548  }
5549  }
5550 #endif
5551 
5552  if (!reStartFromLastIncrement) {
5553  computeVVSWeights();
5554 
5555  if (computeCovariance) {
5556  L_true = m_L;
5557  if (!isoJoIdentity_) {
5559  cVo.buildFrom(m_cMo);
5560  LVJ_true = (m_L * cVo * oJo);
5561  }
5562  }
5563 
5565  if (iter == 0) {
5566  isoJoIdentity_ = true;
5567  oJo.eye();
5568 
5569  // If all the 6 dof should be estimated, we check if the interaction
5570  // matrix is full rank. If not we remove automatically the dof that
5571  // cannot be estimated This is particularly useful when consering
5572  // circles (rank 5) and cylinders (rank 4)
5573  if (isoJoIdentity_) {
5574  cVo.buildFrom(m_cMo);
5575 
5576  vpMatrix K; // kernel
5577  unsigned int rank = (m_L * cVo).kernel(K);
5578  if (rank == 0) {
5579  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
5580  }
5581 
5582  if (rank != 6) {
5583  vpMatrix I; // Identity
5584  I.eye(6);
5585  oJo = I - K.AtA();
5586 
5587  isoJoIdentity_ = false;
5588  }
5589  }
5590  }
5591 
5592  // Weighting
5593  double num = 0;
5594  double den = 0;
5595 
5596  unsigned int start_index = 0;
5597  if (m_trackerType & EDGE_TRACKER) {
5598  for (unsigned int i = 0; i < nb_edge_features; i++) {
5599  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5600  W_true[i] = wi;
5601  m_weightedError[i] = wi * m_error[i];
5602 
5603  num += wi * vpMath::sqr(m_error[i]);
5604  den += wi;
5605 
5606  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5607  m_L[i][j] *= wi;
5608  }
5609  }
5610 
5611  start_index += nb_edge_features;
5612  }
5613 
5614 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5615  if (m_trackerType & KLT_TRACKER) {
5616  for (unsigned int i = 0; i < nb_klt_features; i++) {
5617  double wi = m_w_klt[i] * factorKlt;
5618  W_true[start_index + i] = wi;
5619  m_weightedError[start_index + i] = wi * m_error_klt[i];
5620 
5621  num += wi * vpMath::sqr(m_error[start_index + i]);
5622  den += wi;
5623 
5624  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5625  m_L[start_index + i][j] *= wi;
5626  }
5627  }
5628 
5629  start_index += nb_klt_features;
5630  }
5631 #endif
5632 
5633  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5634  for (unsigned int i = 0; i < nb_depth_features; i++) {
5635  double wi = m_w_depthNormal[i] * factorDepth;
5636  m_w[start_index + i] = m_w_depthNormal[i];
5637  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5638 
5639  num += wi * vpMath::sqr(m_error[start_index + i]);
5640  den += wi;
5641 
5642  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5643  m_L[start_index + i][j] *= wi;
5644  }
5645  }
5646 
5647  start_index += nb_depth_features;
5648  }
5649 
5650  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5651  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
5652  double wi = m_w_depthDense[i] * factorDepthDense;
5653  m_w[start_index + i] = m_w_depthDense[i];
5654  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5655 
5656  num += wi * vpMath::sqr(m_error[start_index + i]);
5657  den += wi;
5658 
5659  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5660  m_L[start_index + i][j] *= wi;
5661  }
5662  }
5663 
5664  // start_index += nb_depth_dense_features;
5665  }
5666 
5667  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5668 
5669  cMo_prev = m_cMo;
5670 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5671  if (m_trackerType & KLT_TRACKER) {
5672  ctTc0_Prev = ctTc0;
5673  }
5674 #endif
5675 
5676  m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo;
5677 
5678 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5679  if (m_trackerType & KLT_TRACKER) {
5680  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
5681  }
5682 #endif
5683  normRes_1 = normRes;
5684 
5685  normRes = sqrt(num / den);
5686  }
5687 
5688  iter++;
5689  }
5690 
5691  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
5692 
5693  if (m_trackerType & EDGE_TRACKER) {
5695  }
5696 }
5697 
5698 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5699 {
5700  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5701  "TrackerWrapper::computeVVSInit("
5702  ") should not be called!");
5703 }
5704 
5705 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
5706 {
5707  initMbtTracking(ptr_I);
5708 
5709  unsigned int nbFeatures = 0;
5710 
5711  if (m_trackerType & EDGE_TRACKER) {
5712  nbFeatures += m_error_edge.getRows();
5713  } else {
5714  m_error_edge.clear();
5715  m_weightedError_edge.clear();
5716  m_L_edge.clear();
5717  m_w_edge.clear();
5718  }
5719 
5720 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5721  if (m_trackerType & KLT_TRACKER) {
5723  nbFeatures += m_error_klt.getRows();
5724  } else {
5725  m_error_klt.clear();
5726  m_weightedError_klt.clear();
5727  m_L_klt.clear();
5728  m_w_klt.clear();
5729  }
5730 #endif
5731 
5732  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5734  nbFeatures += m_error_depthNormal.getRows();
5735  } else {
5736  m_error_depthNormal.clear();
5737  m_weightedError_depthNormal.clear();
5738  m_L_depthNormal.clear();
5739  m_w_depthNormal.clear();
5740  }
5741 
5742  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5744  nbFeatures += m_error_depthDense.getRows();
5745  } else {
5746  m_error_depthDense.clear();
5747  m_weightedError_depthDense.clear();
5748  m_L_depthDense.clear();
5749  m_w_depthDense.clear();
5750  }
5751 
5752  m_L.resize(nbFeatures, 6, false, false);
5753  m_error.resize(nbFeatures, false);
5754 
5755  m_weightedError.resize(nbFeatures, false);
5756  m_w.resize(nbFeatures, false);
5757  m_w = 1;
5758 }
5759 
5761 {
5762  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5763  "TrackerWrapper::"
5764  "computeVVSInteractionMatrixAndR"
5765  "esidu() should not be called!");
5766 }
5767 
5769 {
5770  if (m_trackerType & EDGE_TRACKER) {
5772  }
5773 
5774 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5775  if (m_trackerType & KLT_TRACKER) {
5777  }
5778 #endif
5779 
5780  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5782  }
5783 
5784  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5786  }
5787 
5788  unsigned int start_index = 0;
5789  if (m_trackerType & EDGE_TRACKER) {
5790  m_L.insert(m_L_edge, start_index, 0);
5791  m_error.insert(start_index, m_error_edge);
5792 
5793  start_index += m_error_edge.getRows();
5794  }
5795 
5796 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5797  if (m_trackerType & KLT_TRACKER) {
5798  m_L.insert(m_L_klt, start_index, 0);
5799  m_error.insert(start_index, m_error_klt);
5800 
5801  start_index += m_error_klt.getRows();
5802  }
5803 #endif
5804 
5805  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5806  m_L.insert(m_L_depthNormal, start_index, 0);
5807  m_error.insert(start_index, m_error_depthNormal);
5808 
5809  start_index += m_error_depthNormal.getRows();
5810  }
5811 
5812  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5813  m_L.insert(m_L_depthDense, start_index, 0);
5814  m_error.insert(start_index, m_error_depthDense);
5815 
5816  // start_index += m_error_depthDense.getRows();
5817  }
5818 }
5819 
5821 {
5822  unsigned int start_index = 0;
5823 
5824  if (m_trackerType & EDGE_TRACKER) {
5826  m_w.insert(start_index, m_w_edge);
5827 
5828  start_index += m_w_edge.getRows();
5829  }
5830 
5831 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5832  if (m_trackerType & KLT_TRACKER) {
5833  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
5834  m_w.insert(start_index, m_w_klt);
5835 
5836  start_index += m_w_klt.getRows();
5837  }
5838 #endif
5839 
5840  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5841  if (m_depthNormalUseRobust) {
5842  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
5843  m_w.insert(start_index, m_w_depthNormal);
5844  }
5845 
5846  start_index += m_w_depthNormal.getRows();
5847  }
5848 
5849  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5851  m_w.insert(start_index, m_w_depthDense);
5852 
5853  // start_index += m_w_depthDense.getRows();
5854  }
5855 }
5856 
5857 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
5858  const vpCameraParameters &cam, const vpColor &col,
5859  unsigned int thickness, bool displayFullModel)
5860 {
5861  if (displayFeatures) {
5862  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5863  for (size_t i = 0; i < features.size(); i++) {
5864  if (vpMath::equal(features[i][0], 0)) {
5865  vpImagePoint ip(features[i][1], features[i][2]);
5866  int state = static_cast<int>(features[i][3]);
5867 
5868  switch (state) {
5871  break;
5872 
5873  case vpMeSite::CONSTRAST:
5874  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5875  break;
5876 
5877  case vpMeSite::THRESHOLD:
5879  break;
5880 
5881  case vpMeSite::M_ESTIMATOR:
5882  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5883  break;
5884 
5885  case vpMeSite::TOO_NEAR:
5886  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5887  break;
5888 
5889  default:
5891  }
5892  } else if (vpMath::equal(features[i][0], 1)) {
5893  vpImagePoint ip1(features[i][1], features[i][2]);
5895 
5896  vpImagePoint ip2(features[i][3], features[i][4]);
5897  double id = features[i][5];
5898  std::stringstream ss;
5899  ss << id;
5900  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5901  } else if (vpMath::equal(features[i][0], 2)) {
5902  vpImagePoint im_centroid(features[i][1], features[i][2]);
5903  vpImagePoint im_extremity(features[i][3], features[i][4]);
5904  bool desired = vpMath::equal(features[i][0], 2);
5905  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5906  }
5907  }
5908  }
5909 
5910  std::vector<std::vector<double> > models =
5911  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
5912  for (size_t i = 0; i < models.size(); i++) {
5913  if (vpMath::equal(models[i][0], 0)) {
5914  vpImagePoint ip1(models[i][1], models[i][2]);
5915  vpImagePoint ip2(models[i][3], models[i][4]);
5916  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
5917  } else if (vpMath::equal(models[i][0], 1)) {
5918  vpImagePoint center(models[i][1], models[i][2]);
5919  double n20 = models[i][3];
5920  double n11 = models[i][4];
5921  double n02 = models[i][5];
5922  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
5923  }
5924  }
5925 
5926 #ifdef VISP_HAVE_OGRE
5927  if ((m_trackerType & EDGE_TRACKER)
5928 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5929  || (m_trackerType & KLT_TRACKER)
5930 #endif
5931  ) {
5932  if (useOgre)
5933  faces.displayOgre(cMo);
5934  }
5935 #endif
5936 }
5937 
5938 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
5939  const vpCameraParameters &cam, const vpColor &col,
5940  unsigned int thickness, bool displayFullModel)
5941 {
5942  if (displayFeatures) {
5943  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5944  for (size_t i = 0; i < features.size(); i++) {
5945  if (vpMath::equal(features[i][0], 0)) {
5946  vpImagePoint ip(features[i][1], features[i][2]);
5947  int state = static_cast<int>(features[i][3]);
5948 
5949  switch (state) {
5952  break;
5953 
5954  case vpMeSite::CONSTRAST:
5955  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5956  break;
5957 
5958  case vpMeSite::THRESHOLD:
5960  break;
5961 
5962  case vpMeSite::M_ESTIMATOR:
5963  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5964  break;
5965 
5966  case vpMeSite::TOO_NEAR:
5967  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5968  break;
5969 
5970  default:
5972  }
5973  } else if (vpMath::equal(features[i][0], 1)) {
5974  vpImagePoint ip1(features[i][1], features[i][2]);
5976 
5977  vpImagePoint ip2(features[i][3], features[i][4]);
5978  double id = features[i][5];
5979  std::stringstream ss;
5980  ss << id;
5981  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5982  } else if (vpMath::equal(features[i][0], 2)) {
5983  vpImagePoint im_centroid(features[i][1], features[i][2]);
5984  vpImagePoint im_extremity(features[i][3], features[i][4]);
5985  bool desired = vpMath::equal(features[i][0], 2);
5986  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5987  }
5988  }
5989  }
5990 
5991  std::vector<std::vector<double> > models =
5992  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
5993  for (size_t i = 0; i < models.size(); i++) {
5994  if (vpMath::equal(models[i][0], 0)) {
5995  vpImagePoint ip1(models[i][1], models[i][2]);
5996  vpImagePoint ip2(models[i][3], models[i][4]);
5997  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
5998  } else if (vpMath::equal(models[i][0], 1)) {
5999  vpImagePoint center(models[i][1], models[i][2]);
6000  double n20 = models[i][3];
6001  double n11 = models[i][4];
6002  double n02 = models[i][5];
6003  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6004  }
6005  }
6006 
6007 #ifdef VISP_HAVE_OGRE
6008  if ((m_trackerType & EDGE_TRACKER)
6009 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6010  || (m_trackerType & KLT_TRACKER)
6011 #endif
6012  ) {
6013  if (useOgre)
6014  faces.displayOgre(cMo);
6015  }
6016 #endif
6017 }
6018 
6019 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6020 {
6021  std::vector<std::vector<double> > features;
6022 
6023  if (m_trackerType & EDGE_TRACKER) {
6024  // m_featuresToBeDisplayedEdge updated after computeVVS()
6025  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6026  }
6027 
6028 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6029  if (m_trackerType & KLT_TRACKER) {
6030  // m_featuresToBeDisplayedKlt updated after postTracking()
6031  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6032  }
6033 #endif
6034 
6035  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6036  // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6037  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6038  m_featuresToBeDisplayedDepthNormal.end());
6039  }
6040 
6041  return features;
6042 }
6043 
6044 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6045  unsigned int height,
6046  const vpHomogeneousMatrix &cMo,
6047  const vpCameraParameters &cam,
6048  bool displayFullModel)
6049 {
6050  std::vector<std::vector<double> > models;
6051 
6052  // Do not add multiple times the same models
6053  if (m_trackerType == EDGE_TRACKER) {
6054  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6055  }
6056 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6057  else if (m_trackerType == KLT_TRACKER) {
6058  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6059  }
6060 #endif
6061  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6062  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6063  } else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6064  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6065  } else {
6066  // Edge and KLT trackers use the same primitives
6067  if (m_trackerType & EDGE_TRACKER) {
6068  std::vector<std::vector<double> > edgeModels =
6069  vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6070  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6071  }
6072 
6073  // Depth dense and depth normal trackers use the same primitives
6074  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6075  std::vector<std::vector<double> > depthDenseModels =
6076  vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6077  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6078  }
6079  }
6080 
6081  return models;
6082 }
6083 
6084 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6085 {
6086  if (!modelInitialised) {
6087  throw vpException(vpException::fatalError, "model not initialized");
6088  }
6089 
6090  if (useScanLine || clippingFlag > 3)
6091  m_cam.computeFov(I.getWidth(), I.getHeight());
6092 
6093  bool reInitialisation = false;
6094  if (!useOgre) {
6095  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6096  } else {
6097 #ifdef VISP_HAVE_OGRE
6098  if (!faces.isOgreInitialised()) {
6099  faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6100 
6101  faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6102  faces.initOgre(m_cam);
6103  // Turn off Ogre config dialog display for the next call to this
6104  // function since settings are saved in the ogre.cfg file and used
6105  // during the next call
6106  ogreShowConfigDialog = false;
6107  }
6108 
6109  faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6110 #else
6111  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6112 #endif
6113  }
6114 
6115  if (useScanLine) {
6116  faces.computeClippedPolygons(m_cMo, m_cam);
6117  faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6118  }
6119 
6120 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6121  if (m_trackerType & KLT_TRACKER)
6123 #endif
6124 
6125  if (m_trackerType & EDGE_TRACKER) {
6127 
6128  bool a = false;
6129  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6130 
6132  }
6133 
6134  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6135  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6136 
6137  if (m_trackerType & DEPTH_DENSE_TRACKER)
6138  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6139 }
6140 
6141 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6142  double radius, int idFace, const std::string &name)
6143 {
6144  if (m_trackerType & EDGE_TRACKER)
6145  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6146 
6147 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6148  if (m_trackerType & KLT_TRACKER)
6149  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6150 #endif
6151 }
6152 
6153 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6154  const std::string &name)
6155 {
6156  if (m_trackerType & EDGE_TRACKER)
6157  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6158 
6159 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6160  if (m_trackerType & KLT_TRACKER)
6161  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6162 #endif
6163 }
6164 
6165 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6166 {
6167  if (m_trackerType & EDGE_TRACKER)
6169 
6170 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6171  if (m_trackerType & KLT_TRACKER)
6173 #endif
6174 
6175  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6177 
6178  if (m_trackerType & DEPTH_DENSE_TRACKER)
6180 }
6181 
6182 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6183 {
6184  if (m_trackerType & EDGE_TRACKER)
6186 
6187 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6188  if (m_trackerType & KLT_TRACKER)
6190 #endif
6191 
6192  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6194 
6195  if (m_trackerType & DEPTH_DENSE_TRACKER)
6197 }
6198 
6200 {
6201  if (m_trackerType & EDGE_TRACKER) {
6204  }
6205 }
6206 
6207 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6208 {
6209  // Load projection error config
6210  vpMbTracker::loadConfigFile(configFile, verbose);
6211 
6213  xmlp.setVerbose(verbose);
6214  xmlp.setCameraParameters(m_cam);
6215  xmlp.setAngleAppear(vpMath::deg(angleAppears));
6216  xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6217 
6218  // Edge
6219  xmlp.setEdgeMe(me);
6220 
6221  // KLT
6222  xmlp.setKltMaxFeatures(10000);
6223  xmlp.setKltWindowSize(5);
6224  xmlp.setKltQuality(0.01);
6225  xmlp.setKltMinDistance(5);
6226  xmlp.setKltHarrisParam(0.01);
6227  xmlp.setKltBlockSize(3);
6228  xmlp.setKltPyramidLevels(3);
6229 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6230  xmlp.setKltMaskBorder(maskBorder);
6231 #endif
6232 
6233  // Depth normal
6234  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6235  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6236  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6237  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6238  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6239  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6240 
6241  // Depth dense
6242  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6243  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6244 
6245  try {
6246  if (verbose) {
6247  std::cout << " *********** Parsing XML for";
6248  }
6249 
6250  std::vector<std::string> tracker_names;
6251  if (m_trackerType & EDGE_TRACKER)
6252  tracker_names.push_back("Edge");
6253 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6254  if (m_trackerType & KLT_TRACKER)
6255  tracker_names.push_back("Klt");
6256 #endif
6257  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6258  tracker_names.push_back("Depth Normal");
6259  if (m_trackerType & DEPTH_DENSE_TRACKER)
6260  tracker_names.push_back("Depth Dense");
6261 
6262  if (verbose) {
6263  for (size_t i = 0; i < tracker_names.size(); i++) {
6264  std::cout << " " << tracker_names[i];
6265  if (i == tracker_names.size() - 1) {
6266  std::cout << " ";
6267  }
6268  }
6269 
6270  std::cout << "Model-Based Tracker ************ " << std::endl;
6271  }
6272 
6273  xmlp.parse(configFile);
6274  } catch (...) {
6275  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6276  }
6277 
6278  vpCameraParameters camera;
6279  xmlp.getCameraParameters(camera);
6280  setCameraParameters(camera);
6281 
6282  angleAppears = vpMath::rad(xmlp.getAngleAppear());
6283  angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6284 
6285  if (xmlp.hasNearClippingDistance())
6286  setNearClippingDistance(xmlp.getNearClippingDistance());
6287 
6288  if (xmlp.hasFarClippingDistance())
6289  setFarClippingDistance(xmlp.getFarClippingDistance());
6290 
6291  if (xmlp.getFovClipping()) {
6293  }
6294 
6295  useLodGeneral = xmlp.getLodState();
6296  minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6297  minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6298 
6299  applyLodSettingInConfig = false;
6300  if (this->getNbPolygon() > 0) {
6301  applyLodSettingInConfig = true;
6302  setLod(useLodGeneral);
6303  setMinLineLengthThresh(minLineLengthThresholdGeneral);
6304  setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6305  }
6306 
6307  // Edge
6308  vpMe meParser;
6309  xmlp.getEdgeMe(meParser);
6311 
6312 // KLT
6313 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6314  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6315  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6316  tracker.setQuality(xmlp.getKltQuality());
6317  tracker.setMinDistance(xmlp.getKltMinDistance());
6318  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6319  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6320  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6321  maskBorder = xmlp.getKltMaskBorder();
6322 
6323  // if(useScanLine)
6324  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6325 #endif
6326 
6327  // Depth normal
6328  setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6329  setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6330  setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6331  setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6332  setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6333 
6334  // Depth dense
6335  setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6336 }
6337 
6338 #ifdef VISP_HAVE_PCL
6340  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6341 {
6342 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6343  // KLT
6344  if (m_trackerType & KLT_TRACKER) {
6345  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6346  vpMbKltTracker::reinit(*ptr_I);
6347  }
6348  }
6349 #endif
6350 
6351  // Looking for new visible face
6352  if (m_trackerType & EDGE_TRACKER) {
6353  bool newvisibleface = false;
6354  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6355 
6356  if (useScanLine) {
6357  faces.computeClippedPolygons(m_cMo, m_cam);
6358  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6359  }
6360  }
6361 
6362  // Depth normal
6363  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6364  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6365 
6366  // Depth dense
6367  if (m_trackerType & DEPTH_DENSE_TRACKER)
6368  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6369 
6370  // Edge
6371  if (m_trackerType & EDGE_TRACKER) {
6373 
6374  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6375  // Reinit the moving edge for the lines which need it.
6376  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6377 
6378  if (computeProjError) {
6380  }
6381  }
6382 }
6383 
6385  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6386 {
6387  if (m_trackerType & EDGE_TRACKER) {
6388  try {
6390  } catch (...) {
6391  std::cerr << "Error in moving edge tracking" << std::endl;
6392  throw;
6393  }
6394  }
6395 
6396 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6397  if (m_trackerType & KLT_TRACKER) {
6398  try {
6400  } catch (const vpException &e) {
6401  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6402  throw;
6403  }
6404  }
6405 #endif
6406 
6407  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6408  try {
6410  } catch (...) {
6411  std::cerr << "Error in Depth normal tracking" << std::endl;
6412  throw;
6413  }
6414  }
6415 
6416  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6417  try {
6419  } catch (...) {
6420  std::cerr << "Error in Depth dense tracking" << std::endl;
6421  throw;
6422  }
6423  }
6424 }
6425 #endif
6426 
6428  const unsigned int pointcloud_width,
6429  const unsigned int pointcloud_height)
6430 {
6431 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6432  // KLT
6433  if (m_trackerType & KLT_TRACKER) {
6434  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6435  vpMbKltTracker::reinit(*ptr_I);
6436  }
6437  }
6438 #endif
6439 
6440  // Looking for new visible face
6441  if (m_trackerType & EDGE_TRACKER) {
6442  bool newvisibleface = false;
6443  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6444 
6445  if (useScanLine) {
6446  faces.computeClippedPolygons(m_cMo, m_cam);
6447  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6448  }
6449  }
6450 
6451  // Depth normal
6452  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6453  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6454 
6455  // Depth dense
6456  if (m_trackerType & DEPTH_DENSE_TRACKER)
6457  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6458 
6459  // Edge
6460  if (m_trackerType & EDGE_TRACKER) {
6462 
6463  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6464  // Reinit the moving edge for the lines which need it.
6465  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6466 
6467  if (computeProjError) {
6469  }
6470  }
6471 }
6472 
6474  const std::vector<vpColVector> *const point_cloud,
6475  const unsigned int pointcloud_width,
6476  const unsigned int pointcloud_height)
6477 {
6478  if (m_trackerType & EDGE_TRACKER) {
6479  try {
6481  } catch (...) {
6482  std::cerr << "Error in moving edge tracking" << std::endl;
6483  throw;
6484  }
6485  }
6486 
6487 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6488  if (m_trackerType & KLT_TRACKER) {
6489  try {
6491  } catch (const vpException &e) {
6492  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6493  throw;
6494  }
6495  }
6496 #endif
6497 
6498  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6499  try {
6500  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6501  } catch (...) {
6502  std::cerr << "Error in Depth tracking" << std::endl;
6503  throw;
6504  }
6505  }
6506 
6507  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6508  try {
6509  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6510  } catch (...) {
6511  std::cerr << "Error in Depth dense tracking" << std::endl;
6512  throw;
6513  }
6514  }
6515 }
6516 
6518  const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
6519  const vpHomogeneousMatrix &cMo, bool verbose,
6520  const vpHomogeneousMatrix &T)
6521 {
6522  m_cMo.eye();
6523 
6524  // Edge
6525  vpMbtDistanceLine *l;
6527  vpMbtDistanceCircle *ci;
6528 
6529  for (unsigned int i = 0; i < scales.size(); i++) {
6530  if (scales[i]) {
6531  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6532  l = *it;
6533  if (l != NULL)
6534  delete l;
6535  l = NULL;
6536  }
6537 
6538  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6539  ++it) {
6540  cy = *it;
6541  if (cy != NULL)
6542  delete cy;
6543  cy = NULL;
6544  }
6545 
6546  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6547  ci = *it;
6548  if (ci != NULL)
6549  delete ci;
6550  ci = NULL;
6551  }
6552 
6553  lines[i].clear();
6554  cylinders[i].clear();
6555  circles[i].clear();
6556  }
6557  }
6558 
6559  nline = 0;
6560  ncylinder = 0;
6561  ncircle = 0;
6562  nbvisiblepolygone = 0;
6563 
6564 // KLT
6565 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6566 #if (VISP_HAVE_OPENCV_VERSION < 0x020408)
6567  if (cur != NULL) {
6568  cvReleaseImage(&cur);
6569  cur = NULL;
6570  }
6571 #endif
6572 
6573  // delete the Klt Polygon features
6574  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6575  vpMbtDistanceKltPoints *kltpoly = *it;
6576  if (kltpoly != NULL) {
6577  delete kltpoly;
6578  }
6579  kltpoly = NULL;
6580  }
6581  kltPolygons.clear();
6582 
6583  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6584  ++it) {
6585  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
6586  if (kltPolyCylinder != NULL) {
6587  delete kltPolyCylinder;
6588  }
6589  kltPolyCylinder = NULL;
6590  }
6591  kltCylinders.clear();
6592 
6593  // delete the structures used to display circles
6594  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6595  ci = *it;
6596  if (ci != NULL) {
6597  delete ci;
6598  }
6599  ci = NULL;
6600  }
6601  circles_disp.clear();
6602 
6603  firstInitialisation = true;
6604 
6605 #endif
6606 
6607  // Depth normal
6608  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6609  delete m_depthNormalFaces[i];
6610  m_depthNormalFaces[i] = NULL;
6611  }
6612  m_depthNormalFaces.clear();
6613 
6614  // Depth dense
6615  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6616  delete m_depthDenseFaces[i];
6617  m_depthDenseFaces[i] = NULL;
6618  }
6619  m_depthDenseFaces.clear();
6620 
6621  faces.reset();
6622 
6623  loadModel(cad_name, verbose, T);
6624  if (I) {
6625  initFromPose(*I, cMo);
6626  } else {
6627  initFromPose(*I_color, cMo);
6628  }
6629 }
6630 
6631 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
6632  const vpHomogeneousMatrix &cMo, bool verbose,
6633  const vpHomogeneousMatrix &T)
6634 {
6635  reInitModel(&I, NULL, cad_name, cMo, verbose, T);
6636 }
6637 
6638 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
6639  const vpHomogeneousMatrix &cMo, bool verbose,
6640  const vpHomogeneousMatrix &T)
6641 {
6642  reInitModel(NULL, &I_color, cad_name, cMo, verbose, T);
6643 }
6644 
6645 void vpMbGenericTracker::TrackerWrapper::resetTracker()
6646 {
6648 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6650 #endif
6653 }
6654 
6655 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
6656 {
6657  m_cam = cam;
6658 
6660 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6662 #endif
6665 }
6666 
6667 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
6668 
6669 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
6670 {
6672 }
6673 
6674 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
6675 {
6677 }
6678 
6679 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
6680 {
6682 #ifdef VISP_HAVE_OGRE
6683  faces.getOgreContext()->setWindowName("TrackerWrapper");
6684 #endif
6685 }
6686 
6688  const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
6689 {
6690  bool performKltSetPose = false;
6691  if (I_color) {
6692  vpImageConvert::convert(*I_color, m_I);
6693  }
6694 
6695 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6696  if (m_trackerType & KLT_TRACKER) {
6697  performKltSetPose = true;
6698 
6699  if (useScanLine || clippingFlag > 3) {
6700  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6701  }
6702 
6703  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
6704  }
6705 #endif
6706 
6707  if (!performKltSetPose) {
6708  m_cMo = cdMo;
6709  init(I ? *I : m_I);
6710  return;
6711  }
6712 
6713  if (m_trackerType & EDGE_TRACKER) {
6715  }
6716 
6717  if (useScanLine) {
6718  faces.computeClippedPolygons(m_cMo, m_cam);
6719  faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6720  }
6721 
6722 #if 0
6723  if (m_trackerType & EDGE_TRACKER) {
6724  initPyramid(I, Ipyramid);
6725 
6726  unsigned int i = (unsigned int) scales.size();
6727  do {
6728  i--;
6729  if(scales[i]){
6730  downScale(i);
6731  vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
6732  upScale(i);
6733  }
6734  } while(i != 0);
6735 
6736  cleanPyramid(Ipyramid);
6737  }
6738 #else
6739  if (m_trackerType & EDGE_TRACKER) {
6740  vpMbEdgeTracker::initMovingEdge(I ? *I : m_I, m_cMo);
6741  }
6742 #endif
6743 
6744  // Depth normal
6745  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6746 
6747  // Depth dense
6748  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6749 }
6750 
6752 {
6753  setPose(&I, NULL, cdMo);
6754 }
6755 
6757 {
6758  setPose(NULL, &I_color, cdMo);
6759 }
6760 
6761 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
6762 {
6764 }
6765 
6766 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
6767 {
6769 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6771 #endif
6774 }
6775 
6776 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
6777 {
6778  if ((type & (EDGE_TRACKER |
6779 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6780  KLT_TRACKER |
6781 #endif
6782  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
6783  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
6784  }
6785 
6786  m_trackerType = type;
6787 }
6788 
6789 void vpMbGenericTracker::TrackerWrapper::testTracking()
6790 {
6791  if (m_trackerType & EDGE_TRACKER) {
6793  }
6794 }
6795 
6797 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6798  I
6799 #endif
6800 )
6801 {
6802  if ((m_trackerType & (EDGE_TRACKER
6803 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6804  | KLT_TRACKER
6805 #endif
6806  )) == 0) {
6807  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6808  return;
6809  }
6810 
6811 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6812  track(&I, nullptr);
6813 #endif
6814 }
6815 
6817 {
6818  // not exposed to the public API, only for debug
6819 }
6820 
6821 #ifdef VISP_HAVE_PCL
6823  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6824 {
6825  if ((m_trackerType & (EDGE_TRACKER |
6826 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6827  KLT_TRACKER |
6828 #endif
6829  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
6830  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6831  return;
6832  }
6833 
6834  if (m_trackerType & (EDGE_TRACKER
6835 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6836  | KLT_TRACKER
6837 #endif
6838  ) &&
6839  ptr_I == NULL) {
6840  throw vpException(vpException::fatalError, "Image pointer is NULL!");
6841  }
6842 
6843  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
6844  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
6845  }
6846 
6847  // Back-up cMo in case of exception
6848  vpHomogeneousMatrix cMo_1 = m_cMo;
6849  try {
6850  preTracking(ptr_I, point_cloud);
6851 
6852  try {
6853  computeVVS(ptr_I);
6854  } catch (...) {
6855  covarianceMatrix = -1;
6856  throw; // throw the original exception
6857  }
6858 
6859  if (m_trackerType == EDGE_TRACKER)
6860  testTracking();
6861 
6862  postTracking(ptr_I, point_cloud);
6863 
6864  } catch (const vpException &e) {
6865  std::cerr << "Exception: " << e.what() << std::endl;
6866  m_cMo = cMo_1;
6867  throw; // rethrowing the original exception
6868  }
6869 }
6870 #endif
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
unsigned int getCols() const
Definition: vpArray2D.h:281
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:306
unsigned int getRows() const
Definition: vpArray2D.h:291
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:314
Class to define RGB colors available for display functionnalities.
Definition: vpColor.h:158
static const vpColor red
Definition: vpColor.h:217
static const vpColor cyan
Definition: vpColor.h:226
static const vpColor blue
Definition: vpColor.h:223
static const vpColor purple
Definition: vpColor.h:228
static const vpColor yellow
Definition: vpColor.h:225
static const vpColor green
Definition: vpColor.h:220
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emited by ViSP classes.
Definition: vpException.h:72
@ ioError
I/O error.
Definition: vpException.h:91
@ badValue
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
@ fatalError
Fatal error.
Definition: vpException.h:96
const char * what() const
Definition: vpException.cpp:99
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
vp_deprecated void init()
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:89
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
void init(unsigned int h, unsigned int w, Type value)
Definition: vpImage.h:631
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:79
static double rad(double deg)
Definition: vpMath.h:117
static double sqr(double x)
Definition: vpMath.h:123
static bool equal(double x, double y, double s=0.001)
Definition: vpMath.h:364
static double deg(double rad)
Definition: vpMath.h:110
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
void eye()
Definition: vpMatrix.cpp:450
vpMatrix AtA() const
Definition: vpMatrix.cpp:626
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:5980
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setCameraParameters(const vpCameraParameters &camera)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setNearClippingDistance(const double &dist)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void computeVVSInit()
virtual void setFarClippingDistance(const double &dist)
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void computeVVSWeights()
virtual void setClipping(const unsigned int &flags)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void testTracking()
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setLod(bool useLod, const std::string &name="")
virtual void setDisplayFeatures(bool displayF)
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void setKltMaskBorder(const unsigned int &e)
virtual void setProjectionErrorComputation(const bool &flag)
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpMbTracker.h:414
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void setAngleAppear(const double &a)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual std::string getReferenceCameraName() const
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="")
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void setNearClippingDistance(const double &dist)
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void setAngleDisappear(const double &a)
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
vpMatrix m_L
Interaction matrix.
virtual void init(const vpImage< unsigned char > &I)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual void computeVVSWeights()
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void computeVVSInit()
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
virtual unsigned int getNbPolygon() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setProjectionErrorDisplay(bool display)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
virtual vpKltOpencv getKltOpencv() const
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual vpMbtPolygon * getPolygon(unsigned int index)
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual vpMe getMovingEdge() const
virtual void setFarClippingDistance(const double &dist)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)
virtual void setClipping(const unsigned int &flags)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setOgreVisibilityTest(const bool &v)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
std::string m_referenceCameraName
Name of the reference camera.
virtual void setMask(const vpImage< bool > &mask)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="")
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
virtual void track(const vpImage< unsigned char > &I)
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:256
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
vpAROgre * getOgreContext()
virtual void setScanLineVisibilityTest(const bool &v)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="")
void preTracking(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="")
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void reinit(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void setCameraParameters(const vpCameraParameters &cam)
virtual void computeVVSInit()
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
Definition: vpMbTracker.h:595
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity_, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
bool modelInitialised
Definition: vpMbTracker.h:123
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:647
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:564
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:223
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=NULL, const vpColVector *const m_w_prev=NULL)
virtual void setDisplayFeatures(bool displayF)
Definition: vpMbTracker.h:518
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:423
bool m_computeInteraction
Definition: vpMbTracker.h:185
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:115
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:130
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:193
bool computeProjError
Definition: vpMbTracker.h:133
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:113
virtual void computeVVSPoseEstimation(const bool isoJoIdentity_, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:111
double projectionError
Definition: vpMbTracker.h:136
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:191
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:155
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:143
virtual void setCameraParameters(const vpCameraParameters &cam)
Definition: vpMbTracker.h:488
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:481
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:605
virtual void setOgreVisibilityTest(const bool &v)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:140
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:138
virtual void setProjectionErrorDisplay(bool display)
Definition: vpMbTracker.h:590
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:147
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:151
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:158
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:585
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:145
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:558
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
Definition: vpMbTracker.h:600
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:470
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:128
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:149
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:153
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
Definition: vpMbTracker.h:202
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Manage a circle used in the model-based tracker.
Manage a cylinder used in the model-based tracker.
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Manage the line of a polygon used in the model-based tracker.
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:67
Parse an Xml file to extract configuration parameters of a mbtConfig object.
@ CONSTRAST
Point removed due to a contrast problem.
Definition: vpMeSite.h:79
@ TOO_NEAR
Point removed because too near image borders.
Definition: vpMeSite.h:82
@ THRESHOLD
Point removed due to a threshold problem.
Definition: vpMeSite.h:80
@ M_ESTIMATOR
Point removed during virtual visual-servoing because considered as an outlier.
Definition: vpMeSite.h:81
@ NO_SUPPRESSION
Point used by the tracker.
Definition: vpMeSite.h:78
Definition: vpMe.h:61
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
Error that can be emited by the vpTracker class and its derivates.
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)