Visual Servoing Platform  version 3.4.0
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/mbt/vpMbtXmlGenericParser.h>
42 
44  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
45  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
46  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
47 {
48  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
49 
50  // Add default camera transformation matrix
52 
53  // Add default ponderation between each feature type
55 
56 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
58 #endif
59 
62 }
63 
64 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
68 {
69  if (nbCameras == 0) {
70  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
71  } else if (nbCameras == 1) {
72  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
73 
74  // Add default camera transformation matrix
76  } else {
77  for (unsigned int i = 1; i <= nbCameras; i++) {
78  std::stringstream ss;
79  ss << "Camera" << i;
80  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
81 
82  // Add default camera transformation matrix
84  }
85 
86  // Set by default the reference camera to the first one
87  m_referenceCameraName = m_mapOfTrackers.begin()->first;
88  }
89 
90  // Add default ponderation between each feature type
92 
93 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
95 #endif
96 
99 }
100 
101 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
105 {
106  if (trackerTypes.empty()) {
107  throw vpException(vpException::badValue, "There is no camera!");
108  }
109 
110  if (trackerTypes.size() == 1) {
111  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
112 
113  // Add default camera transformation matrix
115  } else {
116  for (size_t i = 1; i <= trackerTypes.size(); i++) {
117  std::stringstream ss;
118  ss << "Camera" << i;
119  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
120 
121  // Add default camera transformation matrix
123  }
124 
125  // Set by default the reference camera to the first one
126  m_referenceCameraName = m_mapOfTrackers.begin()->first;
127  }
128 
129  // Add default ponderation between each feature type
131 
132 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
134 #endif
135 
138 }
139 
140 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
141  const std::vector<int> &trackerTypes)
145 {
146  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
148  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
149  }
150 
151  for (size_t i = 0; i < cameraNames.size(); i++) {
152  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
153 
154  // Add default camera transformation matrix
156  }
157 
158  // Set by default the reference camera to the first one
159  m_referenceCameraName = m_mapOfTrackers.begin()->first;
160 
161  // Add default ponderation between each feature type
163 
164 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
166 #endif
167 
170 }
171 
173 {
174  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
175  ++it) {
176  delete it->second;
177  it->second = NULL;
178  }
179 }
180 
199  const vpCameraParameters &cam)
200 {
201  double rawTotalProjectionError = 0.0;
202  unsigned int nbTotalFeaturesUsed = 0;
203 
204  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
205  it != m_mapOfTrackers.end(); ++it) {
206  TrackerWrapper *tracker = it->second;
207 
208  unsigned int nbFeaturesUsed = 0;
209  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
210 
211  if (nbFeaturesUsed > 0) {
212  nbTotalFeaturesUsed += nbFeaturesUsed;
213  rawTotalProjectionError += curProjError;
214  }
215  }
216 
217  if (nbTotalFeaturesUsed > 0) {
218  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
219  }
220 
221  return 90.0;
222 }
223 
242  const vpCameraParameters &_cam)
243 {
245  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
246 
247  return computeCurrentProjectionError(I, _cMo, _cam);
248 }
249 
251 {
252  if (computeProjError) {
253  double rawTotalProjectionError = 0.0;
254  unsigned int nbTotalFeaturesUsed = 0;
255 
256  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
257  it != m_mapOfTrackers.end(); ++it) {
258  TrackerWrapper *tracker = it->second;
259 
260  double curProjError = tracker->getProjectionError();
261  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
262 
263  if (nbFeaturesUsed > 0) {
264  nbTotalFeaturesUsed += nbFeaturesUsed;
265  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
266  }
267  }
268 
269  if (nbTotalFeaturesUsed > 0) {
270  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
271  } else {
272  projectionError = 90.0;
273  }
274  } else {
275  projectionError = 90.0;
276  }
277 }
278 
279 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
280 {
281  computeVVSInit(mapOfImages);
282 
283  if (m_error.getRows() < 4) {
284  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
285  }
286 
287  double normRes = 0;
288  double normRes_1 = -1;
289  unsigned int iter = 0;
290 
291  vpMatrix LTL;
292  vpColVector LTR, v;
293  vpColVector error_prev;
294 
295  double mu = m_initialMu;
296  vpHomogeneousMatrix cMo_prev;
297 
298  bool isoJoIdentity_ = true;
299 
300  // Covariance
301  vpColVector W_true(m_error.getRows());
302  vpMatrix L_true, LVJ_true;
303 
304  // Create the map of VelocityTwistMatrices
305  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
306  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
307  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
309  cVo.buildFrom(it->second);
310  mapOfVelocityTwist[it->first] = cVo;
311  }
312 
313  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
314 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
315  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
316 #endif
317  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
318  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
319 
320  m_nb_feat_edge = 0;
321  m_nb_feat_klt = 0;
324 
325  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
326  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
327 
328  bool reStartFromLastIncrement = false;
329  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
330  if (reStartFromLastIncrement) {
331  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
332  it != m_mapOfTrackers.end(); ++it) {
333  TrackerWrapper *tracker = it->second;
334 
335  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
336 
337 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
338  vpHomogeneousMatrix c_curr_tTc_curr0 =
339  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
340  tracker->ctTc0 = c_curr_tTc_curr0;
341 #endif
342  }
343  }
344 
345  if (!reStartFromLastIncrement) {
347 
348  if (computeCovariance) {
349  L_true = m_L;
350  if (!isoJoIdentity_) {
352  cVo.buildFrom(m_cMo);
353  LVJ_true = (m_L * (cVo * oJo));
354  }
355  }
356 
358  if (iter == 0) {
359  isoJoIdentity_ = true;
360  oJo.eye();
361 
362  // If all the 6 dof should be estimated, we check if the interaction
363  // matrix is full rank. If not we remove automatically the dof that
364  // cannot be estimated This is particularly useful when consering
365  // circles (rank 5) and cylinders (rank 4)
366  if (isoJoIdentity_) {
367  cVo.buildFrom(m_cMo);
368 
369  vpMatrix K; // kernel
370  unsigned int rank = (m_L * cVo).kernel(K);
371  if (rank == 0) {
372  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
373  }
374 
375  if (rank != 6) {
376  vpMatrix I; // Identity
377  I.eye(6);
378  oJo = I - K.AtA();
379 
380  isoJoIdentity_ = false;
381  }
382  }
383  }
384 
385  // Weighting
386  double num = 0;
387  double den = 0;
388 
389  unsigned int start_index = 0;
390  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
391  it != m_mapOfTrackers.end(); ++it) {
392  TrackerWrapper *tracker = it->second;
393 
394  if (tracker->m_trackerType & EDGE_TRACKER) {
395  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
396  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
397  W_true[start_index + i] = wi;
398  m_weightedError[start_index + i] = wi * m_error[start_index + i];
399 
400  num += wi * vpMath::sqr(m_error[start_index + i]);
401  den += wi;
402 
403  for (unsigned int j = 0; j < m_L.getCols(); j++) {
404  m_L[start_index + i][j] *= wi;
405  }
406  }
407 
408  start_index += tracker->m_error_edge.getRows();
409  }
410 
411 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
412  if (tracker->m_trackerType & KLT_TRACKER) {
413  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
414  double wi = tracker->m_w_klt[i] * factorKlt;
415  W_true[start_index + i] = wi;
416  m_weightedError[start_index + i] = wi * m_error[start_index + i];
417 
418  num += wi * vpMath::sqr(m_error[start_index + i]);
419  den += wi;
420 
421  for (unsigned int j = 0; j < m_L.getCols(); j++) {
422  m_L[start_index + i][j] *= wi;
423  }
424  }
425 
426  start_index += tracker->m_error_klt.getRows();
427  }
428 #endif
429 
430  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
431  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
432  double wi = tracker->m_w_depthNormal[i] * factorDepth;
433  W_true[start_index + i] = wi;
434  m_weightedError[start_index + i] = wi * m_error[start_index + i];
435 
436  num += wi * vpMath::sqr(m_error[start_index + i]);
437  den += wi;
438 
439  for (unsigned int j = 0; j < m_L.getCols(); j++) {
440  m_L[start_index + i][j] *= wi;
441  }
442  }
443 
444  start_index += tracker->m_error_depthNormal.getRows();
445  }
446 
447  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
448  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
449  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
450  W_true[start_index + i] = wi;
451  m_weightedError[start_index + i] = wi * m_error[start_index + i];
452 
453  num += wi * vpMath::sqr(m_error[start_index + i]);
454  den += wi;
455 
456  for (unsigned int j = 0; j < m_L.getCols(); j++) {
457  m_L[start_index + i][j] *= wi;
458  }
459  }
460 
461  start_index += tracker->m_error_depthDense.getRows();
462  }
463  }
464 
465  normRes_1 = normRes;
466  normRes = sqrt(num / den);
467 
468  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
469 
470  cMo_prev = m_cMo;
471 
473 
474 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
475  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
476  it != m_mapOfTrackers.end(); ++it) {
477  TrackerWrapper *tracker = it->second;
478 
479  vpHomogeneousMatrix c_curr_tTc_curr0 =
480  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
481  tracker->ctTc0 = c_curr_tTc_curr0;
482  }
483 #endif
484 
485  // Update cMo
486  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
487  it != m_mapOfTrackers.end(); ++it) {
488  TrackerWrapper *tracker = it->second;
489  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
490  }
491  }
492 
493  iter++;
494  }
495 
496  // Update features number
497  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
498  it != m_mapOfTrackers.end(); ++it) {
499  TrackerWrapper *tracker = it->second;
500  if (tracker->m_trackerType & EDGE_TRACKER) {
501  m_nb_feat_edge += tracker->m_error_edge.size();
502  }
503 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
504  if (tracker->m_trackerType & KLT_TRACKER) {
505  m_nb_feat_klt += tracker->m_error_klt.size();
506  }
507 #endif
508  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
509  m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
510  }
511  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
512  m_nb_feat_depthDense += tracker->m_error_depthDense.size();
513  }
514  }
515 
516  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
517 
518  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
519  it != m_mapOfTrackers.end(); ++it) {
520  TrackerWrapper *tracker = it->second;
521 
522  if (tracker->m_trackerType & EDGE_TRACKER) {
523  tracker->updateMovingEdgeWeights();
524  }
525  }
526 }
527 
529 {
530  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
531 }
532 
533 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
534 {
535  unsigned int nbFeatures = 0;
536 
537  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
538  it != m_mapOfTrackers.end(); ++it) {
539  TrackerWrapper *tracker = it->second;
540  tracker->computeVVSInit(mapOfImages[it->first]);
541 
542  nbFeatures += tracker->m_error.getRows();
543  }
544 
545  m_L.resize(nbFeatures, 6, false, false);
546  m_error.resize(nbFeatures, false);
547 
548  m_weightedError.resize(nbFeatures, false);
549  m_w.resize(nbFeatures, false);
550  m_w = 1;
551 }
552 
554 {
555  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
556  "computeVVSInteractionMatrixAndR"
557  "esidu() should not be called");
558 }
559 
561  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
562  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
563 {
564  unsigned int start_index = 0;
565 
566  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
567  it != m_mapOfTrackers.end(); ++it) {
568  TrackerWrapper *tracker = it->second;
569 
570  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
571 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
572  vpHomogeneousMatrix c_curr_tTc_curr0 = m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
573  tracker->ctTc0 = c_curr_tTc_curr0;
574 #endif
575 
576  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
577 
578  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
579  m_error.insert(start_index, tracker->m_error);
580 
581  start_index += tracker->m_error.getRows();
582  }
583 }
584 
586 {
587  unsigned int start_index = 0;
588 
589  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
590  it != m_mapOfTrackers.end(); ++it) {
591  TrackerWrapper *tracker = it->second;
592  tracker->computeVVSWeights();
593 
594  m_w.insert(start_index, tracker->m_w);
595  start_index += tracker->m_w.getRows();
596  }
597 }
598 
613  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
614  bool displayFullModel)
615 {
616  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
617  if (it != m_mapOfTrackers.end()) {
618  TrackerWrapper *tracker = it->second;
619  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
620  } else {
621  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
622  }
623 }
624 
639  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
640  bool displayFullModel)
641 {
642  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
643  if (it != m_mapOfTrackers.end()) {
644  TrackerWrapper *tracker = it->second;
645  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
646  } else {
647  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
648  }
649 }
650 
668  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
669  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
670  unsigned int thickness, bool displayFullModel)
671 {
672  if (m_mapOfTrackers.size() == 2) {
673  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
674  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
675  ++it;
676 
677  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
678  } else {
679  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
680  << std::endl;
681  }
682 }
683 
701  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
702  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
703  bool displayFullModel)
704 {
705  if (m_mapOfTrackers.size() == 2) {
706  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
707  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
708  ++it;
709 
710  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
711  } else {
712  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
713  << std::endl;
714  }
715 }
716 
728 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
729  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
730  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
731  const vpColor &col, unsigned int thickness, bool displayFullModel)
732 {
733  // Display only for the given images
734  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
735  it_img != mapOfImages.end(); ++it_img) {
736  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
737  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
738  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
739 
740  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
741  it_cam != mapOfCameraParameters.end()) {
742  TrackerWrapper *tracker = it_tracker->second;
743  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
744  } else {
745  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
746  }
747  }
748 }
749 
761 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
762  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
763  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
764  const vpColor &col, unsigned int thickness, bool displayFullModel)
765 {
766  // Display only for the given images
767  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
768  it_img != mapOfImages.end(); ++it_img) {
769  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
770  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
771  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
772 
773  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
774  it_cam != mapOfCameraParameters.end()) {
775  TrackerWrapper *tracker = it_tracker->second;
776  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
777  } else {
778  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
779  }
780  }
781 }
782 
788 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
789 {
790  std::vector<std::string> cameraNames;
791 
792  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
793  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
794  cameraNames.push_back(it_tracker->first);
795  }
796 
797  return cameraNames;
798 }
799 
801 {
803 }
804 
814 {
815  if (m_mapOfTrackers.size() == 2) {
816  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
817  it->second->getCameraParameters(cam1);
818  ++it;
819 
820  it->second->getCameraParameters(cam2);
821  } else {
822  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
823  << std::endl;
824  }
825 }
826 
832 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
833 {
834  // Clear the input map
835  mapOfCameraParameters.clear();
836 
837  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
838  it != m_mapOfTrackers.end(); ++it) {
839  vpCameraParameters cam_;
840  it->second->getCameraParameters(cam_);
841  mapOfCameraParameters[it->first] = cam_;
842  }
843 }
844 
851 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
852 {
853  std::map<std::string, int> trackingTypes;
854 
855  TrackerWrapper *traker;
856  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
857  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
858  traker = it_tracker->second;
859  trackingTypes[it_tracker->first] = traker->getTrackerType();
860  }
861 
862  return trackingTypes;
863 }
864 
873 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
874 {
875  if (m_mapOfTrackers.size() == 2) {
876  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
877  clippingFlag1 = it->second->getClipping();
878  ++it;
879 
880  clippingFlag2 = it->second->getClipping();
881  } else {
882  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
883  << std::endl;
884  }
885 }
886 
892 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
893 {
894  mapOfClippingFlags.clear();
895 
896  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
897  it != m_mapOfTrackers.end(); ++it) {
898  TrackerWrapper *tracker = it->second;
899  mapOfClippingFlags[it->first] = tracker->getClipping();
900  }
901 }
902 
909 {
910  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
911  if (it != m_mapOfTrackers.end()) {
912  return it->second->getFaces();
913  }
914 
915  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
916  return faces;
917 }
918 
925 {
926  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
927  if (it != m_mapOfTrackers.end()) {
928  return it->second->getFaces();
929  }
930 
931  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
932  return faces;
933 }
934 
935 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
936 
939 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
940 {
941  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
942  if (it != m_mapOfTrackers.end()) {
943  TrackerWrapper *tracker = it->second;
944  return tracker->getFeaturesCircle();
945  } else {
946  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
947  m_referenceCameraName.c_str());
948  }
949 }
950 
954 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
955 {
956  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
957  if (it != m_mapOfTrackers.end()) {
958  TrackerWrapper *tracker = it->second;
959  return tracker->getFeaturesKltCylinder();
960  } else {
961  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
962  m_referenceCameraName.c_str());
963  }
964 }
965 
969 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
970 {
971  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
972  if (it != m_mapOfTrackers.end()) {
973  TrackerWrapper *tracker = it->second;
974  return tracker->getFeaturesKlt();
975  } else {
976  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
977  m_referenceCameraName.c_str());
978  }
979 }
980 #endif
981 
1007 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1008 {
1009  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1010 
1011  if (it != m_mapOfTrackers.end()) {
1012  return it->second->getFeaturesForDisplay();
1013  } else {
1014  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1015  }
1016 
1017  return std::vector<std::vector<double> >();
1018 }
1019 
1043 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1044 {
1045  // Clear the input map
1046  mapOfFeatures.clear();
1047 
1048  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1049  it != m_mapOfTrackers.end(); ++it) {
1050  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1051  }
1052 }
1053 
1064 
1065 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
1066 
1074 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1075 {
1076  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1077  if (it != m_mapOfTrackers.end()) {
1078  TrackerWrapper *tracker = it->second;
1079  return tracker->getKltImagePoints();
1080  } else {
1081  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1082  }
1083 
1084  return std::vector<vpImagePoint>();
1085 }
1086 
1095 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1096 {
1097  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1098  if (it != m_mapOfTrackers.end()) {
1099  TrackerWrapper *tracker = it->second;
1100  return tracker->getKltImagePointsWithId();
1101  } else {
1102  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1103  }
1104 
1105  return std::map<int, vpImagePoint>();
1106 }
1107 
1114 {
1115  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1116  if (it != m_mapOfTrackers.end()) {
1117  TrackerWrapper *tracker = it->second;
1118  return tracker->getKltMaskBorder();
1119  } else {
1120  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1121  }
1122 
1123  return 0;
1124 }
1125 
1132 {
1133  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1134  if (it != m_mapOfTrackers.end()) {
1135  TrackerWrapper *tracker = it->second;
1136  return tracker->getKltNbPoints();
1137  } else {
1138  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1139  }
1140 
1141  return 0;
1142 }
1143 
1150 {
1151  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1152 
1153  if (it_tracker != m_mapOfTrackers.end()) {
1154  TrackerWrapper *tracker;
1155  tracker = it_tracker->second;
1156  return tracker->getKltOpencv();
1157  } else {
1158  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1159  }
1160 
1161  return vpKltOpencv();
1162 }
1163 
1173 {
1174  if (m_mapOfTrackers.size() == 2) {
1175  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1176  klt1 = it->second->getKltOpencv();
1177  ++it;
1178 
1179  klt2 = it->second->getKltOpencv();
1180  } else {
1181  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1182  << std::endl;
1183  }
1184 }
1185 
1191 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1192 {
1193  mapOfKlts.clear();
1194 
1195  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1196  it != m_mapOfTrackers.end(); ++it) {
1197  TrackerWrapper *tracker = it->second;
1198  mapOfKlts[it->first] = tracker->getKltOpencv();
1199  }
1200 }
1201 
1202 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
1203 
1208 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1209 {
1210  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1211  if (it != m_mapOfTrackers.end()) {
1212  TrackerWrapper *tracker = it->second;
1213  return tracker->getKltPoints();
1214  } else {
1215  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1216  }
1217 
1218  return std::vector<cv::Point2f>();
1219 }
1220 #endif
1221 
1229 #endif
1230 
1243 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList,
1244  unsigned int level) const
1245 {
1246  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1247  if (it != m_mapOfTrackers.end()) {
1248  it->second->getLcircle(circlesList, level);
1249  } else {
1250  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1251  }
1252 }
1253 
1267 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1268  unsigned int level) const
1269 {
1270  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1271  if (it != m_mapOfTrackers.end()) {
1272  it->second->getLcircle(circlesList, level);
1273  } else {
1274  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1275  }
1276 }
1277 
1290 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList,
1291  unsigned int level) const
1292 {
1293  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1294  if (it != m_mapOfTrackers.end()) {
1295  it->second->getLcylinder(cylindersList, level);
1296  } else {
1297  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1298  }
1299 }
1300 
1314 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1315  unsigned int level) const
1316 {
1317  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1318  if (it != m_mapOfTrackers.end()) {
1319  it->second->getLcylinder(cylindersList, level);
1320  } else {
1321  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1322  }
1323 }
1324 
1337 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList,
1338  unsigned int level) const
1339 {
1340  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1341 
1342  if (it != m_mapOfTrackers.end()) {
1343  it->second->getLline(linesList, level);
1344  } else {
1345  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1346  }
1347 }
1348 
1362 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1363  unsigned int level) const
1364 {
1365  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1366  if (it != m_mapOfTrackers.end()) {
1367  it->second->getLline(linesList, level);
1368  } else {
1369  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1370  }
1371 }
1372 
1399 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1400  const vpHomogeneousMatrix &cMo,
1401  const vpCameraParameters &cam,
1402  bool displayFullModel)
1403 {
1404  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1405 
1406  if (it != m_mapOfTrackers.end()) {
1407  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1408  } else {
1409  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1410  }
1411 
1412  return std::vector<std::vector<double> >();
1413 }
1414 
1442 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1443  const std::map<std::string, unsigned int> &mapOfwidths,
1444  const std::map<std::string, unsigned int> &mapOfheights,
1445  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1446  const std::map<std::string, vpCameraParameters> &mapOfCams,
1447  bool displayFullModel)
1448 {
1449  // Clear the input map
1450  mapOfModels.clear();
1451 
1452  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1453  it != m_mapOfTrackers.end(); ++it) {
1454  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1455  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1456  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1457  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1458 
1459  if (findWidth != mapOfwidths.end() &&
1460  findHeight != mapOfheights.end() &&
1461  findcMo != mapOfcMos.end() &&
1462  findCam != mapOfCams.end()) {
1463  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second,
1464  findcMo->second, findCam->second, displayFullModel);
1465  }
1466  }
1467 }
1468 
1475 {
1476  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1477 
1478  if (it != m_mapOfTrackers.end()) {
1479  return it->second->getMovingEdge();
1480  } else {
1481  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1482  }
1483 
1484  return vpMe();
1485 }
1486 
1496 {
1497  if (m_mapOfTrackers.size() == 2) {
1498  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1499  it->second->getMovingEdge(me1);
1500  ++it;
1501 
1502  it->second->getMovingEdge(me2);
1503  } else {
1504  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1505  << std::endl;
1506  }
1507 }
1508 
1514 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1515 {
1516  mapOfMovingEdges.clear();
1517 
1518  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1519  it != m_mapOfTrackers.end(); ++it) {
1520  TrackerWrapper *tracker = it->second;
1521  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1522  }
1523 }
1524 
1542 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1543 {
1544  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1545 
1546  unsigned int nbGoodPoints = 0;
1547  if (it != m_mapOfTrackers.end()) {
1548 
1549  nbGoodPoints = it->second->getNbPoints(level);
1550  } else {
1551  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1552  }
1553 
1554  return nbGoodPoints;
1555 }
1556 
1571 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1572 {
1573  mapOfNbPoints.clear();
1574 
1575  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1576  it != m_mapOfTrackers.end(); ++it) {
1577  TrackerWrapper *tracker = it->second;
1578  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1579  }
1580 }
1581 
1588 {
1589  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1590  if (it != m_mapOfTrackers.end()) {
1591  return it->second->getNbPolygon();
1592  }
1593 
1594  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1595  return 0;
1596 }
1597 
1603 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1604 {
1605  mapOfNbPolygons.clear();
1606 
1607  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1608  it != m_mapOfTrackers.end(); ++it) {
1609  TrackerWrapper *tracker = it->second;
1610  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1611  }
1612 }
1613 
1625 {
1626  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1627  if (it != m_mapOfTrackers.end()) {
1628  return it->second->getPolygon(index);
1629  }
1630 
1631  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1632  return NULL;
1633 }
1634 
1646 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1647 {
1648  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1649  if (it != m_mapOfTrackers.end()) {
1650  return it->second->getPolygon(index);
1651  }
1652 
1653  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1654  return NULL;
1655 }
1656 
1672 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1673 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1674 {
1675  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1676 
1677  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1678  if (it != m_mapOfTrackers.end()) {
1679  TrackerWrapper *tracker = it->second;
1680  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1681  } else {
1682  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1683  }
1684 
1685  return polygonFaces;
1686 }
1687 
1705 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1706  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1707  bool orderPolygons, bool useVisibility, bool clipPolygon)
1708 {
1709  mapOfPolygons.clear();
1710  mapOfPoints.clear();
1711 
1712  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1713  it != m_mapOfTrackers.end(); ++it) {
1714  TrackerWrapper *tracker = it->second;
1715  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1716  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1717 
1718  mapOfPolygons[it->first] = polygonFaces.first;
1719  mapOfPoints[it->first] = polygonFaces.second;
1720  }
1721 }
1722 
1724 {
1725  vpMbTracker::getPose(cMo);
1726 }
1727 
1737 {
1738  if (m_mapOfTrackers.size() == 2) {
1739  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1740  it->second->getPose(c1Mo);
1741  ++it;
1742 
1743  it->second->getPose(c2Mo);
1744  } else {
1745  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1746  << std::endl;
1747  }
1748 }
1749 
1755 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1756 {
1757  // Clear the map
1758  mapOfCameraPoses.clear();
1759 
1760  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1761  it != m_mapOfTrackers.end(); ++it) {
1762  TrackerWrapper *tracker = it->second;
1763  tracker->getPose(mapOfCameraPoses[it->first]);
1764  }
1765 }
1766 
1771 {
1772  return m_referenceCameraName;
1773 }
1774 
1779 {
1780  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1781  if (it != m_mapOfTrackers.end()) {
1782  TrackerWrapper *tracker = it->second;
1783  return tracker->getTrackerType();
1784  } else {
1785  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1786  m_referenceCameraName.c_str());
1787  }
1788 }
1789 
1791 {
1792  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1793  it != m_mapOfTrackers.end(); ++it) {
1794  TrackerWrapper *tracker = it->second;
1795  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1796  tracker->init(I);
1797  }
1798 }
1799 
1800 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1801  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1802 {
1803  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1804 }
1805 
1806 #ifdef VISP_HAVE_MODULE_GUI
1807 
1851  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1852  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1853 {
1854  if (m_mapOfTrackers.size() == 2) {
1855  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1856  TrackerWrapper *tracker = it->second;
1857  tracker->initClick(I1, initFile1, displayHelp, T1);
1858 
1859  ++it;
1860 
1861  tracker = it->second;
1862  tracker->initClick(I2, initFile2, displayHelp, T2);
1863 
1865  if (it != m_mapOfTrackers.end()) {
1866  tracker = it->second;
1867 
1868  // Set the reference cMo
1869  tracker->getPose(m_cMo);
1870  }
1871  } else {
1873  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1874  }
1875 }
1876 
1919 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1920  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1921  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1922 {
1923  if (m_mapOfTrackers.size() == 2) {
1924  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1925  TrackerWrapper *tracker = it->second;
1926  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1927 
1928  ++it;
1929 
1930  tracker = it->second;
1931  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1932 
1934  if (it != m_mapOfTrackers.end()) {
1935  tracker = it->second;
1936 
1937  // Set the reference cMo
1938  tracker->getPose(m_cMo);
1939  }
1940  } else {
1942  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1943  }
1944 }
1945 
1988 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1989  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
1990  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
1991 {
1992  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1993  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1994  mapOfImages.find(m_referenceCameraName);
1995  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
1996 
1997  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1998  TrackerWrapper *tracker = it_tracker->second;
1999  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2000  if (it_T != mapOfT.end())
2001  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2002  else
2003  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2004  tracker->getPose(m_cMo);
2005  } else {
2006  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2007  }
2008 
2009  // Vector of missing initFile for cameras
2010  std::vector<std::string> vectorOfMissingCameraPoses;
2011 
2012  // Set pose for the specified cameras
2013  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2014  if (it_tracker->first != m_referenceCameraName) {
2015  it_img = mapOfImages.find(it_tracker->first);
2016  it_initFile = mapOfInitFiles.find(it_tracker->first);
2017 
2018  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2019  // InitClick for the current camera
2020  TrackerWrapper *tracker = it_tracker->second;
2021  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2022  } else {
2023  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2024  }
2025  }
2026  }
2027 
2028  // Init for cameras that do not have an initFile
2029  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2030  it != vectorOfMissingCameraPoses.end(); ++it) {
2031  it_img = mapOfImages.find(*it);
2032  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2034 
2035  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2036  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2037  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2038  m_mapOfTrackers[*it]->init(*it_img->second);
2039  } else {
2041  "Missing image or missing camera transformation "
2042  "matrix! Cannot set the pose for camera: %s!",
2043  it->c_str());
2044  }
2045  }
2046 }
2047 
2090 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2091  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2092  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2093 {
2094  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2095  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2096  mapOfColorImages.find(m_referenceCameraName);
2097  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2098 
2099  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2100  TrackerWrapper *tracker = it_tracker->second;
2101  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2102  if (it_T != mapOfT.end())
2103  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2104  else
2105  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2106  tracker->getPose(m_cMo);
2107  } else {
2108  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2109  }
2110 
2111  // Vector of missing initFile for cameras
2112  std::vector<std::string> vectorOfMissingCameraPoses;
2113 
2114  // Set pose for the specified cameras
2115  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2116  if (it_tracker->first != m_referenceCameraName) {
2117  it_img = mapOfColorImages.find(it_tracker->first);
2118  it_initFile = mapOfInitFiles.find(it_tracker->first);
2119 
2120  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2121  // InitClick for the current camera
2122  TrackerWrapper *tracker = it_tracker->second;
2123  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2124  } else {
2125  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2126  }
2127  }
2128  }
2129 
2130  // Init for cameras that do not have an initFile
2131  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2132  it != vectorOfMissingCameraPoses.end(); ++it) {
2133  it_img = mapOfColorImages.find(*it);
2134  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2136 
2137  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2138  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2139  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2140  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2141  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2142  } else {
2144  "Missing image or missing camera transformation "
2145  "matrix! Cannot set the pose for camera: %s!",
2146  it->c_str());
2147  }
2148  }
2149 }
2150 #endif
2151 
2152 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2153  const int /*idFace*/, const std::string & /*name*/)
2154 {
2155  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2156 }
2157 
2159 {
2160  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2161 }
2162 
2164 {
2165  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2166 }
2167 
2198  const std::string &initFile1, const std::string &initFile2)
2199 {
2200  if (m_mapOfTrackers.size() == 2) {
2201  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2202  TrackerWrapper *tracker = it->second;
2203  tracker->initFromPoints(I1, initFile1);
2204 
2205  ++it;
2206 
2207  tracker = it->second;
2208  tracker->initFromPoints(I2, initFile2);
2209 
2211  if (it != m_mapOfTrackers.end()) {
2212  tracker = it->second;
2213 
2214  // Set the reference cMo
2215  tracker->getPose(m_cMo);
2216 
2217  // Set the reference camera parameters
2218  tracker->getCameraParameters(m_cam);
2219  }
2220  } else {
2222  "Cannot initFromPoints()! Require two cameras but "
2223  "there are %d cameras!",
2224  m_mapOfTrackers.size());
2225  }
2226 }
2227 
2258  const std::string &initFile1, const std::string &initFile2)
2259 {
2260  if (m_mapOfTrackers.size() == 2) {
2261  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2262  TrackerWrapper *tracker = it->second;
2263  tracker->initFromPoints(I_color1, initFile1);
2264 
2265  ++it;
2266 
2267  tracker = it->second;
2268  tracker->initFromPoints(I_color2, initFile2);
2269 
2271  if (it != m_mapOfTrackers.end()) {
2272  tracker = it->second;
2273 
2274  // Set the reference cMo
2275  tracker->getPose(m_cMo);
2276 
2277  // Set the reference camera parameters
2278  tracker->getCameraParameters(m_cam);
2279  }
2280  } else {
2282  "Cannot initFromPoints()! Require two cameras but "
2283  "there are %d cameras!",
2284  m_mapOfTrackers.size());
2285  }
2286 }
2287 
2288 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2289  const std::map<std::string, std::string> &mapOfInitPoints)
2290 {
2291  // Set the reference cMo
2292  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2293  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2294  mapOfImages.find(m_referenceCameraName);
2295  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2296 
2297  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2298  TrackerWrapper *tracker = it_tracker->second;
2299  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2300  tracker->getPose(m_cMo);
2301  } else {
2302  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2303  }
2304 
2305  // Vector of missing initPoints for cameras
2306  std::vector<std::string> vectorOfMissingCameraPoints;
2307 
2308  // Set pose for the specified cameras
2309  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2310  it_img = mapOfImages.find(it_tracker->first);
2311  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2312 
2313  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2314  // Set pose
2315  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2316  } else {
2317  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2318  }
2319  }
2320 
2321  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2322  it != vectorOfMissingCameraPoints.end(); ++it) {
2323  it_img = mapOfImages.find(*it);
2324  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2326 
2327  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2328  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2329  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2330  } else {
2332  "Missing image or missing camera transformation "
2333  "matrix! Cannot init the pose for camera: %s!",
2334  it->c_str());
2335  }
2336  }
2337 }
2338 
2339 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2340  const std::map<std::string, std::string> &mapOfInitPoints)
2341 {
2342  // Set the reference cMo
2343  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2344  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2345  mapOfColorImages.find(m_referenceCameraName);
2346  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2347 
2348  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2349  TrackerWrapper *tracker = it_tracker->second;
2350  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2351  tracker->getPose(m_cMo);
2352  } else {
2353  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2354  }
2355 
2356  // Vector of missing initPoints for cameras
2357  std::vector<std::string> vectorOfMissingCameraPoints;
2358 
2359  // Set pose for the specified cameras
2360  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2361  it_img = mapOfColorImages.find(it_tracker->first);
2362  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2363 
2364  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2365  // Set pose
2366  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2367  } else {
2368  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2369  }
2370  }
2371 
2372  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2373  it != vectorOfMissingCameraPoints.end(); ++it) {
2374  it_img = mapOfColorImages.find(*it);
2375  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2377 
2378  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2379  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2380  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2381  } else {
2383  "Missing image or missing camera transformation "
2384  "matrix! Cannot init the pose for camera: %s!",
2385  it->c_str());
2386  }
2387  }
2388 }
2389 
2391 {
2392  vpMbTracker::initFromPose(I, cMo);
2393 }
2394 
2407  const std::string &initFile1, const std::string &initFile2)
2408 {
2409  if (m_mapOfTrackers.size() == 2) {
2410  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2411  TrackerWrapper *tracker = it->second;
2412  tracker->initFromPose(I1, initFile1);
2413 
2414  ++it;
2415 
2416  tracker = it->second;
2417  tracker->initFromPose(I2, initFile2);
2418 
2420  if (it != m_mapOfTrackers.end()) {
2421  tracker = it->second;
2422 
2423  // Set the reference cMo
2424  tracker->getPose(m_cMo);
2425 
2426  // Set the reference camera parameters
2427  tracker->getCameraParameters(m_cam);
2428  }
2429  } else {
2431  "Cannot initFromPose()! Require two cameras but there "
2432  "are %d cameras!",
2433  m_mapOfTrackers.size());
2434  }
2435 }
2436 
2449  const std::string &initFile1, const std::string &initFile2)
2450 {
2451  if (m_mapOfTrackers.size() == 2) {
2452  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2453  TrackerWrapper *tracker = it->second;
2454  tracker->initFromPose(I_color1, initFile1);
2455 
2456  ++it;
2457 
2458  tracker = it->second;
2459  tracker->initFromPose(I_color2, initFile2);
2460 
2462  if (it != m_mapOfTrackers.end()) {
2463  tracker = it->second;
2464 
2465  // Set the reference cMo
2466  tracker->getPose(m_cMo);
2467 
2468  // Set the reference camera parameters
2469  tracker->getCameraParameters(m_cam);
2470  }
2471  } else {
2473  "Cannot initFromPose()! Require two cameras but there "
2474  "are %d cameras!",
2475  m_mapOfTrackers.size());
2476  }
2477 }
2478 
2493 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2494  const std::map<std::string, std::string> &mapOfInitPoses)
2495 {
2496  // Set the reference cMo
2497  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2498  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2499  mapOfImages.find(m_referenceCameraName);
2500  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2501 
2502  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2503  TrackerWrapper *tracker = it_tracker->second;
2504  tracker->initFromPose(*it_img->second, it_initPose->second);
2505  tracker->getPose(m_cMo);
2506  } else {
2507  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2508  }
2509 
2510  // Vector of missing pose matrices for cameras
2511  std::vector<std::string> vectorOfMissingCameraPoses;
2512 
2513  // Set pose for the specified cameras
2514  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2515  it_img = mapOfImages.find(it_tracker->first);
2516  it_initPose = mapOfInitPoses.find(it_tracker->first);
2517 
2518  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2519  // Set pose
2520  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2521  } else {
2522  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2523  }
2524  }
2525 
2526  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2527  it != vectorOfMissingCameraPoses.end(); ++it) {
2528  it_img = mapOfImages.find(*it);
2529  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2531 
2532  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2533  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2534  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2535  } else {
2537  "Missing image or missing camera transformation "
2538  "matrix! Cannot init the pose for camera: %s!",
2539  it->c_str());
2540  }
2541  }
2542 }
2543 
2558 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2559  const std::map<std::string, std::string> &mapOfInitPoses)
2560 {
2561  // Set the reference cMo
2562  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2563  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2564  mapOfColorImages.find(m_referenceCameraName);
2565  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2566 
2567  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2568  TrackerWrapper *tracker = it_tracker->second;
2569  tracker->initFromPose(*it_img->second, it_initPose->second);
2570  tracker->getPose(m_cMo);
2571  } else {
2572  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2573  }
2574 
2575  // Vector of missing pose matrices for cameras
2576  std::vector<std::string> vectorOfMissingCameraPoses;
2577 
2578  // Set pose for the specified cameras
2579  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2580  it_img = mapOfColorImages.find(it_tracker->first);
2581  it_initPose = mapOfInitPoses.find(it_tracker->first);
2582 
2583  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2584  // Set pose
2585  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2586  } else {
2587  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2588  }
2589  }
2590 
2591  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2592  it != vectorOfMissingCameraPoses.end(); ++it) {
2593  it_img = mapOfColorImages.find(*it);
2594  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2596 
2597  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2598  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2599  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2600  } else {
2602  "Missing image or missing camera transformation "
2603  "matrix! Cannot init the pose for camera: %s!",
2604  it->c_str());
2605  }
2606  }
2607 }
2608 
2620  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2621 {
2622  if (m_mapOfTrackers.size() == 2) {
2623  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2624  it->second->initFromPose(I1, c1Mo);
2625 
2626  ++it;
2627 
2628  it->second->initFromPose(I2, c2Mo);
2629 
2630  m_cMo = c1Mo;
2631  } else {
2633  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2634  }
2635 }
2636 
2648  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2649 {
2650  if (m_mapOfTrackers.size() == 2) {
2651  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2652  it->second->initFromPose(I_color1, c1Mo);
2653 
2654  ++it;
2655 
2656  it->second->initFromPose(I_color2, c2Mo);
2657 
2658  m_cMo = c1Mo;
2659  } else {
2661  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2662  }
2663 }
2664 
2678 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2679  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2680 {
2681  // Set the reference cMo
2682  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2683  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2684  mapOfImages.find(m_referenceCameraName);
2685  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2686 
2687  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2688  TrackerWrapper *tracker = it_tracker->second;
2689  tracker->initFromPose(*it_img->second, it_camPose->second);
2690  tracker->getPose(m_cMo);
2691  } else {
2692  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2693  }
2694 
2695  // Vector of missing pose matrices for cameras
2696  std::vector<std::string> vectorOfMissingCameraPoses;
2697 
2698  // Set pose for the specified cameras
2699  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2700  it_img = mapOfImages.find(it_tracker->first);
2701  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2702 
2703  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2704  // Set pose
2705  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2706  } else {
2707  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2708  }
2709  }
2710 
2711  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2712  it != vectorOfMissingCameraPoses.end(); ++it) {
2713  it_img = mapOfImages.find(*it);
2714  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2716 
2717  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2718  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2719  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2720  } else {
2722  "Missing image or missing camera transformation "
2723  "matrix! Cannot set the pose for camera: %s!",
2724  it->c_str());
2725  }
2726  }
2727 }
2728 
2742 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2743  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2744 {
2745  // Set the reference cMo
2746  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2747  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2748  mapOfColorImages.find(m_referenceCameraName);
2749  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2750 
2751  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2752  TrackerWrapper *tracker = it_tracker->second;
2753  tracker->initFromPose(*it_img->second, it_camPose->second);
2754  tracker->getPose(m_cMo);
2755  } else {
2756  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2757  }
2758 
2759  // Vector of missing pose matrices for cameras
2760  std::vector<std::string> vectorOfMissingCameraPoses;
2761 
2762  // Set pose for the specified cameras
2763  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2764  it_img = mapOfColorImages.find(it_tracker->first);
2765  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2766 
2767  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2768  // Set pose
2769  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2770  } else {
2771  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2772  }
2773  }
2774 
2775  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2776  it != vectorOfMissingCameraPoses.end(); ++it) {
2777  it_img = mapOfColorImages.find(*it);
2778  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2780 
2781  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2782  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2783  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2784  } else {
2786  "Missing image or missing camera transformation "
2787  "matrix! Cannot set the pose for camera: %s!",
2788  it->c_str());
2789  }
2790  }
2791 }
2792 
2804 void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2805 {
2806  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2807  it != m_mapOfTrackers.end(); ++it) {
2808  TrackerWrapper *tracker = it->second;
2809  tracker->loadConfigFile(configFile, verbose);
2810  }
2811 
2813  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2814  }
2815 
2816  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2817  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2818  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2819  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2820 }
2821 
2836 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
2837 {
2838  if (m_mapOfTrackers.size() != 2) {
2839  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2840  }
2841 
2842  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2843  TrackerWrapper *tracker = it_tracker->second;
2844  tracker->loadConfigFile(configFile1, verbose);
2845 
2846  ++it_tracker;
2847  tracker = it_tracker->second;
2848  tracker->loadConfigFile(configFile2, verbose);
2849 
2851  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2852  }
2853 
2854  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2855  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2856  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2857  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2858 }
2859 
2873 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
2874 {
2875  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2876  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2877  TrackerWrapper *tracker = it_tracker->second;
2878 
2879  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
2880  if (it_config != mapOfConfigFiles.end()) {
2881  tracker->loadConfigFile(it_config->second, verbose);
2882  } else {
2883  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
2884  it_tracker->first.c_str());
2885  }
2886  }
2887 
2888  // Set the reference camera parameters
2889  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
2890  if (it != m_mapOfTrackers.end()) {
2891  TrackerWrapper *tracker = it->second;
2892  tracker->getCameraParameters(m_cam);
2893 
2894  // Set clipping
2895  this->clippingFlag = tracker->getClipping();
2896  this->angleAppears = tracker->getAngleAppear();
2897  this->angleDisappears = tracker->getAngleDisappear();
2898  } else {
2899  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
2900  m_referenceCameraName.c_str());
2901  }
2902 }
2903 
2922 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
2923 {
2924  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2925  it != m_mapOfTrackers.end(); ++it) {
2926  TrackerWrapper *tracker = it->second;
2927  tracker->loadModel(modelFile, verbose, T);
2928  }
2929 }
2930 
2953 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
2954  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
2955 {
2956  if (m_mapOfTrackers.size() != 2) {
2957  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2958  }
2959 
2960  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2961  TrackerWrapper *tracker = it_tracker->second;
2962  tracker->loadModel(modelFile1, verbose, T1);
2963 
2964  ++it_tracker;
2965  tracker = it_tracker->second;
2966  tracker->loadModel(modelFile2, verbose, T2);
2967 }
2968 
2988 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
2989  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2990 {
2991  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2992  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2993  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
2994 
2995  if (it_model != mapOfModelFiles.end()) {
2996  TrackerWrapper *tracker = it_tracker->second;
2997  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2998 
2999  if (it_T != mapOfT.end())
3000  tracker->loadModel(it_model->second, verbose, it_T->second);
3001  else
3002  tracker->loadModel(it_model->second, verbose);
3003  } else {
3004  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3005  it_tracker->first.c_str());
3006  }
3007  }
3008 }
3009 
3010 #ifdef VISP_HAVE_PCL
3011 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3012  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3013 {
3014  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3015  it != m_mapOfTrackers.end(); ++it) {
3016  TrackerWrapper *tracker = it->second;
3017  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3018  }
3019 }
3020 #endif
3021 
3022 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3023  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3024  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3025  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3026 {
3027  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3028  it != m_mapOfTrackers.end(); ++it) {
3029  TrackerWrapper *tracker = it->second;
3030  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3031  mapOfPointCloudHeights[it->first]);
3032  }
3033 }
3034 
3046 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3047  const vpHomogeneousMatrix &cMo, bool verbose,
3048  const vpHomogeneousMatrix &T)
3049 {
3050  if (m_mapOfTrackers.size() != 1) {
3051  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3052  m_mapOfTrackers.size());
3053  }
3054 
3055  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3056  if (it_tracker != m_mapOfTrackers.end()) {
3057  TrackerWrapper *tracker = it_tracker->second;
3058  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3059 
3060  // Set reference pose
3061  tracker->getPose(m_cMo);
3062  } else {
3063  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3064  }
3065 
3066  modelInitialised = true;
3067 }
3068 
3080 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3081  const vpHomogeneousMatrix &cMo, bool verbose,
3082  const vpHomogeneousMatrix &T)
3083 {
3084  if (m_mapOfTrackers.size() != 1) {
3085  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3086  m_mapOfTrackers.size());
3087  }
3088 
3089  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3090  if (it_tracker != m_mapOfTrackers.end()) {
3091  TrackerWrapper *tracker = it_tracker->second;
3092  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3093 
3094  // Set reference pose
3095  tracker->getPose(m_cMo);
3096  } else {
3097  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3098  }
3099 
3100  modelInitialised = true;
3101 }
3102 
3124  const std::string &cad_name1, const std::string &cad_name2,
3125  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
3126  bool verbose,
3127  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3128 {
3129  if (m_mapOfTrackers.size() == 2) {
3130  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3131 
3132  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3133 
3134  ++it_tracker;
3135 
3136  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3137 
3138  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3139  if (it_tracker != m_mapOfTrackers.end()) {
3140  // Set reference pose
3141  it_tracker->second->getPose(m_cMo);
3142  }
3143  } else {
3144  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3145  }
3146 
3147  modelInitialised = true;
3148 }
3149 
3171  const std::string &cad_name1, const std::string &cad_name2,
3172  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
3173  bool verbose,
3174  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3175 {
3176  if (m_mapOfTrackers.size() == 2) {
3177  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3178 
3179  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3180 
3181  ++it_tracker;
3182 
3183  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3184 
3185  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3186  if (it_tracker != m_mapOfTrackers.end()) {
3187  // Set reference pose
3188  it_tracker->second->getPose(m_cMo);
3189  }
3190  } else {
3191  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3192  }
3193 
3194  modelInitialised = true;
3195 }
3196 
3211 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3212  const std::map<std::string, std::string> &mapOfModelFiles,
3213  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
3214  bool verbose,
3215  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3216 {
3217  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3218  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3219  mapOfImages.find(m_referenceCameraName);
3220  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3221  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3222 
3223  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3224  it_camPose != mapOfCameraPoses.end()) {
3225  TrackerWrapper *tracker = it_tracker->second;
3226  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3227  if (it_T != mapOfT.end())
3228  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3229  else
3230  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3231 
3232  // Set reference pose
3233  tracker->getPose(m_cMo);
3234  } else {
3235  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3236  }
3237 
3238  std::vector<std::string> vectorOfMissingCameras;
3239  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3240  if (it_tracker->first != m_referenceCameraName) {
3241  it_img = mapOfImages.find(it_tracker->first);
3242  it_model = mapOfModelFiles.find(it_tracker->first);
3243  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3244 
3245  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3246  TrackerWrapper *tracker = it_tracker->second;
3247  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3248  } else {
3249  vectorOfMissingCameras.push_back(it_tracker->first);
3250  }
3251  }
3252  }
3253 
3254  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3255  ++it) {
3256  it_img = mapOfImages.find(*it);
3257  it_model = mapOfModelFiles.find(*it);
3258  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3260 
3261  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3262  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3263  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3264  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3265  }
3266  }
3267 
3268  modelInitialised = true;
3269 }
3270 
3285 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3286  const std::map<std::string, std::string> &mapOfModelFiles,
3287  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
3288  bool verbose,
3289  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3290 {
3291  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3292  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
3293  mapOfColorImages.find(m_referenceCameraName);
3294  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3295  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3296 
3297  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3298  it_camPose != mapOfCameraPoses.end()) {
3299  TrackerWrapper *tracker = it_tracker->second;
3300  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3301  if (it_T != mapOfT.end())
3302  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3303  else
3304  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3305 
3306  // Set reference pose
3307  tracker->getPose(m_cMo);
3308  } else {
3309  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3310  }
3311 
3312  std::vector<std::string> vectorOfMissingCameras;
3313  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3314  if (it_tracker->first != m_referenceCameraName) {
3315  it_img = mapOfColorImages.find(it_tracker->first);
3316  it_model = mapOfModelFiles.find(it_tracker->first);
3317  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3318 
3319  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3320  TrackerWrapper *tracker = it_tracker->second;
3321  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3322  } else {
3323  vectorOfMissingCameras.push_back(it_tracker->first);
3324  }
3325  }
3326  }
3327 
3328  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3329  ++it) {
3330  it_img = mapOfColorImages.find(*it);
3331  it_model = mapOfModelFiles.find(*it);
3332  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3334 
3335  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3336  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3337  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3338  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3339  }
3340  }
3341 
3342  modelInitialised = true;
3343 }
3344 
3350 {
3351  m_cMo.eye();
3352 
3353  useScanLine = false;
3354 
3355 #ifdef VISP_HAVE_OGRE
3356  useOgre = false;
3357 #endif
3358 
3359  m_computeInteraction = true;
3360  m_lambda = 1.0;
3361 
3362  angleAppears = vpMath::rad(89);
3365  distNearClip = 0.001;
3366  distFarClip = 100;
3367 
3369  m_maxIter = 30;
3370  m_stopCriteriaEpsilon = 1e-8;
3371  m_initialMu = 0.01;
3372 
3373  // Only for Edge
3374  m_percentageGdPt = 0.4;
3375 
3376  // Only for KLT
3377  m_thresholdOutlier = 0.5;
3378 
3379  // Reset default ponderation between each feature type
3381 
3382 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3384 #endif
3385 
3388 
3389  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3390  it != m_mapOfTrackers.end(); ++it) {
3391  TrackerWrapper *tracker = it->second;
3392  tracker->resetTracker();
3393  }
3394 }
3395 
3406 {
3408 
3409  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3410  it != m_mapOfTrackers.end(); ++it) {
3411  TrackerWrapper *tracker = it->second;
3412  tracker->setAngleAppear(a);
3413  }
3414 }
3415 
3428 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3429 {
3430  if (m_mapOfTrackers.size() == 2) {
3431  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3432  it->second->setAngleAppear(a1);
3433 
3434  ++it;
3435  it->second->setAngleAppear(a2);
3436 
3438  if (it != m_mapOfTrackers.end()) {
3439  angleAppears = it->second->getAngleAppear();
3440  } else {
3441  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3442  }
3443  } else {
3444  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3445  m_mapOfTrackers.size());
3446  }
3447 }
3448 
3458 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3459 {
3460  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3461  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3462 
3463  if (it_tracker != m_mapOfTrackers.end()) {
3464  TrackerWrapper *tracker = it_tracker->second;
3465  tracker->setAngleAppear(it->second);
3466 
3467  if (it->first == m_referenceCameraName) {
3468  angleAppears = it->second;
3469  }
3470  }
3471  }
3472 }
3473 
3484 {
3486 
3487  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3488  it != m_mapOfTrackers.end(); ++it) {
3489  TrackerWrapper *tracker = it->second;
3490  tracker->setAngleDisappear(a);
3491  }
3492 }
3493 
3506 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3507 {
3508  if (m_mapOfTrackers.size() == 2) {
3509  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3510  it->second->setAngleDisappear(a1);
3511 
3512  ++it;
3513  it->second->setAngleDisappear(a2);
3514 
3516  if (it != m_mapOfTrackers.end()) {
3517  angleDisappears = it->second->getAngleDisappear();
3518  } else {
3519  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3520  }
3521  } else {
3522  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3523  m_mapOfTrackers.size());
3524  }
3525 }
3526 
3536 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3537 {
3538  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3539  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3540 
3541  if (it_tracker != m_mapOfTrackers.end()) {
3542  TrackerWrapper *tracker = it_tracker->second;
3543  tracker->setAngleDisappear(it->second);
3544 
3545  if (it->first == m_referenceCameraName) {
3546  angleDisappears = it->second;
3547  }
3548  }
3549  }
3550 }
3551 
3558 {
3560 
3561  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3562  it != m_mapOfTrackers.end(); ++it) {
3563  TrackerWrapper *tracker = it->second;
3564  tracker->setCameraParameters(camera);
3565  }
3566 }
3567 
3577 {
3578  if (m_mapOfTrackers.size() == 2) {
3579  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3580  it->second->setCameraParameters(camera1);
3581 
3582  ++it;
3583  it->second->setCameraParameters(camera2);
3584 
3586  if (it != m_mapOfTrackers.end()) {
3587  it->second->getCameraParameters(m_cam);
3588  } else {
3589  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3590  }
3591  } else {
3592  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3593  m_mapOfTrackers.size());
3594  }
3595 }
3596 
3605 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3606 {
3607  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3608  it != mapOfCameraParameters.end(); ++it) {
3609  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3610 
3611  if (it_tracker != m_mapOfTrackers.end()) {
3612  TrackerWrapper *tracker = it_tracker->second;
3613  tracker->setCameraParameters(it->second);
3614 
3615  if (it->first == m_referenceCameraName) {
3616  m_cam = it->second;
3617  }
3618  }
3619  }
3620 }
3621 
3630 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3631  const vpHomogeneousMatrix &cameraTransformationMatrix)
3632 {
3633  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3634 
3635  if (it != m_mapOfCameraTransformationMatrix.end()) {
3636  it->second = cameraTransformationMatrix;
3637  } else {
3638  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3639  }
3640 }
3641 
3650  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3651 {
3652  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3653  it != mapOfTransformationMatrix.end(); ++it) {
3654  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3655  m_mapOfCameraTransformationMatrix.find(it->first);
3656 
3657  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3658  it_camTrans->second = it->second;
3659  }
3660  }
3661 }
3662 
3672 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3673 {
3674  vpMbTracker::setClipping(flags);
3675 
3676  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3677  it != m_mapOfTrackers.end(); ++it) {
3678  TrackerWrapper *tracker = it->second;
3679  tracker->setClipping(flags);
3680  }
3681 }
3682 
3693 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3694 {
3695  if (m_mapOfTrackers.size() == 2) {
3696  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3697  it->second->setClipping(flags1);
3698 
3699  ++it;
3700  it->second->setClipping(flags2);
3701 
3703  if (it != m_mapOfTrackers.end()) {
3704  clippingFlag = it->second->getClipping();
3705  } else {
3706  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3707  }
3708  } else {
3709  std::stringstream ss;
3710  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3712  }
3713 }
3714 
3722 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
3723 {
3724  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3725  it != mapOfClippingFlags.end(); ++it) {
3726  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3727 
3728  if (it_tracker != m_mapOfTrackers.end()) {
3729  TrackerWrapper *tracker = it_tracker->second;
3730  tracker->setClipping(it->second);
3731 
3732  if (it->first == m_referenceCameraName) {
3733  clippingFlag = it->second;
3734  }
3735  }
3736  }
3737 }
3738 
3749 {
3750  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3751  it != m_mapOfTrackers.end(); ++it) {
3752  TrackerWrapper *tracker = it->second;
3753  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
3754  }
3755 }
3756 
3766 {
3767  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3768  it != m_mapOfTrackers.end(); ++it) {
3769  TrackerWrapper *tracker = it->second;
3770  tracker->setDepthDenseFilteringMethod(method);
3771  }
3772 }
3773 
3784 {
3785  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3786  it != m_mapOfTrackers.end(); ++it) {
3787  TrackerWrapper *tracker = it->second;
3788  tracker->setDepthDenseFilteringMinDistance(minDistance);
3789  }
3790 }
3791 
3802 {
3803  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3804  it != m_mapOfTrackers.end(); ++it) {
3805  TrackerWrapper *tracker = it->second;
3806  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
3807  }
3808 }
3809 
3818 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
3819 {
3820  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3821  it != m_mapOfTrackers.end(); ++it) {
3822  TrackerWrapper *tracker = it->second;
3823  tracker->setDepthDenseSamplingStep(stepX, stepY);
3824  }
3825 }
3826 
3835 {
3836  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3837  it != m_mapOfTrackers.end(); ++it) {
3838  TrackerWrapper *tracker = it->second;
3839  tracker->setDepthNormalFaceCentroidMethod(method);
3840  }
3841 }
3842 
3852 {
3853  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3854  it != m_mapOfTrackers.end(); ++it) {
3855  TrackerWrapper *tracker = it->second;
3856  tracker->setDepthNormalFeatureEstimationMethod(method);
3857  }
3858 }
3859 
3868 {
3869  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3870  it != m_mapOfTrackers.end(); ++it) {
3871  TrackerWrapper *tracker = it->second;
3872  tracker->setDepthNormalPclPlaneEstimationMethod(method);
3873  }
3874 }
3875 
3884 {
3885  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3886  it != m_mapOfTrackers.end(); ++it) {
3887  TrackerWrapper *tracker = it->second;
3888  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
3889  }
3890 }
3891 
3900 {
3901  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3902  it != m_mapOfTrackers.end(); ++it) {
3903  TrackerWrapper *tracker = it->second;
3904  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
3905  }
3906 }
3907 
3916 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
3917 {
3918  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3919  it != m_mapOfTrackers.end(); ++it) {
3920  TrackerWrapper *tracker = it->second;
3921  tracker->setDepthNormalSamplingStep(stepX, stepY);
3922  }
3923 }
3924 
3944 {
3946 
3947  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3948  it != m_mapOfTrackers.end(); ++it) {
3949  TrackerWrapper *tracker = it->second;
3950  tracker->setDisplayFeatures(displayF);
3951  }
3952 }
3953 
3962 {
3964 
3965  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3966  it != m_mapOfTrackers.end(); ++it) {
3967  TrackerWrapper *tracker = it->second;
3968  tracker->setFarClippingDistance(dist);
3969  }
3970 }
3971 
3980 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
3981 {
3982  if (m_mapOfTrackers.size() == 2) {
3983  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3984  it->second->setFarClippingDistance(dist1);
3985 
3986  ++it;
3987  it->second->setFarClippingDistance(dist2);
3988 
3990  if (it != m_mapOfTrackers.end()) {
3991  distFarClip = it->second->getFarClippingDistance();
3992  } else {
3993  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3994  }
3995  } else {
3996  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3997  m_mapOfTrackers.size());
3998  }
3999 }
4000 
4006 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4007 {
4008  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4009  ++it) {
4010  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4011 
4012  if (it_tracker != m_mapOfTrackers.end()) {
4013  TrackerWrapper *tracker = it_tracker->second;
4014  tracker->setFarClippingDistance(it->second);
4015 
4016  if (it->first == m_referenceCameraName) {
4017  distFarClip = it->second;
4018  }
4019  }
4020  }
4021 }
4022 
4029 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4030 {
4031  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4032  ++it) {
4033  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4034  if (it_factor != mapOfFeatureFactors.end()) {
4035  it->second = it_factor->second;
4036  }
4037  }
4038 }
4039 
4056 {
4057  m_percentageGdPt = threshold;
4058 
4059  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4060  it != m_mapOfTrackers.end(); ++it) {
4061  TrackerWrapper *tracker = it->second;
4062  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4063  }
4064 }
4065 
4066 #ifdef VISP_HAVE_OGRE
4067 
4079 {
4080  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4081  it != m_mapOfTrackers.end(); ++it) {
4082  TrackerWrapper *tracker = it->second;
4083  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4084  }
4085 }
4086 
4099 {
4100  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4101  it != m_mapOfTrackers.end(); ++it) {
4102  TrackerWrapper *tracker = it->second;
4103  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4104  }
4105 }
4106 #endif
4107 
4108 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4109 
4117 {
4118  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4119  it != m_mapOfTrackers.end(); ++it) {
4120  TrackerWrapper *tracker = it->second;
4121  tracker->setKltOpencv(t);
4122  }
4123 }
4124 
4134 {
4135  if (m_mapOfTrackers.size() == 2) {
4136  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4137  it->second->setKltOpencv(t1);
4138 
4139  ++it;
4140  it->second->setKltOpencv(t2);
4141  } else {
4142  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4143  m_mapOfTrackers.size());
4144  }
4145 }
4146 
4152 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4153 {
4154  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4155  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4156 
4157  if (it_tracker != m_mapOfTrackers.end()) {
4158  TrackerWrapper *tracker = it_tracker->second;
4159  tracker->setKltOpencv(it->second);
4160  }
4161  }
4162 }
4163 
4172 {
4173  m_thresholdOutlier = th;
4174 
4175  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4176  it != m_mapOfTrackers.end(); ++it) {
4177  TrackerWrapper *tracker = it->second;
4178  tracker->setKltThresholdAcceptation(th);
4179  }
4180 }
4181 #endif
4182 
4195 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4196 {
4197  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4198  it != m_mapOfTrackers.end(); ++it) {
4199  TrackerWrapper *tracker = it->second;
4200  tracker->setLod(useLod, name);
4201  }
4202 }
4203 
4204 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4205 
4212 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4213 {
4214  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4215  it != m_mapOfTrackers.end(); ++it) {
4216  TrackerWrapper *tracker = it->second;
4217  tracker->setKltMaskBorder(e);
4218  }
4219 }
4220 
4229 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4230 {
4231  if (m_mapOfTrackers.size() == 2) {
4232  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4233  it->second->setKltMaskBorder(e1);
4234 
4235  ++it;
4236 
4237  it->second->setKltMaskBorder(e2);
4238  } else {
4239  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4240  m_mapOfTrackers.size());
4241  }
4242 }
4243 
4249 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4250 {
4251  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4252  ++it) {
4253  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4254 
4255  if (it_tracker != m_mapOfTrackers.end()) {
4256  TrackerWrapper *tracker = it_tracker->second;
4257  tracker->setKltMaskBorder(it->second);
4258  }
4259  }
4260 }
4261 #endif
4262 
4269 {
4270  vpMbTracker::setMask(mask);
4271 
4272  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4273  it != m_mapOfTrackers.end(); ++it) {
4274  TrackerWrapper *tracker = it->second;
4275  tracker->setMask(mask);
4276  }
4277 }
4278 
4279 
4291 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4292 {
4293  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4294  it != m_mapOfTrackers.end(); ++it) {
4295  TrackerWrapper *tracker = it->second;
4296  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4297  }
4298 }
4299 
4310 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4311 {
4312  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4313  it != m_mapOfTrackers.end(); ++it) {
4314  TrackerWrapper *tracker = it->second;
4315  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4316  }
4317 }
4318 
4327 {
4328  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4329  it != m_mapOfTrackers.end(); ++it) {
4330  TrackerWrapper *tracker = it->second;
4331  tracker->setMovingEdge(me);
4332  }
4333 }
4334 
4344 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4345 {
4346  if (m_mapOfTrackers.size() == 2) {
4347  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4348  it->second->setMovingEdge(me1);
4349 
4350  ++it;
4351 
4352  it->second->setMovingEdge(me2);
4353  } else {
4354  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4355  m_mapOfTrackers.size());
4356  }
4357 }
4358 
4364 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4365 {
4366  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4367  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4368 
4369  if (it_tracker != m_mapOfTrackers.end()) {
4370  TrackerWrapper *tracker = it_tracker->second;
4371  tracker->setMovingEdge(it->second);
4372  }
4373  }
4374 }
4375 
4384 {
4386 
4387  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4388  it != m_mapOfTrackers.end(); ++it) {
4389  TrackerWrapper *tracker = it->second;
4390  tracker->setNearClippingDistance(dist);
4391  }
4392 }
4393 
4402 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4403 {
4404  if (m_mapOfTrackers.size() == 2) {
4405  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4406  it->second->setNearClippingDistance(dist1);
4407 
4408  ++it;
4409 
4410  it->second->setNearClippingDistance(dist2);
4411 
4413  if (it != m_mapOfTrackers.end()) {
4414  distNearClip = it->second->getNearClippingDistance();
4415  } else {
4416  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4417  }
4418  } else {
4419  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4420  m_mapOfTrackers.size());
4421  }
4422 }
4423 
4429 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4430 {
4431  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4432  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4433 
4434  if (it_tracker != m_mapOfTrackers.end()) {
4435  TrackerWrapper *tracker = it_tracker->second;
4436  tracker->setNearClippingDistance(it->second);
4437 
4438  if (it->first == m_referenceCameraName) {
4439  distNearClip = it->second;
4440  }
4441  }
4442  }
4443 }
4444 
4458 {
4459  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4460 
4461  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4462  it != m_mapOfTrackers.end(); ++it) {
4463  TrackerWrapper *tracker = it->second;
4464  tracker->setOgreShowConfigDialog(showConfigDialog);
4465  }
4466 }
4467 
4479 {
4481 
4482  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4483  it != m_mapOfTrackers.end(); ++it) {
4484  TrackerWrapper *tracker = it->second;
4485  tracker->setOgreVisibilityTest(v);
4486  }
4487 
4488 #ifdef VISP_HAVE_OGRE
4489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4490  it != m_mapOfTrackers.end(); ++it) {
4491  TrackerWrapper *tracker = it->second;
4492  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4493  }
4494 #endif
4495 }
4496 
4505 {
4507 
4508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4509  it != m_mapOfTrackers.end(); ++it) {
4510  TrackerWrapper *tracker = it->second;
4511  tracker->setOptimizationMethod(opt);
4512  }
4513 }
4514 
4528 {
4529  if (m_mapOfTrackers.size() > 1) {
4530  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4531  "to be configured with only one camera!");
4532  }
4533 
4534  m_cMo = cdMo;
4535 
4536  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4537  if (it != m_mapOfTrackers.end()) {
4538  TrackerWrapper *tracker = it->second;
4539  tracker->setPose(I, cdMo);
4540  } else {
4541  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4542  m_referenceCameraName.c_str());
4543  }
4544 }
4545 
4559 {
4560  if (m_mapOfTrackers.size() > 1) {
4561  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4562  "to be configured with only one camera!");
4563  }
4564 
4565  m_cMo = cdMo;
4566 
4567  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4568  if (it != m_mapOfTrackers.end()) {
4569  TrackerWrapper *tracker = it->second;
4570  vpImageConvert::convert(I_color, m_I);
4571  tracker->setPose(m_I, cdMo);
4572  } else {
4573  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4574  m_referenceCameraName.c_str());
4575  }
4576 }
4577 
4590  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4591 {
4592  if (m_mapOfTrackers.size() == 2) {
4593  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4594  it->second->setPose(I1, c1Mo);
4595 
4596  ++it;
4597 
4598  it->second->setPose(I2, c2Mo);
4599 
4601  if (it != m_mapOfTrackers.end()) {
4602  // Set reference pose
4603  it->second->getPose(m_cMo);
4604  } else {
4605  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4606  m_referenceCameraName.c_str());
4607  }
4608  } else {
4609  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4610  m_mapOfTrackers.size());
4611  }
4612 }
4613 
4625 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4626  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4627 {
4628  if (m_mapOfTrackers.size() == 2) {
4629  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4630  it->second->setPose(I_color1, c1Mo);
4631 
4632  ++it;
4633 
4634  it->second->setPose(I_color2, c2Mo);
4635 
4637  if (it != m_mapOfTrackers.end()) {
4638  // Set reference pose
4639  it->second->getPose(m_cMo);
4640  } else {
4641  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4642  m_referenceCameraName.c_str());
4643  }
4644  } else {
4645  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4646  m_mapOfTrackers.size());
4647  }
4648 }
4649 
4665 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4666  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4667 {
4668  // Set the reference cMo
4669  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4670  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4671  mapOfImages.find(m_referenceCameraName);
4672  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4673 
4674  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4675  TrackerWrapper *tracker = it_tracker->second;
4676  tracker->setPose(*it_img->second, it_camPose->second);
4677  tracker->getPose(m_cMo);
4678  } else {
4679  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4680  }
4681 
4682  // Vector of missing pose matrices for cameras
4683  std::vector<std::string> vectorOfMissingCameraPoses;
4684 
4685  // Set pose for the specified cameras
4686  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4687  if (it_tracker->first != m_referenceCameraName) {
4688  it_img = mapOfImages.find(it_tracker->first);
4689  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4690 
4691  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4692  // Set pose
4693  TrackerWrapper *tracker = it_tracker->second;
4694  tracker->setPose(*it_img->second, it_camPose->second);
4695  } else {
4696  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4697  }
4698  }
4699  }
4700 
4701  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4702  it != vectorOfMissingCameraPoses.end(); ++it) {
4703  it_img = mapOfImages.find(*it);
4704  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4706 
4707  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4708  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4709  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4710  } else {
4712  "Missing image or missing camera transformation "
4713  "matrix! Cannot set pose for camera: %s!",
4714  it->c_str());
4715  }
4716  }
4717 }
4718 
4734 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
4735  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4736 {
4737  // Set the reference cMo
4738  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4739  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
4740  mapOfColorImages.find(m_referenceCameraName);
4741  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4742 
4743  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4744  TrackerWrapper *tracker = it_tracker->second;
4745  tracker->setPose(*it_img->second, it_camPose->second);
4746  tracker->getPose(m_cMo);
4747  } else {
4748  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4749  }
4750 
4751  // Vector of missing pose matrices for cameras
4752  std::vector<std::string> vectorOfMissingCameraPoses;
4753 
4754  // Set pose for the specified cameras
4755  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4756  if (it_tracker->first != m_referenceCameraName) {
4757  it_img = mapOfColorImages.find(it_tracker->first);
4758  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4759 
4760  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4761  // Set pose
4762  TrackerWrapper *tracker = it_tracker->second;
4763  tracker->setPose(*it_img->second, it_camPose->second);
4764  } else {
4765  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4766  }
4767  }
4768  }
4769 
4770  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4771  it != vectorOfMissingCameraPoses.end(); ++it) {
4772  it_img = mapOfColorImages.find(*it);
4773  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4775 
4776  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4777  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4778  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4779  } else {
4781  "Missing image or missing camera transformation "
4782  "matrix! Cannot set pose for camera: %s!",
4783  it->c_str());
4784  }
4785  }
4786 }
4787 
4803 {
4805 
4806  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4807  it != m_mapOfTrackers.end(); ++it) {
4808  TrackerWrapper *tracker = it->second;
4809  tracker->setProjectionErrorComputation(flag);
4810  }
4811 }
4812 
4817 {
4819 
4820  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4821  it != m_mapOfTrackers.end(); ++it) {
4822  TrackerWrapper *tracker = it->second;
4823  tracker->setProjectionErrorDisplay(display);
4824  }
4825 }
4826 
4831 {
4833 
4834  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4835  it != m_mapOfTrackers.end(); ++it) {
4836  TrackerWrapper *tracker = it->second;
4837  tracker->setProjectionErrorDisplayArrowLength(length);
4838  }
4839 }
4840 
4842 {
4844 
4845  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4846  it != m_mapOfTrackers.end(); ++it) {
4847  TrackerWrapper *tracker = it->second;
4848  tracker->setProjectionErrorDisplayArrowThickness(thickness);
4849  }
4850 }
4851 
4857 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
4858 {
4859  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
4860  if (it != m_mapOfTrackers.end()) {
4861  m_referenceCameraName = referenceCameraName;
4862  } else {
4863  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
4864  }
4865 }
4866 
4868 {
4870 
4871  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4872  it != m_mapOfTrackers.end(); ++it) {
4873  TrackerWrapper *tracker = it->second;
4874  tracker->setScanLineVisibilityTest(v);
4875  }
4876 }
4877 
4890 {
4891  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4892  it != m_mapOfTrackers.end(); ++it) {
4893  TrackerWrapper *tracker = it->second;
4894  tracker->setTrackerType(type);
4895  }
4896 }
4897 
4907 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
4908 {
4909  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
4910  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4911  if (it_tracker != m_mapOfTrackers.end()) {
4912  TrackerWrapper *tracker = it_tracker->second;
4913  tracker->setTrackerType(it->second);
4914  }
4915  }
4916 }
4917 
4927 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
4928 {
4929  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4930  it != m_mapOfTrackers.end(); ++it) {
4931  TrackerWrapper *tracker = it->second;
4932  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
4933  }
4934 }
4935 
4945 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
4946 {
4947  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4948  it != m_mapOfTrackers.end(); ++it) {
4949  TrackerWrapper *tracker = it->second;
4950  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
4951  }
4952 }
4953 
4963 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
4964 {
4965  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4966  it != m_mapOfTrackers.end(); ++it) {
4967  TrackerWrapper *tracker = it->second;
4968  tracker->setUseEdgeTracking(name, useEdgeTracking);
4969  }
4970 }
4971 
4972 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4973 
4982 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
4983 {
4984  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4985  it != m_mapOfTrackers.end(); ++it) {
4986  TrackerWrapper *tracker = it->second;
4987  tracker->setUseKltTracking(name, useKltTracking);
4988  }
4989 }
4990 #endif
4991 
4993 {
4994  // Test tracking fails only if all testTracking have failed
4995  bool isOneTestTrackingOk = false;
4996  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4997  it != m_mapOfTrackers.end(); ++it) {
4998  TrackerWrapper *tracker = it->second;
4999  try {
5000  tracker->testTracking();
5001  isOneTestTrackingOk = true;
5002  } catch (...) {
5003  }
5004  }
5005 
5006  if (!isOneTestTrackingOk) {
5007  std::ostringstream oss;
5008  oss << "Not enough moving edges to track the object. Try to reduce the "
5009  "threshold="
5010  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5012  }
5013 }
5014 
5025 {
5026  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5027  mapOfImages[m_referenceCameraName] = &I;
5028 
5029  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5030  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5031 
5032  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5033 }
5034 
5045 {
5046  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5047  mapOfColorImages[m_referenceCameraName] = &I_color;
5048 
5049  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5050  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5051 
5052  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5053 }
5054 
5066 {
5067  if (m_mapOfTrackers.size() == 2) {
5068  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5069  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5070  mapOfImages[it->first] = &I1;
5071  ++it;
5072 
5073  mapOfImages[it->first] = &I2;
5074 
5075  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5076  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5077 
5078  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5079  } else {
5080  std::stringstream ss;
5081  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5082  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5083  }
5084 }
5085 
5096 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5097 {
5098  if (m_mapOfTrackers.size() == 2) {
5099  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5100  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5101  mapOfImages[it->first] = &I_color1;
5102  ++it;
5103 
5104  mapOfImages[it->first] = &_colorI2;
5105 
5106  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5107  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5108 
5109  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5110  } else {
5111  std::stringstream ss;
5112  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5113  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5114  }
5115 }
5116 
5124 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5125 {
5126  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5127  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5128 
5129  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5130 }
5131 
5139 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5140 {
5141  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5142  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5143 
5144  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5145 }
5146 
5147 #ifdef VISP_HAVE_PCL
5148 
5156 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5157  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5158 {
5159  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5160  it != m_mapOfTrackers.end(); ++it) {
5161  TrackerWrapper *tracker = it->second;
5162 
5163  if ((tracker->m_trackerType & (EDGE_TRACKER |
5164 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5165  KLT_TRACKER |
5166 #endif
5168  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5169  }
5170 
5171  if (tracker->m_trackerType & (EDGE_TRACKER
5172 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5173  | KLT_TRACKER
5174 #endif
5175  ) &&
5176  mapOfImages[it->first] == NULL) {
5177  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5178  }
5179 
5180  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5181  ! mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5182  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5183  }
5184  }
5185 
5186  preTracking(mapOfImages, mapOfPointClouds);
5187 
5188  try {
5189  computeVVS(mapOfImages);
5190  } catch (...) {
5191  covarianceMatrix = -1;
5192  throw; // throw the original exception
5193  }
5194 
5195  testTracking();
5196 
5197  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5198  it != m_mapOfTrackers.end(); ++it) {
5199  TrackerWrapper *tracker = it->second;
5200 
5201  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5202  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5203  }
5204 
5205  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5206 
5207  if (displayFeatures) {
5208 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5209  if (tracker->m_trackerType & KLT_TRACKER) {
5210  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5211  }
5212 #endif
5213 
5214  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5215  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5216  }
5217  }
5218  }
5219 
5221 }
5222 
5231 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5232  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5233 {
5234  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5235  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5236  it != m_mapOfTrackers.end(); ++it) {
5237  TrackerWrapper *tracker = it->second;
5238 
5239  if ((tracker->m_trackerType & (EDGE_TRACKER |
5240 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5241  KLT_TRACKER |
5242 #endif
5244  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5245  }
5246 
5247  if (tracker->m_trackerType & (EDGE_TRACKER
5248 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5249  | KLT_TRACKER
5250 #endif
5251  ) && mapOfImages[it->first] == NULL) {
5252  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5253  } else if (tracker->m_trackerType & (EDGE_TRACKER
5254 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5255  | KLT_TRACKER
5256 #endif
5257  ) && mapOfImages[it->first] != NULL) {
5258  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5259  mapOfImages[it->first] = &tracker->m_I; //update grayscale image buffer
5260  }
5261 
5262  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5263  ! mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5264  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5265  }
5266  }
5267 
5268  preTracking(mapOfImages, mapOfPointClouds);
5269 
5270  try {
5271  computeVVS(mapOfImages);
5272  } catch (...) {
5273  covarianceMatrix = -1;
5274  throw; // throw the original exception
5275  }
5276 
5277  testTracking();
5278 
5279  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5280  it != m_mapOfTrackers.end(); ++it) {
5281  TrackerWrapper *tracker = it->second;
5282 
5283  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5284  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5285  }
5286 
5287  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5288 
5289  if (displayFeatures) {
5290 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5291  if (tracker->m_trackerType & KLT_TRACKER) {
5292  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5293  }
5294 #endif
5295 
5296  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5297  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5298  }
5299  }
5300  }
5301 
5303 }
5304 #endif
5305 
5316 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5317  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5318  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5319  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5320 {
5321  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5322  it != m_mapOfTrackers.end(); ++it) {
5323  TrackerWrapper *tracker = it->second;
5324 
5325  if ((tracker->m_trackerType & (EDGE_TRACKER |
5326 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5327  KLT_TRACKER |
5328 #endif
5330  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5331  }
5332 
5333  if (tracker->m_trackerType & (EDGE_TRACKER
5334 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5335  | KLT_TRACKER
5336 #endif
5337  ) &&
5338  mapOfImages[it->first] == NULL) {
5339  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5340  }
5341 
5342  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5343  (mapOfPointClouds[it->first] == NULL)) {
5344  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5345  }
5346  }
5347 
5348  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5349 
5350  try {
5351  computeVVS(mapOfImages);
5352  } catch (...) {
5353  covarianceMatrix = -1;
5354  throw; // throw the original exception
5355  }
5356 
5357  testTracking();
5358 
5359  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5360  it != m_mapOfTrackers.end(); ++it) {
5361  TrackerWrapper *tracker = it->second;
5362 
5363  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5364  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5365  }
5366 
5367  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5368 
5369  if (displayFeatures) {
5370 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5371  if (tracker->m_trackerType & KLT_TRACKER) {
5372  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5373  }
5374 #endif
5375 
5376  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5377  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5378  }
5379  }
5380  }
5381 
5383 }
5384 
5395 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5396  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5397  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5398  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5399 {
5400  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5401  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5402  it != m_mapOfTrackers.end(); ++it) {
5403  TrackerWrapper *tracker = it->second;
5404 
5405  if ((tracker->m_trackerType & (EDGE_TRACKER |
5406 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5407  KLT_TRACKER |
5408 #endif
5410  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5411  }
5412 
5413  if (tracker->m_trackerType & (EDGE_TRACKER
5414 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5415  | KLT_TRACKER
5416 #endif
5417  ) && mapOfColorImages[it->first] == NULL) {
5418  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5419  } else if (tracker->m_trackerType & (EDGE_TRACKER
5420 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5421  | KLT_TRACKER
5422 #endif
5423  ) && mapOfColorImages[it->first] != NULL) {
5424  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5425  mapOfImages[it->first] = &tracker->m_I; //update grayscale image buffer
5426  }
5427 
5428  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5429  (mapOfPointClouds[it->first] == NULL)) {
5430  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5431  }
5432  }
5433 
5434  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5435 
5436  try {
5437  computeVVS(mapOfImages);
5438  } catch (...) {
5439  covarianceMatrix = -1;
5440  throw; // throw the original exception
5441  }
5442 
5443  testTracking();
5444 
5445  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5446  it != m_mapOfTrackers.end(); ++it) {
5447  TrackerWrapper *tracker = it->second;
5448 
5449  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5450  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5451  }
5452 
5453  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5454 
5455  if (displayFeatures) {
5456 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5457  if (tracker->m_trackerType & KLT_TRACKER) {
5458  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5459  }
5460 #endif
5461 
5462  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5463  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5464  }
5465  }
5466  }
5467 
5469 }
5470 
5472 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5473  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5474 {
5475  m_lambda = 1.0;
5476  m_maxIter = 30;
5477 
5478 #ifdef VISP_HAVE_OGRE
5479  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5480 
5481  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5482 #endif
5483 }
5484 
5485 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5486  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5487 {
5488  if ((m_trackerType & (EDGE_TRACKER |
5489 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5490  KLT_TRACKER |
5491 #endif
5493  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5494  }
5495 
5496  m_lambda = 1.0;
5497  m_maxIter = 30;
5498 
5499 #ifdef VISP_HAVE_OGRE
5500  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5501 
5502  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5503 #endif
5504 }
5505 
5506 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() { }
5507 
5508 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5509 void vpMbGenericTracker::TrackerWrapper::computeVVS(const vpImage<unsigned char> *const ptr_I)
5510 {
5511  computeVVSInit(ptr_I);
5512 
5513  if (m_error.getRows() < 4) {
5514  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5515  }
5516 
5517  double normRes = 0;
5518  double normRes_1 = -1;
5519  unsigned int iter = 0;
5520 
5521  double factorEdge = 1.0;
5522 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5523  double factorKlt = 1.0;
5524 #endif
5525  double factorDepth = 1.0;
5526  double factorDepthDense = 1.0;
5527 
5528  vpMatrix LTL;
5529  vpColVector LTR, v;
5530  vpColVector error_prev;
5531 
5532  double mu = m_initialMu;
5533  vpHomogeneousMatrix cMo_prev;
5534 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5535  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
5536 #endif
5537  bool isoJoIdentity_ = true;
5538 
5539  // Covariance
5540  vpColVector W_true(m_error.getRows());
5541  vpMatrix L_true, LVJ_true;
5542 
5543  unsigned int nb_edge_features = m_error_edge.getRows();
5544 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5545  unsigned int nb_klt_features = m_error_klt.getRows();
5546 #endif
5547  unsigned int nb_depth_features = m_error_depthNormal.getRows();
5548  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5549 
5550  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5552 
5553  bool reStartFromLastIncrement = false;
5554  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5555 
5556 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5557  if (reStartFromLastIncrement) {
5558  if (m_trackerType & KLT_TRACKER) {
5559  ctTc0 = ctTc0_Prev;
5560  }
5561  }
5562 #endif
5563 
5564  if (!reStartFromLastIncrement) {
5566 
5567  if (computeCovariance) {
5568  L_true = m_L;
5569  if (!isoJoIdentity_) {
5571  cVo.buildFrom(m_cMo);
5572  LVJ_true = (m_L * cVo * oJo);
5573  }
5574  }
5575 
5577  if (iter == 0) {
5578  isoJoIdentity_ = true;
5579  oJo.eye();
5580 
5581  // If all the 6 dof should be estimated, we check if the interaction
5582  // matrix is full rank. If not we remove automatically the dof that
5583  // cannot be estimated This is particularly useful when consering
5584  // circles (rank 5) and cylinders (rank 4)
5585  if (isoJoIdentity_) {
5586  cVo.buildFrom(m_cMo);
5587 
5588  vpMatrix K; // kernel
5589  unsigned int rank = (m_L * cVo).kernel(K);
5590  if (rank == 0) {
5591  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
5592  }
5593 
5594  if (rank != 6) {
5595  vpMatrix I; // Identity
5596  I.eye(6);
5597  oJo = I - K.AtA();
5598 
5599  isoJoIdentity_ = false;
5600  }
5601  }
5602  }
5603 
5604  // Weighting
5605  double num = 0;
5606  double den = 0;
5607 
5608  unsigned int start_index = 0;
5609  if (m_trackerType & EDGE_TRACKER) {
5610  for (unsigned int i = 0; i < nb_edge_features; i++) {
5611  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5612  W_true[i] = wi;
5613  m_weightedError[i] = wi * m_error[i];
5614 
5615  num += wi * vpMath::sqr(m_error[i]);
5616  den += wi;
5617 
5618  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5619  m_L[i][j] *= wi;
5620  }
5621  }
5622 
5623  start_index += nb_edge_features;
5624  }
5625 
5626 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5627  if (m_trackerType & KLT_TRACKER) {
5628  for (unsigned int i = 0; i < nb_klt_features; i++) {
5629  double wi = m_w_klt[i] * factorKlt;
5630  W_true[start_index + i] = wi;
5631  m_weightedError[start_index + i] = wi * m_error_klt[i];
5632 
5633  num += wi * vpMath::sqr(m_error[start_index + i]);
5634  den += wi;
5635 
5636  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5637  m_L[start_index + i][j] *= wi;
5638  }
5639  }
5640 
5641  start_index += nb_klt_features;
5642  }
5643 #endif
5644 
5645  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5646  for (unsigned int i = 0; i < nb_depth_features; i++) {
5647  double wi = m_w_depthNormal[i] * factorDepth;
5648  m_w[start_index + i] = m_w_depthNormal[i];
5649  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5650 
5651  num += wi * vpMath::sqr(m_error[start_index + i]);
5652  den += wi;
5653 
5654  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5655  m_L[start_index + i][j] *= wi;
5656  }
5657  }
5658 
5659  start_index += nb_depth_features;
5660  }
5661 
5662  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5663  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
5664  double wi = m_w_depthDense[i] * factorDepthDense;
5665  m_w[start_index + i] = m_w_depthDense[i];
5666  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5667 
5668  num += wi * vpMath::sqr(m_error[start_index + i]);
5669  den += wi;
5670 
5671  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5672  m_L[start_index + i][j] *= wi;
5673  }
5674  }
5675 
5676  // start_index += nb_depth_dense_features;
5677  }
5678 
5679  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5680 
5681  cMo_prev = m_cMo;
5682 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5683  if (m_trackerType & KLT_TRACKER) {
5684  ctTc0_Prev = ctTc0;
5685  }
5686 #endif
5687 
5689 
5690 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5691  if (m_trackerType & KLT_TRACKER) {
5692  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
5693  }
5694 #endif
5695  normRes_1 = normRes;
5696 
5697  normRes = sqrt(num / den);
5698  }
5699 
5700  iter++;
5701  }
5702 
5703  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
5704 
5705  if (m_trackerType & EDGE_TRACKER) {
5707  }
5708 }
5709 
5710 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5711 {
5712  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5713  "TrackerWrapper::computeVVSInit("
5714  ") should not be called!");
5715 }
5716 
5717 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
5718 {
5719  initMbtTracking(ptr_I);
5720 
5721  unsigned int nbFeatures = 0;
5722 
5723  if (m_trackerType & EDGE_TRACKER) {
5724  nbFeatures += m_error_edge.getRows();
5725  } else {
5726  m_error_edge.clear();
5727  m_weightedError_edge.clear();
5728  m_L_edge.clear();
5729  m_w_edge.clear();
5730  }
5731 
5732 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5733  if (m_trackerType & KLT_TRACKER) {
5735  nbFeatures += m_error_klt.getRows();
5736  } else {
5737  m_error_klt.clear();
5738  m_weightedError_klt.clear();
5739  m_L_klt.clear();
5740  m_w_klt.clear();
5741  }
5742 #endif
5743 
5744  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5746  nbFeatures += m_error_depthNormal.getRows();
5747  } else {
5748  m_error_depthNormal.clear();
5749  m_weightedError_depthNormal.clear();
5750  m_L_depthNormal.clear();
5751  m_w_depthNormal.clear();
5752  }
5753 
5754  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5756  nbFeatures += m_error_depthDense.getRows();
5757  } else {
5758  m_error_depthDense.clear();
5759  m_weightedError_depthDense.clear();
5760  m_L_depthDense.clear();
5761  m_w_depthDense.clear();
5762  }
5763 
5764  m_L.resize(nbFeatures, 6, false, false);
5765  m_error.resize(nbFeatures, false);
5766 
5767  m_weightedError.resize(nbFeatures, false);
5768  m_w.resize(nbFeatures, false);
5769  m_w = 1;
5770 }
5771 
5772 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu()
5773 {
5774  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5775  "TrackerWrapper::"
5776  "computeVVSInteractionMatrixAndR"
5777  "esidu() should not be called!");
5778 }
5779 
5780 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> *const ptr_I)
5781 {
5782  if (m_trackerType & EDGE_TRACKER) {
5784  }
5785 
5786 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5787  if (m_trackerType & KLT_TRACKER) {
5789  }
5790 #endif
5791 
5792  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5794  }
5795 
5796  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5798  }
5799 
5800  unsigned int start_index = 0;
5801  if (m_trackerType & EDGE_TRACKER) {
5802  m_L.insert(m_L_edge, start_index, 0);
5803  m_error.insert(start_index, m_error_edge);
5804 
5805  start_index += m_error_edge.getRows();
5806  }
5807 
5808 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5809  if (m_trackerType & KLT_TRACKER) {
5810  m_L.insert(m_L_klt, start_index, 0);
5811  m_error.insert(start_index, m_error_klt);
5812 
5813  start_index += m_error_klt.getRows();
5814  }
5815 #endif
5816 
5817  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5818  m_L.insert(m_L_depthNormal, start_index, 0);
5819  m_error.insert(start_index, m_error_depthNormal);
5820 
5821  start_index += m_error_depthNormal.getRows();
5822  }
5823 
5824  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5825  m_L.insert(m_L_depthDense, start_index, 0);
5826  m_error.insert(start_index, m_error_depthDense);
5827 
5828  // start_index += m_error_depthDense.getRows();
5829  }
5830 }
5831 
5833 {
5834  unsigned int start_index = 0;
5835 
5836  if (m_trackerType & EDGE_TRACKER) {
5838  m_w.insert(start_index, m_w_edge);
5839 
5840  start_index += m_w_edge.getRows();
5841  }
5842 
5843 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5844  if (m_trackerType & KLT_TRACKER) {
5845  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
5846  m_w.insert(start_index, m_w_klt);
5847 
5848  start_index += m_w_klt.getRows();
5849  }
5850 #endif
5851 
5852  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5853  if (m_depthNormalUseRobust) {
5854  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
5855  m_w.insert(start_index, m_w_depthNormal);
5856  }
5857 
5858  start_index += m_w_depthNormal.getRows();
5859  }
5860 
5861  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5863  m_w.insert(start_index, m_w_depthDense);
5864 
5865  // start_index += m_w_depthDense.getRows();
5866  }
5867 }
5868 
5869 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
5870  const vpCameraParameters &cam, const vpColor &col,
5871  unsigned int thickness, bool displayFullModel)
5872 {
5873  if (displayFeatures) {
5874  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5875  for (size_t i = 0; i < features.size(); i++) {
5876  if (vpMath::equal(features[i][0], 0)) {
5877  vpImagePoint ip(features[i][1], features[i][2]);
5878  int state = static_cast<int>(features[i][3]);
5879 
5880  switch (state) {
5883  break;
5884 
5885  case vpMeSite::CONSTRAST:
5886  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5887  break;
5888 
5889  case vpMeSite::THRESHOLD:
5891  break;
5892 
5893  case vpMeSite::M_ESTIMATOR:
5894  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5895  break;
5896 
5897  case vpMeSite::TOO_NEAR:
5898  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5899  break;
5900 
5901  default:
5903  }
5904  } else if (vpMath::equal(features[i][0], 1)) {
5905  vpImagePoint ip1(features[i][1], features[i][2]);
5907 
5908  vpImagePoint ip2(features[i][3], features[i][4]);
5909  double id = features[i][5];
5910  std::stringstream ss;
5911  ss << id;
5912  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5913  } else if (vpMath::equal(features[i][0], 2)) {
5914  vpImagePoint im_centroid(features[i][1], features[i][2]);
5915  vpImagePoint im_extremity(features[i][3], features[i][4]);
5916  bool desired = vpMath::equal(features[i][0], 2);
5917  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5918  }
5919  }
5920  }
5921 
5922  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
5923  for (size_t i = 0; i < models.size(); i++) {
5924  if (vpMath::equal(models[i][0], 0)) {
5925  vpImagePoint ip1(models[i][1], models[i][2]);
5926  vpImagePoint ip2(models[i][3], models[i][4]);
5927  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
5928  } else if (vpMath::equal(models[i][0], 1)) {
5929  vpImagePoint center(models[i][1], models[i][2]);
5930  double n20 = models[i][3];
5931  double n11 = models[i][4];
5932  double n02 = models[i][5];
5933  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
5934  }
5935  }
5936 
5937 #ifdef VISP_HAVE_OGRE
5938  if ((m_trackerType & EDGE_TRACKER)
5939  #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5940  || (m_trackerType & KLT_TRACKER)
5941  #endif
5942  ) {
5943  if (useOgre)
5944  faces.displayOgre(cMo);
5945  }
5946 #endif
5947 }
5948 
5949 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
5950  const vpCameraParameters &cam, const vpColor &col,
5951  unsigned int thickness, bool displayFullModel)
5952 {
5953  if (displayFeatures) {
5954  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5955  for (size_t i = 0; i < features.size(); i++) {
5956  if (vpMath::equal(features[i][0], 0)) {
5957  vpImagePoint ip(features[i][1], features[i][2]);
5958  int state = static_cast<int>(features[i][3]);
5959 
5960  switch (state) {
5963  break;
5964 
5965  case vpMeSite::CONSTRAST:
5966  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5967  break;
5968 
5969  case vpMeSite::THRESHOLD:
5971  break;
5972 
5973  case vpMeSite::M_ESTIMATOR:
5974  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5975  break;
5976 
5977  case vpMeSite::TOO_NEAR:
5978  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5979  break;
5980 
5981  default:
5983  }
5984  } else if (vpMath::equal(features[i][0], 1)) {
5985  vpImagePoint ip1(features[i][1], features[i][2]);
5987 
5988  vpImagePoint ip2(features[i][3], features[i][4]);
5989  double id = features[i][5];
5990  std::stringstream ss;
5991  ss << id;
5992  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5993  } else if (vpMath::equal(features[i][0], 2)) {
5994  vpImagePoint im_centroid(features[i][1], features[i][2]);
5995  vpImagePoint im_extremity(features[i][3], features[i][4]);
5996  bool desired = vpMath::equal(features[i][0], 2);
5997  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5998  }
5999  }
6000  }
6001 
6002  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6003  for (size_t i = 0; i < models.size(); i++) {
6004  if (vpMath::equal(models[i][0], 0)) {
6005  vpImagePoint ip1(models[i][1], models[i][2]);
6006  vpImagePoint ip2(models[i][3], models[i][4]);
6007  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6008  } else if (vpMath::equal(models[i][0], 1)) {
6009  vpImagePoint center(models[i][1], models[i][2]);
6010  double n20 = models[i][3];
6011  double n11 = models[i][4];
6012  double n02 = models[i][5];
6013  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6014  }
6015  }
6016 
6017 #ifdef VISP_HAVE_OGRE
6018  if ((m_trackerType & EDGE_TRACKER)
6019  #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6020  || (m_trackerType & KLT_TRACKER)
6021  #endif
6022  ) {
6023  if (useOgre)
6024  faces.displayOgre(cMo);
6025  }
6026 #endif
6027 }
6028 
6029 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6030 {
6031  std::vector<std::vector<double> > features;
6032 
6033  if (m_trackerType & EDGE_TRACKER) {
6034  //m_featuresToBeDisplayedEdge updated after computeVVS()
6035  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6036  }
6037 
6038 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6039  if (m_trackerType & KLT_TRACKER) {
6040  //m_featuresToBeDisplayedKlt updated after postTracking()
6041  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6042  }
6043 #endif
6044 
6045  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6046  //m_featuresToBeDisplayedDepthNormal updated after postTracking()
6047  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(), m_featuresToBeDisplayedDepthNormal.end());
6048  }
6049 
6050  return features;
6051 }
6052 
6053 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width, unsigned int height,
6054  const vpHomogeneousMatrix &cMo,
6055  const vpCameraParameters &cam,
6056  bool displayFullModel)
6057 {
6058  std::vector<std::vector<double> > models;
6059 
6060  //Do not add multiple times the same models
6061  if (m_trackerType == EDGE_TRACKER) {
6062  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6063  }
6064 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6065  else if (m_trackerType == KLT_TRACKER) {
6066  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6067  }
6068 #endif
6069  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6070  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6071  } else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6072  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6073  } else {
6074  //Edge and KLT trackers use the same primitives
6075  if (m_trackerType & EDGE_TRACKER) {
6076  std::vector<std::vector<double> > edgeModels = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6077  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6078  }
6079 
6080  //Depth dense and depth normal trackers use the same primitives
6081  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6082  std::vector<std::vector<double> > depthDenseModels = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6083  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6084  }
6085  }
6086 
6087  return models;
6088 }
6089 
6090 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6091 {
6092  if (!modelInitialised) {
6093  throw vpException(vpException::fatalError, "model not initialized");
6094  }
6095 
6096  if (useScanLine || clippingFlag > 3)
6097  m_cam.computeFov(I.getWidth(), I.getHeight());
6098 
6099  bool reInitialisation = false;
6100  if (!useOgre) {
6101  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6102  } else {
6103 #ifdef VISP_HAVE_OGRE
6104  if (!faces.isOgreInitialised()) {
6106 
6108  faces.initOgre(m_cam);
6109  // Turn off Ogre config dialog display for the next call to this
6110  // function since settings are saved in the ogre.cfg file and used
6111  // during the next call
6112  ogreShowConfigDialog = false;
6113  }
6114 
6116 #else
6117  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6118 #endif
6119  }
6120 
6121  if (useScanLine) {
6124  }
6125 
6126 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6127  if (m_trackerType & KLT_TRACKER)
6129 #endif
6130 
6131  if (m_trackerType & EDGE_TRACKER) {
6133 
6134  bool a = false;
6135  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6136 
6138  }
6139 
6140  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6141  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6142 
6143  if (m_trackerType & DEPTH_DENSE_TRACKER)
6144  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6145 }
6146 
6147 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6148  double radius, int idFace, const std::string &name)
6149 {
6150  if (m_trackerType & EDGE_TRACKER)
6151  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6152 
6153 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6154  if (m_trackerType & KLT_TRACKER)
6155  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6156 #endif
6157 }
6158 
6159 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius,
6160  int idFace, const std::string &name)
6161 {
6162  if (m_trackerType & EDGE_TRACKER)
6163  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6164 
6165 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6166  if (m_trackerType & KLT_TRACKER)
6167  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6168 #endif
6169 }
6170 
6171 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6172 {
6173  if (m_trackerType & EDGE_TRACKER)
6175 
6176 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6177  if (m_trackerType & KLT_TRACKER)
6179 #endif
6180 
6181  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6183 
6184  if (m_trackerType & DEPTH_DENSE_TRACKER)
6186 }
6187 
6188 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6189 {
6190  if (m_trackerType & EDGE_TRACKER)
6192 
6193 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6194  if (m_trackerType & KLT_TRACKER)
6196 #endif
6197 
6198  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6200 
6201  if (m_trackerType & DEPTH_DENSE_TRACKER)
6203 }
6204 
6205 void vpMbGenericTracker::TrackerWrapper::initMbtTracking(const vpImage<unsigned char> *const ptr_I)
6206 {
6207  if (m_trackerType & EDGE_TRACKER) {
6210  }
6211 }
6212 
6213 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6214 {
6215  // Load projection error config
6216  vpMbTracker::loadConfigFile(configFile, verbose);
6217 
6219  xmlp.setVerbose(verbose);
6220  xmlp.setCameraParameters(m_cam);
6223 
6224  // Edge
6225  xmlp.setEdgeMe(me);
6226 
6227  // KLT
6228  xmlp.setKltMaxFeatures(10000);
6229  xmlp.setKltWindowSize(5);
6230  xmlp.setKltQuality(0.01);
6231  xmlp.setKltMinDistance(5);
6232  xmlp.setKltHarrisParam(0.01);
6233  xmlp.setKltBlockSize(3);
6234  xmlp.setKltPyramidLevels(3);
6235 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6236  xmlp.setKltMaskBorder(maskBorder);
6237 #endif
6238 
6239  // Depth normal
6240  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6241  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6242  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6243  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6244  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6245  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6246 
6247  // Depth dense
6248  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6249  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6250 
6251  try {
6252  if (verbose) {
6253  std::cout << " *********** Parsing XML for";
6254  }
6255 
6256  std::vector<std::string> tracker_names;
6257  if (m_trackerType & EDGE_TRACKER)
6258  tracker_names.push_back("Edge");
6259 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6260  if (m_trackerType & KLT_TRACKER)
6261  tracker_names.push_back("Klt");
6262 #endif
6263  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6264  tracker_names.push_back("Depth Normal");
6265  if (m_trackerType & DEPTH_DENSE_TRACKER)
6266  tracker_names.push_back("Depth Dense");
6267 
6268  if (verbose) {
6269  for (size_t i = 0; i < tracker_names.size(); i++) {
6270  std::cout << " " << tracker_names[i];
6271  if (i == tracker_names.size() - 1) {
6272  std::cout << " ";
6273  }
6274  }
6275 
6276  std::cout << "Model-Based Tracker ************ " << std::endl;
6277  }
6278 
6279  xmlp.parse(configFile);
6280  } catch (...) {
6281  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6282  }
6283 
6284  vpCameraParameters camera;
6285  xmlp.getCameraParameters(camera);
6286  setCameraParameters(camera);
6287 
6290 
6291  if (xmlp.hasNearClippingDistance())
6293 
6294  if (xmlp.hasFarClippingDistance())
6296 
6297  if (xmlp.getFovClipping()) {
6299  }
6300 
6301  useLodGeneral = xmlp.getLodState();
6304 
6305  applyLodSettingInConfig = false;
6306  if (this->getNbPolygon() > 0) {
6307  applyLodSettingInConfig = true;
6311  }
6312 
6313  // Edge
6314  vpMe meParser;
6315  xmlp.getEdgeMe(meParser);
6317 
6318 // KLT
6319 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6320  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6321  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6322  tracker.setQuality(xmlp.getKltQuality());
6323  tracker.setMinDistance(xmlp.getKltMinDistance());
6324  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6325  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6326  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6327  maskBorder = xmlp.getKltMaskBorder();
6328 
6329  // if(useScanLine)
6330  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6331 #endif
6332 
6333  // Depth normal
6339 
6340  // Depth dense
6342 }
6343 
6344 #ifdef VISP_HAVE_PCL
6345 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6346  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6347 {
6348 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6349  // KLT
6350  if (m_trackerType & KLT_TRACKER) {
6351  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6352  vpMbKltTracker::reinit(*ptr_I);
6353  }
6354  }
6355 #endif
6356 
6357  // Looking for new visible face
6358  if (m_trackerType & EDGE_TRACKER) {
6359  bool newvisibleface = false;
6360  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6361 
6362  if (useScanLine) {
6364  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6365  }
6366  }
6367 
6368  // Depth normal
6369  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6370  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6371 
6372  // Depth dense
6373  if (m_trackerType & DEPTH_DENSE_TRACKER)
6374  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6375 
6376  // Edge
6377  if (m_trackerType & EDGE_TRACKER) {
6379 
6381  // Reinit the moving edge for the lines which need it.
6383 
6384  if (computeProjError) {
6386  }
6387  }
6388 }
6389 
6390 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
6391  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6392 {
6393  if (m_trackerType & EDGE_TRACKER) {
6394  try {
6396  } catch (...) {
6397  std::cerr << "Error in moving edge tracking" << std::endl;
6398  throw;
6399  }
6400  }
6401 
6402 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6403  if (m_trackerType & KLT_TRACKER) {
6404  try {
6406  } catch (const vpException &e) {
6407  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6408  throw;
6409  }
6410  }
6411 #endif
6412 
6413  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6414  try {
6416  } catch (...) {
6417  std::cerr << "Error in Depth normal tracking" << std::endl;
6418  throw;
6419  }
6420  }
6421 
6422  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6423  try {
6425  } catch (...) {
6426  std::cerr << "Error in Depth dense tracking" << std::endl;
6427  throw;
6428  }
6429  }
6430 }
6431 #endif
6432 
6433 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6434  const unsigned int pointcloud_width,
6435  const unsigned int pointcloud_height)
6436 {
6437 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6438  // KLT
6439  if (m_trackerType & KLT_TRACKER) {
6440  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6441  vpMbKltTracker::reinit(*ptr_I);
6442  }
6443  }
6444 #endif
6445 
6446  // Looking for new visible face
6447  if (m_trackerType & EDGE_TRACKER) {
6448  bool newvisibleface = false;
6449  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6450 
6451  if (useScanLine) {
6453  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6454  }
6455  }
6456 
6457  // Depth normal
6458  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6459  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6460 
6461  // Depth dense
6462  if (m_trackerType & DEPTH_DENSE_TRACKER)
6463  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6464 
6465  // Edge
6466  if (m_trackerType & EDGE_TRACKER) {
6468 
6470  // Reinit the moving edge for the lines which need it.
6472 
6473  if (computeProjError) {
6475  }
6476  }
6477 }
6478 
6479 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> * const ptr_I,
6480  const std::vector<vpColVector> * const point_cloud,
6481  const unsigned int pointcloud_width,
6482  const unsigned int pointcloud_height)
6483 {
6484  if (m_trackerType & EDGE_TRACKER) {
6485  try {
6487  } catch (...) {
6488  std::cerr << "Error in moving edge tracking" << std::endl;
6489  throw;
6490  }
6491  }
6492 
6493 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6494  if (m_trackerType & KLT_TRACKER) {
6495  try {
6497  } catch (const vpException &e) {
6498  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6499  throw;
6500  }
6501  }
6502 #endif
6503 
6504  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6505  try {
6506  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6507  } catch (...) {
6508  std::cerr << "Error in Depth tracking" << std::endl;
6509  throw;
6510  }
6511  }
6512 
6513  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6514  try {
6515  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6516  } catch (...) {
6517  std::cerr << "Error in Depth dense tracking" << std::endl;
6518  throw;
6519  }
6520  }
6521 }
6522 
6523 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> * const I, const vpImage<vpRGBa> * const I_color,
6524  const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose,
6525  const vpHomogeneousMatrix &T)
6526 {
6527  m_cMo.eye();
6528 
6529  // Edge
6530  vpMbtDistanceLine *l;
6532  vpMbtDistanceCircle *ci;
6533 
6534  for (unsigned int i = 0; i < scales.size(); i++) {
6535  if (scales[i]) {
6536  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6537  l = *it;
6538  if (l != NULL)
6539  delete l;
6540  l = NULL;
6541  }
6542 
6543  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6544  ++it) {
6545  cy = *it;
6546  if (cy != NULL)
6547  delete cy;
6548  cy = NULL;
6549  }
6550 
6551  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6552  ci = *it;
6553  if (ci != NULL)
6554  delete ci;
6555  ci = NULL;
6556  }
6557 
6558  lines[i].clear();
6559  cylinders[i].clear();
6560  circles[i].clear();
6561  }
6562  }
6563 
6564  nline = 0;
6565  ncylinder = 0;
6566  ncircle = 0;
6567  nbvisiblepolygone = 0;
6568 
6569 // KLT
6570 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6571 #if (VISP_HAVE_OPENCV_VERSION < 0x020408)
6572  if (cur != NULL) {
6573  cvReleaseImage(&cur);
6574  cur = NULL;
6575  }
6576 #endif
6577 
6578  // delete the Klt Polygon features
6579  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6580  vpMbtDistanceKltPoints *kltpoly = *it;
6581  if (kltpoly != NULL) {
6582  delete kltpoly;
6583  }
6584  kltpoly = NULL;
6585  }
6586  kltPolygons.clear();
6587 
6588  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6589  ++it) {
6590  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
6591  if (kltPolyCylinder != NULL) {
6592  delete kltPolyCylinder;
6593  }
6594  kltPolyCylinder = NULL;
6595  }
6596  kltCylinders.clear();
6597 
6598  // delete the structures used to display circles
6599  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6600  ci = *it;
6601  if (ci != NULL) {
6602  delete ci;
6603  }
6604  ci = NULL;
6605  }
6606  circles_disp.clear();
6607 
6608  firstInitialisation = true;
6609 
6610 #endif
6611 
6612  // Depth normal
6613  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6614  delete m_depthNormalFaces[i];
6615  m_depthNormalFaces[i] = NULL;
6616  }
6617  m_depthNormalFaces.clear();
6618 
6619  // Depth dense
6620  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6621  delete m_depthDenseFaces[i];
6622  m_depthDenseFaces[i] = NULL;
6623  }
6624  m_depthDenseFaces.clear();
6625 
6626  faces.reset();
6627 
6628  loadModel(cad_name, verbose, T);
6629  if (I) {
6630  initFromPose(*I, cMo);
6631  } else {
6632  initFromPose(*I_color, cMo);
6633  }
6634 }
6635 
6636 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
6637  const vpHomogeneousMatrix &cMo, bool verbose,
6638  const vpHomogeneousMatrix &T)
6639 {
6640  reInitModel(&I, NULL, cad_name, cMo, verbose, T);
6641 }
6642 
6643 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
6644  const vpHomogeneousMatrix &cMo, bool verbose,
6645  const vpHomogeneousMatrix &T)
6646 {
6647  reInitModel(NULL, &I_color, cad_name, cMo, verbose, T);
6648 }
6649 
6650 void vpMbGenericTracker::TrackerWrapper::resetTracker()
6651 {
6653 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6655 #endif
6658 }
6659 
6660 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
6661 {
6662  m_cam = cam;
6663 
6665 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6667 #endif
6670 }
6671 
6672 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags)
6673 {
6675 }
6676 
6677 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
6678 {
6680 }
6681 
6682 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
6683 {
6685 }
6686 
6687 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
6688 {
6690 #ifdef VISP_HAVE_OGRE
6691  faces.getOgreContext()->setWindowName("TrackerWrapper");
6692 #endif
6693 }
6694 
6695 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> * const I, const vpImage<vpRGBa> * const I_color,
6696  const vpHomogeneousMatrix &cdMo)
6697 {
6698  bool performKltSetPose = false;
6699  if (I_color) {
6700  vpImageConvert::convert(*I_color, m_I);
6701  }
6702 
6703 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6704  if (m_trackerType & KLT_TRACKER) {
6705  performKltSetPose = true;
6706 
6707  if (useScanLine || clippingFlag > 3) {
6708  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6709  }
6710 
6711  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
6712  }
6713 #endif
6714 
6715  if (!performKltSetPose) {
6716  m_cMo = cdMo;
6717  init(I ? *I : m_I);
6718  return;
6719  }
6720 
6721  if (m_trackerType & EDGE_TRACKER) {
6723  }
6724 
6725  if (useScanLine) {
6728  }
6729 
6730 #if 0
6731  if (m_trackerType & EDGE_TRACKER) {
6732  initPyramid(I, Ipyramid);
6733 
6734  unsigned int i = (unsigned int) scales.size();
6735  do {
6736  i--;
6737  if(scales[i]){
6738  downScale(i);
6739  vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
6740  upScale(i);
6741  }
6742  } while(i != 0);
6743 
6744  cleanPyramid(Ipyramid);
6745  }
6746 #else
6747  if (m_trackerType & EDGE_TRACKER) {
6749  }
6750 #endif
6751 
6752  // Depth normal
6754 
6755  // Depth dense
6757 }
6758 
6759 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo)
6760 {
6761  setPose(&I, NULL, cdMo);
6762 }
6763 
6764 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<vpRGBa> &I_color, const vpHomogeneousMatrix &cdMo)
6765 {
6766  setPose(NULL, &I_color, cdMo);
6767 }
6768 
6769 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
6770 {
6772 }
6773 
6774 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
6775 {
6777 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6779 #endif
6782 }
6783 
6784 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
6785 {
6786  if ((type & (EDGE_TRACKER |
6787 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6788  KLT_TRACKER |
6789 #endif
6791  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
6792  }
6793 
6794  m_trackerType = type;
6795 }
6796 
6797 void vpMbGenericTracker::TrackerWrapper::testTracking()
6798 {
6799  if (m_trackerType & EDGE_TRACKER) {
6801  }
6802 }
6803 
6804 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> &
6805 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6806  I
6807 #endif
6808 )
6809 {
6810  if ((m_trackerType & (EDGE_TRACKER
6811 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6812  | KLT_TRACKER
6813 #endif
6814  )) == 0) {
6815  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6816  return;
6817  }
6818 
6819 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6820  track(&I, nullptr);
6821 #endif
6822 }
6823 
6824 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<vpRGBa> &)
6825 {
6826  //not exposed to the public API, only for debug
6827 }
6828 
6829 #ifdef VISP_HAVE_PCL
6830 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> *const ptr_I,
6831  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6832 {
6833  if ((m_trackerType & (EDGE_TRACKER |
6834 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6835  KLT_TRACKER |
6836 #endif
6838  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6839  return;
6840  }
6841 
6842  if (m_trackerType & (EDGE_TRACKER
6843 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6844  | KLT_TRACKER
6845 #endif
6846  ) &&
6847  ptr_I == NULL) {
6848  throw vpException(vpException::fatalError, "Image pointer is NULL!");
6849  }
6850 
6851  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && ! point_cloud) { // point_cloud == nullptr
6852  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
6853  }
6854 
6855  // Back-up cMo in case of exception
6856  vpHomogeneousMatrix cMo_1 = m_cMo;
6857  try {
6858  preTracking(ptr_I, point_cloud);
6859 
6860  try {
6861  computeVVS(ptr_I);
6862  } catch (...) {
6863  covarianceMatrix = -1;
6864  throw; // throw the original exception
6865  }
6866 
6867  if (m_trackerType == EDGE_TRACKER)
6868  testTracking();
6869 
6870  postTracking(ptr_I, point_cloud);
6871 
6872  } catch (const vpException &e) {
6873  std::cerr << "Exception: " << e.what() << std::endl;
6874  m_cMo = cMo_1;
6875  throw; // rethrowing the original exception
6876  }
6877 }
6878 #endif
bool m_computeInteraction
Definition: vpMbTracker.h:185
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
bool computeProjError
Definition: vpMbTracker.h:133
virtual void initFaceFromLines(vpMbtPolygon &polygon)
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:153
virtual unsigned int getKltMaskBorder() const
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:130
void setMovingEdge(const vpMe &me)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setScanLineVisibilityTest(const bool &v)
double getFarClippingDistance() const
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:111
Point removed during virtual visual-servoing because considered as an outlier.
Definition: vpMeSite.h:81
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:304
virtual void setScanLineVisibilityTest(const bool &v)
virtual void track(const vpImage< unsigned char > &I)
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=NULL, const vpColVector *const m_w_prev=NULL)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual vpMbtPolygon * getPolygon(unsigned int index)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
vpColVector m_w
Robust weights.
unsigned int getWidth() const
Definition: vpImage.h:246
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:256
void setKltQuality(const double &q)
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:480
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
void parse(const std::string &filename)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:143
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
double getNearClippingDistance() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
virtual void setReferenceCameraName(const std::string &referenceCameraName)
void setKltPyramidLevels(const unsigned int &pL)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void setTrackerType(int type)
virtual void setDepthDenseFilteringMethod(int method)
virtual vpMe getMovingEdge() const
Point removed because too near image borders.
Definition: vpMeSite.h:82
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
void setDepthNormalSamplingStepY(unsigned int stepY)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:557
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
unsigned int getKltBlockSize() const
Class to define RGB colors available for display functionnalities.
Definition: vpColor.h:157
static bool equal(double x, double y, double s=0.001)
Definition: vpMath.h:293
Point removed due to a threshold problem.
Definition: vpMeSite.h:80
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
virtual void setClipping(const unsigned int &flags)
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setDisplayFeatures(bool displayF)
bool modelInitialised
Definition: vpMbTracker.h:123
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:643
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual void setAngleDisappear(const double &a)
void getCameraParameters(vpCameraParameters &cam) const
virtual void computeVVSInit()
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
error that can be emited by ViSP classes.
Definition: vpException.h:71
Manage a cylinder used in the model-based tracker.
vpMbScanLine & getMbScanLineRenderer()
Definition: vpMe.h:60
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:563
Manage the line of a polygon used in the model-based tracker.
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void computeVVSInteractionMatrixAndResidu()
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:155
unsigned int getCols() const
Definition: vpArray2D.h:279
double getLodMinLineLengthThreshold() const
void setKltMaskBorder(const unsigned int &mb)
virtual void setMovingEdge(const vpMe &me)
virtual void setLod(bool useLod, const std::string &name="")
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
void setEdgeMe(const vpMe &ecm)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:128
static const vpColor green
Definition: vpColor.h:220
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
void updateMovingEdge(const vpImage< unsigned char > &I)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void reinit(const vpImage< unsigned char > &I)
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
unsigned int setVisibleOgre(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
virtual void computeVVSInteractionMatrixAndResidu()
virtual std::string getReferenceCameraName() const
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
Definition: vpMbTracker.h:594
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
static const vpColor red
Definition: vpColor.h:217
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:81
virtual void computeVVSWeights()
void computeClippedPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="")
virtual void setNearClippingDistance(const double &dist)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="")
vpMatrix m_L
Interaction matrix.
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
Parse an Xml file to extract configuration parameters of a mbtConfig object.Data parser for the model...
void initOgre(const vpCameraParameters &cam=vpCameraParameters())
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:151
double projectionError
Definition: vpMbTracker.h:136
virtual unsigned int getNbPoints(unsigned int level=0) const
vpAROgre * getOgreContext()
void setKltHarrisParam(const double &hp)
const char * what() const
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
Manage a circle used in the model-based tracker.
virtual void initFaceFromLines(vpMbtPolygon &polygon)
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:115
virtual void setKltMaskBorder(const unsigned int &e)
void computeVisibility(unsigned int width, unsigned int height)
vpColVector m_weightedError
Weighted error.
virtual void setCameraParameters(const vpCameraParameters &cam)
Definition: vpMbTracker.h:487
Error that can be emited by the vpTracker class and its derivates.
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="")
Point used by the tracker.
Definition: vpMeSite.h:78
static const vpColor cyan
Definition: vpColor.h:226
vp_deprecated void init()
virtual void setScanLineVisibilityTest(const bool &v)
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:66
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void setAngleDisappear(const double &adisappear)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void setProjectionErrorComputation(const bool &flag)
vpMatrix AtA() const
Definition: vpMatrix.cpp:629
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:423
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:158
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
void setKltMinDistance(const double &mD)
static double sqr(double x)
Definition: vpMath.h:116
double minLineLengthThresholdGeneral
Minimum line length threshold for LOD mode (general setting)
Definition: vpMbTracker.h:177
Point removed due to a contrast problem.
Definition: vpMeSite.h:79
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual std::vector< vpImagePoint > getKltImagePoints() const
void setDepthNormalSamplingStepX(unsigned int stepX)
virtual void testTracking()
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Generic class defining intrinsic camera parameters.
void setOgreShowConfigDialog(bool showConfigDialog)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
double getDepthNormalPclPlaneEstimationRansacThreshold() const
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
virtual void setKltThresholdAcceptation(double th)
virtual void initFromPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo)
virtual void setAngleAppear(const double &a)
virtual void computeVVSInteractionMatrixAndResidu()
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:193
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
virtual void setFarClippingDistance(const double &dist)
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:469
virtual std::map< std::string, int > getCameraTrackerTypes() const
void setDepthDenseSamplingStepY(unsigned int stepY)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:140
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
unsigned int getRows() const
Definition: vpArray2D.h:289
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
void computeVisibility(unsigned int width, unsigned int height)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:145
unsigned int getDepthNormalSamplingStepX() const
void setKltWindowSize(const unsigned int &w)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void getEdgeMe(vpMe &ecm) const
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
virtual double getGoodMovingEdgesRatioThreshold() const
virtual void setScanLineVisibilityTest(const bool &v)
virtual void computeVVSWeights()
void setAngleAppear(const double &aappear)
static double rad(double deg)
Definition: vpMath.h:110
virtual void computeVVSInteractionMatrixAndResidu()
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setProjectionErrorDisplay(bool display)
Definition: vpMbTracker.h:589
void setDepthDenseSamplingStepX(unsigned int stepX)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
void computeProjectionError(const vpImage< unsigned char > &_I)
void insert(unsigned int i, const vpColVector &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
virtual void setNearClippingDistance(const double &dist)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
Definition: vpMbTracker.h:599
virtual std::vector< std::string > getCameraNames() const
vpMbtFaceDepthNormal::vpFeatureEstimationType getDepthNormalFeatureEstimationMethod() const
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void computeVVSPoseEstimation(const bool isoJoIdentity_, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
unsigned int getKltPyramidLevels() const
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="")
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:191
double minPolygonAreaThresholdGeneral
Minimum polygon area threshold for LOD mode (general setting)
Definition: vpMbTracker.h:179
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
static double deg(double rad)
Definition: vpMath.h:103
virtual void setOgreVisibilityTest(const bool &v)
virtual void getCameraParameters(vpCameraParameters &camera) const
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:138
virtual void initFromPoints(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
void setCameraParameters(const vpCameraParameters &cam)
bool ogreShowConfigDialog
Definition: vpMbTracker.h:156
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
void preTracking(const vpImage< unsigned char > &I)
vpColVector m_error
(s - s*)
virtual void init(const vpImage< unsigned char > &I)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
bool applyLodSettingInConfig
Definition: vpMbTracker.h:175
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
virtual void setOgreVisibilityTest(const bool &v)
std::string m_referenceCameraName
Name of the reference camera.
unsigned int getKltMaskBorder() const
virtual void setFarClippingDistance(const double &dist)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
vpHomogeneousMatrix inverse() const
virtual int getKltNbPoints() const
unsigned int getDepthDenseSamplingStepX() const
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
static vpHomogeneousMatrix direct(const vpColVector &v)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity_, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:147
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void computeVVSInit()
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void computeVVSInit()
virtual void computeVVSInteractionMatrixAndResidu()
unsigned int getDepthDenseSamplingStepY() const
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
unsigned int getHeight() const
Definition: vpImage.h:188
virtual void setProjectionErrorDisplay(bool display)
unsigned int getDepthNormalSamplingStepY() const
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:601
unsigned int getKltMaxFeatures() const
virtual void setClipping(const unsigned int &flags)
virtual vpKltOpencv getKltOpencv() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:87
void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual double getKltThresholdAcceptation() const
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:153
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:5988
void trackMovingEdge(const vpImage< unsigned char > &I)
void displayOgre(const vpHomogeneousMatrix &cMo)
virtual void setClipping(const unsigned int &flags)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setMask(const vpImage< bool > &mask)
virtual unsigned int getNbPolygon() const
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setFarClippingDistance(const double &dist)
void setKltBlockSize(const unsigned int &bs)
void setKltMaxFeatures(const unsigned int &mF)
static const vpColor yellow
Definition: vpColor.h:225
void setCameraParameters(const vpCameraParameters &cam)
virtual void setCameraParameters(const vpCameraParameters &camera)
static const vpColor purple
Definition: vpColor.h:228
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:223
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
double getLodMinPolygonAreaThreshold() const
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:149
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
int getDepthNormalPclPlaneEstimationRansacMaxIter() const
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
bool useLodGeneral
True if LOD mode is enabled.
Definition: vpMbTracker.h:172
void computeScanLineRender(const vpCameraParameters &cam, const unsigned int &w, const unsigned int &h)
virtual void computeProjectionError()
int getDepthNormalPclPlaneEstimationMethod() const
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:113
void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void setDisplayFeatures(bool displayF)
Definition: vpMbTracker.h:517
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
void eye()
Definition: vpMatrix.cpp:449
virtual int getTrackerType() const
unsigned int getKltWindowSize() const
unsigned int m_nb_feat_klt
Number of klt features.
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
Definition: vpMbTracker.h:202
unsigned int setVisible(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angle, bool &changed)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
static const vpColor blue
Definition: vpColor.h:223
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:584
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void setNearClippingDistance(const double &dist)
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
void computeFov(const unsigned int &w, const unsigned int &h)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)