Visual Servoing Platform  version 3.2.1 under development (2019-08-08)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/mbt/vpMbtXmlGenericParser.h>
42 
44  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
45  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError()
46 {
47  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
48 
49  // Add default camera transformation matrix
51 
52  // Add default ponderation between each feature type
54 
55 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
57 #endif
58 
61 }
62 
63 vpMbGenericTracker::vpMbGenericTracker(const unsigned int nbCameras, const int trackerType)
66 {
67  if (nbCameras == 0) {
68  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
69  } else if (nbCameras == 1) {
70  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
71 
72  // Add default camera transformation matrix
74  } else {
75  for (unsigned int i = 1; i <= nbCameras; i++) {
76  std::stringstream ss;
77  ss << "Camera" << i;
78  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
79 
80  // Add default camera transformation matrix
82  }
83 
84  // Set by default the reference camera to the first one
85  m_referenceCameraName = m_mapOfTrackers.begin()->first;
86  }
87 
88  // Add default ponderation between each feature type
90 
91 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
93 #endif
94 
97 }
98 
99 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
102 {
103  if (trackerTypes.empty()) {
104  throw vpException(vpException::badValue, "There is no camera!");
105  }
106 
107  if (trackerTypes.size() == 1) {
108  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
109 
110  // Add default camera transformation matrix
112  } else {
113  for (size_t i = 1; i <= trackerTypes.size(); i++) {
114  std::stringstream ss;
115  ss << "Camera" << i;
116  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
117 
118  // Add default camera transformation matrix
120  }
121 
122  // Set by default the reference camera to the first one
123  m_referenceCameraName = m_mapOfTrackers.begin()->first;
124  }
125 
126  // Add default ponderation between each feature type
128 
129 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
131 #endif
132 
135 }
136 
137 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
138  const std::vector<int> &trackerTypes)
141 {
142  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
144  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
145  }
146 
147  for (size_t i = 0; i < cameraNames.size(); i++) {
148  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
149 
150  // Add default camera transformation matrix
152  }
153 
154  // Set by default the reference camera to the first one
155  m_referenceCameraName = m_mapOfTrackers.begin()->first;
156 
157  // Add default ponderation between each feature type
159 
160 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
162 #endif
163 
166 }
167 
169 {
170  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
171  ++it) {
172  delete it->second;
173  it->second = NULL;
174  }
175 }
176 
195  const vpCameraParameters &_cam)
196 {
197  double rawTotalProjectionError = 0.0;
198  unsigned int nbTotalFeaturesUsed = 0;
199 
200  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
201  it != m_mapOfTrackers.end(); ++it) {
202  TrackerWrapper *tracker = it->second;
203 
204  unsigned int nbFeaturesUsed = 0;
205  double curProjError = tracker->computeProjectionErrorImpl(I, _cMo, _cam, nbFeaturesUsed);
206 
207  if (nbFeaturesUsed > 0) {
208  nbTotalFeaturesUsed += nbFeaturesUsed;
209  rawTotalProjectionError += curProjError;
210  }
211  }
212 
213  if (nbTotalFeaturesUsed > 0) {
214  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
215  }
216 
217  return 90.0;
218 }
219 
238  const vpCameraParameters &_cam)
239 {
241  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
242 
243  return computeCurrentProjectionError(I, _cMo, _cam);
244 }
245 
247 {
248  if (computeProjError) {
249  double rawTotalProjectionError = 0.0;
250  unsigned int nbTotalFeaturesUsed = 0;
251 
252  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
253  it != m_mapOfTrackers.end(); ++it) {
254  TrackerWrapper *tracker = it->second;
255 
256  double curProjError = tracker->getProjectionError();
257  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
258 
259  if (nbFeaturesUsed > 0) {
260  nbTotalFeaturesUsed += nbFeaturesUsed;
261  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
262  }
263  }
264 
265  if (nbTotalFeaturesUsed > 0) {
266  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
267  } else {
268  projectionError = 90.0;
269  }
270  } else {
271  projectionError = 90.0;
272  }
273 }
274 
275 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
276 {
277  computeVVSInit(mapOfImages);
278 
279  if (m_error.getRows() < 4) {
280  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
281  }
282 
283  double normRes = 0;
284  double normRes_1 = -1;
285  unsigned int iter = 0;
286 
287  vpMatrix LTL;
288  vpColVector LTR, v;
289  vpColVector error_prev;
290 
291  double mu = m_initialMu;
292  vpHomogeneousMatrix cMo_prev;
293 
294  bool isoJoIdentity_ = true;
295 
296  // Covariance
297  vpColVector W_true(m_error.getRows());
298  vpMatrix L_true, LVJ_true;
299 
300  // Create the map of VelocityTwistMatrices
301  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
302  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
303  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
305  cVo.buildFrom(it->second);
306  mapOfVelocityTwist[it->first] = cVo;
307  }
308 
309  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
310 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
311  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
312 #endif
313  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
314  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
315 
316  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
317  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
318 
319  bool reStartFromLastIncrement = false;
320  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
321  if (reStartFromLastIncrement) {
322  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
323  it != m_mapOfTrackers.end(); ++it) {
324  TrackerWrapper *tracker = it->second;
325 
326  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
327 
328 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
329  vpHomogeneousMatrix c_curr_tTc_curr0 =
330  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
331  tracker->ctTc0 = c_curr_tTc_curr0;
332 #endif
333  }
334  }
335 
336  if (!reStartFromLastIncrement) {
338 
339  if (computeCovariance) {
340  L_true = m_L;
341  if (!isoJoIdentity_) {
343  cVo.buildFrom(cMo);
344  LVJ_true = (m_L * (cVo * oJo));
345  }
346  }
347 
349  if (iter == 0) {
350  isoJoIdentity_ = true;
351  oJo.eye();
352 
353  // If all the 6 dof should be estimated, we check if the interaction
354  // matrix is full rank. If not we remove automatically the dof that
355  // cannot be estimated This is particularly useful when consering
356  // circles (rank 5) and cylinders (rank 4)
357  if (isoJoIdentity_) {
358  cVo.buildFrom(cMo);
359 
360  vpMatrix K; // kernel
361  unsigned int rank = (m_L * cVo).kernel(K);
362  if (rank == 0) {
363  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
364  }
365 
366  if (rank != 6) {
367  vpMatrix I; // Identity
368  I.eye(6);
369  oJo = I - K.AtA();
370 
371  isoJoIdentity_ = false;
372  }
373  }
374  }
375 
376  // Weighting
377  double num = 0;
378  double den = 0;
379 
380  unsigned int start_index = 0;
381  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
382  it != m_mapOfTrackers.end(); ++it) {
383  TrackerWrapper *tracker = it->second;
384 
385  if (tracker->m_trackerType & EDGE_TRACKER) {
386  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
387  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
388  W_true[start_index + i] = wi;
389  m_weightedError[start_index + i] = wi * m_error[start_index + i];
390 
391  num += wi * vpMath::sqr(m_error[start_index + i]);
392  den += wi;
393 
394  for (unsigned int j = 0; j < m_L.getCols(); j++) {
395  m_L[start_index + i][j] *= wi;
396  }
397  }
398 
399  start_index += tracker->m_error_edge.getRows();
400  }
401 
402 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
403  if (tracker->m_trackerType & KLT_TRACKER) {
404  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
405  double wi = tracker->m_w_klt[i] * factorKlt;
406  W_true[start_index + i] = wi;
407  m_weightedError[start_index + i] = wi * m_error[start_index + i];
408 
409  num += wi * vpMath::sqr(m_error[start_index + i]);
410  den += wi;
411 
412  for (unsigned int j = 0; j < m_L.getCols(); j++) {
413  m_L[start_index + i][j] *= wi;
414  }
415  }
416 
417  start_index += tracker->m_error_klt.getRows();
418  }
419 #endif
420 
421  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
422  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
423  double wi = tracker->m_w_depthNormal[i] * factorDepth;
424  W_true[start_index + i] = wi;
425  m_weightedError[start_index + i] = wi * m_error[start_index + i];
426 
427  num += wi * vpMath::sqr(m_error[start_index + i]);
428  den += wi;
429 
430  for (unsigned int j = 0; j < m_L.getCols(); j++) {
431  m_L[start_index + i][j] *= wi;
432  }
433  }
434 
435  start_index += tracker->m_error_depthNormal.getRows();
436  }
437 
438  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
439  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
440  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
441  W_true[start_index + i] = wi;
442  m_weightedError[start_index + i] = wi * m_error[start_index + i];
443 
444  num += wi * vpMath::sqr(m_error[start_index + i]);
445  den += wi;
446 
447  for (unsigned int j = 0; j < m_L.getCols(); j++) {
448  m_L[start_index + i][j] *= wi;
449  }
450  }
451 
452  start_index += tracker->m_error_depthDense.getRows();
453  }
454  }
455 
456  normRes_1 = normRes;
457  normRes = sqrt(num / den);
458 
459  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
460 
461  cMo_prev = cMo;
462 
464 
465 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
466  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
467  it != m_mapOfTrackers.end(); ++it) {
468  TrackerWrapper *tracker = it->second;
469 
470  vpHomogeneousMatrix c_curr_tTc_curr0 =
471  m_mapOfCameraTransformationMatrix[it->first] * cMo * tracker->c0Mo.inverse();
472  tracker->ctTc0 = c_curr_tTc_curr0;
473  }
474 #endif
475 
476  // Update cMo
477  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
478  it != m_mapOfTrackers.end(); ++it) {
479  TrackerWrapper *tracker = it->second;
480  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo;
481  }
482  }
483 
484  iter++;
485  }
486 
487  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
488 
489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
490  it != m_mapOfTrackers.end(); ++it) {
491  TrackerWrapper *tracker = it->second;
492 
493  if (tracker->m_trackerType & EDGE_TRACKER) {
494  tracker->updateMovingEdgeWeights();
495  }
496  }
497 }
498 
500 {
501  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
502 }
503 
504 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
505 {
506  unsigned int nbFeatures = 0;
507 
508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
509  it != m_mapOfTrackers.end(); ++it) {
510  TrackerWrapper *tracker = it->second;
511  tracker->computeVVSInit(mapOfImages[it->first]);
512 
513  nbFeatures += tracker->m_error.getRows();
514  }
515 
516  m_L.resize(nbFeatures, 6, false, false);
517  m_error.resize(nbFeatures, false);
518 
519  m_weightedError.resize(nbFeatures, false);
520  m_w.resize(nbFeatures, false);
521  m_w = 1;
522 }
523 
525 {
526  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
527  "computeVVSInteractionMatrixAndR"
528  "esidu() should not be called");
529 }
530 
532  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
533  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
534 {
535  unsigned int start_index = 0;
536 
537  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
538  it != m_mapOfTrackers.end(); ++it) {
539  TrackerWrapper *tracker = it->second;
540 
541  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo;
542 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
543  vpHomogeneousMatrix c_curr_tTc_curr0 = m_mapOfCameraTransformationMatrix[it->first] * cMo * tracker->c0Mo.inverse();
544  tracker->ctTc0 = c_curr_tTc_curr0;
545 #endif
546 
547  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
548 
549  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
550  m_error.insert(start_index, tracker->m_error);
551 
552  start_index += tracker->m_error.getRows();
553  }
554 }
555 
557 {
558  unsigned int start_index = 0;
559 
560  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
561  it != m_mapOfTrackers.end(); ++it) {
562  TrackerWrapper *tracker = it->second;
563  tracker->computeVVSWeights();
564 
565  m_w.insert(start_index, tracker->m_w);
566  start_index += tracker->m_w.getRows();
567  }
568 }
569 
584  const vpCameraParameters &cam_, const vpColor &col, const unsigned int thickness,
585  const bool displayFullModel)
586 {
587  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
588  if (it != m_mapOfTrackers.end()) {
589  TrackerWrapper *tracker = it->second;
590  tracker->display(I, cMo_, cam_, col, thickness, displayFullModel);
591  } else {
592  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
593  }
594 }
595 
610  const vpCameraParameters &cam_, const vpColor &col, const unsigned int thickness,
611  const bool displayFullModel)
612 {
613  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
614  if (it != m_mapOfTrackers.end()) {
615  TrackerWrapper *tracker = it->second;
616  tracker->display(I, cMo_, cam_, col, thickness, displayFullModel);
617  } else {
618  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
619  }
620 }
621 
639  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
640  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
641  const unsigned int thickness, const bool displayFullModel)
642 {
643  if (m_mapOfTrackers.size() == 2) {
644  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
645  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
646  ++it;
647 
648  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
649  } else {
650  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
651  << std::endl;
652  }
653 }
654 
672  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
673  const vpCameraParameters &cam2, const vpColor &color, const unsigned int thickness,
674  const bool displayFullModel)
675 {
676  if (m_mapOfTrackers.size() == 2) {
677  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
678  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
679  ++it;
680 
681  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
682  } else {
683  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
684  << std::endl;
685  }
686 }
687 
699 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
700  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
701  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
702  const vpColor &col, const unsigned int thickness, const bool displayFullModel)
703 {
704  // Display only for the given images
705  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
706  it_img != mapOfImages.end(); ++it_img) {
707  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
708  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
709  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
710 
711  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
712  it_cam != mapOfCameraParameters.end()) {
713  TrackerWrapper *tracker = it_tracker->second;
714  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
715  } else {
716  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
717  }
718  }
719 }
720 
732 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
733  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
734  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
735  const vpColor &col, const unsigned int thickness, const bool displayFullModel)
736 {
737  // Display only for the given images
738  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
739  it_img != mapOfImages.end(); ++it_img) {
740  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
741  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
742  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
743 
744  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
745  it_cam != mapOfCameraParameters.end()) {
746  TrackerWrapper *tracker = it_tracker->second;
747  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
748  } else {
749  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
750  }
751  }
752 }
753 
759 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
760 {
761  std::vector<std::string> cameraNames;
762 
763  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
764  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
765  cameraNames.push_back(it_tracker->first);
766  }
767 
768  return cameraNames;
769 }
770 
780 {
781  if (m_mapOfTrackers.size() == 2) {
782  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
783  it->second->getCameraParameters(cam1);
784  ++it;
785 
786  it->second->getCameraParameters(cam2);
787  } else {
788  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
789  << std::endl;
790  }
791 }
792 
798 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
799 {
800  // Clear the input map
801  mapOfCameraParameters.clear();
802 
803  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
804  it != m_mapOfTrackers.end(); ++it) {
805  vpCameraParameters cam_;
806  it->second->getCameraParameters(cam_);
807  mapOfCameraParameters[it->first] = cam_;
808  }
809 }
810 
817 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
818 {
819  std::map<std::string, int> trackingTypes;
820 
821  TrackerWrapper *traker;
822  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
823  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
824  traker = it_tracker->second;
825  trackingTypes[it_tracker->first] = traker->getTrackerType();
826  }
827 
828  return trackingTypes;
829 }
830 
839 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
840 {
841  if (m_mapOfTrackers.size() == 2) {
842  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
843  clippingFlag1 = it->second->getClipping();
844  ++it;
845 
846  clippingFlag2 = it->second->getClipping();
847  } else {
848  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
849  << std::endl;
850  }
851 }
852 
858 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
859 {
860  mapOfClippingFlags.clear();
861 
862  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
863  it != m_mapOfTrackers.end(); ++it) {
864  TrackerWrapper *tracker = it->second;
865  mapOfClippingFlags[it->first] = tracker->getClipping();
866  }
867 }
868 
875 {
876  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
877  if (it != m_mapOfTrackers.end()) {
878  return it->second->getFaces();
879  }
880 
881  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
882  return faces;
883 }
884 
891 {
892  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
893  if (it != m_mapOfTrackers.end()) {
894  return it->second->getFaces();
895  }
896 
897  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
898  return faces;
899 }
900 
901 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
902 
905 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
906 {
907  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
908  if (it != m_mapOfTrackers.end()) {
909  TrackerWrapper *tracker = it->second;
910  return tracker->getFeaturesCircle();
911  } else {
912  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
913  m_referenceCameraName.c_str());
914  }
915 }
916 
920 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
921 {
922  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
923  if (it != m_mapOfTrackers.end()) {
924  TrackerWrapper *tracker = it->second;
925  return tracker->getFeaturesKltCylinder();
926  } else {
927  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
928  m_referenceCameraName.c_str());
929  }
930 }
931 
935 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
936 {
937  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
938  if (it != m_mapOfTrackers.end()) {
939  TrackerWrapper *tracker = it->second;
940  return tracker->getFeaturesKlt();
941  } else {
942  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
943  m_referenceCameraName.c_str());
944  }
945 }
946 #endif
947 
958 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
959 {
960  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
961 
962  if (it != m_mapOfTrackers.end()) {
963  return it->second->getFeaturesForDisplay();
964  } else {
965  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
966  }
967 
968  return std::vector<std::vector<double> >();
969 }
970 
978 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
979 {
980  // Clear the input map
981  mapOfFeatures.clear();
982 
983  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
984  it != m_mapOfTrackers.end(); ++it) {
985  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
986  }
987 }
988 
999 
1000 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
1001 
1009 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1010 {
1011  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1012  if (it != m_mapOfTrackers.end()) {
1013  TrackerWrapper *tracker = it->second;
1014  return tracker->getKltImagePoints();
1015  } else {
1016  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1017  }
1018 
1019  return std::vector<vpImagePoint>();
1020 }
1021 
1030 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1031 {
1032  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1033  if (it != m_mapOfTrackers.end()) {
1034  TrackerWrapper *tracker = it->second;
1035  return tracker->getKltImagePointsWithId();
1036  } else {
1037  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1038  }
1039 
1040  return std::map<int, vpImagePoint>();
1041 }
1042 
1049 {
1050  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1051  if (it != m_mapOfTrackers.end()) {
1052  TrackerWrapper *tracker = it->second;
1053  return tracker->getKltMaskBorder();
1054  } else {
1055  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1056  }
1057 
1058  return 0;
1059 }
1060 
1067 {
1068  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1069  if (it != m_mapOfTrackers.end()) {
1070  TrackerWrapper *tracker = it->second;
1071  return tracker->getKltNbPoints();
1072  } else {
1073  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1074  }
1075 
1076  return 0;
1077 }
1078 
1085 {
1086  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1087 
1088  if (it_tracker != m_mapOfTrackers.end()) {
1089  TrackerWrapper *tracker;
1090  tracker = it_tracker->second;
1091  return tracker->getKltOpencv();
1092  } else {
1093  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1094  }
1095 
1096  return vpKltOpencv();
1097 }
1098 
1108 {
1109  if (m_mapOfTrackers.size() == 2) {
1110  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1111  klt1 = it->second->getKltOpencv();
1112  ++it;
1113 
1114  klt2 = it->second->getKltOpencv();
1115  } else {
1116  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1117  << std::endl;
1118  }
1119 }
1120 
1126 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1127 {
1128  mapOfKlts.clear();
1129 
1130  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1131  it != m_mapOfTrackers.end(); ++it) {
1132  TrackerWrapper *tracker = it->second;
1133  mapOfKlts[it->first] = tracker->getKltOpencv();
1134  }
1135 }
1136 
1137 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
1138 
1143 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1144 {
1145  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1146  if (it != m_mapOfTrackers.end()) {
1147  TrackerWrapper *tracker = it->second;
1148  return tracker->getKltPoints();
1149  } else {
1150  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1151  }
1152 
1153  return std::vector<cv::Point2f>();
1154 }
1155 #endif
1156 
1164 #endif
1165 
1178 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList,
1179  const unsigned int level) const
1180 {
1181  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1182  if (it != m_mapOfTrackers.end()) {
1183  it->second->getLcircle(circlesList, level);
1184  } else {
1185  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1186  }
1187 }
1188 
1202 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1203  const unsigned int level) const
1204 {
1205  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1206  if (it != m_mapOfTrackers.end()) {
1207  it->second->getLcircle(circlesList, level);
1208  } else {
1209  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1210  }
1211 }
1212 
1225 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList,
1226  const unsigned int level) const
1227 {
1228  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1229  if (it != m_mapOfTrackers.end()) {
1230  it->second->getLcylinder(cylindersList, level);
1231  } else {
1232  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1233  }
1234 }
1235 
1249 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1250  const unsigned int level) const
1251 {
1252  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1253  if (it != m_mapOfTrackers.end()) {
1254  it->second->getLcylinder(cylindersList, level);
1255  } else {
1256  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1257  }
1258 }
1259 
1272 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList,
1273  const unsigned int level) const
1274 {
1275  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1276 
1277  if (it != m_mapOfTrackers.end()) {
1278  it->second->getLline(linesList, level);
1279  } else {
1280  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1281  }
1282 }
1283 
1297 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1298  const unsigned int level) const
1299 {
1300  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1301  if (it != m_mapOfTrackers.end()) {
1302  it->second->getLline(linesList, level);
1303  } else {
1304  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1305  }
1306 }
1307 
1325 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1326  const vpHomogeneousMatrix &cMo,
1327  const vpCameraParameters &cam,
1328  const bool displayFullModel)
1329 {
1330  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1331 
1332  if (it != m_mapOfTrackers.end()) {
1333  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1334  } else {
1335  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1336  }
1337 
1338  return std::vector<std::vector<double> >();
1339 }
1340 
1357 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1358  unsigned int width, unsigned int height,
1359  const vpHomogeneousMatrix &cMo,
1360  const vpCameraParameters &cam,
1361  const bool displayFullModel)
1362 {
1363  // Clear the input map
1364  mapOfModels.clear();
1365 
1366  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1367  it != m_mapOfTrackers.end(); ++it) {
1368  mapOfModels[it->first] = it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1369  }
1370 }
1371 
1378 {
1379  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1380 
1381  if (it != m_mapOfTrackers.end()) {
1382  return it->second->getMovingEdge();
1383  } else {
1384  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1385  }
1386 
1387  return vpMe();
1388 }
1389 
1399 {
1400  if (m_mapOfTrackers.size() == 2) {
1401  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1402  it->second->getMovingEdge(me1);
1403  ++it;
1404 
1405  it->second->getMovingEdge(me2);
1406  } else {
1407  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1408  << std::endl;
1409  }
1410 }
1411 
1417 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1418 {
1419  mapOfMovingEdges.clear();
1420 
1421  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1422  it != m_mapOfTrackers.end(); ++it) {
1423  TrackerWrapper *tracker = it->second;
1424  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1425  }
1426 }
1427 
1443 unsigned int vpMbGenericTracker::getNbPoints(const unsigned int level) const
1444 {
1445  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1446 
1447  unsigned int nbGoodPoints = 0;
1448  if (it != m_mapOfTrackers.end()) {
1449 
1450  nbGoodPoints = it->second->getNbPoints(level);
1451  } else {
1452  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1453  }
1454 
1455  return nbGoodPoints;
1456 }
1457 
1472 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, const unsigned int level) const
1473 {
1474  mapOfNbPoints.clear();
1475 
1476  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1477  it != m_mapOfTrackers.end(); ++it) {
1478  TrackerWrapper *tracker = it->second;
1479  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1480  }
1481 }
1482 
1489 {
1490  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1491  if (it != m_mapOfTrackers.end()) {
1492  return it->second->getNbPolygon();
1493  }
1494 
1495  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1496  return 0;
1497 }
1498 
1504 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1505 {
1506  mapOfNbPolygons.clear();
1507 
1508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1509  it != m_mapOfTrackers.end(); ++it) {
1510  TrackerWrapper *tracker = it->second;
1511  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1512  }
1513 }
1514 
1526 {
1527  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1528  if (it != m_mapOfTrackers.end()) {
1529  return it->second->getPolygon(index);
1530  }
1531 
1532  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1533  return NULL;
1534 }
1535 
1547 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, const unsigned int index)
1548 {
1549  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1550  if (it != m_mapOfTrackers.end()) {
1551  return it->second->getPolygon(index);
1552  }
1553 
1554  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1555  return NULL;
1556 }
1557 
1573 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1574 vpMbGenericTracker::getPolygonFaces(const bool orderPolygons, const bool useVisibility, const bool clipPolygon)
1575 {
1576  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1577 
1578  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1579  if (it != m_mapOfTrackers.end()) {
1580  TrackerWrapper *tracker = it->second;
1581  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1582  } else {
1583  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1584  }
1585 
1586  return polygonFaces;
1587 }
1588 
1606 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1607  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1608  const bool orderPolygons, const bool useVisibility, const bool clipPolygon)
1609 {
1610  mapOfPolygons.clear();
1611  mapOfPoints.clear();
1612 
1613  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1614  it != m_mapOfTrackers.end(); ++it) {
1615  TrackerWrapper *tracker = it->second;
1616  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1617  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1618 
1619  mapOfPolygons[it->first] = polygonFaces.first;
1620  mapOfPoints[it->first] = polygonFaces.second;
1621  }
1622 }
1623 
1633 {
1634  if (m_mapOfTrackers.size() == 2) {
1635  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1636  it->second->getPose(c1Mo);
1637  ++it;
1638 
1639  it->second->getPose(c2Mo);
1640  } else {
1641  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1642  << std::endl;
1643  }
1644 }
1645 
1651 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1652 {
1653  // Clear the map
1654  mapOfCameraPoses.clear();
1655 
1656  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1657  it != m_mapOfTrackers.end(); ++it) {
1658  TrackerWrapper *tracker = it->second;
1659  tracker->getPose(mapOfCameraPoses[it->first]);
1660  }
1661 }
1662 
1667 {
1668  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1669  if (it != m_mapOfTrackers.end()) {
1670  TrackerWrapper *tracker = it->second;
1671  return tracker->getTrackerType();
1672  } else {
1673  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1674  m_referenceCameraName.c_str());
1675  }
1676 }
1677 
1679 {
1680  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1681  it != m_mapOfTrackers.end(); ++it) {
1682  TrackerWrapper *tracker = it->second;
1683  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo;
1684  tracker->init(I);
1685  }
1686 }
1687 
1688 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1689  const double /*radius*/, const int /*idFace*/, const std::string & /*name*/)
1690 {
1691  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1692 }
1693 
1694 #ifdef VISP_HAVE_MODULE_GUI
1695 
1739  const std::string &initFile1, const std::string &initFile2, const bool displayHelp,
1740  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1741 {
1742  if (m_mapOfTrackers.size() == 2) {
1743  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1744  TrackerWrapper *tracker = it->second;
1745  tracker->initClick(I1, initFile1, displayHelp, T1);
1746 
1747  ++it;
1748 
1749  tracker = it->second;
1750  tracker->initClick(I2, initFile2, displayHelp, T2);
1751 
1753  if (it != m_mapOfTrackers.end()) {
1754  tracker = it->second;
1755 
1756  // Set the reference cMo
1757  tracker->getPose(cMo);
1758  }
1759  } else {
1761  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1762  }
1763 }
1764 
1807 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1808  const std::string &initFile1, const std::string &initFile2, const bool displayHelp,
1809  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1810 {
1811  if (m_mapOfTrackers.size() == 2) {
1812  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1813  TrackerWrapper *tracker = it->second;
1814  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1815 
1816  ++it;
1817 
1818  tracker = it->second;
1819  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1820 
1822  if (it != m_mapOfTrackers.end()) {
1823  tracker = it->second;
1824 
1825  // Set the reference cMo
1826  tracker->getPose(cMo);
1827  }
1828  } else {
1830  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1831  }
1832 }
1833 
1876 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1877  const std::map<std::string, std::string> &mapOfInitFiles, const bool displayHelp,
1878  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
1879 {
1880  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1881  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1882  mapOfImages.find(m_referenceCameraName);
1883  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
1884 
1885  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1886  TrackerWrapper *tracker = it_tracker->second;
1887  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
1888  if (it_T != mapOfT.end())
1889  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
1890  else
1891  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1892  tracker->getPose(cMo);
1893  } else {
1894  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
1895  }
1896 
1897  // Vector of missing initFile for cameras
1898  std::vector<std::string> vectorOfMissingCameraPoses;
1899 
1900  // Set pose for the specified cameras
1901  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
1902  if (it_tracker->first != m_referenceCameraName) {
1903  it_img = mapOfImages.find(it_tracker->first);
1904  it_initFile = mapOfInitFiles.find(it_tracker->first);
1905 
1906  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1907  // InitClick for the current camera
1908  TrackerWrapper *tracker = it_tracker->second;
1909  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1910  } else {
1911  vectorOfMissingCameraPoses.push_back(it_tracker->first);
1912  }
1913  }
1914  }
1915 
1916  // Init for cameras that do not have an initFile
1917  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
1918  it != vectorOfMissingCameraPoses.end(); ++it) {
1919  it_img = mapOfImages.find(*it);
1920  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
1922 
1923  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
1924  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
1925  m_mapOfTrackers[*it]->cMo = cCurrentMo;
1926  m_mapOfTrackers[*it]->init(*it_img->second);
1927  } else {
1929  "Missing image or missing camera transformation "
1930  "matrix! Cannot set the pose for camera: %s!",
1931  it->c_str());
1932  }
1933  }
1934 }
1935 
1978 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
1979  const std::map<std::string, std::string> &mapOfInitFiles, const bool displayHelp,
1980  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
1981 {
1982  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1983  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
1984  mapOfColorImages.find(m_referenceCameraName);
1985  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
1986 
1987  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
1988  TrackerWrapper *tracker = it_tracker->second;
1989  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
1990  if (it_T != mapOfT.end())
1991  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
1992  else
1993  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1994  tracker->getPose(cMo);
1995  } else {
1996  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
1997  }
1998 
1999  // Vector of missing initFile for cameras
2000  std::vector<std::string> vectorOfMissingCameraPoses;
2001 
2002  // Set pose for the specified cameras
2003  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2004  if (it_tracker->first != m_referenceCameraName) {
2005  it_img = mapOfColorImages.find(it_tracker->first);
2006  it_initFile = mapOfInitFiles.find(it_tracker->first);
2007 
2008  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2009  // InitClick for the current camera
2010  TrackerWrapper *tracker = it_tracker->second;
2011  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2012  } else {
2013  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2014  }
2015  }
2016  }
2017 
2018  // Init for cameras that do not have an initFile
2019  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2020  it != vectorOfMissingCameraPoses.end(); ++it) {
2021  it_img = mapOfColorImages.find(*it);
2022  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2024 
2025  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2026  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2027  m_mapOfTrackers[*it]->cMo = cCurrentMo;
2028  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2029  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2030  } else {
2032  "Missing image or missing camera transformation "
2033  "matrix! Cannot set the pose for camera: %s!",
2034  it->c_str());
2035  }
2036  }
2037 }
2038 #endif
2039 
2040 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2041  const int /*idFace*/, const std::string & /*name*/)
2042 {
2043  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2044 }
2045 
2047 {
2048  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2049 }
2050 
2052 {
2053  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2054 }
2055 
2086  const std::string &initFile1, const std::string &initFile2)
2087 {
2088  if (m_mapOfTrackers.size() == 2) {
2089  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2090  TrackerWrapper *tracker = it->second;
2091  tracker->initFromPoints(I1, initFile1);
2092 
2093  ++it;
2094 
2095  tracker = it->second;
2096  tracker->initFromPoints(I2, initFile2);
2097 
2099  if (it != m_mapOfTrackers.end()) {
2100  tracker = it->second;
2101 
2102  // Set the reference cMo
2103  tracker->getPose(cMo);
2104 
2105  // Set the reference camera parameters
2106  tracker->getCameraParameters(cam);
2107  }
2108  } else {
2110  "Cannot initFromPoints()! Require two cameras but "
2111  "there are %d cameras!",
2112  m_mapOfTrackers.size());
2113  }
2114 }
2115 
2146  const std::string &initFile1, const std::string &initFile2)
2147 {
2148  if (m_mapOfTrackers.size() == 2) {
2149  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2150  TrackerWrapper *tracker = it->second;
2151  tracker->initFromPoints(I_color1, initFile1);
2152 
2153  ++it;
2154 
2155  tracker = it->second;
2156  tracker->initFromPoints(I_color2, initFile2);
2157 
2159  if (it != m_mapOfTrackers.end()) {
2160  tracker = it->second;
2161 
2162  // Set the reference cMo
2163  tracker->getPose(cMo);
2164 
2165  // Set the reference camera parameters
2166  tracker->getCameraParameters(cam);
2167  }
2168  } else {
2170  "Cannot initFromPoints()! Require two cameras but "
2171  "there are %d cameras!",
2172  m_mapOfTrackers.size());
2173  }
2174 }
2175 
2176 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2177  const std::map<std::string, std::string> &mapOfInitPoints)
2178 {
2179  // Set the reference cMo
2180  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2181  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2182  mapOfImages.find(m_referenceCameraName);
2183  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2184 
2185  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2186  TrackerWrapper *tracker = it_tracker->second;
2187  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2188  tracker->getPose(cMo);
2189  } else {
2190  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2191  }
2192 
2193  // Vector of missing initPoints for cameras
2194  std::vector<std::string> vectorOfMissingCameraPoints;
2195 
2196  // Set pose for the specified cameras
2197  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2198  it_img = mapOfImages.find(it_tracker->first);
2199  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2200 
2201  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2202  // Set pose
2203  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2204  } else {
2205  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2206  }
2207  }
2208 
2209  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2210  it != vectorOfMissingCameraPoints.end(); ++it) {
2211  it_img = mapOfImages.find(*it);
2212  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2214 
2215  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2216  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2217  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2218  } else {
2220  "Missing image or missing camera transformation "
2221  "matrix! Cannot init the pose for camera: %s!",
2222  it->c_str());
2223  }
2224  }
2225 }
2226 
2227 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2228  const std::map<std::string, std::string> &mapOfInitPoints)
2229 {
2230  // Set the reference cMo
2231  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2232  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2233  mapOfColorImages.find(m_referenceCameraName);
2234  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2235 
2236  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2237  TrackerWrapper *tracker = it_tracker->second;
2238  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2239  tracker->getPose(cMo);
2240  } else {
2241  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2242  }
2243 
2244  // Vector of missing initPoints for cameras
2245  std::vector<std::string> vectorOfMissingCameraPoints;
2246 
2247  // Set pose for the specified cameras
2248  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2249  it_img = mapOfColorImages.find(it_tracker->first);
2250  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2251 
2252  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2253  // Set pose
2254  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2255  } else {
2256  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2257  }
2258  }
2259 
2260  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2261  it != vectorOfMissingCameraPoints.end(); ++it) {
2262  it_img = mapOfColorImages.find(*it);
2263  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2265 
2266  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2267  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2268  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2269  } else {
2271  "Missing image or missing camera transformation "
2272  "matrix! Cannot init the pose for camera: %s!",
2273  it->c_str());
2274  }
2275  }
2276 }
2277 
2290  const std::string &initFile1, const std::string &initFile2)
2291 {
2292  if (m_mapOfTrackers.size() == 2) {
2293  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2294  TrackerWrapper *tracker = it->second;
2295  tracker->initFromPose(I1, initFile1);
2296 
2297  ++it;
2298 
2299  tracker = it->second;
2300  tracker->initFromPose(I2, initFile2);
2301 
2303  if (it != m_mapOfTrackers.end()) {
2304  tracker = it->second;
2305 
2306  // Set the reference cMo
2307  tracker->getPose(cMo);
2308 
2309  // Set the reference camera parameters
2310  tracker->getCameraParameters(cam);
2311  }
2312  } else {
2314  "Cannot initFromPose()! Require two cameras but there "
2315  "are %d cameras!",
2316  m_mapOfTrackers.size());
2317  }
2318 }
2319 
2332  const std::string &initFile1, const std::string &initFile2)
2333 {
2334  if (m_mapOfTrackers.size() == 2) {
2335  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2336  TrackerWrapper *tracker = it->second;
2337  tracker->initFromPose(I_color1, initFile1);
2338 
2339  ++it;
2340 
2341  tracker = it->second;
2342  tracker->initFromPose(I_color2, initFile2);
2343 
2345  if (it != m_mapOfTrackers.end()) {
2346  tracker = it->second;
2347 
2348  // Set the reference cMo
2349  tracker->getPose(cMo);
2350 
2351  // Set the reference camera parameters
2352  tracker->getCameraParameters(cam);
2353  }
2354  } else {
2356  "Cannot initFromPose()! Require two cameras but there "
2357  "are %d cameras!",
2358  m_mapOfTrackers.size());
2359  }
2360 }
2361 
2376 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2377  const std::map<std::string, std::string> &mapOfInitPoses)
2378 {
2379  // Set the reference cMo
2380  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2381  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2382  mapOfImages.find(m_referenceCameraName);
2383  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2384 
2385  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2386  TrackerWrapper *tracker = it_tracker->second;
2387  tracker->initFromPose(*it_img->second, it_initPose->second);
2388  tracker->getPose(cMo);
2389  } else {
2390  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2391  }
2392 
2393  // Vector of missing pose matrices for cameras
2394  std::vector<std::string> vectorOfMissingCameraPoses;
2395 
2396  // Set pose for the specified cameras
2397  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2398  it_img = mapOfImages.find(it_tracker->first);
2399  it_initPose = mapOfInitPoses.find(it_tracker->first);
2400 
2401  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2402  // Set pose
2403  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2404  } else {
2405  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2406  }
2407  }
2408 
2409  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2410  it != vectorOfMissingCameraPoses.end(); ++it) {
2411  it_img = mapOfImages.find(*it);
2412  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2414 
2415  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2416  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2417  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2418  } else {
2420  "Missing image or missing camera transformation "
2421  "matrix! Cannot init the pose for camera: %s!",
2422  it->c_str());
2423  }
2424  }
2425 }
2426 
2441 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2442  const std::map<std::string, std::string> &mapOfInitPoses)
2443 {
2444  // Set the reference cMo
2445  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2446  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2447  mapOfColorImages.find(m_referenceCameraName);
2448  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2449 
2450  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2451  TrackerWrapper *tracker = it_tracker->second;
2452  tracker->initFromPose(*it_img->second, it_initPose->second);
2453  tracker->getPose(cMo);
2454  } else {
2455  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2456  }
2457 
2458  // Vector of missing pose matrices for cameras
2459  std::vector<std::string> vectorOfMissingCameraPoses;
2460 
2461  // Set pose for the specified cameras
2462  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2463  it_img = mapOfColorImages.find(it_tracker->first);
2464  it_initPose = mapOfInitPoses.find(it_tracker->first);
2465 
2466  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2467  // Set pose
2468  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2469  } else {
2470  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2471  }
2472  }
2473 
2474  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2475  it != vectorOfMissingCameraPoses.end(); ++it) {
2476  it_img = mapOfColorImages.find(*it);
2477  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2479 
2480  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2481  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2482  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2483  } else {
2485  "Missing image or missing camera transformation "
2486  "matrix! Cannot init the pose for camera: %s!",
2487  it->c_str());
2488  }
2489  }
2490 }
2491 
2503  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2504 {
2505  if (m_mapOfTrackers.size() == 2) {
2506  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2507  it->second->initFromPose(I1, c1Mo);
2508 
2509  ++it;
2510 
2511  it->second->initFromPose(I2, c2Mo);
2512 
2513  this->cMo = c1Mo;
2514  } else {
2516  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2517  }
2518 }
2519 
2531  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2532 {
2533  if (m_mapOfTrackers.size() == 2) {
2534  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2535  it->second->initFromPose(I_color1, c1Mo);
2536 
2537  ++it;
2538 
2539  it->second->initFromPose(I_color2, c2Mo);
2540 
2541  this->cMo = c1Mo;
2542  } else {
2544  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2545  }
2546 }
2547 
2561 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2562  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2563 {
2564  // Set the reference cMo
2565  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2566  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2567  mapOfImages.find(m_referenceCameraName);
2568  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2569 
2570  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2571  TrackerWrapper *tracker = it_tracker->second;
2572  tracker->initFromPose(*it_img->second, it_camPose->second);
2573  tracker->getPose(cMo);
2574  } else {
2575  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2576  }
2577 
2578  // Vector of missing pose matrices for cameras
2579  std::vector<std::string> vectorOfMissingCameraPoses;
2580 
2581  // Set pose for the specified cameras
2582  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2583  it_img = mapOfImages.find(it_tracker->first);
2584  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2585 
2586  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2587  // Set pose
2588  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2589  } else {
2590  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2591  }
2592  }
2593 
2594  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2595  it != vectorOfMissingCameraPoses.end(); ++it) {
2596  it_img = mapOfImages.find(*it);
2597  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2599 
2600  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2601  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2602  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2603  } else {
2605  "Missing image or missing camera transformation "
2606  "matrix! Cannot set the pose for camera: %s!",
2607  it->c_str());
2608  }
2609  }
2610 }
2611 
2625 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2626  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2627 {
2628  // Set the reference cMo
2629  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2630  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2631  mapOfColorImages.find(m_referenceCameraName);
2632  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2633 
2634  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2635  TrackerWrapper *tracker = it_tracker->second;
2636  tracker->initFromPose(*it_img->second, it_camPose->second);
2637  tracker->getPose(cMo);
2638  } else {
2639  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2640  }
2641 
2642  // Vector of missing pose matrices for cameras
2643  std::vector<std::string> vectorOfMissingCameraPoses;
2644 
2645  // Set pose for the specified cameras
2646  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2647  it_img = mapOfColorImages.find(it_tracker->first);
2648  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2649 
2650  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2651  // Set pose
2652  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2653  } else {
2654  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2655  }
2656  }
2657 
2658  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2659  it != vectorOfMissingCameraPoses.end(); ++it) {
2660  it_img = mapOfColorImages.find(*it);
2661  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2663 
2664  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2665  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2666  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2667  } else {
2669  "Missing image or missing camera transformation "
2670  "matrix! Cannot set the pose for camera: %s!",
2671  it->c_str());
2672  }
2673  }
2674 }
2675 
2686 void vpMbGenericTracker::loadConfigFile(const std::string &configFile)
2687 {
2688  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2689  it != m_mapOfTrackers.end(); ++it) {
2690  TrackerWrapper *tracker = it->second;
2691  tracker->loadConfigFile(configFile);
2692  }
2693 
2695  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2696  }
2697 
2698  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(this->cam);
2699  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2700  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2701  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2702 }
2703 
2717 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2)
2718 {
2719  if (m_mapOfTrackers.size() != 2) {
2720  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2721  }
2722 
2723  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2724  TrackerWrapper *tracker = it_tracker->second;
2725  tracker->loadConfigFile(configFile1);
2726 
2727  ++it_tracker;
2728  tracker = it_tracker->second;
2729  tracker->loadConfigFile(configFile2);
2730 
2732  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2733  }
2734 
2735  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(this->cam);
2736  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2737  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2738  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2739 }
2740 
2753 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles)
2754 {
2755  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2756  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2757  TrackerWrapper *tracker = it_tracker->second;
2758 
2759  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
2760  if (it_config != mapOfConfigFiles.end()) {
2761  tracker->loadConfigFile(it_config->second);
2762  } else {
2763  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
2764  it_tracker->first.c_str());
2765  }
2766  }
2767 
2768  // Set the reference camera parameters
2769  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
2770  if (it != m_mapOfTrackers.end()) {
2771  TrackerWrapper *tracker = it->second;
2772  tracker->getCameraParameters(cam);
2773 
2774  // Set clipping
2775  this->clippingFlag = tracker->getClipping();
2776  this->angleAppears = tracker->getAngleAppear();
2777  this->angleDisappears = tracker->getAngleDisappear();
2778  } else {
2779  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
2780  m_referenceCameraName.c_str());
2781  }
2782 }
2783 
2814 void vpMbGenericTracker::loadModel(const std::string &modelFile, const bool verbose, const vpHomogeneousMatrix &T)
2815 {
2816  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2817  it != m_mapOfTrackers.end(); ++it) {
2818  TrackerWrapper *tracker = it->second;
2819  tracker->loadModel(modelFile, verbose, T);
2820  }
2821 }
2822 
2857 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, const bool verbose,
2858  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
2859 {
2860  if (m_mapOfTrackers.size() != 2) {
2861  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2862  }
2863 
2864  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2865  TrackerWrapper *tracker = it_tracker->second;
2866  tracker->loadModel(modelFile1, verbose, T1);
2867 
2868  ++it_tracker;
2869  tracker = it_tracker->second;
2870  tracker->loadModel(modelFile2, verbose, T2);
2871 }
2872 
2904 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, const bool verbose,
2905  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2906 {
2907  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2908  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2909  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
2910 
2911  if (it_model != mapOfModelFiles.end()) {
2912  TrackerWrapper *tracker = it_tracker->second;
2913  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2914 
2915  if (it_T != mapOfT.end())
2916  tracker->loadModel(it_model->second, verbose, it_T->second);
2917  else
2918  tracker->loadModel(it_model->second, verbose);
2919  } else {
2920  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
2921  it_tracker->first.c_str());
2922  }
2923  }
2924 }
2925 
2926 #ifdef VISP_HAVE_PCL
2927 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2928  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
2929 {
2930  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2931  it != m_mapOfTrackers.end(); ++it) {
2932  TrackerWrapper *tracker = it->second;
2933  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
2934  }
2935 }
2936 #endif
2937 
2938 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2939  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
2940  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
2941  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
2942 {
2943  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2944  it != m_mapOfTrackers.end(); ++it) {
2945  TrackerWrapper *tracker = it->second;
2946  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
2947  mapOfPointCloudHeights[it->first]);
2948  }
2949 }
2950 
2962 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
2963  const vpHomogeneousMatrix &cMo_, const bool verbose,
2964  const vpHomogeneousMatrix &T)
2965 {
2966  if (m_mapOfTrackers.size() != 1) {
2967  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
2968  m_mapOfTrackers.size());
2969  }
2970 
2971  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2972  if (it_tracker != m_mapOfTrackers.end()) {
2973  TrackerWrapper *tracker = it_tracker->second;
2974  tracker->reInitModel(I, cad_name, cMo_, verbose, T);
2975 
2976  // Set reference pose
2977  tracker->getPose(cMo);
2978  } else {
2979  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
2980  }
2981 
2982  modelInitialised = true;
2983 }
2984 
2996 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
2997  const vpHomogeneousMatrix &cMo_, const bool verbose,
2998  const vpHomogeneousMatrix &T)
2999 {
3000  if (m_mapOfTrackers.size() != 1) {
3001  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3002  m_mapOfTrackers.size());
3003  }
3004 
3005  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3006  if (it_tracker != m_mapOfTrackers.end()) {
3007  TrackerWrapper *tracker = it_tracker->second;
3008  tracker->reInitModel(I_color, cad_name, cMo_, verbose, T);
3009 
3010  // Set reference pose
3011  tracker->getPose(cMo);
3012  } else {
3013  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3014  }
3015 
3016  modelInitialised = true;
3017 }
3018 
3040  const std::string &cad_name1, const std::string &cad_name2,
3041  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
3042  const bool verbose,
3043  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3044 {
3045  if (m_mapOfTrackers.size() == 2) {
3046  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3047 
3048  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3049 
3050  ++it_tracker;
3051 
3052  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3053 
3054  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3055  if (it_tracker != m_mapOfTrackers.end()) {
3056  // Set reference pose
3057  it_tracker->second->getPose(cMo);
3058  }
3059  } else {
3060  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3061  }
3062 
3063  modelInitialised = true;
3064 }
3065 
3087  const std::string &cad_name1, const std::string &cad_name2,
3088  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
3089  const bool verbose,
3090  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3091 {
3092  if (m_mapOfTrackers.size() == 2) {
3093  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3094 
3095  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3096 
3097  ++it_tracker;
3098 
3099  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3100 
3101  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3102  if (it_tracker != m_mapOfTrackers.end()) {
3103  // Set reference pose
3104  it_tracker->second->getPose(cMo);
3105  }
3106  } else {
3107  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3108  }
3109 
3110  modelInitialised = true;
3111 }
3112 
3127 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3128  const std::map<std::string, std::string> &mapOfModelFiles,
3129  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
3130  const bool verbose,
3131  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3132 {
3133  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3134  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3135  mapOfImages.find(m_referenceCameraName);
3136  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3137  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3138 
3139  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3140  it_camPose != mapOfCameraPoses.end()) {
3141  TrackerWrapper *tracker = it_tracker->second;
3142  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3143  if (it_T != mapOfT.end())
3144  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3145  else
3146  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3147 
3148  // Set reference pose
3149  tracker->getPose(cMo);
3150  } else {
3151  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3152  }
3153 
3154  std::vector<std::string> vectorOfMissingCameras;
3155  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3156  if (it_tracker->first != m_referenceCameraName) {
3157  it_img = mapOfImages.find(it_tracker->first);
3158  it_model = mapOfModelFiles.find(it_tracker->first);
3159  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3160 
3161  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3162  TrackerWrapper *tracker = it_tracker->second;
3163  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3164  } else {
3165  vectorOfMissingCameras.push_back(it_tracker->first);
3166  }
3167  }
3168  }
3169 
3170  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3171  ++it) {
3172  it_img = mapOfImages.find(*it);
3173  it_model = mapOfModelFiles.find(*it);
3174  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3176 
3177  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3178  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3179  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
3180  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3181  }
3182  }
3183 
3184  modelInitialised = true;
3185 }
3186 
3201 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3202  const std::map<std::string, std::string> &mapOfModelFiles,
3203  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
3204  const bool verbose,
3205  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3206 {
3207  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3208  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
3209  mapOfColorImages.find(m_referenceCameraName);
3210  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3211  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3212 
3213  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3214  it_camPose != mapOfCameraPoses.end()) {
3215  TrackerWrapper *tracker = it_tracker->second;
3216  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3217  if (it_T != mapOfT.end())
3218  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3219  else
3220  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3221 
3222  // Set reference pose
3223  tracker->getPose(cMo);
3224  } else {
3225  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3226  }
3227 
3228  std::vector<std::string> vectorOfMissingCameras;
3229  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3230  if (it_tracker->first != m_referenceCameraName) {
3231  it_img = mapOfColorImages.find(it_tracker->first);
3232  it_model = mapOfModelFiles.find(it_tracker->first);
3233  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3234 
3235  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3236  TrackerWrapper *tracker = it_tracker->second;
3237  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3238  } else {
3239  vectorOfMissingCameras.push_back(it_tracker->first);
3240  }
3241  }
3242  }
3243 
3244  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3245  ++it) {
3246  it_img = mapOfColorImages.find(*it);
3247  it_model = mapOfModelFiles.find(*it);
3248  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3250 
3251  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3252  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3253  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
3254  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3255  }
3256  }
3257 
3258  modelInitialised = true;
3259 }
3260 
3266 {
3267  cMo.eye();
3268 
3269  useScanLine = false;
3270 
3271 #ifdef VISP_HAVE_OGRE
3272  useOgre = false;
3273 #endif
3274 
3275  m_computeInteraction = true;
3276  m_lambda = 1.0;
3277 
3278  angleAppears = vpMath::rad(89);
3281  distNearClip = 0.001;
3282  distFarClip = 100;
3283 
3285  m_maxIter = 30;
3286  m_stopCriteriaEpsilon = 1e-8;
3287  m_initialMu = 0.01;
3288 
3289  // Only for Edge
3290  m_percentageGdPt = 0.4;
3291 
3292  // Only for KLT
3293  m_thresholdOutlier = 0.5;
3294 
3295  // Reset default ponderation between each feature type
3297 
3298 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3300 #endif
3301 
3304 
3305  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3306  it != m_mapOfTrackers.end(); ++it) {
3307  TrackerWrapper *tracker = it->second;
3308  tracker->resetTracker();
3309  }
3310 }
3311 
3322 {
3324 
3325  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3326  it != m_mapOfTrackers.end(); ++it) {
3327  TrackerWrapper *tracker = it->second;
3328  tracker->setAngleAppear(a);
3329  }
3330 }
3331 
3344 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3345 {
3346  if (m_mapOfTrackers.size() == 2) {
3347  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3348  it->second->setAngleAppear(a1);
3349 
3350  ++it;
3351  it->second->setAngleAppear(a2);
3352 
3354  if (it != m_mapOfTrackers.end()) {
3355  angleAppears = it->second->getAngleAppear();
3356  } else {
3357  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3358  }
3359  } else {
3360  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3361  m_mapOfTrackers.size());
3362  }
3363 }
3364 
3374 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3375 {
3376  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3377  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3378 
3379  if (it_tracker != m_mapOfTrackers.end()) {
3380  TrackerWrapper *tracker = it_tracker->second;
3381  tracker->setAngleAppear(it->second);
3382 
3383  if (it->first == m_referenceCameraName) {
3384  angleAppears = it->second;
3385  }
3386  }
3387  }
3388 }
3389 
3400 {
3402 
3403  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3404  it != m_mapOfTrackers.end(); ++it) {
3405  TrackerWrapper *tracker = it->second;
3406  tracker->setAngleDisappear(a);
3407  }
3408 }
3409 
3422 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3423 {
3424  if (m_mapOfTrackers.size() == 2) {
3425  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3426  it->second->setAngleDisappear(a1);
3427 
3428  ++it;
3429  it->second->setAngleDisappear(a2);
3430 
3432  if (it != m_mapOfTrackers.end()) {
3433  angleDisappears = it->second->getAngleDisappear();
3434  } else {
3435  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3436  }
3437  } else {
3438  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3439  m_mapOfTrackers.size());
3440  }
3441 }
3442 
3452 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3453 {
3454  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3455  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3456 
3457  if (it_tracker != m_mapOfTrackers.end()) {
3458  TrackerWrapper *tracker = it_tracker->second;
3459  tracker->setAngleDisappear(it->second);
3460 
3461  if (it->first == m_referenceCameraName) {
3462  angleDisappears = it->second;
3463  }
3464  }
3465  }
3466 }
3467 
3474 {
3476 
3477  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3478  it != m_mapOfTrackers.end(); ++it) {
3479  TrackerWrapper *tracker = it->second;
3480  tracker->setCameraParameters(camera);
3481  }
3482 }
3483 
3493 {
3494  if (m_mapOfTrackers.size() == 2) {
3495  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3496  it->second->setCameraParameters(camera1);
3497 
3498  ++it;
3499  it->second->setCameraParameters(camera2);
3500 
3502  if (it != m_mapOfTrackers.end()) {
3503  it->second->getCameraParameters(cam);
3504  } else {
3505  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3506  }
3507  } else {
3508  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3509  m_mapOfTrackers.size());
3510  }
3511 }
3512 
3521 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3522 {
3523  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3524  it != mapOfCameraParameters.end(); ++it) {
3525  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3526 
3527  if (it_tracker != m_mapOfTrackers.end()) {
3528  TrackerWrapper *tracker = it_tracker->second;
3529  tracker->setCameraParameters(it->second);
3530 
3531  if (it->first == m_referenceCameraName) {
3532  cam = it->second;
3533  }
3534  }
3535  }
3536 }
3537 
3546 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3547  const vpHomogeneousMatrix &cameraTransformationMatrix)
3548 {
3549  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3550 
3551  if (it != m_mapOfCameraTransformationMatrix.end()) {
3552  it->second = cameraTransformationMatrix;
3553  } else {
3554  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3555  }
3556 }
3557 
3566  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3567 {
3568  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3569  it != mapOfTransformationMatrix.end(); ++it) {
3570  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3571  m_mapOfCameraTransformationMatrix.find(it->first);
3572 
3573  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3574  it_camTrans->second = it->second;
3575  }
3576  }
3577 }
3578 
3588 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3589 {
3590  vpMbTracker::setClipping(flags);
3591 
3592  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3593  it != m_mapOfTrackers.end(); ++it) {
3594  TrackerWrapper *tracker = it->second;
3595  tracker->setClipping(flags);
3596  }
3597 }
3598 
3609 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3610 {
3611  if (m_mapOfTrackers.size() == 2) {
3612  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3613  it->second->setClipping(flags1);
3614 
3615  ++it;
3616  it->second->setClipping(flags2);
3617 
3619  if (it != m_mapOfTrackers.end()) {
3620  clippingFlag = it->second->getClipping();
3621  } else {
3622  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3623  }
3624  } else {
3625  std::stringstream ss;
3626  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3628  }
3629 }
3630 
3638 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
3639 {
3640  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3641  it != mapOfClippingFlags.end(); ++it) {
3642  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3643 
3644  if (it_tracker != m_mapOfTrackers.end()) {
3645  TrackerWrapper *tracker = it_tracker->second;
3646  tracker->setClipping(it->second);
3647 
3648  if (it->first == m_referenceCameraName) {
3649  clippingFlag = it->second;
3650  }
3651  }
3652  }
3653 }
3654 
3665 {
3666  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3667  it != m_mapOfTrackers.end(); ++it) {
3668  TrackerWrapper *tracker = it->second;
3669  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
3670  }
3671 }
3672 
3682 {
3683  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3684  it != m_mapOfTrackers.end(); ++it) {
3685  TrackerWrapper *tracker = it->second;
3686  tracker->setDepthDenseFilteringMethod(method);
3687  }
3688 }
3689 
3700 {
3701  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3702  it != m_mapOfTrackers.end(); ++it) {
3703  TrackerWrapper *tracker = it->second;
3704  tracker->setDepthDenseFilteringMinDistance(minDistance);
3705  }
3706 }
3707 
3718 {
3719  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3720  it != m_mapOfTrackers.end(); ++it) {
3721  TrackerWrapper *tracker = it->second;
3722  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
3723  }
3724 }
3725 
3734 void vpMbGenericTracker::setDepthDenseSamplingStep(const unsigned int stepX, const unsigned int stepY)
3735 {
3736  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3737  it != m_mapOfTrackers.end(); ++it) {
3738  TrackerWrapper *tracker = it->second;
3739  tracker->setDepthDenseSamplingStep(stepX, stepY);
3740  }
3741 }
3742 
3751 {
3752  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3753  it != m_mapOfTrackers.end(); ++it) {
3754  TrackerWrapper *tracker = it->second;
3755  tracker->setDepthNormalFaceCentroidMethod(method);
3756  }
3757 }
3758 
3768 {
3769  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3770  it != m_mapOfTrackers.end(); ++it) {
3771  TrackerWrapper *tracker = it->second;
3772  tracker->setDepthNormalFeatureEstimationMethod(method);
3773  }
3774 }
3775 
3784 {
3785  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3786  it != m_mapOfTrackers.end(); ++it) {
3787  TrackerWrapper *tracker = it->second;
3788  tracker->setDepthNormalPclPlaneEstimationMethod(method);
3789  }
3790 }
3791 
3800 {
3801  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3802  it != m_mapOfTrackers.end(); ++it) {
3803  TrackerWrapper *tracker = it->second;
3804  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
3805  }
3806 }
3807 
3816 {
3817  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3818  it != m_mapOfTrackers.end(); ++it) {
3819  TrackerWrapper *tracker = it->second;
3820  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
3821  }
3822 }
3823 
3832 void vpMbGenericTracker::setDepthNormalSamplingStep(const unsigned int stepX, const unsigned int stepY)
3833 {
3834  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3835  it != m_mapOfTrackers.end(); ++it) {
3836  TrackerWrapper *tracker = it->second;
3837  tracker->setDepthNormalSamplingStep(stepX, stepY);
3838  }
3839 }
3840 
3860 {
3862 
3863  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3864  it != m_mapOfTrackers.end(); ++it) {
3865  TrackerWrapper *tracker = it->second;
3866  tracker->setDisplayFeatures(displayF);
3867  }
3868 }
3869 
3878 {
3880 
3881  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3882  it != m_mapOfTrackers.end(); ++it) {
3883  TrackerWrapper *tracker = it->second;
3884  tracker->setFarClippingDistance(dist);
3885  }
3886 }
3887 
3896 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
3897 {
3898  if (m_mapOfTrackers.size() == 2) {
3899  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3900  it->second->setFarClippingDistance(dist1);
3901 
3902  ++it;
3903  it->second->setFarClippingDistance(dist2);
3904 
3906  if (it != m_mapOfTrackers.end()) {
3907  distFarClip = it->second->getFarClippingDistance();
3908  } else {
3909  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3910  }
3911  } else {
3912  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3913  m_mapOfTrackers.size());
3914  }
3915 }
3916 
3922 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
3923 {
3924  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
3925  ++it) {
3926  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3927 
3928  if (it_tracker != m_mapOfTrackers.end()) {
3929  TrackerWrapper *tracker = it_tracker->second;
3930  tracker->setFarClippingDistance(it->second);
3931 
3932  if (it->first == m_referenceCameraName) {
3933  distFarClip = it->second;
3934  }
3935  }
3936  }
3937 }
3938 
3945 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
3946 {
3947  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
3948  ++it) {
3949  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
3950  if (it_factor != mapOfFeatureFactors.end()) {
3951  it->second = it_factor->second;
3952  }
3953  }
3954 }
3955 
3972 {
3973  m_percentageGdPt = threshold;
3974 
3975  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3976  it != m_mapOfTrackers.end(); ++it) {
3977  TrackerWrapper *tracker = it->second;
3978  tracker->setGoodMovingEdgesRatioThreshold(threshold);
3979  }
3980 }
3981 
3982 #ifdef VISP_HAVE_OGRE
3983 
3995 {
3996  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3997  it != m_mapOfTrackers.end(); ++it) {
3998  TrackerWrapper *tracker = it->second;
3999  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4000  }
4001 }
4002 
4015 {
4016  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4017  it != m_mapOfTrackers.end(); ++it) {
4018  TrackerWrapper *tracker = it->second;
4019  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4020  }
4021 }
4022 #endif
4023 
4024 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4025 
4033 {
4034  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4035  it != m_mapOfTrackers.end(); ++it) {
4036  TrackerWrapper *tracker = it->second;
4037  tracker->setKltOpencv(t);
4038  }
4039 }
4040 
4050 {
4051  if (m_mapOfTrackers.size() == 2) {
4052  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4053  it->second->setKltOpencv(t1);
4054 
4055  ++it;
4056  it->second->setKltOpencv(t2);
4057  } else {
4058  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4059  m_mapOfTrackers.size());
4060  }
4061 }
4062 
4068 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4069 {
4070  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4071  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4072 
4073  if (it_tracker != m_mapOfTrackers.end()) {
4074  TrackerWrapper *tracker = it_tracker->second;
4075  tracker->setKltOpencv(it->second);
4076  }
4077  }
4078 }
4079 
4088 {
4089  m_thresholdOutlier = th;
4090 
4091  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4092  it != m_mapOfTrackers.end(); ++it) {
4093  TrackerWrapper *tracker = it->second;
4094  tracker->setKltThresholdAcceptation(th);
4095  }
4096 }
4097 #endif
4098 
4111 void vpMbGenericTracker::setLod(const bool useLod, const std::string &name)
4112 {
4113  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4114  it != m_mapOfTrackers.end(); ++it) {
4115  TrackerWrapper *tracker = it->second;
4116  tracker->setLod(useLod, name);
4117  }
4118 }
4119 
4120 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4121 
4128 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4129 {
4130  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4131  it != m_mapOfTrackers.end(); ++it) {
4132  TrackerWrapper *tracker = it->second;
4133  tracker->setKltMaskBorder(e);
4134  }
4135 }
4136 
4145 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4146 {
4147  if (m_mapOfTrackers.size() == 2) {
4148  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4149  it->second->setKltMaskBorder(e1);
4150 
4151  ++it;
4152 
4153  it->second->setKltMaskBorder(e2);
4154  } else {
4155  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4156  m_mapOfTrackers.size());
4157  }
4158 }
4159 
4165 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4166 {
4167  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4168  ++it) {
4169  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4170 
4171  if (it_tracker != m_mapOfTrackers.end()) {
4172  TrackerWrapper *tracker = it_tracker->second;
4173  tracker->setKltMaskBorder(it->second);
4174  }
4175  }
4176 }
4177 #endif
4178 
4185 {
4186  vpMbTracker::setMask(mask);
4187 
4188  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4189  it != m_mapOfTrackers.end(); ++it) {
4190  TrackerWrapper *tracker = it->second;
4191  tracker->setMask(mask);
4192  }
4193 }
4194 
4195 
4207 void vpMbGenericTracker::setMinLineLengthThresh(const double minLineLengthThresh, const std::string &name)
4208 {
4209  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4210  it != m_mapOfTrackers.end(); ++it) {
4211  TrackerWrapper *tracker = it->second;
4212  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4213  }
4214 }
4215 
4226 void vpMbGenericTracker::setMinPolygonAreaThresh(const double minPolygonAreaThresh, const std::string &name)
4227 {
4228  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4229  it != m_mapOfTrackers.end(); ++it) {
4230  TrackerWrapper *tracker = it->second;
4231  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4232  }
4233 }
4234 
4243 {
4244  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4245  it != m_mapOfTrackers.end(); ++it) {
4246  TrackerWrapper *tracker = it->second;
4247  tracker->setMovingEdge(me);
4248  }
4249 }
4250 
4260 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4261 {
4262  if (m_mapOfTrackers.size() == 2) {
4263  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4264  it->second->setMovingEdge(me1);
4265 
4266  ++it;
4267 
4268  it->second->setMovingEdge(me2);
4269  } else {
4270  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4271  m_mapOfTrackers.size());
4272  }
4273 }
4274 
4280 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4281 {
4282  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4283  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4284 
4285  if (it_tracker != m_mapOfTrackers.end()) {
4286  TrackerWrapper *tracker = it_tracker->second;
4287  tracker->setMovingEdge(it->second);
4288  }
4289  }
4290 }
4291 
4300 {
4302 
4303  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4304  it != m_mapOfTrackers.end(); ++it) {
4305  TrackerWrapper *tracker = it->second;
4306  tracker->setNearClippingDistance(dist);
4307  }
4308 }
4309 
4318 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4319 {
4320  if (m_mapOfTrackers.size() == 2) {
4321  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4322  it->second->setNearClippingDistance(dist1);
4323 
4324  ++it;
4325 
4326  it->second->setNearClippingDistance(dist2);
4327 
4329  if (it != m_mapOfTrackers.end()) {
4330  distNearClip = it->second->getNearClippingDistance();
4331  } else {
4332  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4333  }
4334  } else {
4335  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4336  m_mapOfTrackers.size());
4337  }
4338 }
4339 
4345 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4346 {
4347  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4348  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4349 
4350  if (it_tracker != m_mapOfTrackers.end()) {
4351  TrackerWrapper *tracker = it_tracker->second;
4352  tracker->setNearClippingDistance(it->second);
4353 
4354  if (it->first == m_referenceCameraName) {
4355  distNearClip = it->second;
4356  }
4357  }
4358  }
4359 }
4360 
4373 void vpMbGenericTracker::setOgreShowConfigDialog(const bool showConfigDialog)
4374 {
4375  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4376 
4377  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4378  it != m_mapOfTrackers.end(); ++it) {
4379  TrackerWrapper *tracker = it->second;
4380  tracker->setOgreShowConfigDialog(showConfigDialog);
4381  }
4382 }
4383 
4395 {
4397 
4398  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4399  it != m_mapOfTrackers.end(); ++it) {
4400  TrackerWrapper *tracker = it->second;
4401  tracker->setOgreVisibilityTest(v);
4402  }
4403 
4404 #ifdef VISP_HAVE_OGRE
4405  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4406  it != m_mapOfTrackers.end(); ++it) {
4407  TrackerWrapper *tracker = it->second;
4408  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4409  }
4410 #endif
4411 }
4412 
4421 {
4423 
4424  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4425  it != m_mapOfTrackers.end(); ++it) {
4426  TrackerWrapper *tracker = it->second;
4427  tracker->setOptimizationMethod(opt);
4428  }
4429 }
4430 
4444 {
4445  if (m_mapOfTrackers.size() > 1) {
4446  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4447  "to be configured with only one camera!");
4448  }
4449 
4450  cMo = cdMo;
4451 
4452  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4453  if (it != m_mapOfTrackers.end()) {
4454  TrackerWrapper *tracker = it->second;
4455  tracker->setPose(I, cdMo);
4456  } else {
4457  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4458  m_referenceCameraName.c_str());
4459  }
4460 }
4461 
4475 {
4476  if (m_mapOfTrackers.size() > 1) {
4477  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4478  "to be configured with only one camera!");
4479  }
4480 
4481  cMo = cdMo;
4482 
4483  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4484  if (it != m_mapOfTrackers.end()) {
4485  TrackerWrapper *tracker = it->second;
4486  vpImageConvert::convert(I_color, m_I);
4487  tracker->setPose(m_I, cdMo);
4488  } else {
4489  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4490  m_referenceCameraName.c_str());
4491  }
4492 }
4493 
4506  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4507 {
4508  if (m_mapOfTrackers.size() == 2) {
4509  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4510  it->second->setPose(I1, c1Mo);
4511 
4512  ++it;
4513 
4514  it->second->setPose(I2, c2Mo);
4515 
4517  if (it != m_mapOfTrackers.end()) {
4518  // Set reference pose
4519  it->second->getPose(cMo);
4520  } else {
4521  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4522  m_referenceCameraName.c_str());
4523  }
4524  } else {
4525  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4526  m_mapOfTrackers.size());
4527  }
4528 }
4529 
4541 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4542  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4543 {
4544  if (m_mapOfTrackers.size() == 2) {
4545  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4546  it->second->setPose(I_color1, c1Mo);
4547 
4548  ++it;
4549 
4550  it->second->setPose(I_color2, c2Mo);
4551 
4553  if (it != m_mapOfTrackers.end()) {
4554  // Set reference pose
4555  it->second->getPose(cMo);
4556  } else {
4557  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4558  m_referenceCameraName.c_str());
4559  }
4560  } else {
4561  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4562  m_mapOfTrackers.size());
4563  }
4564 }
4565 
4581 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4582  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4583 {
4584  // Set the reference cMo
4585  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4586  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4587  mapOfImages.find(m_referenceCameraName);
4588  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4589 
4590  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4591  TrackerWrapper *tracker = it_tracker->second;
4592  tracker->setPose(*it_img->second, it_camPose->second);
4593  tracker->getPose(cMo);
4594  } else {
4595  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4596  }
4597 
4598  // Vector of missing pose matrices for cameras
4599  std::vector<std::string> vectorOfMissingCameraPoses;
4600 
4601  // Set pose for the specified cameras
4602  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4603  if (it_tracker->first != m_referenceCameraName) {
4604  it_img = mapOfImages.find(it_tracker->first);
4605  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4606 
4607  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4608  // Set pose
4609  TrackerWrapper *tracker = it_tracker->second;
4610  tracker->setPose(*it_img->second, it_camPose->second);
4611  } else {
4612  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4613  }
4614  }
4615  }
4616 
4617  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4618  it != vectorOfMissingCameraPoses.end(); ++it) {
4619  it_img = mapOfImages.find(*it);
4620  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4622 
4623  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4624  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
4625  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4626  } else {
4628  "Missing image or missing camera transformation "
4629  "matrix! Cannot set pose for camera: %s!",
4630  it->c_str());
4631  }
4632  }
4633 }
4634 
4650 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
4651  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4652 {
4653  // Set the reference cMo
4654  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4655  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
4656  mapOfColorImages.find(m_referenceCameraName);
4657  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4658 
4659  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4660  TrackerWrapper *tracker = it_tracker->second;
4661  tracker->setPose(*it_img->second, it_camPose->second);
4662  tracker->getPose(cMo);
4663  } else {
4664  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4665  }
4666 
4667  // Vector of missing pose matrices for cameras
4668  std::vector<std::string> vectorOfMissingCameraPoses;
4669 
4670  // Set pose for the specified cameras
4671  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4672  if (it_tracker->first != m_referenceCameraName) {
4673  it_img = mapOfColorImages.find(it_tracker->first);
4674  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4675 
4676  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4677  // Set pose
4678  TrackerWrapper *tracker = it_tracker->second;
4679  tracker->setPose(*it_img->second, it_camPose->second);
4680  } else {
4681  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4682  }
4683  }
4684  }
4685 
4686  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4687  it != vectorOfMissingCameraPoses.end(); ++it) {
4688  it_img = mapOfColorImages.find(*it);
4689  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4691 
4692  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4693  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
4694  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4695  } else {
4697  "Missing image or missing camera transformation "
4698  "matrix! Cannot set pose for camera: %s!",
4699  it->c_str());
4700  }
4701  }
4702 }
4703 
4719 {
4721 
4722  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4723  it != m_mapOfTrackers.end(); ++it) {
4724  TrackerWrapper *tracker = it->second;
4725  tracker->setProjectionErrorComputation(flag);
4726  }
4727 }
4728 
4733 {
4735 
4736  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4737  it != m_mapOfTrackers.end(); ++it) {
4738  TrackerWrapper *tracker = it->second;
4739  tracker->setProjectionErrorDisplay(display);
4740  }
4741 }
4742 
4747 {
4749 
4750  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4751  it != m_mapOfTrackers.end(); ++it) {
4752  TrackerWrapper *tracker = it->second;
4753  tracker->setProjectionErrorDisplayArrowLength(length);
4754  }
4755 }
4756 
4758 {
4760 
4761  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4762  it != m_mapOfTrackers.end(); ++it) {
4763  TrackerWrapper *tracker = it->second;
4764  tracker->setProjectionErrorDisplayArrowThickness(thickness);
4765  }
4766 }
4767 
4773 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
4774 {
4775  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
4776  if (it != m_mapOfTrackers.end()) {
4777  m_referenceCameraName = referenceCameraName;
4778  } else {
4779  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
4780  }
4781 }
4782 
4784 {
4786 
4787  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4788  it != m_mapOfTrackers.end(); ++it) {
4789  TrackerWrapper *tracker = it->second;
4790  tracker->setScanLineVisibilityTest(v);
4791  }
4792 }
4793 
4806 {
4807  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4808  it != m_mapOfTrackers.end(); ++it) {
4809  TrackerWrapper *tracker = it->second;
4810  tracker->setTrackerType(type);
4811  }
4812 }
4813 
4823 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
4824 {
4825  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
4826  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4827  if (it_tracker != m_mapOfTrackers.end()) {
4828  TrackerWrapper *tracker = it_tracker->second;
4829  tracker->setTrackerType(it->second);
4830  }
4831  }
4832 }
4833 
4843 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
4844 {
4845  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4846  it != m_mapOfTrackers.end(); ++it) {
4847  TrackerWrapper *tracker = it->second;
4848  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
4849  }
4850 }
4851 
4861 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
4862 {
4863  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4864  it != m_mapOfTrackers.end(); ++it) {
4865  TrackerWrapper *tracker = it->second;
4866  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
4867  }
4868 }
4869 
4879 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
4880 {
4881  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4882  it != m_mapOfTrackers.end(); ++it) {
4883  TrackerWrapper *tracker = it->second;
4884  tracker->setUseEdgeTracking(name, useEdgeTracking);
4885  }
4886 }
4887 
4888 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4889 
4898 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
4899 {
4900  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4901  it != m_mapOfTrackers.end(); ++it) {
4902  TrackerWrapper *tracker = it->second;
4903  tracker->setUseKltTracking(name, useKltTracking);
4904  }
4905 }
4906 #endif
4907 
4909 {
4910  // Test tracking fails only if all testTracking have failed
4911  bool isOneTestTrackingOk = false;
4912  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4913  it != m_mapOfTrackers.end(); ++it) {
4914  TrackerWrapper *tracker = it->second;
4915  try {
4916  tracker->testTracking();
4917  isOneTestTrackingOk = true;
4918  } catch (...) {
4919  }
4920  }
4921 
4922  if (!isOneTestTrackingOk) {
4923  std::ostringstream oss;
4924  oss << "Not enough moving edges to track the object. Try to reduce the "
4925  "threshold="
4926  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
4928  }
4929 }
4930 
4941 {
4942  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
4943  mapOfImages[m_referenceCameraName] = &I;
4944 
4945  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
4946  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
4947 
4948  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
4949 }
4950 
4961 {
4962  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
4963  mapOfColorImages[m_referenceCameraName] = &I_color;
4964 
4965  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
4966  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
4967 
4968  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
4969 }
4970 
4982 {
4983  if (m_mapOfTrackers.size() == 2) {
4984  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4985  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
4986  mapOfImages[it->first] = &I1;
4987  ++it;
4988 
4989  mapOfImages[it->first] = &I2;
4990 
4991  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
4992  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
4993 
4994  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
4995  } else {
4996  std::stringstream ss;
4997  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
4998  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
4999  }
5000 }
5001 
5012 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5013 {
5014  if (m_mapOfTrackers.size() == 2) {
5015  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5016  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5017  mapOfImages[it->first] = &I_color1;
5018  ++it;
5019 
5020  mapOfImages[it->first] = &_colorI2;
5021 
5022  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5023  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5024 
5025  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5026  } else {
5027  std::stringstream ss;
5028  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5029  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5030  }
5031 }
5032 
5040 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5041 {
5042  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5043  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5044 
5045  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5046 }
5047 
5055 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5056 {
5057  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5058  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5059 
5060  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5061 }
5062 
5063 #ifdef VISP_HAVE_PCL
5064 
5072 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5073  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5074 {
5075  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5076  it != m_mapOfTrackers.end(); ++it) {
5077  TrackerWrapper *tracker = it->second;
5078 
5079  if ((tracker->m_trackerType & (EDGE_TRACKER |
5080 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5081  KLT_TRACKER |
5082 #endif
5084  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5085  }
5086 
5087  if (tracker->m_trackerType & (EDGE_TRACKER
5088 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5089  | KLT_TRACKER
5090 #endif
5091  ) &&
5092  mapOfImages[it->first] == NULL) {
5093  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5094  }
5095 
5096  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5097  ! mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5098  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5099  }
5100  }
5101 
5102  preTracking(mapOfImages, mapOfPointClouds);
5103 
5104  try {
5105  computeVVS(mapOfImages);
5106  } catch (...) {
5107  covarianceMatrix = -1;
5108  throw; // throw the original exception
5109  }
5110 
5111  testTracking();
5112 
5113  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5114  it != m_mapOfTrackers.end(); ++it) {
5115  TrackerWrapper *tracker = it->second;
5116 
5117  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5118  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5119  }
5120 
5121  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5122 
5123  if (displayFeatures) {
5124 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5125  if (tracker->m_trackerType & KLT_TRACKER) {
5126  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5127  }
5128 #endif
5129 
5130  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5131  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5132  }
5133  }
5134  }
5135 
5137 }
5138 
5147 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5148  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5149 {
5150  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5151  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5152  it != m_mapOfTrackers.end(); ++it) {
5153  TrackerWrapper *tracker = it->second;
5154 
5155  if ((tracker->m_trackerType & (EDGE_TRACKER |
5156 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5157  KLT_TRACKER |
5158 #endif
5160  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5161  }
5162 
5163  if (tracker->m_trackerType & (EDGE_TRACKER
5164 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5165  | KLT_TRACKER
5166 #endif
5167  ) && mapOfImages[it->first] == NULL) {
5168  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5169  } else if (tracker->m_trackerType & (EDGE_TRACKER
5170 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5171  | KLT_TRACKER
5172 #endif
5173  ) && mapOfImages[it->first] != NULL) {
5174  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5175  mapOfImages[it->first] = &tracker->m_I; //update grayscale image buffer
5176  }
5177 
5178  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5179  ! mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5180  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5181  }
5182  }
5183 
5184  preTracking(mapOfImages, mapOfPointClouds);
5185 
5186  try {
5187  computeVVS(mapOfImages);
5188  } catch (...) {
5189  covarianceMatrix = -1;
5190  throw; // throw the original exception
5191  }
5192 
5193  testTracking();
5194 
5195  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5196  it != m_mapOfTrackers.end(); ++it) {
5197  TrackerWrapper *tracker = it->second;
5198 
5199  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5200  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5201  }
5202 
5203  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5204 
5205  if (displayFeatures) {
5206 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5207  if (tracker->m_trackerType & KLT_TRACKER) {
5208  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5209  }
5210 #endif
5211 
5212  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5213  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5214  }
5215  }
5216  }
5217 
5219 }
5220 #endif
5221 
5232 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5233  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5234  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5235  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5236 {
5237  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5238  it != m_mapOfTrackers.end(); ++it) {
5239  TrackerWrapper *tracker = it->second;
5240 
5241  if ((tracker->m_trackerType & (EDGE_TRACKER |
5242 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5243  KLT_TRACKER |
5244 #endif
5246  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5247  }
5248 
5249  if (tracker->m_trackerType & (EDGE_TRACKER
5250 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5251  | KLT_TRACKER
5252 #endif
5253  ) &&
5254  mapOfImages[it->first] == NULL) {
5255  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5256  }
5257 
5258  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5259  (mapOfPointClouds[it->first] == NULL)) {
5260  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5261  }
5262  }
5263 
5264  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5265 
5266  try {
5267  computeVVS(mapOfImages);
5268  } catch (...) {
5269  covarianceMatrix = -1;
5270  throw; // throw the original exception
5271  }
5272 
5273  testTracking();
5274 
5275  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5276  it != m_mapOfTrackers.end(); ++it) {
5277  TrackerWrapper *tracker = it->second;
5278 
5279  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5280  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5281  }
5282 
5283  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5284 
5285  if (displayFeatures) {
5286 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5287  if (tracker->m_trackerType & KLT_TRACKER) {
5288  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5289  }
5290 #endif
5291 
5292  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5293  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5294  }
5295  }
5296  }
5297 
5299 }
5300 
5311 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5312  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5313  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5314  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5315 {
5316  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5317  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5318  it != m_mapOfTrackers.end(); ++it) {
5319  TrackerWrapper *tracker = it->second;
5320 
5321  if ((tracker->m_trackerType & (EDGE_TRACKER |
5322 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5323  KLT_TRACKER |
5324 #endif
5326  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5327  }
5328 
5329  if (tracker->m_trackerType & (EDGE_TRACKER
5330 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5331  | KLT_TRACKER
5332 #endif
5333  ) && mapOfColorImages[it->first] == NULL) {
5334  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5335  } else if (tracker->m_trackerType & (EDGE_TRACKER
5336 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5337  | KLT_TRACKER
5338 #endif
5339  ) && mapOfColorImages[it->first] != NULL) {
5340  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5341  mapOfImages[it->first] = &tracker->m_I; //update grayscale image buffer
5342  }
5343 
5344  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5345  (mapOfPointClouds[it->first] == NULL)) {
5346  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5347  }
5348  }
5349 
5350  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5351 
5352  try {
5353  computeVVS(mapOfImages);
5354  } catch (...) {
5355  covarianceMatrix = -1;
5356  throw; // throw the original exception
5357  }
5358 
5359  testTracking();
5360 
5361  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5362  it != m_mapOfTrackers.end(); ++it) {
5363  TrackerWrapper *tracker = it->second;
5364 
5365  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5366  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5367  }
5368 
5369  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5370 
5371  if (displayFeatures) {
5372 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5373  if (tracker->m_trackerType & KLT_TRACKER) {
5374  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5375  }
5376 #endif
5377 
5378  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5379  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5380  }
5381  }
5382  }
5383 
5385 }
5386 
5388 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5389  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5390 {
5391  m_lambda = 1.0;
5392  m_maxIter = 30;
5393 
5394 #ifdef VISP_HAVE_OGRE
5395  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5396 
5397  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5398 #endif
5399 }
5400 
5401 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(const int trackerType)
5402  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5403 {
5404  if ((m_trackerType & (EDGE_TRACKER |
5405 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5406  KLT_TRACKER |
5407 #endif
5409  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5410  }
5411 
5412  m_lambda = 1.0;
5413  m_maxIter = 30;
5414 
5415 #ifdef VISP_HAVE_OGRE
5416  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5417 
5418  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5419 #endif
5420 }
5421 
5422 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() { }
5423 
5424 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5425 void vpMbGenericTracker::TrackerWrapper::computeVVS(const vpImage<unsigned char> *const ptr_I)
5426 {
5427  computeVVSInit(ptr_I);
5428 
5429  if (m_error.getRows() < 4) {
5430  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5431  }
5432 
5433  double normRes = 0;
5434  double normRes_1 = -1;
5435  unsigned int iter = 0;
5436 
5437  double factorEdge = 1.0;
5438 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5439  double factorKlt = 1.0;
5440 #endif
5441  double factorDepth = 1.0;
5442  double factorDepthDense = 1.0;
5443 
5444  vpMatrix LTL;
5445  vpColVector LTR, v;
5446  vpColVector error_prev;
5447 
5448  double mu = m_initialMu;
5449  vpHomogeneousMatrix cMo_prev;
5450 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5451  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
5452 #endif
5453  bool isoJoIdentity_ = true;
5454 
5455  // Covariance
5456  vpColVector W_true(m_error.getRows());
5457  vpMatrix L_true, LVJ_true;
5458 
5459  unsigned int nb_edge_features = m_error_edge.getRows();
5460 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5461  unsigned int nb_klt_features = m_error_klt.getRows();
5462 #endif
5463  unsigned int nb_depth_features = m_error_depthNormal.getRows();
5464  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5465 
5466  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5468 
5469  bool reStartFromLastIncrement = false;
5470  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5471 
5472 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5473  if (reStartFromLastIncrement) {
5474  if (m_trackerType & KLT_TRACKER) {
5475  ctTc0 = ctTc0_Prev;
5476  }
5477  }
5478 #endif
5479 
5480  if (!reStartFromLastIncrement) {
5482 
5483  if (computeCovariance) {
5484  L_true = m_L;
5485  if (!isoJoIdentity_) {
5487  cVo.buildFrom(cMo);
5488  LVJ_true = (m_L * cVo * oJo);
5489  }
5490  }
5491 
5493  if (iter == 0) {
5494  isoJoIdentity_ = true;
5495  oJo.eye();
5496 
5497  // If all the 6 dof should be estimated, we check if the interaction
5498  // matrix is full rank. If not we remove automatically the dof that
5499  // cannot be estimated This is particularly useful when consering
5500  // circles (rank 5) and cylinders (rank 4)
5501  if (isoJoIdentity_) {
5502  cVo.buildFrom(cMo);
5503 
5504  vpMatrix K; // kernel
5505  unsigned int rank = (m_L * cVo).kernel(K);
5506  if (rank == 0) {
5507  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
5508  }
5509 
5510  if (rank != 6) {
5511  vpMatrix I; // Identity
5512  I.eye(6);
5513  oJo = I - K.AtA();
5514 
5515  isoJoIdentity_ = false;
5516  }
5517  }
5518  }
5519 
5520  // Weighting
5521  double num = 0;
5522  double den = 0;
5523 
5524  unsigned int start_index = 0;
5525  if (m_trackerType & EDGE_TRACKER) {
5526  for (unsigned int i = 0; i < nb_edge_features; i++) {
5527  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5528  W_true[i] = wi;
5529  m_weightedError[i] = wi * m_error[i];
5530 
5531  num += wi * vpMath::sqr(m_error[i]);
5532  den += wi;
5533 
5534  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5535  m_L[i][j] *= wi;
5536  }
5537  }
5538 
5539  start_index += nb_edge_features;
5540  }
5541 
5542 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5543  if (m_trackerType & KLT_TRACKER) {
5544  for (unsigned int i = 0; i < nb_klt_features; i++) {
5545  double wi = m_w_klt[i] * factorKlt;
5546  W_true[start_index + i] = wi;
5547  m_weightedError[start_index + i] = wi * m_error_klt[i];
5548 
5549  num += wi * vpMath::sqr(m_error[start_index + i]);
5550  den += wi;
5551 
5552  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5553  m_L[start_index + i][j] *= wi;
5554  }
5555  }
5556 
5557  start_index += nb_klt_features;
5558  }
5559 #endif
5560 
5561  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5562  for (unsigned int i = 0; i < nb_depth_features; i++) {
5563  double wi = m_w_depthNormal[i] * factorDepth;
5564  m_w[start_index + i] = m_w_depthNormal[i];
5565  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5566 
5567  num += wi * vpMath::sqr(m_error[start_index + i]);
5568  den += wi;
5569 
5570  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5571  m_L[start_index + i][j] *= wi;
5572  }
5573  }
5574 
5575  start_index += nb_depth_features;
5576  }
5577 
5578  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5579  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
5580  double wi = m_w_depthDense[i] * factorDepthDense;
5581  m_w[start_index + i] = m_w_depthDense[i];
5582  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5583 
5584  num += wi * vpMath::sqr(m_error[start_index + i]);
5585  den += wi;
5586 
5587  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5588  m_L[start_index + i][j] *= wi;
5589  }
5590  }
5591 
5592  // start_index += nb_depth_dense_features;
5593  }
5594 
5595  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5596 
5597  cMo_prev = cMo;
5598 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5599  if (m_trackerType & KLT_TRACKER) {
5600  ctTc0_Prev = ctTc0;
5601  }
5602 #endif
5603 
5605 
5606 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5607  if (m_trackerType & KLT_TRACKER) {
5608  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
5609  }
5610 #endif
5611  normRes_1 = normRes;
5612 
5613  normRes = sqrt(num / den);
5614  }
5615 
5616  iter++;
5617  }
5618 
5619  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
5620 
5621  if (m_trackerType & EDGE_TRACKER) {
5623  }
5624 }
5625 
5626 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5627 {
5628  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5629  "TrackerWrapper::computeVVSInit("
5630  ") should not be called!");
5631 }
5632 
5633 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
5634 {
5635  initMbtTracking(ptr_I);
5636 
5637  unsigned int nbFeatures = 0;
5638 
5639  if (m_trackerType & EDGE_TRACKER) {
5640  nbFeatures += m_error_edge.getRows();
5641  } else {
5642  m_error_edge.clear();
5643  m_weightedError_edge.clear();
5644  m_L_edge.clear();
5645  m_w_edge.clear();
5646  }
5647 
5648 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5649  if (m_trackerType & KLT_TRACKER) {
5651  nbFeatures += m_error_klt.getRows();
5652  } else {
5653  m_error_klt.clear();
5654  m_weightedError_klt.clear();
5655  m_L_klt.clear();
5656  m_w_klt.clear();
5657  }
5658 #endif
5659 
5660  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5662  nbFeatures += m_error_depthNormal.getRows();
5663  } else {
5664  m_error_depthNormal.clear();
5665  m_weightedError_depthNormal.clear();
5666  m_L_depthNormal.clear();
5667  m_w_depthNormal.clear();
5668  }
5669 
5670  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5672  nbFeatures += m_error_depthDense.getRows();
5673  } else {
5674  m_error_depthDense.clear();
5675  m_weightedError_depthDense.clear();
5676  m_L_depthDense.clear();
5677  m_w_depthDense.clear();
5678  }
5679 
5680  m_L.resize(nbFeatures, 6, false, false);
5681  m_error.resize(nbFeatures, false);
5682 
5683  m_weightedError.resize(nbFeatures, false);
5684  m_w.resize(nbFeatures, false);
5685  m_w = 1;
5686 }
5687 
5688 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu()
5689 {
5690  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5691  "TrackerWrapper::"
5692  "computeVVSInteractionMatrixAndR"
5693  "esidu() should not be called!");
5694 }
5695 
5696 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> *const ptr_I)
5697 {
5698  if (m_trackerType & EDGE_TRACKER) {
5700  }
5701 
5702 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5703  if (m_trackerType & KLT_TRACKER) {
5705  }
5706 #endif
5707 
5708  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5710  }
5711 
5712  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5714  }
5715 
5716  unsigned int start_index = 0;
5717  if (m_trackerType & EDGE_TRACKER) {
5718  m_L.insert(m_L_edge, start_index, 0);
5719  m_error.insert(start_index, m_error_edge);
5720 
5721  start_index += m_error_edge.getRows();
5722  }
5723 
5724 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5725  if (m_trackerType & KLT_TRACKER) {
5726  m_L.insert(m_L_klt, start_index, 0);
5727  m_error.insert(start_index, m_error_klt);
5728 
5729  start_index += m_error_klt.getRows();
5730  }
5731 #endif
5732 
5733  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5734  m_L.insert(m_L_depthNormal, start_index, 0);
5735  m_error.insert(start_index, m_error_depthNormal);
5736 
5737  start_index += m_error_depthNormal.getRows();
5738  }
5739 
5740  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5741  m_L.insert(m_L_depthDense, start_index, 0);
5742  m_error.insert(start_index, m_error_depthDense);
5743 
5744  // start_index += m_error_depthDense.getRows();
5745  }
5746 }
5747 
5749 {
5750  unsigned int start_index = 0;
5751 
5752  if (m_trackerType & EDGE_TRACKER) {
5754  m_w.insert(start_index, m_w_edge);
5755 
5756  start_index += m_w_edge.getRows();
5757  }
5758 
5759 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5760  if (m_trackerType & KLT_TRACKER) {
5761  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
5762  m_w.insert(start_index, m_w_klt);
5763 
5764  start_index += m_w_klt.getRows();
5765  }
5766 #endif
5767 
5768  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5769  if (m_depthNormalUseRobust) {
5770  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
5771  m_w.insert(start_index, m_w_depthNormal);
5772  }
5773 
5774  start_index += m_w_depthNormal.getRows();
5775  }
5776 
5777  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5779  m_w.insert(start_index, m_w_depthDense);
5780 
5781  // start_index += m_w_depthDense.getRows();
5782  }
5783 }
5784 
5785 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo_,
5786  const vpCameraParameters &camera, const vpColor &col,
5787  const unsigned int thickness, const bool displayFullModel)
5788 {
5789  if (displayFeatures) {
5790  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5791  for (size_t i = 0; i < features.size(); i++) {
5792  if (vpMath::equal(features[i][0], 0)) {
5793  vpImagePoint ip(features[i][1], features[i][2]);
5794  int state = static_cast<int>(features[i][3]);
5795 
5796  switch (state) {
5799  break;
5800 
5801  case vpMeSite::CONSTRAST:
5802  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5803  break;
5804 
5805  case vpMeSite::THRESHOLD:
5807  break;
5808 
5809  case vpMeSite::M_ESTIMATOR:
5810  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5811  break;
5812 
5813  case vpMeSite::TOO_NEAR:
5814  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5815  break;
5816 
5817  default:
5819  }
5820  } else if (vpMath::equal(features[i][0], 1)) {
5821  vpImagePoint ip1(features[i][1], features[i][2]);
5823 
5824  vpImagePoint ip2(features[i][3], features[i][4]);
5825  double id = features[i][5];
5826  std::stringstream ss;
5827  ss << id;
5828  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5829  } else if (vpMath::equal(features[i][0], 2)) {
5830  vpImagePoint im_centroid(features[i][1], features[i][2]);
5831  vpImagePoint im_extremity(features[i][3], features[i][4]);
5832  bool desired = vpMath::equal(features[i][0], 2);
5833  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5834  }
5835  }
5836  }
5837 
5838  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo_, camera, displayFullModel);
5839  for (size_t i = 0; i < models.size(); i++) {
5840  if (vpMath::equal(models[i][0], 0)) {
5841  vpImagePoint ip1(models[i][1], models[i][2]);
5842  vpImagePoint ip2(models[i][3], models[i][4]);
5843  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
5844  } else if (vpMath::equal(models[i][0], 1)) {
5845  vpImagePoint center(models[i][1], models[i][2]);
5846  double mu20 = models[i][3];
5847  double mu11 = models[i][4];
5848  double mu02 = models[i][5];
5849  vpDisplay::displayEllipse(I, center, mu20, mu11, mu02, true, col, thickness);
5850  }
5851  }
5852 
5853 #ifdef VISP_HAVE_OGRE
5854  if ((m_trackerType & EDGE_TRACKER)
5855  #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5856  || (m_trackerType & KLT_TRACKER)
5857  #endif
5858  ) {
5859  if (useOgre)
5860  faces.displayOgre(cMo_);
5861  }
5862 #endif
5863 }
5864 
5865 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo_,
5866  const vpCameraParameters &camera, const vpColor &col,
5867  const unsigned int thickness, const bool displayFullModel)
5868 {
5869  if (displayFeatures) {
5870  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5871  for (size_t i = 0; i < features.size(); i++) {
5872  if (vpMath::equal(features[i][0], 0)) {
5873  vpImagePoint ip(features[i][1], features[i][2]);
5874  int state = static_cast<int>(features[i][3]);
5875 
5876  switch (state) {
5879  break;
5880 
5881  case vpMeSite::CONSTRAST:
5882  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5883  break;
5884 
5885  case vpMeSite::THRESHOLD:
5887  break;
5888 
5889  case vpMeSite::M_ESTIMATOR:
5890  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5891  break;
5892 
5893  case vpMeSite::TOO_NEAR:
5894  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5895  break;
5896 
5897  default:
5899  }
5900  } else if (vpMath::equal(features[i][0], 1)) {
5901  vpImagePoint ip1(features[i][1], features[i][2]);
5903 
5904  vpImagePoint ip2(features[i][3], features[i][4]);
5905  double id = features[i][5];
5906  std::stringstream ss;
5907  ss << id;
5908  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5909  } else if (vpMath::equal(features[i][0], 2)) {
5910  vpImagePoint im_centroid(features[i][1], features[i][2]);
5911  vpImagePoint im_extremity(features[i][3], features[i][4]);
5912  bool desired = vpMath::equal(features[i][0], 2);
5913  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5914  }
5915  }
5916  }
5917 
5918  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo_, camera, displayFullModel);
5919  for (size_t i = 0; i < models.size(); i++) {
5920  if (vpMath::equal(models[i][0], 0)) {
5921  vpImagePoint ip1(models[i][1], models[i][2]);
5922  vpImagePoint ip2(models[i][3], models[i][4]);
5923  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
5924  } else if (vpMath::equal(models[i][0], 1)) {
5925  vpImagePoint center(models[i][1], models[i][2]);
5926  double mu20 = models[i][3];
5927  double mu11 = models[i][4];
5928  double mu02 = models[i][5];
5929  vpDisplay::displayEllipse(I, center, mu20, mu11, mu02, true, col, thickness);
5930  }
5931  }
5932 
5933 #ifdef VISP_HAVE_OGRE
5934  if ((m_trackerType & EDGE_TRACKER)
5935  #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5936  || (m_trackerType & KLT_TRACKER)
5937  #endif
5938  ) {
5939  if (useOgre)
5940  faces.displayOgre(cMo_);
5941  }
5942 #endif
5943 }
5944 
5945 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
5946 {
5947  std::vector<std::vector<double> > features;
5948 
5949  if (m_trackerType & EDGE_TRACKER) {
5950  //m_featuresToBeDisplayedEdge updated after computeVVS()
5951  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
5952  }
5953 
5954 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5955  if (m_trackerType & KLT_TRACKER) {
5956  //m_featuresToBeDisplayedKlt updated after postTracking()
5957  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
5958  }
5959 #endif
5960 
5961  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5962  //m_featuresToBeDisplayedDepthNormal updated after postTracking()
5963  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(), m_featuresToBeDisplayedDepthNormal.end());
5964  }
5965 
5966  return features;
5967 }
5968 
5969 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width, unsigned int height,
5970  const vpHomogeneousMatrix &cMo_,
5971  const vpCameraParameters &camera,
5972  const bool displayFullModel)
5973 {
5974  std::vector<std::vector<double> > models;
5975 
5976  //Do not add multiple times the same models
5977  if (m_trackerType == EDGE_TRACKER) {
5978  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo_, camera, displayFullModel);
5979  }
5980 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5981  else if (m_trackerType == KLT_TRACKER) {
5982  models = vpMbKltTracker::getModelForDisplay(width, height, cMo_, camera, displayFullModel);
5983  }
5984 #endif
5985  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
5986  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo_, camera, displayFullModel);
5987  } else if (m_trackerType == DEPTH_DENSE_TRACKER) {
5988  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo_, camera, displayFullModel);
5989  } else {
5990  //Edge and KLT trackers use the same primitives
5991  if (m_trackerType & EDGE_TRACKER) {
5992  std::vector<std::vector<double> > edgeModels = vpMbEdgeTracker::getModelForDisplay(width, height, cMo_, camera, displayFullModel);
5993  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
5994  }
5995 
5996  //Depth dense and depth normal trackers use the same primitives
5997  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5998  std::vector<std::vector<double> > depthDenseModels = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo_, camera, displayFullModel);
5999  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6000  }
6001  }
6002 
6003  return models;
6004 }
6005 
6006 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6007 {
6008  if (!modelInitialised) {
6009  throw vpException(vpException::fatalError, "model not initialized");
6010  }
6011 
6012  if (useScanLine || clippingFlag > 3)
6013  cam.computeFov(I.getWidth(), I.getHeight());
6014 
6015  bool reInitialisation = false;
6016  if (!useOgre) {
6017  faces.setVisible(I.getWidth(), I.getHeight(), cam, cMo, angleAppears, angleDisappears, reInitialisation);
6018  } else {
6019 #ifdef VISP_HAVE_OGRE
6020  if (!faces.isOgreInitialised()) {
6022 
6024  faces.initOgre(cam);
6025  // Turn off Ogre config dialog display for the next call to this
6026  // function since settings are saved in the ogre.cfg file and used
6027  // during the next call
6028  ogreShowConfigDialog = false;
6029  }
6030 
6031  faces.setVisibleOgre(I.getWidth(), I.getHeight(), cam, cMo, angleAppears, angleDisappears, reInitialisation);
6032 #else
6033  faces.setVisible(I.getWidth(), I.getHeight(), cam, cMo, angleAppears, angleDisappears, reInitialisation);
6034 #endif
6035  }
6036 
6037  if (useScanLine) {
6040  }
6041 
6042 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6043  if (m_trackerType & KLT_TRACKER)
6045 #endif
6046 
6047  if (m_trackerType & EDGE_TRACKER) {
6049 
6050  bool a = false;
6051  vpMbEdgeTracker::visibleFace(I, cMo, a); // should be useless, but keep it for nbvisiblepolygone
6052 
6053  initMovingEdge(I, cMo);
6054  }
6055 
6056  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6057  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6058 
6059  if (m_trackerType & DEPTH_DENSE_TRACKER)
6060  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6061 }
6062 
6063 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6064  const double radius, const int idFace, const std::string &name)
6065 {
6066  if (m_trackerType & EDGE_TRACKER)
6067  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6068 
6069 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6070  if (m_trackerType & KLT_TRACKER)
6071  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6072 #endif
6073 }
6074 
6075 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, const double radius,
6076  const int idFace, const std::string &name)
6077 {
6078  if (m_trackerType & EDGE_TRACKER)
6079  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6080 
6081 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6082  if (m_trackerType & KLT_TRACKER)
6083  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6084 #endif
6085 }
6086 
6087 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6088 {
6089  if (m_trackerType & EDGE_TRACKER)
6091 
6092 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6093  if (m_trackerType & KLT_TRACKER)
6095 #endif
6096 
6097  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6099 
6100  if (m_trackerType & DEPTH_DENSE_TRACKER)
6102 }
6103 
6104 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6105 {
6106  if (m_trackerType & EDGE_TRACKER)
6108 
6109 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6110  if (m_trackerType & KLT_TRACKER)
6112 #endif
6113 
6114  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6116 
6117  if (m_trackerType & DEPTH_DENSE_TRACKER)
6119 }
6120 
6121 void vpMbGenericTracker::TrackerWrapper::initMbtTracking(const vpImage<unsigned char> *const ptr_I)
6122 {
6123  if (m_trackerType & EDGE_TRACKER) {
6126  }
6127 }
6128 
6129 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile)
6130 {
6131  // Load projection error config
6132  vpMbTracker::loadConfigFile(configFile);
6133 
6134 #ifdef VISP_HAVE_PUGIXML
6136 
6137  xmlp.setCameraParameters(cam);
6140 
6141  // Edge
6142  xmlp.setEdgeMe(me);
6143 
6144  // KLT
6145  xmlp.setKltMaxFeatures(10000);
6146  xmlp.setKltWindowSize(5);
6147  xmlp.setKltQuality(0.01);
6148  xmlp.setKltMinDistance(5);
6149  xmlp.setKltHarrisParam(0.01);
6150  xmlp.setKltBlockSize(3);
6151  xmlp.setKltPyramidLevels(3);
6152 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6153  xmlp.setKltMaskBorder(maskBorder);
6154 #endif
6155 
6156  // Depth normal
6157  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6158  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6159  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6160  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6161  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6162  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6163 
6164  // Depth dense
6165  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6166  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6167 
6168  try {
6169  std::cout << " *********** Parsing XML for";
6170 
6171  std::vector<std::string> tracker_names;
6172  if (m_trackerType & EDGE_TRACKER)
6173  tracker_names.push_back("Edge");
6174 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6175  if (m_trackerType & KLT_TRACKER)
6176  tracker_names.push_back("Klt");
6177 #endif
6178  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6179  tracker_names.push_back("Depth Normal");
6180  if (m_trackerType & DEPTH_DENSE_TRACKER)
6181  tracker_names.push_back("Depth Dense");
6182 
6183  for (size_t i = 0; i < tracker_names.size(); i++) {
6184  std::cout << " " << tracker_names[i];
6185  if (i == tracker_names.size() - 1) {
6186  std::cout << " ";
6187  }
6188  }
6189 
6190  std::cout << "Model-Based Tracker ************ " << std::endl;
6191  xmlp.parse(configFile);
6192  } catch (...) {
6193  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6194  }
6195 
6196  vpCameraParameters camera;
6197  xmlp.getCameraParameters(camera);
6198  setCameraParameters(camera);
6199 
6202 
6203  if (xmlp.hasNearClippingDistance())
6205 
6206  if (xmlp.hasFarClippingDistance())
6208 
6209  if (xmlp.getFovClipping()) {
6211  }
6212 
6213  useLodGeneral = xmlp.getLodState();
6216 
6217  applyLodSettingInConfig = false;
6218  if (this->getNbPolygon() > 0) {
6219  applyLodSettingInConfig = true;
6223  }
6224 
6225  // Edge
6226  vpMe meParser;
6227  xmlp.getEdgeMe(meParser);
6229 
6230 // KLT
6231 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6232  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6233  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6234  tracker.setQuality(xmlp.getKltQuality());
6235  tracker.setMinDistance(xmlp.getKltMinDistance());
6236  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6237  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6238  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6239  maskBorder = xmlp.getKltMaskBorder();
6240 
6241  // if(useScanLine)
6242  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6243 #endif
6244 
6245  // Depth normal
6251 
6252  // Depth dense
6254 #else
6255  std::cerr << "pugixml third-party is not properly built to read config file: " << configFile << std::endl;
6256 #endif
6257 }
6258 
6259 #ifdef VISP_HAVE_PCL
6260 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6261  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6262 {
6263 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6264  // KLT
6265  if (m_trackerType & KLT_TRACKER) {
6266  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6267  vpMbKltTracker::reinit(*ptr_I);
6268  }
6269  }
6270 #endif
6271 
6272  // Looking for new visible face
6273  if (m_trackerType & EDGE_TRACKER) {
6274  bool newvisibleface = false;
6275  vpMbEdgeTracker::visibleFace(*ptr_I, cMo, newvisibleface);
6276 
6277  if (useScanLine) {
6279  faces.computeScanLineRender(cam, ptr_I->getWidth(), ptr_I->getHeight());
6280  }
6281  }
6282 
6283  // Depth normal
6284  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6285  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6286 
6287  // Depth dense
6288  if (m_trackerType & DEPTH_DENSE_TRACKER)
6289  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6290 
6291  // Edge
6292  if (m_trackerType & EDGE_TRACKER) {
6294 
6296  // Reinit the moving edge for the lines which need it.
6298 
6299  if (computeProjError) {
6301  }
6302  }
6303 }
6304 
6305 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
6306  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6307 {
6308  if (m_trackerType & EDGE_TRACKER) {
6309  try {
6311  } catch (...) {
6312  std::cerr << "Error in moving edge tracking" << std::endl;
6313  throw;
6314  }
6315  }
6316 
6317 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6318  if (m_trackerType & KLT_TRACKER) {
6319  try {
6321  } catch (const vpException &e) {
6322  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6323  throw;
6324  }
6325  }
6326 #endif
6327 
6328  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6329  try {
6331  } catch (...) {
6332  std::cerr << "Error in Depth normal tracking" << std::endl;
6333  throw;
6334  }
6335  }
6336 
6337  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6338  try {
6340  } catch (...) {
6341  std::cerr << "Error in Depth dense tracking" << std::endl;
6342  throw;
6343  }
6344  }
6345 }
6346 #endif
6347 
6348 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6349  const unsigned int pointcloud_width,
6350  const unsigned int pointcloud_height)
6351 {
6352 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6353  // KLT
6354  if (m_trackerType & KLT_TRACKER) {
6355  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6356  vpMbKltTracker::reinit(*ptr_I);
6357  }
6358  }
6359 #endif
6360 
6361  // Looking for new visible face
6362  if (m_trackerType & EDGE_TRACKER) {
6363  bool newvisibleface = false;
6364  vpMbEdgeTracker::visibleFace(*ptr_I, cMo, newvisibleface);
6365 
6366  if (useScanLine) {
6368  faces.computeScanLineRender(cam, ptr_I->getWidth(), ptr_I->getHeight());
6369  }
6370  }
6371 
6372  // Depth normal
6373  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6374  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6375 
6376  // Depth dense
6377  if (m_trackerType & DEPTH_DENSE_TRACKER)
6378  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6379 
6380  // Edge
6381  if (m_trackerType & EDGE_TRACKER) {
6383 
6385  // Reinit the moving edge for the lines which need it.
6387 
6388  if (computeProjError) {
6390  }
6391  }
6392 }
6393 
6394 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> * const ptr_I,
6395  const std::vector<vpColVector> * const point_cloud,
6396  const unsigned int pointcloud_width,
6397  const unsigned int pointcloud_height)
6398 {
6399  if (m_trackerType & EDGE_TRACKER) {
6400  try {
6402  } catch (...) {
6403  std::cerr << "Error in moving edge tracking" << std::endl;
6404  throw;
6405  }
6406  }
6407 
6408 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6409  if (m_trackerType & KLT_TRACKER) {
6410  try {
6412  } catch (const vpException &e) {
6413  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6414  throw;
6415  }
6416  }
6417 #endif
6418 
6419  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6420  try {
6421  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6422  } catch (...) {
6423  std::cerr << "Error in Depth tracking" << std::endl;
6424  throw;
6425  }
6426  }
6427 
6428  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6429  try {
6430  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6431  } catch (...) {
6432  std::cerr << "Error in Depth dense tracking" << std::endl;
6433  throw;
6434  }
6435  }
6436 }
6437 
6438 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> * const I, const vpImage<vpRGBa> * const I_color,
6439  const std::string &cad_name, const vpHomogeneousMatrix &cMo_, const bool verbose,
6440  const vpHomogeneousMatrix &T)
6441 {
6442  cMo.eye();
6443 
6444  // Edge
6445  vpMbtDistanceLine *l;
6447  vpMbtDistanceCircle *ci;
6448 
6449  for (unsigned int i = 0; i < scales.size(); i++) {
6450  if (scales[i]) {
6451  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6452  l = *it;
6453  if (l != NULL)
6454  delete l;
6455  l = NULL;
6456  }
6457 
6458  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6459  ++it) {
6460  cy = *it;
6461  if (cy != NULL)
6462  delete cy;
6463  cy = NULL;
6464  }
6465 
6466  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6467  ci = *it;
6468  if (ci != NULL)
6469  delete ci;
6470  ci = NULL;
6471  }
6472 
6473  lines[i].clear();
6474  cylinders[i].clear();
6475  circles[i].clear();
6476  }
6477  }
6478 
6479  nline = 0;
6480  ncylinder = 0;
6481  ncircle = 0;
6482  nbvisiblepolygone = 0;
6483 
6484 // KLT
6485 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6486 #if (VISP_HAVE_OPENCV_VERSION < 0x020408)
6487  if (cur != NULL) {
6488  cvReleaseImage(&cur);
6489  cur = NULL;
6490  }
6491 #endif
6492 
6493  // delete the Klt Polygon features
6494  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6495  vpMbtDistanceKltPoints *kltpoly = *it;
6496  if (kltpoly != NULL) {
6497  delete kltpoly;
6498  }
6499  kltpoly = NULL;
6500  }
6501  kltPolygons.clear();
6502 
6503  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6504  ++it) {
6505  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
6506  if (kltPolyCylinder != NULL) {
6507  delete kltPolyCylinder;
6508  }
6509  kltPolyCylinder = NULL;
6510  }
6511  kltCylinders.clear();
6512 
6513  // delete the structures used to display circles
6514  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6515  ci = *it;
6516  if (ci != NULL) {
6517  delete ci;
6518  }
6519  ci = NULL;
6520  }
6521  circles_disp.clear();
6522 
6523  firstInitialisation = true;
6524 
6525 #endif
6526 
6527  // Depth normal
6528  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6529  delete m_depthNormalFaces[i];
6530  m_depthNormalFaces[i] = NULL;
6531  }
6532  m_depthNormalFaces.clear();
6533 
6534  // Depth dense
6535  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6536  delete m_depthDenseFaces[i];
6537  m_depthDenseFaces[i] = NULL;
6538  }
6539  m_depthDenseFaces.clear();
6540 
6541  faces.reset();
6542 
6543  loadModel(cad_name, verbose, T);
6544  if (I) {
6545  initFromPose(*I, cMo_);
6546  } else {
6547  initFromPose(*I_color, cMo_);
6548  }
6549 }
6550 
6551 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
6552  const vpHomogeneousMatrix &cMo_, const bool verbose,
6553  const vpHomogeneousMatrix &T)
6554 {
6555  reInitModel(&I, NULL, cad_name, cMo_, verbose, T);
6556 }
6557 
6558 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
6559  const vpHomogeneousMatrix &cMo_, const bool verbose,
6560  const vpHomogeneousMatrix &T)
6561 {
6562  reInitModel(NULL, &I_color, cad_name, cMo_, verbose, T);
6563 }
6564 
6565 void vpMbGenericTracker::TrackerWrapper::resetTracker()
6566 {
6568 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6570 #endif
6573 }
6574 
6575 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &camera)
6576 {
6577  this->cam = camera;
6578 
6580 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6582 #endif
6585 }
6586 
6587 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags)
6588 {
6590 }
6591 
6592 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
6593 {
6595 }
6596 
6597 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
6598 {
6600 }
6601 
6602 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
6603 {
6605 #ifdef VISP_HAVE_OGRE
6606  faces.getOgreContext()->setWindowName("TrackerWrapper");
6607 #endif
6608 }
6609 
6610 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> * const I, const vpImage<vpRGBa> * const I_color,
6611  const vpHomogeneousMatrix &cdMo)
6612 {
6613  bool performKltSetPose = false;
6614  if (I_color) {
6615  vpImageConvert::convert(*I_color, m_I);
6616  }
6617 
6618 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6619  if (m_trackerType & KLT_TRACKER) {
6620  performKltSetPose = true;
6621 
6622  if (useScanLine || clippingFlag > 3) {
6623  cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6624  }
6625 
6626  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
6627  }
6628 #endif
6629 
6630  if (!performKltSetPose) {
6631  cMo = cdMo;
6632  init(I ? *I : m_I);
6633  return;
6634  }
6635 
6636  if (m_trackerType & EDGE_TRACKER)
6637  resetMovingEdge();
6638 
6639  if (useScanLine) {
6641  faces.computeScanLineRender(cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6642  }
6643 
6644 #if 0
6645  if (m_trackerType & EDGE_TRACKER) {
6646  initPyramid(I, Ipyramid);
6647 
6648  unsigned int i = (unsigned int) scales.size();
6649  do {
6650  i--;
6651  if(scales[i]){
6652  downScale(i);
6653  initMovingEdge(*Ipyramid[i], cMo);
6654  upScale(i);
6655  }
6656  } while(i != 0);
6657 
6658  cleanPyramid(Ipyramid);
6659  }
6660 #else
6661  if (m_trackerType & EDGE_TRACKER)
6662  initMovingEdge(I ? *I : m_I, cMo);
6663 #endif
6664 
6665  // Depth normal
6666  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6667 
6668  // Depth dense
6669  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6670 }
6671 
6672 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo)
6673 {
6674  setPose(&I, NULL, cdMo);
6675 }
6676 
6677 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<vpRGBa> &I_color, const vpHomogeneousMatrix &cdMo)
6678 {
6679  setPose(NULL, &I_color, cdMo);
6680 }
6681 
6682 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
6683 {
6685 }
6686 
6687 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
6688 {
6690 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6692 #endif
6695 }
6696 
6697 void vpMbGenericTracker::TrackerWrapper::setTrackerType(const int type)
6698 {
6699  if ((type & (EDGE_TRACKER |
6700 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6701  KLT_TRACKER |
6702 #endif
6704  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
6705  }
6706 
6707  m_trackerType = type;
6708 }
6709 
6710 void vpMbGenericTracker::TrackerWrapper::testTracking()
6711 {
6712  if (m_trackerType & EDGE_TRACKER) {
6714  }
6715 }
6716 
6717 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> &
6718 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6719  I
6720 #endif
6721 )
6722 {
6723  if ((m_trackerType & (EDGE_TRACKER
6724 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6725  | KLT_TRACKER
6726 #endif
6727  )) == 0) {
6728  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6729  return;
6730  }
6731 
6732 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6733  track(&I, nullptr);
6734 #endif
6735 }
6736 
6737 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<vpRGBa> &)
6738 {
6739  //not exposed to the public API, only for debug
6740 }
6741 
6742 #ifdef VISP_HAVE_PCL
6743 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> *const ptr_I,
6744  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6745 {
6746  if ((m_trackerType & (EDGE_TRACKER |
6747 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6748  KLT_TRACKER |
6749 #endif
6751  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6752  return;
6753  }
6754 
6755  if (m_trackerType & (EDGE_TRACKER
6756 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6757  | KLT_TRACKER
6758 #endif
6759  ) &&
6760  ptr_I == NULL) {
6761  throw vpException(vpException::fatalError, "Image pointer is NULL!");
6762  }
6763 
6764  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && ! point_cloud) { // point_cloud == nullptr
6765  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
6766  }
6767 
6768  // Back-up cMo in case of exception
6769  vpHomogeneousMatrix cMo_1 = cMo;
6770  try {
6771  preTracking(ptr_I, point_cloud);
6772 
6773  try {
6774  computeVVS(ptr_I);
6775  } catch (...) {
6776  covarianceMatrix = -1;
6777  throw; // throw the original exception
6778  }
6779 
6780  if (m_trackerType == EDGE_TRACKER)
6781  testTracking();
6782 
6783  postTracking(ptr_I, point_cloud);
6784 
6785  } catch (const vpException &e) {
6786  std::cerr << "Exception: " << e.what() << std::endl;
6787  cMo = cMo_1;
6788  throw; // rethrowing the original exception
6789  }
6790 }
6791 #endif
virtual void setDepthDenseFilteringOccupancyRatio(const double occupancyRatio)
virtual void setDisplayFeatures(const bool displayF)
bool m_computeInteraction
Definition: vpMbTracker.h:185
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, const unsigned int level=0) const
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const bool displayFullModel=false)
bool computeProjError
Definition: vpMbTracker.h:133
virtual void setDisplayFeatures(const bool displayF)
Definition: vpMbTracker.h:513
virtual void initFaceFromLines(vpMbtPolygon &polygon)
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
virtual void setOgreShowConfigDialog(const bool showConfigDialog)
Definition: vpMbTracker.h:639
virtual void loadConfigFile(const std::string &configFile)
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:164
virtual void setKltThresholdAcceptation(const double th)
virtual unsigned int getKltMaskBorder() const
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:130
void setMovingEdge(const vpMe &me)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_centered_moments, const vpColor &color, unsigned int thickness=1)
virtual void setScanLineVisibilityTest(const bool &v)
double getFarClippingDistance() const
virtual void setMinLineLengthThresh(const double minLineLengthThresh, const std::string &name="")
virtual void setProjectionErrorDisplayArrowLength(const unsigned int length)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void track(const vpImage< unsigned char > &I)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, const double radius, const int idFace=0, const std::string &name="")
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(const bool orderPolygons=true, const bool useVisibility=true, const bool clipPolygon=false)
virtual void setProjectionErrorDisplayArrowThickness(const unsigned int thickness)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
vpColVector m_w
Robust weights.
unsigned int getWidth() const
Definition: vpImage.h:244
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:252
void setKltQuality(const double &q)
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:476
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
void parse(const std::string &filename)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:143
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
double getNearClippingDistance() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
virtual void setReferenceCameraName(const std::string &referenceCameraName)
void setKltPyramidLevels(const unsigned int &pL)
virtual vpMe getMovingEdge() const
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void loadConfigFile(const std::string &configFile)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:553
void resize(const unsigned int nrows, const unsigned int ncols, const bool flagNullify=true, const bool recopy_=true)
Definition: vpArray2D.h:305
unsigned int getKltBlockSize() const
Class to define colors available for display functionnalities.
Definition: vpColor.h:120
static bool equal(double x, double y, double s=0.001)
Definition: vpMath.h:296
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
virtual void initCylinder(const vpPoint &, const vpPoint &, const double, const int, const std::string &name="")
vpHomogeneousMatrix cMo
The current pose.
Definition: vpMbTracker.h:113
virtual void setClipping(const unsigned int &flags)
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
virtual std::vector< cv::Point2f > getKltPoints() const
void setOgreShowConfigDialog(const bool showConfigDialog)
bool modelInitialised
Definition: vpMbTracker.h:123
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void setProjectionErrorDisplay(const bool display)
virtual void setAngleDisappear(const double &a)
void getCameraParameters(vpCameraParameters &cam) const
virtual void computeVVSInit()
error that can be emited by ViSP classes.
Definition: vpException.h:71
Manage a cylinder used in the model-based tracker.
vpMbScanLine & getMbScanLineRenderer()
Definition: vpMe.h:60
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:559
Manage the line of a polygon used in the model-based tracker.
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void computeVVSInteractionMatrixAndResidu()
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:155
virtual void setDepthDenseFilteringMethod(const int method)
unsigned int getCols() const
Definition: vpArray2D.h:279
double getLodMinLineLengthThreshold() const
virtual void getCameraParameters(vpCameraParameters &cam1, vpCameraParameters &cam2) const
void setKltMaskBorder(const unsigned int &mb)
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, const unsigned int level=0) const
virtual void setMovingEdge(const vpMe &me)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const bool displayFullModel=false)
void setEdgeMe(const vpMe &ecm)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:128
static const vpColor green
Definition: vpColor.h:183
void updateMovingEdge(const vpImage< unsigned char > &I)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void reinit(const vpImage< unsigned char > &I)
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
unsigned int setVisibleOgre(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
virtual void setDepthDenseSamplingStep(const unsigned int stepX, const unsigned int stepY)
static const vpColor red
Definition: vpColor.h:180
Class that defines what is a point.
Definition: vpPoint.h:58
vpCameraParameters cam
The camera parameters.
Definition: vpMbTracker.h:111
virtual void computeVVSWeights()
void computeClippedPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
virtual void setNearClippingDistance(const double &dist)
vpMatrix m_L
Interaction matrix.
void setDepthNormalSamplingStepX(const unsigned int stepX)
Parse an Xml file to extract configuration parameters of a mbtConfig object.Data parser for the model...
void initOgre(const vpCameraParameters &cam=vpCameraParameters())
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:151
double projectionError
Definition: vpMbTracker.h:136
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const bool displayFullModel=false)
vpAROgre * getOgreContext()
void setKltHarrisParam(const double &hp)
const char * what() const
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void setDepthDenseSamplingStepX(const unsigned int stepX)
Manage a circle used in the model-based tracker.
void insert(const vpMatrix &A, const unsigned int r, const unsigned int c)
Definition: vpMatrix.cpp:4653
virtual void initFaceFromLines(vpMbtPolygon &polygon)
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:115
virtual void setKltMaskBorder(const unsigned int &e)
vpColVector m_weightedError
Weighted error.
virtual void initFromPose(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
virtual void setDepthNormalPclPlaneEstimationMethod(const int method)
Error that can be emited by the vpTracker class and its derivates.
static const vpColor cyan
Definition: vpColor.h:189
vp_deprecated void init()
virtual void setScanLineVisibilityTest(const bool &v)
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:66
virtual void setCameraParameters(const vpCameraParameters &camera)
Definition: vpMbTracker.h:483
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const bool displayFullModel=false)
void setAngleDisappear(const double &adisappear)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void setProjectionErrorComputation(const bool &flag)
vpMatrix AtA() const
Definition: vpMatrix.cpp:524
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:419
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, const bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:158
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
void setKltMinDistance(const double &mD)
static double sqr(double x)
Definition: vpMath.h:114
double minLineLengthThresholdGeneral
Minimum line length threshold for LOD mode (general setting)
Definition: vpMbTracker.h:177
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo_, const bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void testTracking()
virtual vpMbtPolygon * getPolygon(const unsigned int index)
virtual void setOgreShowConfigDialog(const bool showConfigDialog)
Generic class defining intrinsic camera parameters.
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
double getDepthNormalPclPlaneEstimationRansacThreshold() const
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
virtual void setAngleAppear(const double &a)
virtual void computeVVSInteractionMatrixAndResidu()
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:193
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
virtual void setFarClippingDistance(const double &dist)
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:465
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void setProjectionErrorDisplay(const bool display)
Definition: vpMbTracker.h:585
virtual void setMinPolygonAreaThresh(const double minPolygonAreaThresh, const std::string &name="")
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:140
unsigned int getRows() const
Definition: vpArray2D.h:289
void setDepthNormalPclPlaneEstimationRansacMaxIter(const int maxIter)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:145
virtual unsigned int getNbPoints(const unsigned int level=0) const
unsigned int getDepthNormalSamplingStepX() const
void setKltWindowSize(const unsigned int &w)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void getEdgeMe(vpMe &ecm) const
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
virtual double getGoodMovingEdgesRatioThreshold() const
virtual void setDepthDenseFilteringMaxDistance(const double maxDistance)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void computeVVSWeights()
void setAngleAppear(const double &aappear)
static double rad(double deg)
Definition: vpMath.h:108
virtual void computeVVSInteractionMatrixAndResidu()
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual void loadModel(const std::string &modelFile, const bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void computeProjectionError(const vpImage< unsigned char > &_I)
void insert(unsigned int i, const vpColVector &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setNearClippingDistance(const double &dist)
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, const unsigned int level=0) const
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual std::vector< std::string > getCameraNames() const
vpMbtFaceDepthNormal::vpFeatureEstimationType getDepthNormalFeatureEstimationMethod() const
void setDepthDenseSamplingStepY(const unsigned int stepY)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, const double radius, const int idFace=0, const std::string &name="")
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, const double, const int, const std::string &name="")
unsigned int getKltPyramidLevels() const
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:191
double minPolygonAreaThresholdGeneral
Minimum polygon area threshold for LOD mode (general setting)
Definition: vpMbTracker.h:179
static double deg(double rad)
Definition: vpMath.h:101
void computeVisibility(const unsigned int width, const unsigned int height)
virtual void setOgreVisibilityTest(const bool &v)
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:138
virtual void initFromPoints(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
void setCameraParameters(const vpCameraParameters &cam)
bool ogreShowConfigDialog
Definition: vpMbTracker.h:156
void preTracking(const vpImage< unsigned char > &I)
vpColVector m_error
(s - s*)
virtual void init(const vpImage< unsigned char > &I)
bool applyLodSettingInConfig
Definition: vpMbTracker.h:175
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
virtual void setOgreVisibilityTest(const bool &v)
std::string m_referenceCameraName
Name of the reference camera.
unsigned int getKltMaskBorder() const
virtual void setFarClippingDistance(const double &dist)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, const double radius, const int idFace=0, const std::string &name="")
virtual void computeVVSPoseEstimation(const bool isoJoIdentity_, const unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
virtual void setProjectionErrorDisplayArrowLength(const unsigned int length)
Definition: vpMbTracker.h:590
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
vpHomogeneousMatrix inverse() const
virtual int getKltNbPoints() const
unsigned int getDepthDenseSamplingStepX() const