Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/mbt/vpMbtXmlGenericParser.h>
42 
44  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
45  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError()
46 {
47  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
48 
49  // Add default camera transformation matrix
51 
52  // Add default ponderation between each feature type
54 
55 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
57 #endif
58 
61 }
62 
63 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
66 {
67  if (nbCameras == 0) {
68  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
69  } else if (nbCameras == 1) {
70  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
71 
72  // Add default camera transformation matrix
74  } else {
75  for (unsigned int i = 1; i <= nbCameras; i++) {
76  std::stringstream ss;
77  ss << "Camera" << i;
78  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
79 
80  // Add default camera transformation matrix
82  }
83 
84  // Set by default the reference camera to the first one
85  m_referenceCameraName = m_mapOfTrackers.begin()->first;
86  }
87 
88  // Add default ponderation between each feature type
90 
91 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
93 #endif
94 
97 }
98 
99 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
102 {
103  if (trackerTypes.empty()) {
104  throw vpException(vpException::badValue, "There is no camera!");
105  }
106 
107  if (trackerTypes.size() == 1) {
108  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
109 
110  // Add default camera transformation matrix
112  } else {
113  for (size_t i = 1; i <= trackerTypes.size(); i++) {
114  std::stringstream ss;
115  ss << "Camera" << i;
116  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
117 
118  // Add default camera transformation matrix
120  }
121 
122  // Set by default the reference camera to the first one
123  m_referenceCameraName = m_mapOfTrackers.begin()->first;
124  }
125 
126  // Add default ponderation between each feature type
128 
129 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
131 #endif
132 
135 }
136 
137 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
138  const std::vector<int> &trackerTypes)
141 {
142  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
144  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
145  }
146 
147  for (size_t i = 0; i < cameraNames.size(); i++) {
148  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
149 
150  // Add default camera transformation matrix
152  }
153 
154  // Set by default the reference camera to the first one
155  m_referenceCameraName = m_mapOfTrackers.begin()->first;
156 
157  // Add default ponderation between each feature type
159 
160 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
162 #endif
163 
166 }
167 
169 {
170  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
171  ++it) {
172  delete it->second;
173  it->second = NULL;
174  }
175 }
176 
195  const vpCameraParameters &cam)
196 {
197  double rawTotalProjectionError = 0.0;
198  unsigned int nbTotalFeaturesUsed = 0;
199 
200  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
201  it != m_mapOfTrackers.end(); ++it) {
202  TrackerWrapper *tracker = it->second;
203 
204  unsigned int nbFeaturesUsed = 0;
205  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
206 
207  if (nbFeaturesUsed > 0) {
208  nbTotalFeaturesUsed += nbFeaturesUsed;
209  rawTotalProjectionError += curProjError;
210  }
211  }
212 
213  if (nbTotalFeaturesUsed > 0) {
214  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
215  }
216 
217  return 90.0;
218 }
219 
238  const vpCameraParameters &_cam)
239 {
241  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
242 
243  return computeCurrentProjectionError(I, _cMo, _cam);
244 }
245 
247 {
248  if (computeProjError) {
249  double rawTotalProjectionError = 0.0;
250  unsigned int nbTotalFeaturesUsed = 0;
251 
252  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
253  it != m_mapOfTrackers.end(); ++it) {
254  TrackerWrapper *tracker = it->second;
255 
256  double curProjError = tracker->getProjectionError();
257  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
258 
259  if (nbFeaturesUsed > 0) {
260  nbTotalFeaturesUsed += nbFeaturesUsed;
261  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
262  }
263  }
264 
265  if (nbTotalFeaturesUsed > 0) {
266  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
267  } else {
268  projectionError = 90.0;
269  }
270  } else {
271  projectionError = 90.0;
272  }
273 }
274 
275 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
276 {
277  computeVVSInit(mapOfImages);
278 
279  if (m_error.getRows() < 4) {
280  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
281  }
282 
283  double normRes = 0;
284  double normRes_1 = -1;
285  unsigned int iter = 0;
286 
287  vpMatrix LTL;
288  vpColVector LTR, v;
289  vpColVector error_prev;
290 
291  double mu = m_initialMu;
292  vpHomogeneousMatrix cMo_prev;
293 
294  bool isoJoIdentity_ = true;
295 
296  // Covariance
297  vpColVector W_true(m_error.getRows());
298  vpMatrix L_true, LVJ_true;
299 
300  // Create the map of VelocityTwistMatrices
301  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
302  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
303  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
305  cVo.buildFrom(it->second);
306  mapOfVelocityTwist[it->first] = cVo;
307  }
308 
309  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
310 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
311  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
312 #endif
313  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
314  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
315 
316  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
317  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
318 
319  bool reStartFromLastIncrement = false;
320  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
321  if (reStartFromLastIncrement) {
322  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
323  it != m_mapOfTrackers.end(); ++it) {
324  TrackerWrapper *tracker = it->second;
325 
326  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
327 
328 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
329  vpHomogeneousMatrix c_curr_tTc_curr0 =
330  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
331  tracker->ctTc0 = c_curr_tTc_curr0;
332 #endif
333  }
334  }
335 
336  if (!reStartFromLastIncrement) {
338 
339  if (computeCovariance) {
340  L_true = m_L;
341  if (!isoJoIdentity_) {
343  cVo.buildFrom(m_cMo);
344  LVJ_true = (m_L * (cVo * oJo));
345  }
346  }
347 
349  if (iter == 0) {
350  isoJoIdentity_ = true;
351  oJo.eye();
352 
353  // If all the 6 dof should be estimated, we check if the interaction
354  // matrix is full rank. If not we remove automatically the dof that
355  // cannot be estimated This is particularly useful when consering
356  // circles (rank 5) and cylinders (rank 4)
357  if (isoJoIdentity_) {
358  cVo.buildFrom(m_cMo);
359 
360  vpMatrix K; // kernel
361  unsigned int rank = (m_L * cVo).kernel(K);
362  if (rank == 0) {
363  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
364  }
365 
366  if (rank != 6) {
367  vpMatrix I; // Identity
368  I.eye(6);
369  oJo = I - K.AtA();
370 
371  isoJoIdentity_ = false;
372  }
373  }
374  }
375 
376  // Weighting
377  double num = 0;
378  double den = 0;
379 
380  unsigned int start_index = 0;
381  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
382  it != m_mapOfTrackers.end(); ++it) {
383  TrackerWrapper *tracker = it->second;
384 
385  if (tracker->m_trackerType & EDGE_TRACKER) {
386  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
387  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
388  W_true[start_index + i] = wi;
389  m_weightedError[start_index + i] = wi * m_error[start_index + i];
390 
391  num += wi * vpMath::sqr(m_error[start_index + i]);
392  den += wi;
393 
394  for (unsigned int j = 0; j < m_L.getCols(); j++) {
395  m_L[start_index + i][j] *= wi;
396  }
397  }
398 
399  start_index += tracker->m_error_edge.getRows();
400  }
401 
402 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
403  if (tracker->m_trackerType & KLT_TRACKER) {
404  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
405  double wi = tracker->m_w_klt[i] * factorKlt;
406  W_true[start_index + i] = wi;
407  m_weightedError[start_index + i] = wi * m_error[start_index + i];
408 
409  num += wi * vpMath::sqr(m_error[start_index + i]);
410  den += wi;
411 
412  for (unsigned int j = 0; j < m_L.getCols(); j++) {
413  m_L[start_index + i][j] *= wi;
414  }
415  }
416 
417  start_index += tracker->m_error_klt.getRows();
418  }
419 #endif
420 
421  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
422  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
423  double wi = tracker->m_w_depthNormal[i] * factorDepth;
424  W_true[start_index + i] = wi;
425  m_weightedError[start_index + i] = wi * m_error[start_index + i];
426 
427  num += wi * vpMath::sqr(m_error[start_index + i]);
428  den += wi;
429 
430  for (unsigned int j = 0; j < m_L.getCols(); j++) {
431  m_L[start_index + i][j] *= wi;
432  }
433  }
434 
435  start_index += tracker->m_error_depthNormal.getRows();
436  }
437 
438  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
439  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
440  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
441  W_true[start_index + i] = wi;
442  m_weightedError[start_index + i] = wi * m_error[start_index + i];
443 
444  num += wi * vpMath::sqr(m_error[start_index + i]);
445  den += wi;
446 
447  for (unsigned int j = 0; j < m_L.getCols(); j++) {
448  m_L[start_index + i][j] *= wi;
449  }
450  }
451 
452  start_index += tracker->m_error_depthDense.getRows();
453  }
454  }
455 
456  normRes_1 = normRes;
457  normRes = sqrt(num / den);
458 
459  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
460 
461  cMo_prev = m_cMo;
462 
464 
465 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
466  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
467  it != m_mapOfTrackers.end(); ++it) {
468  TrackerWrapper *tracker = it->second;
469 
470  vpHomogeneousMatrix c_curr_tTc_curr0 =
471  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
472  tracker->ctTc0 = c_curr_tTc_curr0;
473  }
474 #endif
475 
476  // Update cMo
477  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
478  it != m_mapOfTrackers.end(); ++it) {
479  TrackerWrapper *tracker = it->second;
480  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
481  }
482  }
483 
484  iter++;
485  }
486 
487  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
488 
489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
490  it != m_mapOfTrackers.end(); ++it) {
491  TrackerWrapper *tracker = it->second;
492 
493  if (tracker->m_trackerType & EDGE_TRACKER) {
494  tracker->updateMovingEdgeWeights();
495  }
496  }
497 }
498 
500 {
501  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
502 }
503 
504 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
505 {
506  unsigned int nbFeatures = 0;
507 
508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
509  it != m_mapOfTrackers.end(); ++it) {
510  TrackerWrapper *tracker = it->second;
511  tracker->computeVVSInit(mapOfImages[it->first]);
512 
513  nbFeatures += tracker->m_error.getRows();
514  }
515 
516  m_L.resize(nbFeatures, 6, false, false);
517  m_error.resize(nbFeatures, false);
518 
519  m_weightedError.resize(nbFeatures, false);
520  m_w.resize(nbFeatures, false);
521  m_w = 1;
522 }
523 
525 {
526  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
527  "computeVVSInteractionMatrixAndR"
528  "esidu() should not be called");
529 }
530 
532  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
533  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
534 {
535  unsigned int start_index = 0;
536 
537  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
538  it != m_mapOfTrackers.end(); ++it) {
539  TrackerWrapper *tracker = it->second;
540 
541  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
542 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
543  vpHomogeneousMatrix c_curr_tTc_curr0 = m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
544  tracker->ctTc0 = c_curr_tTc_curr0;
545 #endif
546 
547  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
548 
549  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
550  m_error.insert(start_index, tracker->m_error);
551 
552  start_index += tracker->m_error.getRows();
553  }
554 }
555 
557 {
558  unsigned int start_index = 0;
559 
560  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
561  it != m_mapOfTrackers.end(); ++it) {
562  TrackerWrapper *tracker = it->second;
563  tracker->computeVVSWeights();
564 
565  m_w.insert(start_index, tracker->m_w);
566  start_index += tracker->m_w.getRows();
567  }
568 }
569 
584  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
585  bool displayFullModel)
586 {
587  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
588  if (it != m_mapOfTrackers.end()) {
589  TrackerWrapper *tracker = it->second;
590  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
591  } else {
592  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
593  }
594 }
595 
610  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
611  bool displayFullModel)
612 {
613  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
614  if (it != m_mapOfTrackers.end()) {
615  TrackerWrapper *tracker = it->second;
616  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
617  } else {
618  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
619  }
620 }
621 
639  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
640  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
641  unsigned int thickness, bool displayFullModel)
642 {
643  if (m_mapOfTrackers.size() == 2) {
644  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
645  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
646  ++it;
647 
648  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
649  } else {
650  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
651  << std::endl;
652  }
653 }
654 
672  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
673  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
674  bool displayFullModel)
675 {
676  if (m_mapOfTrackers.size() == 2) {
677  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
678  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
679  ++it;
680 
681  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
682  } else {
683  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
684  << std::endl;
685  }
686 }
687 
699 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
700  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
701  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
702  const vpColor &col, unsigned int thickness, bool displayFullModel)
703 {
704  // Display only for the given images
705  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
706  it_img != mapOfImages.end(); ++it_img) {
707  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
708  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
709  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
710 
711  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
712  it_cam != mapOfCameraParameters.end()) {
713  TrackerWrapper *tracker = it_tracker->second;
714  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
715  } else {
716  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
717  }
718  }
719 }
720 
732 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
733  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
734  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
735  const vpColor &col, unsigned int thickness, bool displayFullModel)
736 {
737  // Display only for the given images
738  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
739  it_img != mapOfImages.end(); ++it_img) {
740  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
741  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
742  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
743 
744  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
745  it_cam != mapOfCameraParameters.end()) {
746  TrackerWrapper *tracker = it_tracker->second;
747  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
748  } else {
749  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
750  }
751  }
752 }
753 
759 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
760 {
761  std::vector<std::string> cameraNames;
762 
763  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
764  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
765  cameraNames.push_back(it_tracker->first);
766  }
767 
768  return cameraNames;
769 }
770 
772 {
774 }
775 
785 {
786  if (m_mapOfTrackers.size() == 2) {
787  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
788  it->second->getCameraParameters(cam1);
789  ++it;
790 
791  it->second->getCameraParameters(cam2);
792  } else {
793  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
794  << std::endl;
795  }
796 }
797 
803 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
804 {
805  // Clear the input map
806  mapOfCameraParameters.clear();
807 
808  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
809  it != m_mapOfTrackers.end(); ++it) {
810  vpCameraParameters cam_;
811  it->second->getCameraParameters(cam_);
812  mapOfCameraParameters[it->first] = cam_;
813  }
814 }
815 
822 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
823 {
824  std::map<std::string, int> trackingTypes;
825 
826  TrackerWrapper *traker;
827  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
828  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
829  traker = it_tracker->second;
830  trackingTypes[it_tracker->first] = traker->getTrackerType();
831  }
832 
833  return trackingTypes;
834 }
835 
844 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
845 {
846  if (m_mapOfTrackers.size() == 2) {
847  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
848  clippingFlag1 = it->second->getClipping();
849  ++it;
850 
851  clippingFlag2 = it->second->getClipping();
852  } else {
853  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
854  << std::endl;
855  }
856 }
857 
863 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
864 {
865  mapOfClippingFlags.clear();
866 
867  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
868  it != m_mapOfTrackers.end(); ++it) {
869  TrackerWrapper *tracker = it->second;
870  mapOfClippingFlags[it->first] = tracker->getClipping();
871  }
872 }
873 
880 {
881  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
882  if (it != m_mapOfTrackers.end()) {
883  return it->second->getFaces();
884  }
885 
886  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
887  return faces;
888 }
889 
896 {
897  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
898  if (it != m_mapOfTrackers.end()) {
899  return it->second->getFaces();
900  }
901 
902  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
903  return faces;
904 }
905 
906 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
907 
910 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
911 {
912  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
913  if (it != m_mapOfTrackers.end()) {
914  TrackerWrapper *tracker = it->second;
915  return tracker->getFeaturesCircle();
916  } else {
917  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
918  m_referenceCameraName.c_str());
919  }
920 }
921 
925 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
926 {
927  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
928  if (it != m_mapOfTrackers.end()) {
929  TrackerWrapper *tracker = it->second;
930  return tracker->getFeaturesKltCylinder();
931  } else {
932  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
933  m_referenceCameraName.c_str());
934  }
935 }
936 
940 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
941 {
942  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
943  if (it != m_mapOfTrackers.end()) {
944  TrackerWrapper *tracker = it->second;
945  return tracker->getFeaturesKlt();
946  } else {
947  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
948  m_referenceCameraName.c_str());
949  }
950 }
951 #endif
952 
978 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
979 {
980  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
981 
982  if (it != m_mapOfTrackers.end()) {
983  return it->second->getFeaturesForDisplay();
984  } else {
985  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
986  }
987 
988  return std::vector<std::vector<double> >();
989 }
990 
1014 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1015 {
1016  // Clear the input map
1017  mapOfFeatures.clear();
1018 
1019  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1020  it != m_mapOfTrackers.end(); ++it) {
1021  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1022  }
1023 }
1024 
1035 
1036 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
1037 
1045 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1046 {
1047  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1048  if (it != m_mapOfTrackers.end()) {
1049  TrackerWrapper *tracker = it->second;
1050  return tracker->getKltImagePoints();
1051  } else {
1052  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1053  }
1054 
1055  return std::vector<vpImagePoint>();
1056 }
1057 
1066 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1067 {
1068  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1069  if (it != m_mapOfTrackers.end()) {
1070  TrackerWrapper *tracker = it->second;
1071  return tracker->getKltImagePointsWithId();
1072  } else {
1073  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1074  }
1075 
1076  return std::map<int, vpImagePoint>();
1077 }
1078 
1085 {
1086  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1087  if (it != m_mapOfTrackers.end()) {
1088  TrackerWrapper *tracker = it->second;
1089  return tracker->getKltMaskBorder();
1090  } else {
1091  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1092  }
1093 
1094  return 0;
1095 }
1096 
1103 {
1104  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1105  if (it != m_mapOfTrackers.end()) {
1106  TrackerWrapper *tracker = it->second;
1107  return tracker->getKltNbPoints();
1108  } else {
1109  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1110  }
1111 
1112  return 0;
1113 }
1114 
1121 {
1122  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1123 
1124  if (it_tracker != m_mapOfTrackers.end()) {
1125  TrackerWrapper *tracker;
1126  tracker = it_tracker->second;
1127  return tracker->getKltOpencv();
1128  } else {
1129  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1130  }
1131 
1132  return vpKltOpencv();
1133 }
1134 
1144 {
1145  if (m_mapOfTrackers.size() == 2) {
1146  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1147  klt1 = it->second->getKltOpencv();
1148  ++it;
1149 
1150  klt2 = it->second->getKltOpencv();
1151  } else {
1152  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1153  << std::endl;
1154  }
1155 }
1156 
1162 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1163 {
1164  mapOfKlts.clear();
1165 
1166  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1167  it != m_mapOfTrackers.end(); ++it) {
1168  TrackerWrapper *tracker = it->second;
1169  mapOfKlts[it->first] = tracker->getKltOpencv();
1170  }
1171 }
1172 
1173 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
1174 
1179 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1180 {
1181  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1182  if (it != m_mapOfTrackers.end()) {
1183  TrackerWrapper *tracker = it->second;
1184  return tracker->getKltPoints();
1185  } else {
1186  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1187  }
1188 
1189  return std::vector<cv::Point2f>();
1190 }
1191 #endif
1192 
1200 #endif
1201 
1214 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList,
1215  unsigned int level) const
1216 {
1217  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1218  if (it != m_mapOfTrackers.end()) {
1219  it->second->getLcircle(circlesList, level);
1220  } else {
1221  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1222  }
1223 }
1224 
1238 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1239  unsigned int level) const
1240 {
1241  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1242  if (it != m_mapOfTrackers.end()) {
1243  it->second->getLcircle(circlesList, level);
1244  } else {
1245  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1246  }
1247 }
1248 
1261 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList,
1262  unsigned int level) const
1263 {
1264  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1265  if (it != m_mapOfTrackers.end()) {
1266  it->second->getLcylinder(cylindersList, level);
1267  } else {
1268  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1269  }
1270 }
1271 
1285 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1286  unsigned int level) const
1287 {
1288  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1289  if (it != m_mapOfTrackers.end()) {
1290  it->second->getLcylinder(cylindersList, level);
1291  } else {
1292  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1293  }
1294 }
1295 
1308 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList,
1309  unsigned int level) const
1310 {
1311  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1312 
1313  if (it != m_mapOfTrackers.end()) {
1314  it->second->getLline(linesList, level);
1315  } else {
1316  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1317  }
1318 }
1319 
1333 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1334  unsigned int level) const
1335 {
1336  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1337  if (it != m_mapOfTrackers.end()) {
1338  it->second->getLline(linesList, level);
1339  } else {
1340  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1341  }
1342 }
1343 
1369 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1370  const vpHomogeneousMatrix &cMo,
1371  const vpCameraParameters &cam,
1372  bool displayFullModel)
1373 {
1374  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1375 
1376  if (it != m_mapOfTrackers.end()) {
1377  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1378  } else {
1379  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1380  }
1381 
1382  return std::vector<std::vector<double> >();
1383 }
1384 
1411 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1412  const std::map<std::string, unsigned int> &mapOfwidths,
1413  const std::map<std::string, unsigned int> &mapOfheights,
1414  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1415  const std::map<std::string, vpCameraParameters> &mapOfCams,
1416  bool displayFullModel)
1417 {
1418  // Clear the input map
1419  mapOfModels.clear();
1420 
1421  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1422  it != m_mapOfTrackers.end(); ++it) {
1423  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1424  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1425  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1426  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1427 
1428  if (findWidth != mapOfwidths.end() &&
1429  findHeight != mapOfheights.end() &&
1430  findcMo != mapOfcMos.end() &&
1431  findCam != mapOfCams.end()) {
1432  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second,
1433  findcMo->second, findCam->second, displayFullModel);
1434  }
1435  }
1436 }
1437 
1444 {
1445  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1446 
1447  if (it != m_mapOfTrackers.end()) {
1448  return it->second->getMovingEdge();
1449  } else {
1450  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1451  }
1452 
1453  return vpMe();
1454 }
1455 
1465 {
1466  if (m_mapOfTrackers.size() == 2) {
1467  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1468  it->second->getMovingEdge(me1);
1469  ++it;
1470 
1471  it->second->getMovingEdge(me2);
1472  } else {
1473  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1474  << std::endl;
1475  }
1476 }
1477 
1483 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1484 {
1485  mapOfMovingEdges.clear();
1486 
1487  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1488  it != m_mapOfTrackers.end(); ++it) {
1489  TrackerWrapper *tracker = it->second;
1490  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1491  }
1492 }
1493 
1509 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1510 {
1511  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1512 
1513  unsigned int nbGoodPoints = 0;
1514  if (it != m_mapOfTrackers.end()) {
1515 
1516  nbGoodPoints = it->second->getNbPoints(level);
1517  } else {
1518  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1519  }
1520 
1521  return nbGoodPoints;
1522 }
1523 
1538 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1539 {
1540  mapOfNbPoints.clear();
1541 
1542  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1543  it != m_mapOfTrackers.end(); ++it) {
1544  TrackerWrapper *tracker = it->second;
1545  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1546  }
1547 }
1548 
1555 {
1556  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1557  if (it != m_mapOfTrackers.end()) {
1558  return it->second->getNbPolygon();
1559  }
1560 
1561  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1562  return 0;
1563 }
1564 
1570 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1571 {
1572  mapOfNbPolygons.clear();
1573 
1574  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1575  it != m_mapOfTrackers.end(); ++it) {
1576  TrackerWrapper *tracker = it->second;
1577  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1578  }
1579 }
1580 
1592 {
1593  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1594  if (it != m_mapOfTrackers.end()) {
1595  return it->second->getPolygon(index);
1596  }
1597 
1598  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1599  return NULL;
1600 }
1601 
1613 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1614 {
1615  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1616  if (it != m_mapOfTrackers.end()) {
1617  return it->second->getPolygon(index);
1618  }
1619 
1620  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1621  return NULL;
1622 }
1623 
1639 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1640 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1641 {
1642  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1643 
1644  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1645  if (it != m_mapOfTrackers.end()) {
1646  TrackerWrapper *tracker = it->second;
1647  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1648  } else {
1649  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1650  }
1651 
1652  return polygonFaces;
1653 }
1654 
1672 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1673  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1674  bool orderPolygons, bool useVisibility, bool clipPolygon)
1675 {
1676  mapOfPolygons.clear();
1677  mapOfPoints.clear();
1678 
1679  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1680  it != m_mapOfTrackers.end(); ++it) {
1681  TrackerWrapper *tracker = it->second;
1682  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1683  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1684 
1685  mapOfPolygons[it->first] = polygonFaces.first;
1686  mapOfPoints[it->first] = polygonFaces.second;
1687  }
1688 }
1689 
1691 {
1692  vpMbTracker::getPose(cMo);
1693 }
1694 
1704 {
1705  if (m_mapOfTrackers.size() == 2) {
1706  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1707  it->second->getPose(c1Mo);
1708  ++it;
1709 
1710  it->second->getPose(c2Mo);
1711  } else {
1712  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1713  << std::endl;
1714  }
1715 }
1716 
1722 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1723 {
1724  // Clear the map
1725  mapOfCameraPoses.clear();
1726 
1727  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1728  it != m_mapOfTrackers.end(); ++it) {
1729  TrackerWrapper *tracker = it->second;
1730  tracker->getPose(mapOfCameraPoses[it->first]);
1731  }
1732 }
1733 
1738 {
1739  return m_referenceCameraName;
1740 }
1741 
1746 {
1747  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1748  if (it != m_mapOfTrackers.end()) {
1749  TrackerWrapper *tracker = it->second;
1750  return tracker->getTrackerType();
1751  } else {
1752  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1753  m_referenceCameraName.c_str());
1754  }
1755 }
1756 
1758 {
1759  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1760  it != m_mapOfTrackers.end(); ++it) {
1761  TrackerWrapper *tracker = it->second;
1762  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1763  tracker->init(I);
1764  }
1765 }
1766 
1767 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1768  const double /*radius*/, const int /*idFace*/, const std::string & /*name*/)
1769 {
1770  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1771 }
1772 
1773 #ifdef VISP_HAVE_MODULE_GUI
1774 
1818  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1819  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1820 {
1821  if (m_mapOfTrackers.size() == 2) {
1822  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1823  TrackerWrapper *tracker = it->second;
1824  tracker->initClick(I1, initFile1, displayHelp, T1);
1825 
1826  ++it;
1827 
1828  tracker = it->second;
1829  tracker->initClick(I2, initFile2, displayHelp, T2);
1830 
1832  if (it != m_mapOfTrackers.end()) {
1833  tracker = it->second;
1834 
1835  // Set the reference cMo
1836  tracker->getPose(m_cMo);
1837  }
1838  } else {
1840  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1841  }
1842 }
1843 
1886 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1887  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1888  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1889 {
1890  if (m_mapOfTrackers.size() == 2) {
1891  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1892  TrackerWrapper *tracker = it->second;
1893  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1894 
1895  ++it;
1896 
1897  tracker = it->second;
1898  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1899 
1901  if (it != m_mapOfTrackers.end()) {
1902  tracker = it->second;
1903 
1904  // Set the reference cMo
1905  tracker->getPose(m_cMo);
1906  }
1907  } else {
1909  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1910  }
1911 }
1912 
1955 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1956  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
1957  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
1958 {
1959  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1960  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1961  mapOfImages.find(m_referenceCameraName);
1962  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
1963 
1964  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1965  TrackerWrapper *tracker = it_tracker->second;
1966  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
1967  if (it_T != mapOfT.end())
1968  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
1969  else
1970  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1971  tracker->getPose(m_cMo);
1972  } else {
1973  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
1974  }
1975 
1976  // Vector of missing initFile for cameras
1977  std::vector<std::string> vectorOfMissingCameraPoses;
1978 
1979  // Set pose for the specified cameras
1980  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
1981  if (it_tracker->first != m_referenceCameraName) {
1982  it_img = mapOfImages.find(it_tracker->first);
1983  it_initFile = mapOfInitFiles.find(it_tracker->first);
1984 
1985  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1986  // InitClick for the current camera
1987  TrackerWrapper *tracker = it_tracker->second;
1988  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1989  } else {
1990  vectorOfMissingCameraPoses.push_back(it_tracker->first);
1991  }
1992  }
1993  }
1994 
1995  // Init for cameras that do not have an initFile
1996  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
1997  it != vectorOfMissingCameraPoses.end(); ++it) {
1998  it_img = mapOfImages.find(*it);
1999  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2001 
2002  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2003  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2004  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2005  m_mapOfTrackers[*it]->init(*it_img->second);
2006  } else {
2008  "Missing image or missing camera transformation "
2009  "matrix! Cannot set the pose for camera: %s!",
2010  it->c_str());
2011  }
2012  }
2013 }
2014 
2057 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2058  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2059  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2060 {
2061  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2062  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2063  mapOfColorImages.find(m_referenceCameraName);
2064  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2065 
2066  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2067  TrackerWrapper *tracker = it_tracker->second;
2068  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2069  if (it_T != mapOfT.end())
2070  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2071  else
2072  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2073  tracker->getPose(m_cMo);
2074  } else {
2075  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2076  }
2077 
2078  // Vector of missing initFile for cameras
2079  std::vector<std::string> vectorOfMissingCameraPoses;
2080 
2081  // Set pose for the specified cameras
2082  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2083  if (it_tracker->first != m_referenceCameraName) {
2084  it_img = mapOfColorImages.find(it_tracker->first);
2085  it_initFile = mapOfInitFiles.find(it_tracker->first);
2086 
2087  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2088  // InitClick for the current camera
2089  TrackerWrapper *tracker = it_tracker->second;
2090  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2091  } else {
2092  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2093  }
2094  }
2095  }
2096 
2097  // Init for cameras that do not have an initFile
2098  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2099  it != vectorOfMissingCameraPoses.end(); ++it) {
2100  it_img = mapOfColorImages.find(*it);
2101  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2103 
2104  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2105  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2106  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2107  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2108  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2109  } else {
2111  "Missing image or missing camera transformation "
2112  "matrix! Cannot set the pose for camera: %s!",
2113  it->c_str());
2114  }
2115  }
2116 }
2117 #endif
2118 
2119 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2120  const int /*idFace*/, const std::string & /*name*/)
2121 {
2122  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2123 }
2124 
2126 {
2127  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2128 }
2129 
2131 {
2132  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2133 }
2134 
2165  const std::string &initFile1, const std::string &initFile2)
2166 {
2167  if (m_mapOfTrackers.size() == 2) {
2168  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2169  TrackerWrapper *tracker = it->second;
2170  tracker->initFromPoints(I1, initFile1);
2171 
2172  ++it;
2173 
2174  tracker = it->second;
2175  tracker->initFromPoints(I2, initFile2);
2176 
2178  if (it != m_mapOfTrackers.end()) {
2179  tracker = it->second;
2180 
2181  // Set the reference cMo
2182  tracker->getPose(m_cMo);
2183 
2184  // Set the reference camera parameters
2185  tracker->getCameraParameters(m_cam);
2186  }
2187  } else {
2189  "Cannot initFromPoints()! Require two cameras but "
2190  "there are %d cameras!",
2191  m_mapOfTrackers.size());
2192  }
2193 }
2194 
2225  const std::string &initFile1, const std::string &initFile2)
2226 {
2227  if (m_mapOfTrackers.size() == 2) {
2228  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2229  TrackerWrapper *tracker = it->second;
2230  tracker->initFromPoints(I_color1, initFile1);
2231 
2232  ++it;
2233 
2234  tracker = it->second;
2235  tracker->initFromPoints(I_color2, initFile2);
2236 
2238  if (it != m_mapOfTrackers.end()) {
2239  tracker = it->second;
2240 
2241  // Set the reference cMo
2242  tracker->getPose(m_cMo);
2243 
2244  // Set the reference camera parameters
2245  tracker->getCameraParameters(m_cam);
2246  }
2247  } else {
2249  "Cannot initFromPoints()! Require two cameras but "
2250  "there are %d cameras!",
2251  m_mapOfTrackers.size());
2252  }
2253 }
2254 
2255 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2256  const std::map<std::string, std::string> &mapOfInitPoints)
2257 {
2258  // Set the reference cMo
2259  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2260  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2261  mapOfImages.find(m_referenceCameraName);
2262  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2263 
2264  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2265  TrackerWrapper *tracker = it_tracker->second;
2266  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2267  tracker->getPose(m_cMo);
2268  } else {
2269  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2270  }
2271 
2272  // Vector of missing initPoints for cameras
2273  std::vector<std::string> vectorOfMissingCameraPoints;
2274 
2275  // Set pose for the specified cameras
2276  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2277  it_img = mapOfImages.find(it_tracker->first);
2278  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2279 
2280  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2281  // Set pose
2282  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2283  } else {
2284  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2285  }
2286  }
2287 
2288  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2289  it != vectorOfMissingCameraPoints.end(); ++it) {
2290  it_img = mapOfImages.find(*it);
2291  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2293 
2294  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2295  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2296  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2297  } else {
2299  "Missing image or missing camera transformation "
2300  "matrix! Cannot init the pose for camera: %s!",
2301  it->c_str());
2302  }
2303  }
2304 }
2305 
2306 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2307  const std::map<std::string, std::string> &mapOfInitPoints)
2308 {
2309  // Set the reference cMo
2310  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2311  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2312  mapOfColorImages.find(m_referenceCameraName);
2313  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2314 
2315  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2316  TrackerWrapper *tracker = it_tracker->second;
2317  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2318  tracker->getPose(m_cMo);
2319  } else {
2320  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2321  }
2322 
2323  // Vector of missing initPoints for cameras
2324  std::vector<std::string> vectorOfMissingCameraPoints;
2325 
2326  // Set pose for the specified cameras
2327  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2328  it_img = mapOfColorImages.find(it_tracker->first);
2329  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2330 
2331  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2332  // Set pose
2333  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2334  } else {
2335  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2336  }
2337  }
2338 
2339  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2340  it != vectorOfMissingCameraPoints.end(); ++it) {
2341  it_img = mapOfColorImages.find(*it);
2342  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2344 
2345  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2346  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2347  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2348  } else {
2350  "Missing image or missing camera transformation "
2351  "matrix! Cannot init the pose for camera: %s!",
2352  it->c_str());
2353  }
2354  }
2355 }
2356 
2358 {
2359  vpMbTracker::initFromPose(I, cMo);
2360 }
2361 
2374  const std::string &initFile1, const std::string &initFile2)
2375 {
2376  if (m_mapOfTrackers.size() == 2) {
2377  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2378  TrackerWrapper *tracker = it->second;
2379  tracker->initFromPose(I1, initFile1);
2380 
2381  ++it;
2382 
2383  tracker = it->second;
2384  tracker->initFromPose(I2, initFile2);
2385 
2387  if (it != m_mapOfTrackers.end()) {
2388  tracker = it->second;
2389 
2390  // Set the reference cMo
2391  tracker->getPose(m_cMo);
2392 
2393  // Set the reference camera parameters
2394  tracker->getCameraParameters(m_cam);
2395  }
2396  } else {
2398  "Cannot initFromPose()! Require two cameras but there "
2399  "are %d cameras!",
2400  m_mapOfTrackers.size());
2401  }
2402 }
2403 
2416  const std::string &initFile1, const std::string &initFile2)
2417 {
2418  if (m_mapOfTrackers.size() == 2) {
2419  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2420  TrackerWrapper *tracker = it->second;
2421  tracker->initFromPose(I_color1, initFile1);
2422 
2423  ++it;
2424 
2425  tracker = it->second;
2426  tracker->initFromPose(I_color2, initFile2);
2427 
2429  if (it != m_mapOfTrackers.end()) {
2430  tracker = it->second;
2431 
2432  // Set the reference cMo
2433  tracker->getPose(m_cMo);
2434 
2435  // Set the reference camera parameters
2436  tracker->getCameraParameters(m_cam);
2437  }
2438  } else {
2440  "Cannot initFromPose()! Require two cameras but there "
2441  "are %d cameras!",
2442  m_mapOfTrackers.size());
2443  }
2444 }
2445 
2460 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2461  const std::map<std::string, std::string> &mapOfInitPoses)
2462 {
2463  // Set the reference cMo
2464  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2465  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2466  mapOfImages.find(m_referenceCameraName);
2467  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2468 
2469  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2470  TrackerWrapper *tracker = it_tracker->second;
2471  tracker->initFromPose(*it_img->second, it_initPose->second);
2472  tracker->getPose(m_cMo);
2473  } else {
2474  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2475  }
2476 
2477  // Vector of missing pose matrices for cameras
2478  std::vector<std::string> vectorOfMissingCameraPoses;
2479 
2480  // Set pose for the specified cameras
2481  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2482  it_img = mapOfImages.find(it_tracker->first);
2483  it_initPose = mapOfInitPoses.find(it_tracker->first);
2484 
2485  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2486  // Set pose
2487  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2488  } else {
2489  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2490  }
2491  }
2492 
2493  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2494  it != vectorOfMissingCameraPoses.end(); ++it) {
2495  it_img = mapOfImages.find(*it);
2496  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2498 
2499  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2500  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2501  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2502  } else {
2504  "Missing image or missing camera transformation "
2505  "matrix! Cannot init the pose for camera: %s!",
2506  it->c_str());
2507  }
2508  }
2509 }
2510 
2525 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2526  const std::map<std::string, std::string> &mapOfInitPoses)
2527 {
2528  // Set the reference cMo
2529  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2530  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2531  mapOfColorImages.find(m_referenceCameraName);
2532  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2533 
2534  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2535  TrackerWrapper *tracker = it_tracker->second;
2536  tracker->initFromPose(*it_img->second, it_initPose->second);
2537  tracker->getPose(m_cMo);
2538  } else {
2539  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2540  }
2541 
2542  // Vector of missing pose matrices for cameras
2543  std::vector<std::string> vectorOfMissingCameraPoses;
2544 
2545  // Set pose for the specified cameras
2546  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2547  it_img = mapOfColorImages.find(it_tracker->first);
2548  it_initPose = mapOfInitPoses.find(it_tracker->first);
2549 
2550  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2551  // Set pose
2552  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2553  } else {
2554  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2555  }
2556  }
2557 
2558  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2559  it != vectorOfMissingCameraPoses.end(); ++it) {
2560  it_img = mapOfColorImages.find(*it);
2561  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2563 
2564  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2565  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2566  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2567  } else {
2569  "Missing image or missing camera transformation "
2570  "matrix! Cannot init the pose for camera: %s!",
2571  it->c_str());
2572  }
2573  }
2574 }
2575 
2587  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2588 {
2589  if (m_mapOfTrackers.size() == 2) {
2590  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2591  it->second->initFromPose(I1, c1Mo);
2592 
2593  ++it;
2594 
2595  it->second->initFromPose(I2, c2Mo);
2596 
2597  m_cMo = c1Mo;
2598  } else {
2600  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2601  }
2602 }
2603 
2615  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2616 {
2617  if (m_mapOfTrackers.size() == 2) {
2618  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2619  it->second->initFromPose(I_color1, c1Mo);
2620 
2621  ++it;
2622 
2623  it->second->initFromPose(I_color2, c2Mo);
2624 
2625  m_cMo = c1Mo;
2626  } else {
2628  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2629  }
2630 }
2631 
2645 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2646  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2647 {
2648  // Set the reference cMo
2649  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2650  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2651  mapOfImages.find(m_referenceCameraName);
2652  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2653 
2654  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2655  TrackerWrapper *tracker = it_tracker->second;
2656  tracker->initFromPose(*it_img->second, it_camPose->second);
2657  tracker->getPose(m_cMo);
2658  } else {
2659  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2660  }
2661 
2662  // Vector of missing pose matrices for cameras
2663  std::vector<std::string> vectorOfMissingCameraPoses;
2664 
2665  // Set pose for the specified cameras
2666  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2667  it_img = mapOfImages.find(it_tracker->first);
2668  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2669 
2670  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2671  // Set pose
2672  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2673  } else {
2674  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2675  }
2676  }
2677 
2678  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2679  it != vectorOfMissingCameraPoses.end(); ++it) {
2680  it_img = mapOfImages.find(*it);
2681  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2683 
2684  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2685  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2686  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2687  } else {
2689  "Missing image or missing camera transformation "
2690  "matrix! Cannot set the pose for camera: %s!",
2691  it->c_str());
2692  }
2693  }
2694 }
2695 
2709 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2710  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2711 {
2712  // Set the reference cMo
2713  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2714  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
2715  mapOfColorImages.find(m_referenceCameraName);
2716  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2717 
2718  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2719  TrackerWrapper *tracker = it_tracker->second;
2720  tracker->initFromPose(*it_img->second, it_camPose->second);
2721  tracker->getPose(m_cMo);
2722  } else {
2723  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2724  }
2725 
2726  // Vector of missing pose matrices for cameras
2727  std::vector<std::string> vectorOfMissingCameraPoses;
2728 
2729  // Set pose for the specified cameras
2730  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2731  it_img = mapOfColorImages.find(it_tracker->first);
2732  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2733 
2734  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2735  // Set pose
2736  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2737  } else {
2738  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2739  }
2740  }
2741 
2742  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2743  it != vectorOfMissingCameraPoses.end(); ++it) {
2744  it_img = mapOfColorImages.find(*it);
2745  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2747 
2748  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2749  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2750  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2751  } else {
2753  "Missing image or missing camera transformation "
2754  "matrix! Cannot set the pose for camera: %s!",
2755  it->c_str());
2756  }
2757  }
2758 }
2759 
2770 void vpMbGenericTracker::loadConfigFile(const std::string &configFile)
2771 {
2772  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2773  it != m_mapOfTrackers.end(); ++it) {
2774  TrackerWrapper *tracker = it->second;
2775  tracker->loadConfigFile(configFile);
2776  }
2777 
2779  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2780  }
2781 
2782  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2783  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2784  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2785  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2786 }
2787 
2801 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2)
2802 {
2803  if (m_mapOfTrackers.size() != 2) {
2804  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2805  }
2806 
2807  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2808  TrackerWrapper *tracker = it_tracker->second;
2809  tracker->loadConfigFile(configFile1);
2810 
2811  ++it_tracker;
2812  tracker = it_tracker->second;
2813  tracker->loadConfigFile(configFile2);
2814 
2816  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2817  }
2818 
2819  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2820  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2821  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2822  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2823 }
2824 
2837 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles)
2838 {
2839  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2840  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2841  TrackerWrapper *tracker = it_tracker->second;
2842 
2843  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
2844  if (it_config != mapOfConfigFiles.end()) {
2845  tracker->loadConfigFile(it_config->second);
2846  } else {
2847  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
2848  it_tracker->first.c_str());
2849  }
2850  }
2851 
2852  // Set the reference camera parameters
2853  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
2854  if (it != m_mapOfTrackers.end()) {
2855  TrackerWrapper *tracker = it->second;
2856  tracker->getCameraParameters(m_cam);
2857 
2858  // Set clipping
2859  this->clippingFlag = tracker->getClipping();
2860  this->angleAppears = tracker->getAngleAppear();
2861  this->angleDisappears = tracker->getAngleDisappear();
2862  } else {
2863  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
2864  m_referenceCameraName.c_str());
2865  }
2866 }
2867 
2898 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
2899 {
2900  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2901  it != m_mapOfTrackers.end(); ++it) {
2902  TrackerWrapper *tracker = it->second;
2903  tracker->loadModel(modelFile, verbose, T);
2904  }
2905 }
2906 
2941 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
2942  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
2943 {
2944  if (m_mapOfTrackers.size() != 2) {
2945  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2946  }
2947 
2948  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2949  TrackerWrapper *tracker = it_tracker->second;
2950  tracker->loadModel(modelFile1, verbose, T1);
2951 
2952  ++it_tracker;
2953  tracker = it_tracker->second;
2954  tracker->loadModel(modelFile2, verbose, T2);
2955 }
2956 
2988 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
2989  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2990 {
2991  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2992  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2993  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
2994 
2995  if (it_model != mapOfModelFiles.end()) {
2996  TrackerWrapper *tracker = it_tracker->second;
2997  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2998 
2999  if (it_T != mapOfT.end())
3000  tracker->loadModel(it_model->second, verbose, it_T->second);
3001  else
3002  tracker->loadModel(it_model->second, verbose);
3003  } else {
3004  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3005  it_tracker->first.c_str());
3006  }
3007  }
3008 }
3009 
3010 #ifdef VISP_HAVE_PCL
3011 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3012  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3013 {
3014  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3015  it != m_mapOfTrackers.end(); ++it) {
3016  TrackerWrapper *tracker = it->second;
3017  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3018  }
3019 }
3020 #endif
3021 
3022 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3023  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3024  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3025  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3026 {
3027  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3028  it != m_mapOfTrackers.end(); ++it) {
3029  TrackerWrapper *tracker = it->second;
3030  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3031  mapOfPointCloudHeights[it->first]);
3032  }
3033 }
3034 
3046 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3047  const vpHomogeneousMatrix &cMo, bool verbose,
3048  const vpHomogeneousMatrix &T)
3049 {
3050  if (m_mapOfTrackers.size() != 1) {
3051  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3052  m_mapOfTrackers.size());
3053  }
3054 
3055  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3056  if (it_tracker != m_mapOfTrackers.end()) {
3057  TrackerWrapper *tracker = it_tracker->second;
3058  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3059 
3060  // Set reference pose
3061  tracker->getPose(m_cMo);
3062  } else {
3063  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3064  }
3065 
3066  modelInitialised = true;
3067 }
3068 
3080 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3081  const vpHomogeneousMatrix &cMo, bool verbose,
3082  const vpHomogeneousMatrix &T)
3083 {
3084  if (m_mapOfTrackers.size() != 1) {
3085  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3086  m_mapOfTrackers.size());
3087  }
3088 
3089  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3090  if (it_tracker != m_mapOfTrackers.end()) {
3091  TrackerWrapper *tracker = it_tracker->second;
3092  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3093 
3094  // Set reference pose
3095  tracker->getPose(m_cMo);
3096  } else {
3097  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3098  }
3099 
3100  modelInitialised = true;
3101 }
3102 
3124  const std::string &cad_name1, const std::string &cad_name2,
3125  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
3126  bool verbose,
3127  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3128 {
3129  if (m_mapOfTrackers.size() == 2) {
3130  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3131 
3132  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3133 
3134  ++it_tracker;
3135 
3136  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3137 
3138  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3139  if (it_tracker != m_mapOfTrackers.end()) {
3140  // Set reference pose
3141  it_tracker->second->getPose(m_cMo);
3142  }
3143  } else {
3144  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3145  }
3146 
3147  modelInitialised = true;
3148 }
3149 
3171  const std::string &cad_name1, const std::string &cad_name2,
3172  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
3173  bool verbose,
3174  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3175 {
3176  if (m_mapOfTrackers.size() == 2) {
3177  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3178 
3179  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3180 
3181  ++it_tracker;
3182 
3183  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3184 
3185  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3186  if (it_tracker != m_mapOfTrackers.end()) {
3187  // Set reference pose
3188  it_tracker->second->getPose(m_cMo);
3189  }
3190  } else {
3191  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3192  }
3193 
3194  modelInitialised = true;
3195 }
3196 
3211 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3212  const std::map<std::string, std::string> &mapOfModelFiles,
3213  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
3214  bool verbose,
3215  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3216 {
3217  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3218  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3219  mapOfImages.find(m_referenceCameraName);
3220  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3221  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3222 
3223  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3224  it_camPose != mapOfCameraPoses.end()) {
3225  TrackerWrapper *tracker = it_tracker->second;
3226  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3227  if (it_T != mapOfT.end())
3228  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3229  else
3230  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3231 
3232  // Set reference pose
3233  tracker->getPose(m_cMo);
3234  } else {
3235  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3236  }
3237 
3238  std::vector<std::string> vectorOfMissingCameras;
3239  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3240  if (it_tracker->first != m_referenceCameraName) {
3241  it_img = mapOfImages.find(it_tracker->first);
3242  it_model = mapOfModelFiles.find(it_tracker->first);
3243  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3244 
3245  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3246  TrackerWrapper *tracker = it_tracker->second;
3247  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3248  } else {
3249  vectorOfMissingCameras.push_back(it_tracker->first);
3250  }
3251  }
3252  }
3253 
3254  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3255  ++it) {
3256  it_img = mapOfImages.find(*it);
3257  it_model = mapOfModelFiles.find(*it);
3258  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3260 
3261  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3262  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3263  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3264  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3265  }
3266  }
3267 
3268  modelInitialised = true;
3269 }
3270 
3285 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3286  const std::map<std::string, std::string> &mapOfModelFiles,
3287  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
3288  bool verbose,
3289  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3290 {
3291  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3292  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
3293  mapOfColorImages.find(m_referenceCameraName);
3294  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3295  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3296 
3297  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3298  it_camPose != mapOfCameraPoses.end()) {
3299  TrackerWrapper *tracker = it_tracker->second;
3300  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3301  if (it_T != mapOfT.end())
3302  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3303  else
3304  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3305 
3306  // Set reference pose
3307  tracker->getPose(m_cMo);
3308  } else {
3309  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3310  }
3311 
3312  std::vector<std::string> vectorOfMissingCameras;
3313  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3314  if (it_tracker->first != m_referenceCameraName) {
3315  it_img = mapOfColorImages.find(it_tracker->first);
3316  it_model = mapOfModelFiles.find(it_tracker->first);
3317  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3318 
3319  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3320  TrackerWrapper *tracker = it_tracker->second;
3321  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3322  } else {
3323  vectorOfMissingCameras.push_back(it_tracker->first);
3324  }
3325  }
3326  }
3327 
3328  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3329  ++it) {
3330  it_img = mapOfColorImages.find(*it);
3331  it_model = mapOfModelFiles.find(*it);
3332  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3334 
3335  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3336  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3337  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3338  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3339  }
3340  }
3341 
3342  modelInitialised = true;
3343 }
3344 
3350 {
3351  m_cMo.eye();
3352 
3353  useScanLine = false;
3354 
3355 #ifdef VISP_HAVE_OGRE
3356  useOgre = false;
3357 #endif
3358 
3359  m_computeInteraction = true;
3360  m_lambda = 1.0;
3361 
3362  angleAppears = vpMath::rad(89);
3365  distNearClip = 0.001;
3366  distFarClip = 100;
3367 
3369  m_maxIter = 30;
3370  m_stopCriteriaEpsilon = 1e-8;
3371  m_initialMu = 0.01;
3372 
3373  // Only for Edge
3374  m_percentageGdPt = 0.4;
3375 
3376  // Only for KLT
3377  m_thresholdOutlier = 0.5;
3378 
3379  // Reset default ponderation between each feature type
3381 
3382 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3384 #endif
3385 
3388 
3389  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3390  it != m_mapOfTrackers.end(); ++it) {
3391  TrackerWrapper *tracker = it->second;
3392  tracker->resetTracker();
3393  }
3394 }
3395 
3406 {
3408 
3409  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3410  it != m_mapOfTrackers.end(); ++it) {
3411  TrackerWrapper *tracker = it->second;
3412  tracker->setAngleAppear(a);
3413  }
3414 }
3415 
3428 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3429 {
3430  if (m_mapOfTrackers.size() == 2) {
3431  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3432  it->second->setAngleAppear(a1);
3433 
3434  ++it;
3435  it->second->setAngleAppear(a2);
3436 
3438  if (it != m_mapOfTrackers.end()) {
3439  angleAppears = it->second->getAngleAppear();
3440  } else {
3441  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3442  }
3443  } else {
3444  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3445  m_mapOfTrackers.size());
3446  }
3447 }
3448 
3458 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3459 {
3460  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3461  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3462 
3463  if (it_tracker != m_mapOfTrackers.end()) {
3464  TrackerWrapper *tracker = it_tracker->second;
3465  tracker->setAngleAppear(it->second);
3466 
3467  if (it->first == m_referenceCameraName) {
3468  angleAppears = it->second;
3469  }
3470  }
3471  }
3472 }
3473 
3484 {
3486 
3487  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3488  it != m_mapOfTrackers.end(); ++it) {
3489  TrackerWrapper *tracker = it->second;
3490  tracker->setAngleDisappear(a);
3491  }
3492 }
3493 
3506 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3507 {
3508  if (m_mapOfTrackers.size() == 2) {
3509  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3510  it->second->setAngleDisappear(a1);
3511 
3512  ++it;
3513  it->second->setAngleDisappear(a2);
3514 
3516  if (it != m_mapOfTrackers.end()) {
3517  angleDisappears = it->second->getAngleDisappear();
3518  } else {
3519  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3520  }
3521  } else {
3522  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3523  m_mapOfTrackers.size());
3524  }
3525 }
3526 
3536 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3537 {
3538  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3539  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3540 
3541  if (it_tracker != m_mapOfTrackers.end()) {
3542  TrackerWrapper *tracker = it_tracker->second;
3543  tracker->setAngleDisappear(it->second);
3544 
3545  if (it->first == m_referenceCameraName) {
3546  angleDisappears = it->second;
3547  }
3548  }
3549  }
3550 }
3551 
3558 {
3560 
3561  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3562  it != m_mapOfTrackers.end(); ++it) {
3563  TrackerWrapper *tracker = it->second;
3564  tracker->setCameraParameters(camera);
3565  }
3566 }
3567 
3577 {
3578  if (m_mapOfTrackers.size() == 2) {
3579  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3580  it->second->setCameraParameters(camera1);
3581 
3582  ++it;
3583  it->second->setCameraParameters(camera2);
3584 
3586  if (it != m_mapOfTrackers.end()) {
3587  it->second->getCameraParameters(m_cam);
3588  } else {
3589  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3590  }
3591  } else {
3592  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3593  m_mapOfTrackers.size());
3594  }
3595 }
3596 
3605 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3606 {
3607  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3608  it != mapOfCameraParameters.end(); ++it) {
3609  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3610 
3611  if (it_tracker != m_mapOfTrackers.end()) {
3612  TrackerWrapper *tracker = it_tracker->second;
3613  tracker->setCameraParameters(it->second);
3614 
3615  if (it->first == m_referenceCameraName) {
3616  m_cam = it->second;
3617  }
3618  }
3619  }
3620 }
3621 
3630 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3631  const vpHomogeneousMatrix &cameraTransformationMatrix)
3632 {
3633  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3634 
3635  if (it != m_mapOfCameraTransformationMatrix.end()) {
3636  it->second = cameraTransformationMatrix;
3637  } else {
3638  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3639  }
3640 }
3641 
3650  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3651 {
3652  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3653  it != mapOfTransformationMatrix.end(); ++it) {
3654  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3655  m_mapOfCameraTransformationMatrix.find(it->first);
3656 
3657  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3658  it_camTrans->second = it->second;
3659  }
3660  }
3661 }
3662 
3672 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3673 {
3674  vpMbTracker::setClipping(flags);
3675 
3676  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3677  it != m_mapOfTrackers.end(); ++it) {
3678  TrackerWrapper *tracker = it->second;
3679  tracker->setClipping(flags);
3680  }
3681 }
3682 
3693 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3694 {
3695  if (m_mapOfTrackers.size() == 2) {
3696  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3697  it->second->setClipping(flags1);
3698 
3699  ++it;
3700  it->second->setClipping(flags2);
3701 
3703  if (it != m_mapOfTrackers.end()) {
3704  clippingFlag = it->second->getClipping();
3705  } else {
3706  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3707  }
3708  } else {
3709  std::stringstream ss;
3710  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3712  }
3713 }
3714 
3722 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
3723 {
3724  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3725  it != mapOfClippingFlags.end(); ++it) {
3726  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3727 
3728  if (it_tracker != m_mapOfTrackers.end()) {
3729  TrackerWrapper *tracker = it_tracker->second;
3730  tracker->setClipping(it->second);
3731 
3732  if (it->first == m_referenceCameraName) {
3733  clippingFlag = it->second;
3734  }
3735  }
3736  }
3737 }
3738 
3749 {
3750  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3751  it != m_mapOfTrackers.end(); ++it) {
3752  TrackerWrapper *tracker = it->second;
3753  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
3754  }
3755 }
3756 
3766 {
3767  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3768  it != m_mapOfTrackers.end(); ++it) {
3769  TrackerWrapper *tracker = it->second;
3770  tracker->setDepthDenseFilteringMethod(method);
3771  }
3772 }
3773 
3784 {
3785  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3786  it != m_mapOfTrackers.end(); ++it) {
3787  TrackerWrapper *tracker = it->second;
3788  tracker->setDepthDenseFilteringMinDistance(minDistance);
3789  }
3790 }
3791 
3802 {
3803  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3804  it != m_mapOfTrackers.end(); ++it) {
3805  TrackerWrapper *tracker = it->second;
3806  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
3807  }
3808 }
3809 
3818 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
3819 {
3820  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3821  it != m_mapOfTrackers.end(); ++it) {
3822  TrackerWrapper *tracker = it->second;
3823  tracker->setDepthDenseSamplingStep(stepX, stepY);
3824  }
3825 }
3826 
3835 {
3836  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3837  it != m_mapOfTrackers.end(); ++it) {
3838  TrackerWrapper *tracker = it->second;
3839  tracker->setDepthNormalFaceCentroidMethod(method);
3840  }
3841 }
3842 
3852 {
3853  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3854  it != m_mapOfTrackers.end(); ++it) {
3855  TrackerWrapper *tracker = it->second;
3856  tracker->setDepthNormalFeatureEstimationMethod(method);
3857  }
3858 }
3859 
3868 {
3869  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3870  it != m_mapOfTrackers.end(); ++it) {
3871  TrackerWrapper *tracker = it->second;
3872  tracker->setDepthNormalPclPlaneEstimationMethod(method);
3873  }
3874 }
3875 
3884 {
3885  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3886  it != m_mapOfTrackers.end(); ++it) {
3887  TrackerWrapper *tracker = it->second;
3888  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
3889  }
3890 }
3891 
3900 {
3901  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3902  it != m_mapOfTrackers.end(); ++it) {
3903  TrackerWrapper *tracker = it->second;
3904  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
3905  }
3906 }
3907 
3916 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
3917 {
3918  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3919  it != m_mapOfTrackers.end(); ++it) {
3920  TrackerWrapper *tracker = it->second;
3921  tracker->setDepthNormalSamplingStep(stepX, stepY);
3922  }
3923 }
3924 
3944 {
3946 
3947  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3948  it != m_mapOfTrackers.end(); ++it) {
3949  TrackerWrapper *tracker = it->second;
3950  tracker->setDisplayFeatures(displayF);
3951  }
3952 }
3953 
3962 {
3964 
3965  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3966  it != m_mapOfTrackers.end(); ++it) {
3967  TrackerWrapper *tracker = it->second;
3968  tracker->setFarClippingDistance(dist);
3969  }
3970 }
3971 
3980 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
3981 {
3982  if (m_mapOfTrackers.size() == 2) {
3983  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3984  it->second->setFarClippingDistance(dist1);
3985 
3986  ++it;
3987  it->second->setFarClippingDistance(dist2);
3988 
3990  if (it != m_mapOfTrackers.end()) {
3991  distFarClip = it->second->getFarClippingDistance();
3992  } else {
3993  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3994  }
3995  } else {
3996  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3997  m_mapOfTrackers.size());
3998  }
3999 }
4000 
4006 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4007 {
4008  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4009  ++it) {
4010  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4011 
4012  if (it_tracker != m_mapOfTrackers.end()) {
4013  TrackerWrapper *tracker = it_tracker->second;
4014  tracker->setFarClippingDistance(it->second);
4015 
4016  if (it->first == m_referenceCameraName) {
4017  distFarClip = it->second;
4018  }
4019  }
4020  }
4021 }
4022 
4029 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4030 {
4031  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4032  ++it) {
4033  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4034  if (it_factor != mapOfFeatureFactors.end()) {
4035  it->second = it_factor->second;
4036  }
4037  }
4038 }
4039 
4056 {
4057  m_percentageGdPt = threshold;
4058 
4059  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4060  it != m_mapOfTrackers.end(); ++it) {
4061  TrackerWrapper *tracker = it->second;
4062  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4063  }
4064 }
4065 
4066 #ifdef VISP_HAVE_OGRE
4067 
4079 {
4080  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4081  it != m_mapOfTrackers.end(); ++it) {
4082  TrackerWrapper *tracker = it->second;
4083  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4084  }
4085 }
4086 
4099 {
4100  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4101  it != m_mapOfTrackers.end(); ++it) {
4102  TrackerWrapper *tracker = it->second;
4103  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4104  }
4105 }
4106 #endif
4107 
4108 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4109 
4117 {
4118  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4119  it != m_mapOfTrackers.end(); ++it) {
4120  TrackerWrapper *tracker = it->second;
4121  tracker->setKltOpencv(t);
4122  }
4123 }
4124 
4134 {
4135  if (m_mapOfTrackers.size() == 2) {
4136  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4137  it->second->setKltOpencv(t1);
4138 
4139  ++it;
4140  it->second->setKltOpencv(t2);
4141  } else {
4142  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4143  m_mapOfTrackers.size());
4144  }
4145 }
4146 
4152 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4153 {
4154  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4155  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4156 
4157  if (it_tracker != m_mapOfTrackers.end()) {
4158  TrackerWrapper *tracker = it_tracker->second;
4159  tracker->setKltOpencv(it->second);
4160  }
4161  }
4162 }
4163 
4172 {
4173  m_thresholdOutlier = th;
4174 
4175  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4176  it != m_mapOfTrackers.end(); ++it) {
4177  TrackerWrapper *tracker = it->second;
4178  tracker->setKltThresholdAcceptation(th);
4179  }
4180 }
4181 #endif
4182 
4195 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4196 {
4197  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4198  it != m_mapOfTrackers.end(); ++it) {
4199  TrackerWrapper *tracker = it->second;
4200  tracker->setLod(useLod, name);
4201  }
4202 }
4203 
4204 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4205 
4212 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4213 {
4214  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4215  it != m_mapOfTrackers.end(); ++it) {
4216  TrackerWrapper *tracker = it->second;
4217  tracker->setKltMaskBorder(e);
4218  }
4219 }
4220 
4229 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4230 {
4231  if (m_mapOfTrackers.size() == 2) {
4232  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4233  it->second->setKltMaskBorder(e1);
4234 
4235  ++it;
4236 
4237  it->second->setKltMaskBorder(e2);
4238  } else {
4239  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4240  m_mapOfTrackers.size());
4241  }
4242 }
4243 
4249 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4250 {
4251  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4252  ++it) {
4253  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4254 
4255  if (it_tracker != m_mapOfTrackers.end()) {
4256  TrackerWrapper *tracker = it_tracker->second;
4257  tracker->setKltMaskBorder(it->second);
4258  }
4259  }
4260 }
4261 #endif
4262 
4269 {
4270  vpMbTracker::setMask(mask);
4271 
4272  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4273  it != m_mapOfTrackers.end(); ++it) {
4274  TrackerWrapper *tracker = it->second;
4275  tracker->setMask(mask);
4276  }
4277 }
4278 
4279 
4291 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4292 {
4293  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4294  it != m_mapOfTrackers.end(); ++it) {
4295  TrackerWrapper *tracker = it->second;
4296  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4297  }
4298 }
4299 
4310 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4311 {
4312  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4313  it != m_mapOfTrackers.end(); ++it) {
4314  TrackerWrapper *tracker = it->second;
4315  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4316  }
4317 }
4318 
4327 {
4328  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4329  it != m_mapOfTrackers.end(); ++it) {
4330  TrackerWrapper *tracker = it->second;
4331  tracker->setMovingEdge(me);
4332  }
4333 }
4334 
4344 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4345 {
4346  if (m_mapOfTrackers.size() == 2) {
4347  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4348  it->second->setMovingEdge(me1);
4349 
4350  ++it;
4351 
4352  it->second->setMovingEdge(me2);
4353  } else {
4354  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4355  m_mapOfTrackers.size());
4356  }
4357 }
4358 
4364 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4365 {
4366  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4367  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4368 
4369  if (it_tracker != m_mapOfTrackers.end()) {
4370  TrackerWrapper *tracker = it_tracker->second;
4371  tracker->setMovingEdge(it->second);
4372  }
4373  }
4374 }
4375 
4384 {
4386 
4387  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4388  it != m_mapOfTrackers.end(); ++it) {
4389  TrackerWrapper *tracker = it->second;
4390  tracker->setNearClippingDistance(dist);
4391  }
4392 }
4393 
4402 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4403 {
4404  if (m_mapOfTrackers.size() == 2) {
4405  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4406  it->second->setNearClippingDistance(dist1);
4407 
4408  ++it;
4409 
4410  it->second->setNearClippingDistance(dist2);
4411 
4413  if (it != m_mapOfTrackers.end()) {
4414  distNearClip = it->second->getNearClippingDistance();
4415  } else {
4416  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4417  }
4418  } else {
4419  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4420  m_mapOfTrackers.size());
4421  }
4422 }
4423 
4429 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4430 {
4431  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4432  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4433 
4434  if (it_tracker != m_mapOfTrackers.end()) {
4435  TrackerWrapper *tracker = it_tracker->second;
4436  tracker->setNearClippingDistance(it->second);
4437 
4438  if (it->first == m_referenceCameraName) {
4439  distNearClip = it->second;
4440  }
4441  }
4442  }
4443 }
4444 
4458 {
4459  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4460 
4461  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4462  it != m_mapOfTrackers.end(); ++it) {
4463  TrackerWrapper *tracker = it->second;
4464  tracker->setOgreShowConfigDialog(showConfigDialog);
4465  }
4466 }
4467 
4479 {
4481 
4482  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4483  it != m_mapOfTrackers.end(); ++it) {
4484  TrackerWrapper *tracker = it->second;
4485  tracker->setOgreVisibilityTest(v);
4486  }
4487 
4488 #ifdef VISP_HAVE_OGRE
4489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4490  it != m_mapOfTrackers.end(); ++it) {
4491  TrackerWrapper *tracker = it->second;
4492  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4493  }
4494 #endif
4495 }
4496 
4505 {
4507 
4508  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4509  it != m_mapOfTrackers.end(); ++it) {
4510  TrackerWrapper *tracker = it->second;
4511  tracker->setOptimizationMethod(opt);
4512  }
4513 }
4514 
4528 {
4529  if (m_mapOfTrackers.size() > 1) {
4530  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4531  "to be configured with only one camera!");
4532  }
4533 
4534  m_cMo = cdMo;
4535 
4536  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4537  if (it != m_mapOfTrackers.end()) {
4538  TrackerWrapper *tracker = it->second;
4539  tracker->setPose(I, cdMo);
4540  } else {
4541  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4542  m_referenceCameraName.c_str());
4543  }
4544 }
4545 
4559 {
4560  if (m_mapOfTrackers.size() > 1) {
4561  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4562  "to be configured with only one camera!");
4563  }
4564 
4565  m_cMo = cdMo;
4566 
4567  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4568  if (it != m_mapOfTrackers.end()) {
4569  TrackerWrapper *tracker = it->second;
4570  vpImageConvert::convert(I_color, m_I);
4571  tracker->setPose(m_I, cdMo);
4572  } else {
4573  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4574  m_referenceCameraName.c_str());
4575  }
4576 }
4577 
4590  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4591 {
4592  if (m_mapOfTrackers.size() == 2) {
4593  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4594  it->second->setPose(I1, c1Mo);
4595 
4596  ++it;
4597 
4598  it->second->setPose(I2, c2Mo);
4599 
4601  if (it != m_mapOfTrackers.end()) {
4602  // Set reference pose
4603  it->second->getPose(m_cMo);
4604  } else {
4605  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4606  m_referenceCameraName.c_str());
4607  }
4608  } else {
4609  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4610  m_mapOfTrackers.size());
4611  }
4612 }
4613 
4625 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4626  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4627 {
4628  if (m_mapOfTrackers.size() == 2) {
4629  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4630  it->second->setPose(I_color1, c1Mo);
4631 
4632  ++it;
4633 
4634  it->second->setPose(I_color2, c2Mo);
4635 
4637  if (it != m_mapOfTrackers.end()) {
4638  // Set reference pose
4639  it->second->getPose(m_cMo);
4640  } else {
4641  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4642  m_referenceCameraName.c_str());
4643  }
4644  } else {
4645  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4646  m_mapOfTrackers.size());
4647  }
4648 }
4649 
4665 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4666  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4667 {
4668  // Set the reference cMo
4669  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4670  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4671  mapOfImages.find(m_referenceCameraName);
4672  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4673 
4674  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4675  TrackerWrapper *tracker = it_tracker->second;
4676  tracker->setPose(*it_img->second, it_camPose->second);
4677  tracker->getPose(m_cMo);
4678  } else {
4679  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4680  }
4681 
4682  // Vector of missing pose matrices for cameras
4683  std::vector<std::string> vectorOfMissingCameraPoses;
4684 
4685  // Set pose for the specified cameras
4686  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4687  if (it_tracker->first != m_referenceCameraName) {
4688  it_img = mapOfImages.find(it_tracker->first);
4689  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4690 
4691  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4692  // Set pose
4693  TrackerWrapper *tracker = it_tracker->second;
4694  tracker->setPose(*it_img->second, it_camPose->second);
4695  } else {
4696  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4697  }
4698  }
4699  }
4700 
4701  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4702  it != vectorOfMissingCameraPoses.end(); ++it) {
4703  it_img = mapOfImages.find(*it);
4704  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4706 
4707  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4708  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4709  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4710  } else {
4712  "Missing image or missing camera transformation "
4713  "matrix! Cannot set pose for camera: %s!",
4714  it->c_str());
4715  }
4716  }
4717 }
4718 
4734 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
4735  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4736 {
4737  // Set the reference cMo
4738  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4739  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img =
4740  mapOfColorImages.find(m_referenceCameraName);
4741  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4742 
4743  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4744  TrackerWrapper *tracker = it_tracker->second;
4745  tracker->setPose(*it_img->second, it_camPose->second);
4746  tracker->getPose(m_cMo);
4747  } else {
4748  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4749  }
4750 
4751  // Vector of missing pose matrices for cameras
4752  std::vector<std::string> vectorOfMissingCameraPoses;
4753 
4754  // Set pose for the specified cameras
4755  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4756  if (it_tracker->first != m_referenceCameraName) {
4757  it_img = mapOfColorImages.find(it_tracker->first);
4758  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4759 
4760  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
4761  // Set pose
4762  TrackerWrapper *tracker = it_tracker->second;
4763  tracker->setPose(*it_img->second, it_camPose->second);
4764  } else {
4765  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4766  }
4767  }
4768  }
4769 
4770  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4771  it != vectorOfMissingCameraPoses.end(); ++it) {
4772  it_img = mapOfColorImages.find(*it);
4773  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4775 
4776  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4777  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4778  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4779  } else {
4781  "Missing image or missing camera transformation "
4782  "matrix! Cannot set pose for camera: %s!",
4783  it->c_str());
4784  }
4785  }
4786 }
4787 
4803 {
4805 
4806  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4807  it != m_mapOfTrackers.end(); ++it) {
4808  TrackerWrapper *tracker = it->second;
4809  tracker->setProjectionErrorComputation(flag);
4810  }
4811 }
4812 
4817 {
4819 
4820  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4821  it != m_mapOfTrackers.end(); ++it) {
4822  TrackerWrapper *tracker = it->second;
4823  tracker->setProjectionErrorDisplay(display);
4824  }
4825 }
4826 
4831 {
4833 
4834  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4835  it != m_mapOfTrackers.end(); ++it) {
4836  TrackerWrapper *tracker = it->second;
4837  tracker->setProjectionErrorDisplayArrowLength(length);
4838  }
4839 }
4840 
4842 {
4844 
4845  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4846  it != m_mapOfTrackers.end(); ++it) {
4847  TrackerWrapper *tracker = it->second;
4848  tracker->setProjectionErrorDisplayArrowThickness(thickness);
4849  }
4850 }
4851 
4857 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
4858 {
4859  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
4860  if (it != m_mapOfTrackers.end()) {
4861  m_referenceCameraName = referenceCameraName;
4862  } else {
4863  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
4864  }
4865 }
4866 
4868 {
4870 
4871  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4872  it != m_mapOfTrackers.end(); ++it) {
4873  TrackerWrapper *tracker = it->second;
4874  tracker->setScanLineVisibilityTest(v);
4875  }
4876 }
4877 
4890 {
4891  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4892  it != m_mapOfTrackers.end(); ++it) {
4893  TrackerWrapper *tracker = it->second;
4894  tracker->setTrackerType(type);
4895  }
4896 }
4897 
4907 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
4908 {
4909  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
4910  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4911  if (it_tracker != m_mapOfTrackers.end()) {
4912  TrackerWrapper *tracker = it_tracker->second;
4913  tracker->setTrackerType(it->second);
4914  }
4915  }
4916 }
4917 
4927 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
4928 {
4929  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4930  it != m_mapOfTrackers.end(); ++it) {
4931  TrackerWrapper *tracker = it->second;
4932  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
4933  }
4934 }
4935 
4945 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
4946 {
4947  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4948  it != m_mapOfTrackers.end(); ++it) {
4949  TrackerWrapper *tracker = it->second;
4950  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
4951  }
4952 }
4953 
4963 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
4964 {
4965  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4966  it != m_mapOfTrackers.end(); ++it) {
4967  TrackerWrapper *tracker = it->second;
4968  tracker->setUseEdgeTracking(name, useEdgeTracking);
4969  }
4970 }
4971 
4972 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4973 
4982 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
4983 {
4984  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4985  it != m_mapOfTrackers.end(); ++it) {
4986  TrackerWrapper *tracker = it->second;
4987  tracker->setUseKltTracking(name, useKltTracking);
4988  }
4989 }
4990 #endif
4991 
4993 {
4994  // Test tracking fails only if all testTracking have failed
4995  bool isOneTestTrackingOk = false;
4996  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4997  it != m_mapOfTrackers.end(); ++it) {
4998  TrackerWrapper *tracker = it->second;
4999  try {
5000  tracker->testTracking();
5001  isOneTestTrackingOk = true;
5002  } catch (...) {
5003  }
5004  }
5005 
5006  if (!isOneTestTrackingOk) {
5007  std::ostringstream oss;
5008  oss << "Not enough moving edges to track the object. Try to reduce the "
5009  "threshold="
5010  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5012  }
5013 }
5014 
5025 {
5026  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5027  mapOfImages[m_referenceCameraName] = &I;
5028 
5029  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5030  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5031 
5032  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5033 }
5034 
5045 {
5046  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5047  mapOfColorImages[m_referenceCameraName] = &I_color;
5048 
5049  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5050  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5051 
5052  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5053 }
5054 
5066 {
5067  if (m_mapOfTrackers.size() == 2) {
5068  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5069  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5070  mapOfImages[it->first] = &I1;
5071  ++it;
5072 
5073  mapOfImages[it->first] = &I2;
5074 
5075  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5076  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5077 
5078  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5079  } else {
5080  std::stringstream ss;
5081  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5082  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5083  }
5084 }
5085 
5096 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5097 {
5098  if (m_mapOfTrackers.size() == 2) {
5099  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5100  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5101  mapOfImages[it->first] = &I_color1;
5102  ++it;
5103 
5104  mapOfImages[it->first] = &_colorI2;
5105 
5106  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5107  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5108 
5109  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5110  } else {
5111  std::stringstream ss;
5112  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5113  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5114  }
5115 }
5116 
5124 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5125 {
5126  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5127  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5128 
5129  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5130 }
5131 
5139 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5140 {
5141  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5142  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5143 
5144  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5145 }
5146 
5147 #ifdef VISP_HAVE_PCL
5148 
5156 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5157  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5158 {
5159  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5160  it != m_mapOfTrackers.end(); ++it) {
5161  TrackerWrapper *tracker = it->second;
5162 
5163  if ((tracker->m_trackerType & (EDGE_TRACKER |
5164 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5165  KLT_TRACKER |
5166 #endif
5168  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5169  }
5170 
5171  if (tracker->m_trackerType & (EDGE_TRACKER
5172 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5173  | KLT_TRACKER
5174 #endif
5175  ) &&
5176  mapOfImages[it->first] == NULL) {
5177  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5178  }
5179 
5180  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5181  ! mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5182  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5183  }
5184  }
5185 
5186  preTracking(mapOfImages, mapOfPointClouds);
5187 
5188  try {
5189  computeVVS(mapOfImages);
5190  } catch (...) {
5191  covarianceMatrix = -1;
5192  throw; // throw the original exception
5193  }
5194 
5195  testTracking();
5196 
5197  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5198  it != m_mapOfTrackers.end(); ++it) {
5199  TrackerWrapper *tracker = it->second;
5200 
5201  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5202  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5203  }
5204 
5205  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5206 
5207  if (displayFeatures) {
5208 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5209  if (tracker->m_trackerType & KLT_TRACKER) {
5210  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5211  }
5212 #endif
5213 
5214  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5215  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5216  }
5217  }
5218  }
5219 
5221 }
5222 
5231 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5232  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5233 {
5234  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5235  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5236  it != m_mapOfTrackers.end(); ++it) {
5237  TrackerWrapper *tracker = it->second;
5238 
5239  if ((tracker->m_trackerType & (EDGE_TRACKER |
5240 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5241  KLT_TRACKER |
5242 #endif
5244  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5245  }
5246 
5247  if (tracker->m_trackerType & (EDGE_TRACKER
5248 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5249  | KLT_TRACKER
5250 #endif
5251  ) && mapOfImages[it->first] == NULL) {
5252  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5253  } else if (tracker->m_trackerType & (EDGE_TRACKER
5254 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5255  | KLT_TRACKER
5256 #endif
5257  ) && mapOfImages[it->first] != NULL) {
5258  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5259  mapOfImages[it->first] = &tracker->m_I; //update grayscale image buffer
5260  }
5261 
5262  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5263  ! mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5264  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5265  }
5266  }
5267 
5268  preTracking(mapOfImages, mapOfPointClouds);
5269 
5270  try {
5271  computeVVS(mapOfImages);
5272  } catch (...) {
5273  covarianceMatrix = -1;
5274  throw; // throw the original exception
5275  }
5276 
5277  testTracking();
5278 
5279  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5280  it != m_mapOfTrackers.end(); ++it) {
5281  TrackerWrapper *tracker = it->second;
5282 
5283  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5284  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5285  }
5286 
5287  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5288 
5289  if (displayFeatures) {
5290 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5291  if (tracker->m_trackerType & KLT_TRACKER) {
5292  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5293  }
5294 #endif
5295 
5296  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5297  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5298  }
5299  }
5300  }
5301 
5303 }
5304 #endif
5305 
5316 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5317  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5318  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5319  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5320 {
5321  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5322  it != m_mapOfTrackers.end(); ++it) {
5323  TrackerWrapper *tracker = it->second;
5324 
5325  if ((tracker->m_trackerType & (EDGE_TRACKER |
5326 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5327  KLT_TRACKER |
5328 #endif
5330  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5331  }
5332 
5333  if (tracker->m_trackerType & (EDGE_TRACKER
5334 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5335  | KLT_TRACKER
5336 #endif
5337  ) &&
5338  mapOfImages[it->first] == NULL) {
5339  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5340  }
5341 
5342  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5343  (mapOfPointClouds[it->first] == NULL)) {
5344  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5345  }
5346  }
5347 
5348  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5349 
5350  try {
5351  computeVVS(mapOfImages);
5352  } catch (...) {
5353  covarianceMatrix = -1;
5354  throw; // throw the original exception
5355  }
5356 
5357  testTracking();
5358 
5359  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5360  it != m_mapOfTrackers.end(); ++it) {
5361  TrackerWrapper *tracker = it->second;
5362 
5363  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5364  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5365  }
5366 
5367  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5368 
5369  if (displayFeatures) {
5370 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5371  if (tracker->m_trackerType & KLT_TRACKER) {
5372  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5373  }
5374 #endif
5375 
5376  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5377  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5378  }
5379  }
5380  }
5381 
5383 }
5384 
5395 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5396  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5397  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5398  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5399 {
5400  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5401  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5402  it != m_mapOfTrackers.end(); ++it) {
5403  TrackerWrapper *tracker = it->second;
5404 
5405  if ((tracker->m_trackerType & (EDGE_TRACKER |
5406 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5407  KLT_TRACKER |
5408 #endif
5410  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5411  }
5412 
5413  if (tracker->m_trackerType & (EDGE_TRACKER
5414 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5415  | KLT_TRACKER
5416 #endif
5417  ) && mapOfColorImages[it->first] == NULL) {
5418  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5419  } else if (tracker->m_trackerType & (EDGE_TRACKER
5420 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5421  | KLT_TRACKER
5422 #endif
5423  ) && mapOfColorImages[it->first] != NULL) {
5424  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5425  mapOfImages[it->first] = &tracker->m_I; //update grayscale image buffer
5426  }
5427 
5428  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5429  (mapOfPointClouds[it->first] == NULL)) {
5430  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5431  }
5432  }
5433 
5434  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5435 
5436  try {
5437  computeVVS(mapOfImages);
5438  } catch (...) {
5439  covarianceMatrix = -1;
5440  throw; // throw the original exception
5441  }
5442 
5443  testTracking();
5444 
5445  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5446  it != m_mapOfTrackers.end(); ++it) {
5447  TrackerWrapper *tracker = it->second;
5448 
5449  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5450  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5451  }
5452 
5453  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5454 
5455  if (displayFeatures) {
5456 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5457  if (tracker->m_trackerType & KLT_TRACKER) {
5458  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5459  }
5460 #endif
5461 
5462  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5463  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5464  }
5465  }
5466  }
5467 
5469 }
5470 
5472 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5473  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5474 {
5475  m_lambda = 1.0;
5476  m_maxIter = 30;
5477 
5478 #ifdef VISP_HAVE_OGRE
5479  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5480 
5481  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5482 #endif
5483 }
5484 
5485 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5486  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5487 {
5488  if ((m_trackerType & (EDGE_TRACKER |
5489 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5490  KLT_TRACKER |
5491 #endif
5493  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5494  }
5495 
5496  m_lambda = 1.0;
5497  m_maxIter = 30;
5498 
5499 #ifdef VISP_HAVE_OGRE
5500  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5501 
5502  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5503 #endif
5504 }
5505 
5506 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() { }
5507 
5508 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5509 void vpMbGenericTracker::TrackerWrapper::computeVVS(const vpImage<unsigned char> *const ptr_I)
5510 {
5511  computeVVSInit(ptr_I);
5512 
5513  if (m_error.getRows() < 4) {
5514  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5515  }
5516 
5517  double normRes = 0;
5518  double normRes_1 = -1;
5519  unsigned int iter = 0;
5520 
5521  double factorEdge = 1.0;
5522 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5523  double factorKlt = 1.0;
5524 #endif
5525  double factorDepth = 1.0;
5526  double factorDepthDense = 1.0;
5527 
5528  vpMatrix LTL;
5529  vpColVector LTR, v;
5530  vpColVector error_prev;
5531 
5532  double mu = m_initialMu;
5533  vpHomogeneousMatrix cMo_prev;
5534 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5535  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
5536 #endif
5537  bool isoJoIdentity_ = true;
5538 
5539  // Covariance
5540  vpColVector W_true(m_error.getRows());
5541  vpMatrix L_true, LVJ_true;
5542 
5543  unsigned int nb_edge_features = m_error_edge.getRows();
5544 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5545  unsigned int nb_klt_features = m_error_klt.getRows();
5546 #endif
5547  unsigned int nb_depth_features = m_error_depthNormal.getRows();
5548  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5549 
5550  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5552 
5553  bool reStartFromLastIncrement = false;
5554  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5555 
5556 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5557  if (reStartFromLastIncrement) {
5558  if (m_trackerType & KLT_TRACKER) {
5559  ctTc0 = ctTc0_Prev;
5560  }
5561  }
5562 #endif
5563 
5564  if (!reStartFromLastIncrement) {
5566 
5567  if (computeCovariance) {
5568  L_true = m_L;
5569  if (!isoJoIdentity_) {
5571  cVo.buildFrom(m_cMo);
5572  LVJ_true = (m_L * cVo * oJo);
5573  }
5574  }
5575 
5577  if (iter == 0) {
5578  isoJoIdentity_ = true;
5579  oJo.eye();
5580 
5581  // If all the 6 dof should be estimated, we check if the interaction
5582  // matrix is full rank. If not we remove automatically the dof that
5583  // cannot be estimated This is particularly useful when consering
5584  // circles (rank 5) and cylinders (rank 4)
5585  if (isoJoIdentity_) {
5586  cVo.buildFrom(m_cMo);
5587 
5588  vpMatrix K; // kernel
5589  unsigned int rank = (m_L * cVo).kernel(K);
5590  if (rank == 0) {
5591  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
5592  }
5593 
5594  if (rank != 6) {
5595  vpMatrix I; // Identity
5596  I.eye(6);
5597  oJo = I - K.AtA();
5598 
5599  isoJoIdentity_ = false;
5600  }
5601  }
5602  }
5603 
5604  // Weighting
5605  double num = 0;
5606  double den = 0;
5607 
5608  unsigned int start_index = 0;
5609  if (m_trackerType & EDGE_TRACKER) {
5610  for (unsigned int i = 0; i < nb_edge_features; i++) {
5611  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5612  W_true[i] = wi;
5613  m_weightedError[i] = wi * m_error[i];
5614 
5615  num += wi * vpMath::sqr(m_error[i]);
5616  den += wi;
5617 
5618  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5619  m_L[i][j] *= wi;
5620  }
5621  }
5622 
5623  start_index += nb_edge_features;
5624  }
5625 
5626 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5627  if (m_trackerType & KLT_TRACKER) {
5628  for (unsigned int i = 0; i < nb_klt_features; i++) {
5629  double wi = m_w_klt[i] * factorKlt;
5630  W_true[start_index + i] = wi;
5631  m_weightedError[start_index + i] = wi * m_error_klt[i];
5632 
5633  num += wi * vpMath::sqr(m_error[start_index + i]);
5634  den += wi;
5635 
5636  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5637  m_L[start_index + i][j] *= wi;
5638  }
5639  }
5640 
5641  start_index += nb_klt_features;
5642  }
5643 #endif
5644 
5645  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5646  for (unsigned int i = 0; i < nb_depth_features; i++) {
5647  double wi = m_w_depthNormal[i] * factorDepth;
5648  m_w[start_index + i] = m_w_depthNormal[i];
5649  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5650 
5651  num += wi * vpMath::sqr(m_error[start_index + i]);
5652  den += wi;
5653 
5654  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5655  m_L[start_index + i][j] *= wi;
5656  }
5657  }
5658 
5659  start_index += nb_depth_features;
5660  }
5661 
5662  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5663  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
5664  double wi = m_w_depthDense[i] * factorDepthDense;
5665  m_w[start_index + i] = m_w_depthDense[i];
5666  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5667 
5668  num += wi * vpMath::sqr(m_error[start_index + i]);
5669  den += wi;
5670 
5671  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5672  m_L[start_index + i][j] *= wi;
5673  }
5674  }
5675 
5676  // start_index += nb_depth_dense_features;
5677  }
5678 
5679  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5680 
5681  cMo_prev = m_cMo;
5682 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5683  if (m_trackerType & KLT_TRACKER) {
5684  ctTc0_Prev = ctTc0;
5685  }
5686 #endif
5687 
5689 
5690 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5691  if (m_trackerType & KLT_TRACKER) {
5692  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
5693  }
5694 #endif
5695  normRes_1 = normRes;
5696 
5697  normRes = sqrt(num / den);
5698  }
5699 
5700  iter++;
5701  }
5702 
5703  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
5704 
5705  if (m_trackerType & EDGE_TRACKER) {
5707  }
5708 }
5709 
5710 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5711 {
5712  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5713  "TrackerWrapper::computeVVSInit("
5714  ") should not be called!");
5715 }
5716 
5717 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
5718 {
5719  initMbtTracking(ptr_I);
5720 
5721  unsigned int nbFeatures = 0;
5722 
5723  if (m_trackerType & EDGE_TRACKER) {
5724  nbFeatures += m_error_edge.getRows();
5725  } else {
5726  m_error_edge.clear();
5727  m_weightedError_edge.clear();
5728  m_L_edge.clear();
5729  m_w_edge.clear();
5730  }
5731 
5732 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5733  if (m_trackerType & KLT_TRACKER) {
5735  nbFeatures += m_error_klt.getRows();
5736  } else {
5737  m_error_klt.clear();
5738  m_weightedError_klt.clear();
5739  m_L_klt.clear();
5740  m_w_klt.clear();
5741  }
5742 #endif
5743 
5744  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5746  nbFeatures += m_error_depthNormal.getRows();
5747  } else {
5748  m_error_depthNormal.clear();
5749  m_weightedError_depthNormal.clear();
5750  m_L_depthNormal.clear();
5751  m_w_depthNormal.clear();
5752  }
5753 
5754  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5756  nbFeatures += m_error_depthDense.getRows();
5757  } else {
5758  m_error_depthDense.clear();
5759  m_weightedError_depthDense.clear();
5760  m_L_depthDense.clear();
5761  m_w_depthDense.clear();
5762  }
5763 
5764  m_L.resize(nbFeatures, 6, false, false);
5765  m_error.resize(nbFeatures, false);
5766 
5767  m_weightedError.resize(nbFeatures, false);
5768  m_w.resize(nbFeatures, false);
5769  m_w = 1;
5770 }
5771 
5772 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu()
5773 {
5774  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5775  "TrackerWrapper::"
5776  "computeVVSInteractionMatrixAndR"
5777  "esidu() should not be called!");
5778 }
5779 
5780 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> *const ptr_I)
5781 {
5782  if (m_trackerType & EDGE_TRACKER) {
5784  }
5785 
5786 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5787  if (m_trackerType & KLT_TRACKER) {
5789  }
5790 #endif
5791 
5792  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5794  }
5795 
5796  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5798  }
5799 
5800  unsigned int start_index = 0;
5801  if (m_trackerType & EDGE_TRACKER) {
5802  m_L.insert(m_L_edge, start_index, 0);
5803  m_error.insert(start_index, m_error_edge);
5804 
5805  start_index += m_error_edge.getRows();
5806  }
5807 
5808 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5809  if (m_trackerType & KLT_TRACKER) {
5810  m_L.insert(m_L_klt, start_index, 0);
5811  m_error.insert(start_index, m_error_klt);
5812 
5813  start_index += m_error_klt.getRows();
5814  }
5815 #endif
5816 
5817  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5818  m_L.insert(m_L_depthNormal, start_index, 0);
5819  m_error.insert(start_index, m_error_depthNormal);
5820 
5821  start_index += m_error_depthNormal.getRows();
5822  }
5823 
5824  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5825  m_L.insert(m_L_depthDense, start_index, 0);
5826  m_error.insert(start_index, m_error_depthDense);
5827 
5828  // start_index += m_error_depthDense.getRows();
5829  }
5830 }
5831 
5833 {
5834  unsigned int start_index = 0;
5835 
5836  if (m_trackerType & EDGE_TRACKER) {
5838  m_w.insert(start_index, m_w_edge);
5839 
5840  start_index += m_w_edge.getRows();
5841  }
5842 
5843 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5844  if (m_trackerType & KLT_TRACKER) {
5845  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
5846  m_w.insert(start_index, m_w_klt);
5847 
5848  start_index += m_w_klt.getRows();
5849  }
5850 #endif
5851 
5852  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5853  if (m_depthNormalUseRobust) {
5854  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
5855  m_w.insert(start_index, m_w_depthNormal);
5856  }
5857 
5858  start_index += m_w_depthNormal.getRows();
5859  }
5860 
5861  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5863  m_w.insert(start_index, m_w_depthDense);
5864 
5865  // start_index += m_w_depthDense.getRows();
5866  }
5867 }
5868 
5869 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
5870  const vpCameraParameters &cam, const vpColor &col,
5871  unsigned int thickness, bool displayFullModel)
5872 {
5873  if (displayFeatures) {
5874  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5875  for (size_t i = 0; i < features.size(); i++) {
5876  if (vpMath::equal(features[i][0], 0)) {
5877  vpImagePoint ip(features[i][1], features[i][2]);
5878  int state = static_cast<int>(features[i][3]);
5879 
5880  switch (state) {
5883  break;
5884 
5885  case vpMeSite::CONSTRAST:
5886  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5887  break;
5888 
5889  case vpMeSite::THRESHOLD:
5891  break;
5892 
5893  case vpMeSite::M_ESTIMATOR:
5894  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5895  break;
5896 
5897  case vpMeSite::TOO_NEAR:
5898  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5899  break;
5900 
5901  default:
5903  }
5904  } else if (vpMath::equal(features[i][0], 1)) {
5905  vpImagePoint ip1(features[i][1], features[i][2]);
5907 
5908  vpImagePoint ip2(features[i][3], features[i][4]);
5909  double id = features[i][5];
5910  std::stringstream ss;
5911  ss << id;
5912  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5913  } else if (vpMath::equal(features[i][0], 2)) {
5914  vpImagePoint im_centroid(features[i][1], features[i][2]);
5915  vpImagePoint im_extremity(features[i][3], features[i][4]);
5916  bool desired = vpMath::equal(features[i][0], 2);
5917  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5918  }
5919  }
5920  }
5921 
5922  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
5923  for (size_t i = 0; i < models.size(); i++) {
5924  if (vpMath::equal(models[i][0], 0)) {
5925  vpImagePoint ip1(models[i][1], models[i][2]);
5926  vpImagePoint ip2(models[i][3], models[i][4]);
5927  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
5928  } else if (vpMath::equal(models[i][0], 1)) {
5929  vpImagePoint center(models[i][1], models[i][2]);
5930  double mu20 = models[i][3];
5931  double mu11 = models[i][4];
5932  double mu02 = models[i][5];
5933  vpDisplay::displayEllipse(I, center, mu20, mu11, mu02, true, col, thickness);
5934  }
5935  }
5936 
5937 #ifdef VISP_HAVE_OGRE
5938  if ((m_trackerType & EDGE_TRACKER)
5939  #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5940  || (m_trackerType & KLT_TRACKER)
5941  #endif
5942  ) {
5943  if (useOgre)
5944  faces.displayOgre(cMo);
5945  }
5946 #endif
5947 }
5948 
5949 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
5950  const vpCameraParameters &cam, const vpColor &col,
5951  unsigned int thickness, bool displayFullModel)
5952 {
5953  if (displayFeatures) {
5954  std::vector<std::vector<double> > features = getFeaturesForDisplay();
5955  for (size_t i = 0; i < features.size(); i++) {
5956  if (vpMath::equal(features[i][0], 0)) {
5957  vpImagePoint ip(features[i][1], features[i][2]);
5958  int state = static_cast<int>(features[i][3]);
5959 
5960  switch (state) {
5963  break;
5964 
5965  case vpMeSite::CONSTRAST:
5966  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
5967  break;
5968 
5969  case vpMeSite::THRESHOLD:
5971  break;
5972 
5973  case vpMeSite::M_ESTIMATOR:
5974  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
5975  break;
5976 
5977  case vpMeSite::TOO_NEAR:
5978  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
5979  break;
5980 
5981  default:
5983  }
5984  } else if (vpMath::equal(features[i][0], 1)) {
5985  vpImagePoint ip1(features[i][1], features[i][2]);
5987 
5988  vpImagePoint ip2(features[i][3], features[i][4]);
5989  double id = features[i][5];
5990  std::stringstream ss;
5991  ss << id;
5992  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
5993  } else if (vpMath::equal(features[i][0], 2)) {
5994  vpImagePoint im_centroid(features[i][1], features[i][2]);
5995  vpImagePoint im_extremity(features[i][3], features[i][4]);
5996  bool desired = vpMath::equal(features[i][0], 2);
5997  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
5998  }
5999  }
6000  }
6001 
6002  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6003  for (size_t i = 0; i < models.size(); i++) {
6004  if (vpMath::equal(models[i][0], 0)) {
6005  vpImagePoint ip1(models[i][1], models[i][2]);
6006  vpImagePoint ip2(models[i][3], models[i][4]);
6007  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6008  } else if (vpMath::equal(models[i][0], 1)) {
6009  vpImagePoint center(models[i][1], models[i][2]);
6010  double mu20 = models[i][3];
6011  double mu11 = models[i][4];
6012  double mu02 = models[i][5];
6013  vpDisplay::displayEllipse(I, center, mu20, mu11, mu02, true, col, thickness);
6014  }
6015  }
6016 
6017 #ifdef VISP_HAVE_OGRE
6018  if ((m_trackerType & EDGE_TRACKER)
6019  #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6020  || (m_trackerType & KLT_TRACKER)
6021  #endif
6022  ) {
6023  if (useOgre)
6024  faces.displayOgre(cMo);
6025  }
6026 #endif
6027 }
6028 
6029 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6030 {
6031  std::vector<std::vector<double> > features;
6032 
6033  if (m_trackerType & EDGE_TRACKER) {
6034  //m_featuresToBeDisplayedEdge updated after computeVVS()
6035  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6036  }
6037 
6038 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6039  if (m_trackerType & KLT_TRACKER) {
6040  //m_featuresToBeDisplayedKlt updated after postTracking()
6041  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6042  }
6043 #endif
6044 
6045  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6046  //m_featuresToBeDisplayedDepthNormal updated after postTracking()
6047  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(), m_featuresToBeDisplayedDepthNormal.end());
6048  }
6049 
6050  return features;
6051 }
6052 
6053 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width, unsigned int height,
6054  const vpHomogeneousMatrix &cMo,
6055  const vpCameraParameters &cam,
6056  bool displayFullModel)
6057 {
6058  std::vector<std::vector<double> > models;
6059 
6060  //Do not add multiple times the same models
6061  if (m_trackerType == EDGE_TRACKER) {
6062  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6063  }
6064 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6065  else if (m_trackerType == KLT_TRACKER) {
6066  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6067  }
6068 #endif
6069  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6070  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6071  } else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6072  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6073  } else {
6074  //Edge and KLT trackers use the same primitives
6075  if (m_trackerType & EDGE_TRACKER) {
6076  std::vector<std::vector<double> > edgeModels = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6077  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6078  }
6079 
6080  //Depth dense and depth normal trackers use the same primitives
6081  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6082  std::vector<std::vector<double> > depthDenseModels = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6083  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6084  }
6085  }
6086 
6087  return models;
6088 }
6089 
6090 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6091 {
6092  if (!modelInitialised) {
6093  throw vpException(vpException::fatalError, "model not initialized");
6094  }
6095 
6096  if (useScanLine || clippingFlag > 3)
6097  m_cam.computeFov(I.getWidth(), I.getHeight());
6098 
6099  bool reInitialisation = false;
6100  if (!useOgre) {
6101  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6102  } else {
6103 #ifdef VISP_HAVE_OGRE
6104  if (!faces.isOgreInitialised()) {
6106 
6108  faces.initOgre(m_cam);
6109  // Turn off Ogre config dialog display for the next call to this
6110  // function since settings are saved in the ogre.cfg file and used
6111  // during the next call
6112  ogreShowConfigDialog = false;
6113  }
6114 
6116 #else
6117  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6118 #endif
6119  }
6120 
6121  if (useScanLine) {
6124  }
6125 
6126 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6127  if (m_trackerType & KLT_TRACKER)
6129 #endif
6130 
6131  if (m_trackerType & EDGE_TRACKER) {
6133 
6134  bool a = false;
6135  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6136 
6137  initMovingEdge(I, m_cMo);
6138  }
6139 
6140  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6141  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6142 
6143  if (m_trackerType & DEPTH_DENSE_TRACKER)
6144  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6145 }
6146 
6147 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6148  double radius, int idFace, const std::string &name)
6149 {
6150  if (m_trackerType & EDGE_TRACKER)
6151  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6152 
6153 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6154  if (m_trackerType & KLT_TRACKER)
6155  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6156 #endif
6157 }
6158 
6159 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius,
6160  int idFace, const std::string &name)
6161 {
6162  if (m_trackerType & EDGE_TRACKER)
6163  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6164 
6165 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6166  if (m_trackerType & KLT_TRACKER)
6167  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6168 #endif
6169 }
6170 
6171 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6172 {
6173  if (m_trackerType & EDGE_TRACKER)
6175 
6176 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6177  if (m_trackerType & KLT_TRACKER)
6179 #endif
6180 
6181  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6183 
6184  if (m_trackerType & DEPTH_DENSE_TRACKER)
6186 }
6187 
6188 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6189 {
6190  if (m_trackerType & EDGE_TRACKER)
6192 
6193 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6194  if (m_trackerType & KLT_TRACKER)
6196 #endif
6197 
6198  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6200 
6201  if (m_trackerType & DEPTH_DENSE_TRACKER)
6203 }
6204 
6205 void vpMbGenericTracker::TrackerWrapper::initMbtTracking(const vpImage<unsigned char> *const ptr_I)
6206 {
6207  if (m_trackerType & EDGE_TRACKER) {
6210  }
6211 }
6212 
6213 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile)
6214 {
6215  // Load projection error config
6216  vpMbTracker::loadConfigFile(configFile);
6217 
6218 #ifdef VISP_HAVE_PUGIXML
6220 
6221  xmlp.setCameraParameters(m_cam);
6224 
6225  // Edge
6226  xmlp.setEdgeMe(me);
6227 
6228  // KLT
6229  xmlp.setKltMaxFeatures(10000);
6230  xmlp.setKltWindowSize(5);
6231  xmlp.setKltQuality(0.01);
6232  xmlp.setKltMinDistance(5);
6233  xmlp.setKltHarrisParam(0.01);
6234  xmlp.setKltBlockSize(3);
6235  xmlp.setKltPyramidLevels(3);
6236 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6237  xmlp.setKltMaskBorder(maskBorder);
6238 #endif
6239 
6240  // Depth normal
6241  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6242  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6243  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6244  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6245  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6246  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6247 
6248  // Depth dense
6249  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6250  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6251 
6252  try {
6253  std::cout << " *********** Parsing XML for";
6254 
6255  std::vector<std::string> tracker_names;
6256  if (m_trackerType & EDGE_TRACKER)
6257  tracker_names.push_back("Edge");
6258 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6259  if (m_trackerType & KLT_TRACKER)
6260  tracker_names.push_back("Klt");
6261 #endif
6262  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6263  tracker_names.push_back("Depth Normal");
6264  if (m_trackerType & DEPTH_DENSE_TRACKER)
6265  tracker_names.push_back("Depth Dense");
6266 
6267  for (size_t i = 0; i < tracker_names.size(); i++) {
6268  std::cout << " " << tracker_names[i];
6269  if (i == tracker_names.size() - 1) {
6270  std::cout << " ";
6271  }
6272  }
6273 
6274  std::cout << "Model-Based Tracker ************ " << std::endl;
6275  xmlp.parse(configFile);
6276  } catch (...) {
6277  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6278  }
6279 
6280  vpCameraParameters camera;
6281  xmlp.getCameraParameters(camera);
6282  setCameraParameters(camera);
6283 
6286 
6287  if (xmlp.hasNearClippingDistance())
6289 
6290  if (xmlp.hasFarClippingDistance())
6292 
6293  if (xmlp.getFovClipping()) {
6295  }
6296 
6297  useLodGeneral = xmlp.getLodState();
6300 
6301  applyLodSettingInConfig = false;
6302  if (this->getNbPolygon() > 0) {
6303  applyLodSettingInConfig = true;
6307  }
6308 
6309  // Edge
6310  vpMe meParser;
6311  xmlp.getEdgeMe(meParser);
6313 
6314 // KLT
6315 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6316  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6317  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6318  tracker.setQuality(xmlp.getKltQuality());
6319  tracker.setMinDistance(xmlp.getKltMinDistance());
6320  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6321  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6322  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6323  maskBorder = xmlp.getKltMaskBorder();
6324 
6325  // if(useScanLine)
6326  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6327 #endif
6328 
6329  // Depth normal
6335 
6336  // Depth dense
6338 #else
6339  std::cerr << "pugixml third-party is not properly built to read config file: " << configFile << std::endl;
6340 #endif
6341 }
6342 
6343 #ifdef VISP_HAVE_PCL
6344 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6345  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6346 {
6347 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6348  // KLT
6349  if (m_trackerType & KLT_TRACKER) {
6350  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6351  vpMbKltTracker::reinit(*ptr_I);
6352  }
6353  }
6354 #endif
6355 
6356  // Looking for new visible face
6357  if (m_trackerType & EDGE_TRACKER) {
6358  bool newvisibleface = false;
6359  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6360 
6361  if (useScanLine) {
6363  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6364  }
6365  }
6366 
6367  // Depth normal
6368  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6369  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6370 
6371  // Depth dense
6372  if (m_trackerType & DEPTH_DENSE_TRACKER)
6373  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6374 
6375  // Edge
6376  if (m_trackerType & EDGE_TRACKER) {
6378 
6380  // Reinit the moving edge for the lines which need it.
6382 
6383  if (computeProjError) {
6385  }
6386  }
6387 }
6388 
6389 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
6390  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6391 {
6392  if (m_trackerType & EDGE_TRACKER) {
6393  try {
6395  } catch (...) {
6396  std::cerr << "Error in moving edge tracking" << std::endl;
6397  throw;
6398  }
6399  }
6400 
6401 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6402  if (m_trackerType & KLT_TRACKER) {
6403  try {
6405  } catch (const vpException &e) {
6406  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6407  throw;
6408  }
6409  }
6410 #endif
6411 
6412  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6413  try {
6415  } catch (...) {
6416  std::cerr << "Error in Depth normal tracking" << std::endl;
6417  throw;
6418  }
6419  }
6420 
6421  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6422  try {
6424  } catch (...) {
6425  std::cerr << "Error in Depth dense tracking" << std::endl;
6426  throw;
6427  }
6428  }
6429 }
6430 #endif
6431 
6432 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6433  const unsigned int pointcloud_width,
6434  const unsigned int pointcloud_height)
6435 {
6436 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6437  // KLT
6438  if (m_trackerType & KLT_TRACKER) {
6439  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6440  vpMbKltTracker::reinit(*ptr_I);
6441  }
6442  }
6443 #endif
6444 
6445  // Looking for new visible face
6446  if (m_trackerType & EDGE_TRACKER) {
6447  bool newvisibleface = false;
6448  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6449 
6450  if (useScanLine) {
6452  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6453  }
6454  }
6455 
6456  // Depth normal
6457  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6458  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6459 
6460  // Depth dense
6461  if (m_trackerType & DEPTH_DENSE_TRACKER)
6462  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6463 
6464  // Edge
6465  if (m_trackerType & EDGE_TRACKER) {
6467 
6469  // Reinit the moving edge for the lines which need it.
6471 
6472  if (computeProjError) {
6474  }
6475  }
6476 }
6477 
6478 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> * const ptr_I,
6479  const std::vector<vpColVector> * const point_cloud,
6480  const unsigned int pointcloud_width,
6481  const unsigned int pointcloud_height)
6482 {
6483  if (m_trackerType & EDGE_TRACKER) {
6484  try {
6486  } catch (...) {
6487  std::cerr << "Error in moving edge tracking" << std::endl;
6488  throw;
6489  }
6490  }
6491 
6492 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6493  if (m_trackerType & KLT_TRACKER) {
6494  try {
6496  } catch (const vpException &e) {
6497  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6498  throw;
6499  }
6500  }
6501 #endif
6502 
6503  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6504  try {
6505  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6506  } catch (...) {
6507  std::cerr << "Error in Depth tracking" << std::endl;
6508  throw;
6509  }
6510  }
6511 
6512  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6513  try {
6514  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6515  } catch (...) {
6516  std::cerr << "Error in Depth dense tracking" << std::endl;
6517  throw;
6518  }
6519  }
6520 }
6521 
6522 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> * const I, const vpImage<vpRGBa> * const I_color,
6523  const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose,
6524  const vpHomogeneousMatrix &T)
6525 {
6526  m_cMo.eye();
6527 
6528  // Edge
6529  vpMbtDistanceLine *l;
6531  vpMbtDistanceCircle *ci;
6532 
6533  for (unsigned int i = 0; i < scales.size(); i++) {
6534  if (scales[i]) {
6535  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6536  l = *it;
6537  if (l != NULL)
6538  delete l;
6539  l = NULL;
6540  }
6541 
6542  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6543  ++it) {
6544  cy = *it;
6545  if (cy != NULL)
6546  delete cy;
6547  cy = NULL;
6548  }
6549 
6550  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6551  ci = *it;
6552  if (ci != NULL)
6553  delete ci;
6554  ci = NULL;
6555  }
6556 
6557  lines[i].clear();
6558  cylinders[i].clear();
6559  circles[i].clear();
6560  }
6561  }
6562 
6563  nline = 0;
6564  ncylinder = 0;
6565  ncircle = 0;
6566  nbvisiblepolygone = 0;
6567 
6568 // KLT
6569 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6570 #if (VISP_HAVE_OPENCV_VERSION < 0x020408)
6571  if (cur != NULL) {
6572  cvReleaseImage(&cur);
6573  cur = NULL;
6574  }
6575 #endif
6576 
6577  // delete the Klt Polygon features
6578  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6579  vpMbtDistanceKltPoints *kltpoly = *it;
6580  if (kltpoly != NULL) {
6581  delete kltpoly;
6582  }
6583  kltpoly = NULL;
6584  }
6585  kltPolygons.clear();
6586 
6587  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6588  ++it) {
6589  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
6590  if (kltPolyCylinder != NULL) {
6591  delete kltPolyCylinder;
6592  }
6593  kltPolyCylinder = NULL;
6594  }
6595  kltCylinders.clear();
6596 
6597  // delete the structures used to display circles
6598  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6599  ci = *it;
6600  if (ci != NULL) {
6601  delete ci;
6602  }
6603  ci = NULL;
6604  }
6605  circles_disp.clear();
6606 
6607  firstInitialisation = true;
6608 
6609 #endif
6610 
6611  // Depth normal
6612  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6613  delete m_depthNormalFaces[i];
6614  m_depthNormalFaces[i] = NULL;
6615  }
6616  m_depthNormalFaces.clear();
6617 
6618  // Depth dense
6619  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6620  delete m_depthDenseFaces[i];
6621  m_depthDenseFaces[i] = NULL;
6622  }
6623  m_depthDenseFaces.clear();
6624 
6625  faces.reset();
6626 
6627  loadModel(cad_name, verbose, T);
6628  if (I) {
6629  initFromPose(*I, cMo);
6630  } else {
6631  initFromPose(*I_color, cMo);
6632  }
6633 }
6634 
6635 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
6636  const vpHomogeneousMatrix &cMo, bool verbose,
6637  const vpHomogeneousMatrix &T)
6638 {
6639  reInitModel(&I, NULL, cad_name, cMo, verbose, T);
6640 }
6641 
6642 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
6643  const vpHomogeneousMatrix &cMo, bool verbose,
6644  const vpHomogeneousMatrix &T)
6645 {
6646  reInitModel(NULL, &I_color, cad_name, cMo, verbose, T);
6647 }
6648 
6649 void vpMbGenericTracker::TrackerWrapper::resetTracker()
6650 {
6652 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6654 #endif
6657 }
6658 
6659 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
6660 {
6661  m_cam = cam;
6662 
6664 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6666 #endif
6669 }
6670 
6671 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags)
6672 {
6674 }
6675 
6676 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
6677 {
6679 }
6680 
6681 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
6682 {
6684 }
6685 
6686 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
6687 {
6689 #ifdef VISP_HAVE_OGRE
6690  faces.getOgreContext()->setWindowName("TrackerWrapper");
6691 #endif
6692 }
6693 
6694 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> * const I, const vpImage<vpRGBa> * const I_color,
6695  const vpHomogeneousMatrix &cdMo)
6696 {
6697  bool performKltSetPose = false;
6698  if (I_color) {
6699  vpImageConvert::convert(*I_color, m_I);
6700  }
6701 
6702 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6703  if (m_trackerType & KLT_TRACKER) {
6704  performKltSetPose = true;
6705 
6706  if (useScanLine || clippingFlag > 3) {
6707  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6708  }
6709 
6710  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
6711  }
6712 #endif
6713 
6714  if (!performKltSetPose) {
6715  m_cMo = cdMo;
6716  init(I ? *I : m_I);
6717  return;
6718  }
6719 
6720  if (m_trackerType & EDGE_TRACKER)
6721  resetMovingEdge();
6722 
6723  if (useScanLine) {
6726  }
6727 
6728 #if 0
6729  if (m_trackerType & EDGE_TRACKER) {
6730  initPyramid(I, Ipyramid);
6731 
6732  unsigned int i = (unsigned int) scales.size();
6733  do {
6734  i--;
6735  if(scales[i]){
6736  downScale(i);
6737  initMovingEdge(*Ipyramid[i], cMo);
6738  upScale(i);
6739  }
6740  } while(i != 0);
6741 
6742  cleanPyramid(Ipyramid);
6743  }
6744 #else
6745  if (m_trackerType & EDGE_TRACKER)
6746  initMovingEdge(I ? *I : m_I, m_cMo);
6747 #endif
6748 
6749  // Depth normal
6750  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6751 
6752  // Depth dense
6753  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
6754 }
6755 
6756 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo)
6757 {
6758  setPose(&I, NULL, cdMo);
6759 }
6760 
6761 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<vpRGBa> &I_color, const vpHomogeneousMatrix &cdMo)
6762 {
6763  setPose(NULL, &I_color, cdMo);
6764 }
6765 
6766 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
6767 {
6769 }
6770 
6771 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
6772 {
6774 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6776 #endif
6779 }
6780 
6781 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
6782 {
6783  if ((type & (EDGE_TRACKER |
6784 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6785  KLT_TRACKER |
6786 #endif
6788  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
6789  }
6790 
6791  m_trackerType = type;
6792 }
6793 
6794 void vpMbGenericTracker::TrackerWrapper::testTracking()
6795 {
6796  if (m_trackerType & EDGE_TRACKER) {
6798  }
6799 }
6800 
6801 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> &
6802 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6803  I
6804 #endif
6805 )
6806 {
6807  if ((m_trackerType & (EDGE_TRACKER
6808 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6809  | KLT_TRACKER
6810 #endif
6811  )) == 0) {
6812  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6813  return;
6814  }
6815 
6816 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
6817  track(&I, nullptr);
6818 #endif
6819 }
6820 
6821 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<vpRGBa> &)
6822 {
6823  //not exposed to the public API, only for debug
6824 }
6825 
6826 #ifdef VISP_HAVE_PCL
6827 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> *const ptr_I,
6828  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6829 {
6830  if ((m_trackerType & (EDGE_TRACKER |
6831 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6832  KLT_TRACKER |
6833 #endif
6835  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
6836  return;
6837  }
6838 
6839  if (m_trackerType & (EDGE_TRACKER
6840 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6841  | KLT_TRACKER
6842 #endif
6843  ) &&
6844  ptr_I == NULL) {
6845  throw vpException(vpException::fatalError, "Image pointer is NULL!");
6846  }
6847 
6848  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && ! point_cloud) { // point_cloud == nullptr
6849  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
6850  }
6851 
6852  // Back-up cMo in case of exception
6853  vpHomogeneousMatrix cMo_1 = m_cMo;
6854  try {
6855  preTracking(ptr_I, point_cloud);
6856 
6857  try {
6858  computeVVS(ptr_I);
6859  } catch (...) {
6860  covarianceMatrix = -1;
6861  throw; // throw the original exception
6862  }
6863 
6864  if (m_trackerType == EDGE_TRACKER)
6865  testTracking();
6866 
6867  postTracking(ptr_I, point_cloud);
6868 
6869  } catch (const vpException &e) {
6870  std::cerr << "Exception: " << e.what() << std::endl;
6871  m_cMo = cMo_1;
6872  throw; // rethrowing the original exception
6873  }
6874 }
6875 #endif
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:252
bool m_computeInteraction
Definition: vpMbTracker.h:185
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
unsigned int getDepthNormalSamplingStepY() const
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
bool computeProjError
Definition: vpMbTracker.h:133
virtual void initFaceFromLines(vpMbtPolygon &polygon)
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
virtual void loadConfigFile(const std::string &configFile)
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:164
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:130
void setMovingEdge(const vpMe &me)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_centered_moments, const vpColor &color, unsigned int thickness=1)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setScanLineVisibilityTest(const bool &v)
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:111
Point removed during virtual visual-servoing because considered as an outlier.
Definition: vpMeSite.h:81
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:305
virtual void setScanLineVisibilityTest(const bool &v)
virtual void track(const vpImage< unsigned char > &I)
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=NULL, const vpColVector *const m_w_prev=NULL)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual vpMbtPolygon * getPolygon(unsigned int index)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
vpColVector m_w
Robust weights.
int getDepthNormalPclPlaneEstimationMethod() const
void setKltQuality(const double &q)
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:476
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
void parse(const std::string &filename)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:143
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
void getCameraParameters(vpCameraParameters &cam) const
Implementation of an homogeneous matrix and operations on such kind of matrices.
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual std::map< std::string, int > getCameraTrackerTypes() const
void setKltPyramidLevels(const unsigned int &pL)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void setTrackerType(int type)
virtual int getTrackerType() const
virtual void setDepthDenseFilteringMethod(int method)
Point removed because too near image borders.
Definition: vpMeSite.h:82
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void setDepthNormalSamplingStepY(unsigned int stepY)
vpMatrix AtA() const
Definition: vpMatrix.cpp:693
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void loadConfigFile(const std::string &configFile)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:553
Class to define colors available for display functionnalities.
Definition: vpColor.h:119
static bool equal(double x, double y, double s=0.001)
Definition: vpMath.h:296
Point removed due to a threshold problem.
Definition: vpMeSite.h:80
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void setClipping(const unsigned int &flags)
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
virtual void setDisplayFeatures(bool displayF)
double getDepthNormalPclPlaneEstimationRansacThreshold() const
bool modelInitialised
Definition: vpMbTracker.h:123
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:639
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual void setAngleDisappear(const double &a)
virtual void computeVVSInit()
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
error that can be emited by ViSP classes.
Definition: vpException.h:71
Manage a cylinder used in the model-based tracker.
vpMbScanLine & getMbScanLineRenderer()
unsigned int getRows() const
Definition: vpArray2D.h:289
Definition: vpMe.h:60
vpHomogeneousMatrix inverse() const
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:559
Manage the line of a polygon used in the model-based tracker.
virtual double getGoodMovingEdgesRatioThreshold() const
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void computeVVSInteractionMatrixAndResidu()
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:155
unsigned int getKltBlockSize() const
void setKltMaskBorder(const unsigned int &mb)
virtual void setMovingEdge(const vpMe &me)
virtual void setLod(bool useLod, const std::string &name="")
virtual int getKltNbPoints() const
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
void setEdgeMe(const vpMe &ecm)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:128
static const vpColor green
Definition: vpColor.h:182
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
void updateMovingEdge(const vpImage< unsigned char > &I)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > *> &mapOfImages)
virtual void reinit(const vpImage< unsigned char > &I)
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
unsigned int getDepthDenseSamplingStepY() const
unsigned int setVisibleOgre(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
Definition: vpMbTracker.h:590
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
static const vpColor red
Definition: vpColor.h:179
Class that defines what is a point.
Definition: vpPoint.h:58
virtual void computeVVSWeights()
void computeClippedPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
virtual std::string getReferenceCameraName() const
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="")
virtual void setNearClippingDistance(const double &dist)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="")
vpMatrix m_L
Interaction matrix.
unsigned int getCols() const
Definition: vpArray2D.h:279
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
Parse an Xml file to extract configuration parameters of a mbtConfig object.Data parser for the model...
void initOgre(const vpCameraParameters &cam=vpCameraParameters())
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:151
double projectionError
Definition: vpMbTracker.h:136
vpAROgre * getOgreContext()
void setKltHarrisParam(const double &hp)
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:419
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void getLcylinder(std::list< vpMbtDistanceCylinder *> &cylindersList, unsigned int level=0) const
Manage a circle used in the model-based tracker.
virtual void initFaceFromLines(vpMbtPolygon &polygon)
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:115
virtual void setKltMaskBorder(const unsigned int &e)
void computeVisibility(unsigned int width, unsigned int height)
vpColVector m_weightedError
Weighted error.
virtual void setCameraParameters(const vpCameraParameters &cam)
Definition: vpMbTracker.h:483
Error that can be emited by the vpTracker class and its derivates.
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="")
Point used by the tracker.
Definition: vpMeSite.h:78
static const vpColor cyan
Definition: vpColor.h:188
vp_deprecated void init()
virtual void setScanLineVisibilityTest(const bool &v)
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:66
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void setAngleDisappear(const double &adisappear)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void setProjectionErrorComputation(const bool &flag)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual std::vector< cv::Point2f > getKltPoints() const
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:158
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
void setKltMinDistance(const double &mD)
static double sqr(double x)
Definition: vpMath.h:114
double minLineLengthThresholdGeneral
Minimum line length threshold for LOD mode (general setting)
Definition: vpMbTracker.h:177
Point removed due to a contrast problem.
Definition: vpMeSite.h:79
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual unsigned int getNbPolygon() const
void setDepthNormalSamplingStepX(unsigned int stepX)
virtual void testTracking()
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
Generic class defining intrinsic camera parameters.
virtual unsigned int getKltMaskBorder() const
unsigned int getKltWindowSize() const
void setOgreShowConfigDialog(bool showConfigDialog)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
unsigned int getKltMaxFeatures() const
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
virtual void setKltThresholdAcceptation(double th)
virtual void initFromPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo)
virtual void setAngleAppear(const double &a)
virtual double getKltThresholdAcceptation() const
virtual void computeVVSInteractionMatrixAndResidu()
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:193
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
virtual void getCameraParameters(vpCameraParameters &camera) const
virtual void setFarClippingDistance(const double &dist)
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:465
virtual void getLline(std::list< vpMbtDistanceLine *> &linesList, unsigned int level=0) const
void setDepthDenseSamplingStepY(unsigned int stepY)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:140
unsigned int getDepthNormalSamplingStepX() const
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
void computeVisibility(unsigned int width, unsigned int height)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:145
void setKltWindowSize(const unsigned int &w)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
virtual void setScanLineVisibilityTest(const bool &v)
virtual void computeVVSWeights()
void setAngleAppear(const double &aappear)
const char * what() const
static double rad(double deg)
Definition: vpMath.h:108
virtual void computeVVSInteractionMatrixAndResidu()
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual void setProjectionErrorDisplay(bool display)
Definition: vpMbTracker.h:585
void setDepthDenseSamplingStepX(unsigned int stepX)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
void computeProjectionError(const vpImage< unsigned char > &_I)
void insert(unsigned int i, const vpColVector &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setNearClippingDistance(const double &dist)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
Definition: vpMbTracker.h:595
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
virtual void setScanLineVisibilityTest(const bool &v)
void getEdgeMe(vpMe &ecm) const
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void computeVVSPoseEstimation(const bool isoJoIdentity_, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="")
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:191
double minPolygonAreaThresholdGeneral
Minimum polygon area threshold for LOD mode (general setting)
Definition: vpMbTracker.h:179
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
static double deg(double rad)
Definition: vpMath.h:101
virtual void setOgreVisibilityTest(const bool &v)
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:138
unsigned int getHeight() const
Definition: vpImage.h:186
virtual void initFromPoints(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
void setCameraParameters(const vpCameraParameters &cam)
bool ogreShowConfigDialog
Definition: vpMbTracker.h:156
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
void preTracking(const vpImage< unsigned char > &I)
vpColVector m_error
(s - s*)
virtual void init(const vpImage< unsigned char > &I)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
bool applyLodSettingInConfig
Definition: vpMbTracker.h:175
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
virtual void setOgreVisibilityTest(const bool &v)
std::string m_referenceCameraName
Name of the reference camera.
virtual void setFarClippingDistance(const double &dist)
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > *> &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
double getLodMinLineLengthThreshold() const
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
virtual std::vector< std::string > getCameraNames() const
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
static vpHomogeneousMatrix direct(const vpColVector &v)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity_, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:147
unsigned int getDepthDenseSamplingStepX() const
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void computeVVSInit()
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void computeVVSInit()
int getDepthNormalPclPlaneEstimationRansacMaxIter() const
virtual void computeVVSInteractionMatrixAndResidu()
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
unsigned int getKltMaskBorder() const
virtual void setProjectionErrorDisplay(bool display)
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:597
virtual void setClipping(const unsigned int &flags)
virtual void setCameraParameters(const vpCameraParameters &cam)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:153
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:4909
void trackMovingEdge(const vpImage< unsigned char > &I)
void displayOgre(const vpHomogeneousMatrix &cMo)
virtual void setClipping(const unsigned int &flags)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setMask(const vpImage< bool > &mask)
virtual vpKltOpencv getKltOpencv() const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setFarClippingDistance(const double &dist)
void setKltBlockSize(const unsigned int &bs)
void setKltMaxFeatures(const unsigned int &mF)
static const vpColor yellow
Definition: vpColor.h:187
void setCameraParameters(const vpCameraParameters &cam)
virtual void setCameraParameters(const vpCameraParameters &camera)
static const vpColor purple
Definition: vpColor.h:190
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:223
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
unsigned int getWidth() const
Definition: vpImage.h:244
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:149
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
bool useLodGeneral
True if LOD mode is enabled.
Definition: vpMbTracker.h:172
void computeScanLineRender(const vpCameraParameters &cam, const unsigned int &w, const unsigned int &h)
virtual void computeProjectionError()
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:113
void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void setDisplayFeatures(bool displayF)
Definition: vpMbTracker.h:513
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
vpMbtFaceDepthNormal::vpFeatureEstimationType getDepthNormalFeatureEstimationMethod() const
void eye()
Definition: vpMatrix.cpp:492
virtual vpMe getMovingEdge() const
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:244
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
Definition: vpMbTracker.h:202
unsigned int setVisible(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angle, bool &changed)
virtual void getLcircle(std::list< vpMbtDistanceCircle *> &circlesList, unsigned int level=0) const
double getLodMinPolygonAreaThreshold() const
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
static const vpColor blue
Definition: vpColor.h:185
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
unsigned int getKltPyramidLevels() const
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:580
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void setNearClippingDistance(const double &dist)
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
void computeFov(const unsigned int &w, const unsigned int &h)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)