Visual Servoing Platform  version 3.5.1 under development (2023-05-30)
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
43 
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include <nlohmann/json.hpp>
46 using json = nlohmann::json;
47 #endif
48 
50  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
51  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
52  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
53 {
54  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
55 
56  // Add default camera transformation matrix
58 
59  // Add default ponderation between each feature type
61 
62 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
64 #endif
65 
68 }
69 
70 vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
71  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
72  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
73  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
74 {
75  if (nbCameras == 0) {
76  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
77  }
78  else if (nbCameras == 1) {
79  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
80 
81  // Add default camera transformation matrix
83  }
84  else {
85  for (unsigned int i = 1; i <= nbCameras; i++) {
86  std::stringstream ss;
87  ss << "Camera" << i;
88  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
89 
90  // Add default camera transformation matrix
92  }
93 
94  // Set by default the reference camera to the first one
95  m_referenceCameraName = m_mapOfTrackers.begin()->first;
96  }
97 
98  // Add default ponderation between each feature type
100 
101 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
103 #endif
104 
107 }
108 
109 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
110  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
111  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
112  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
113 {
114  if (trackerTypes.empty()) {
115  throw vpException(vpException::badValue, "There is no camera!");
116  }
117 
118  if (trackerTypes.size() == 1) {
119  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
120 
121  // Add default camera transformation matrix
123  }
124  else {
125  for (size_t i = 1; i <= trackerTypes.size(); i++) {
126  std::stringstream ss;
127  ss << "Camera" << i;
128  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
129 
130  // Add default camera transformation matrix
132  }
133 
134  // Set by default the reference camera to the first one
135  m_referenceCameraName = m_mapOfTrackers.begin()->first;
136  }
137 
138  // Add default ponderation between each feature type
140 
141 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
143 #endif
144 
147 }
148 
149 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
150  const std::vector<int> &trackerTypes)
151  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
152  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
153  m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
154 {
155  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
157  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
158  }
159 
160  for (size_t i = 0; i < cameraNames.size(); i++) {
161  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
162 
163  // Add default camera transformation matrix
165  }
166 
167  // Set by default the reference camera to the first one
168  m_referenceCameraName = m_mapOfTrackers.begin()->first;
169 
170  // Add default ponderation between each feature type
172 
173 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
175 #endif
176 
179 }
180 
182 {
183  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
184  ++it) {
185  delete it->second;
186  it->second = NULL;
187  }
188 }
189 
208  const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
209 {
210  double rawTotalProjectionError = 0.0;
211  unsigned int nbTotalFeaturesUsed = 0;
212 
213  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
214  it != m_mapOfTrackers.end(); ++it) {
215  TrackerWrapper *tracker = it->second;
216 
217  unsigned int nbFeaturesUsed = 0;
218  double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
219 
220  if (nbFeaturesUsed > 0) {
221  nbTotalFeaturesUsed += nbFeaturesUsed;
222  rawTotalProjectionError += curProjError;
223  }
224  }
225 
226  if (nbTotalFeaturesUsed > 0) {
227  return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
228  }
229 
230  return 90.0;
231 }
232 
251  const vpHomogeneousMatrix &_cMo,
252  const vpCameraParameters &_cam)
253 {
255  vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
256 
257  return computeCurrentProjectionError(I, _cMo, _cam);
258 }
259 
261 {
262  if (computeProjError) {
263  double rawTotalProjectionError = 0.0;
264  unsigned int nbTotalFeaturesUsed = 0;
265 
266  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
267  it != m_mapOfTrackers.end(); ++it) {
268  TrackerWrapper *tracker = it->second;
269 
270  double curProjError = tracker->getProjectionError();
271  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
272 
273  if (nbFeaturesUsed > 0) {
274  nbTotalFeaturesUsed += nbFeaturesUsed;
275  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
276  }
277  }
278 
279  if (nbTotalFeaturesUsed > 0) {
280  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
281  }
282  else {
283  projectionError = 90.0;
284  }
285  }
286  else {
287  projectionError = 90.0;
288  }
289 }
290 
291 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
292 {
293  computeVVSInit(mapOfImages);
294 
295  if (m_error.getRows() < 4) {
296  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
297  }
298 
299  double normRes = 0;
300  double normRes_1 = -1;
301  unsigned int iter = 0;
302 
303  vpMatrix LTL;
304  vpColVector LTR, v;
305  vpColVector error_prev;
306 
307  double mu = m_initialMu;
308  vpHomogeneousMatrix cMo_prev;
309 
310  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
311  if (isoJoIdentity)
312  oJo.eye();
313 
314  // Covariance
315  vpColVector W_true(m_error.getRows());
316  vpMatrix L_true, LVJ_true;
317 
318  // Create the map of VelocityTwistMatrices
319  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
320  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
321  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
323  cVo.buildFrom(it->second);
324  mapOfVelocityTwist[it->first] = cVo;
325  }
326 
327  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
328 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
329  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
330 #endif
331  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
332  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
333 
334  m_nb_feat_edge = 0;
335  m_nb_feat_klt = 0;
338 
339  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
340  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
341 
342  bool reStartFromLastIncrement = false;
343  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
344  if (reStartFromLastIncrement) {
345  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
346  it != m_mapOfTrackers.end(); ++it) {
347  TrackerWrapper *tracker = it->second;
348 
349  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
350 
351 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
352  vpHomogeneousMatrix c_curr_tTc_curr0 =
353  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
354  tracker->ctTc0 = c_curr_tTc_curr0;
355 #endif
356  }
357  }
358 
359  if (!reStartFromLastIncrement) {
361 
362  if (computeCovariance) {
363  L_true = m_L;
364  if (!isoJoIdentity) {
366  cVo.buildFrom(m_cMo);
367  LVJ_true = (m_L * (cVo * oJo));
368  }
369  }
370 
372  if (iter == 0) {
373  // If all the 6 dof should be estimated, we check if the interaction
374  // matrix is full rank. If not we remove automatically the dof that
375  // cannot be estimated. This is particularly useful when considering
376  // circles (rank 5) and cylinders (rank 4)
377  if (isoJoIdentity) {
378  cVo.buildFrom(m_cMo);
379 
380  vpMatrix K; // kernel
381  unsigned int rank = (m_L * cVo).kernel(K);
382  if (rank == 0) {
383  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
384  }
385 
386  if (rank != 6) {
387  vpMatrix I; // Identity
388  I.eye(6);
389  oJo = I - K.AtA();
390  isoJoIdentity = false;
391  }
392  }
393  }
394 
395  // Weighting
396  double num = 0;
397  double den = 0;
398 
399  unsigned int start_index = 0;
400  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
401  it != m_mapOfTrackers.end(); ++it) {
402  TrackerWrapper *tracker = it->second;
403 
404  if (tracker->m_trackerType & EDGE_TRACKER) {
405  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
406  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
407  W_true[start_index + i] = wi;
408  m_weightedError[start_index + i] = wi * m_error[start_index + i];
409 
410  num += wi * vpMath::sqr(m_error[start_index + i]);
411  den += wi;
412 
413  for (unsigned int j = 0; j < m_L.getCols(); j++) {
414  m_L[start_index + i][j] *= wi;
415  }
416  }
417 
418  start_index += tracker->m_error_edge.getRows();
419  }
420 
421 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
422  if (tracker->m_trackerType & KLT_TRACKER) {
423  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
424  double wi = tracker->m_w_klt[i] * factorKlt;
425  W_true[start_index + i] = wi;
426  m_weightedError[start_index + i] = wi * m_error[start_index + i];
427 
428  num += wi * vpMath::sqr(m_error[start_index + i]);
429  den += wi;
430 
431  for (unsigned int j = 0; j < m_L.getCols(); j++) {
432  m_L[start_index + i][j] *= wi;
433  }
434  }
435 
436  start_index += tracker->m_error_klt.getRows();
437  }
438 #endif
439 
440  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
441  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
442  double wi = tracker->m_w_depthNormal[i] * factorDepth;
443  W_true[start_index + i] = wi;
444  m_weightedError[start_index + i] = wi * m_error[start_index + i];
445 
446  num += wi * vpMath::sqr(m_error[start_index + i]);
447  den += wi;
448 
449  for (unsigned int j = 0; j < m_L.getCols(); j++) {
450  m_L[start_index + i][j] *= wi;
451  }
452  }
453 
454  start_index += tracker->m_error_depthNormal.getRows();
455  }
456 
457  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
458  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
459  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
460  W_true[start_index + i] = wi;
461  m_weightedError[start_index + i] = wi * m_error[start_index + i];
462 
463  num += wi * vpMath::sqr(m_error[start_index + i]);
464  den += wi;
465 
466  for (unsigned int j = 0; j < m_L.getCols(); j++) {
467  m_L[start_index + i][j] *= wi;
468  }
469  }
470 
471  start_index += tracker->m_error_depthDense.getRows();
472  }
473  }
474 
475  normRes_1 = normRes;
476  normRes = sqrt(num / den);
477 
478  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
479 
480  cMo_prev = m_cMo;
481 
483 
484 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
485  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
486  it != m_mapOfTrackers.end(); ++it) {
487  TrackerWrapper *tracker = it->second;
488 
489  vpHomogeneousMatrix c_curr_tTc_curr0 =
490  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
491  tracker->ctTc0 = c_curr_tTc_curr0;
492  }
493 #endif
494 
495  // Update cMo
496  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
497  it != m_mapOfTrackers.end(); ++it) {
498  TrackerWrapper *tracker = it->second;
499  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
500  }
501  }
502 
503  iter++;
504  }
505 
506  // Update features number
507  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
508  it != m_mapOfTrackers.end(); ++it) {
509  TrackerWrapper *tracker = it->second;
510  if (tracker->m_trackerType & EDGE_TRACKER) {
511  m_nb_feat_edge += tracker->m_error_edge.size();
512  }
513 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
514  if (tracker->m_trackerType & KLT_TRACKER) {
515  m_nb_feat_klt += tracker->m_error_klt.size();
516  }
517 #endif
518  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
519  m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
520  }
521  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
522  m_nb_feat_depthDense += tracker->m_error_depthDense.size();
523  }
524  }
525 
526  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
527 
528  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
529  it != m_mapOfTrackers.end(); ++it) {
530  TrackerWrapper *tracker = it->second;
531 
532  if (tracker->m_trackerType & EDGE_TRACKER) {
533  tracker->updateMovingEdgeWeights();
534  }
535  }
536 }
537 
539 {
540  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
541 }
542 
543 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
544 {
545  unsigned int nbFeatures = 0;
546 
547  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
548  it != m_mapOfTrackers.end(); ++it) {
549  TrackerWrapper *tracker = it->second;
550  tracker->computeVVSInit(mapOfImages[it->first]);
551 
552  nbFeatures += tracker->m_error.getRows();
553  }
554 
555  m_L.resize(nbFeatures, 6, false, false);
556  m_error.resize(nbFeatures, false);
557 
558  m_weightedError.resize(nbFeatures, false);
559  m_w.resize(nbFeatures, false);
560  m_w = 1;
561 }
562 
564 {
565  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
566  "computeVVSInteractionMatrixAndR"
567  "esidu() should not be called");
568 }
569 
571  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
572  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
573 {
574  unsigned int start_index = 0;
575 
576  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
577  it != m_mapOfTrackers.end(); ++it) {
578  TrackerWrapper *tracker = it->second;
579 
580  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
581 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
582  vpHomogeneousMatrix c_curr_tTc_curr0 =
583  m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
584  tracker->ctTc0 = c_curr_tTc_curr0;
585 #endif
586 
587  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
588 
589  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
590  m_error.insert(start_index, tracker->m_error);
591 
592  start_index += tracker->m_error.getRows();
593  }
594 }
595 
597 {
598  unsigned int start_index = 0;
599 
600  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
601  it != m_mapOfTrackers.end(); ++it) {
602  TrackerWrapper *tracker = it->second;
603  tracker->computeVVSWeights();
604 
605  m_w.insert(start_index, tracker->m_w);
606  start_index += tracker->m_w.getRows();
607  }
608 }
609 
624  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
625  bool displayFullModel)
626 {
627  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
628  if (it != m_mapOfTrackers.end()) {
629  TrackerWrapper *tracker = it->second;
630  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
631  }
632  else {
633  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
634  }
635 }
636 
651  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
652  bool displayFullModel)
653 {
654  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
655  if (it != m_mapOfTrackers.end()) {
656  TrackerWrapper *tracker = it->second;
657  tracker->display(I, cMo, cam, col, thickness, displayFullModel);
658  }
659  else {
660  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
661  }
662 }
663 
681  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
682  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
683  unsigned int thickness, bool displayFullModel)
684 {
685  if (m_mapOfTrackers.size() == 2) {
686  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
687  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
688  ++it;
689 
690  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
691  }
692  else {
693  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
694  << std::endl;
695  }
696 }
697 
715  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
716  const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
717  bool displayFullModel)
718 {
719  if (m_mapOfTrackers.size() == 2) {
720  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
721  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
722  ++it;
723 
724  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
725  }
726  else {
727  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
728  << std::endl;
729  }
730 }
731 
743 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
744  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
745  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
746  const vpColor &col, unsigned int thickness, bool displayFullModel)
747 {
748  // Display only for the given images
749  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
750  it_img != mapOfImages.end(); ++it_img) {
751  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
752  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
753  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
754 
755  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
756  it_cam != mapOfCameraParameters.end()) {
757  TrackerWrapper *tracker = it_tracker->second;
758  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
759  }
760  else {
761  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
762  }
763  }
764 }
765 
777 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
778  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
779  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
780  const vpColor &col, unsigned int thickness, bool displayFullModel)
781 {
782  // Display only for the given images
783  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
784  it_img != mapOfImages.end(); ++it_img) {
785  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
786  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
787  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
788 
789  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
790  it_cam != mapOfCameraParameters.end()) {
791  TrackerWrapper *tracker = it_tracker->second;
792  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
793  }
794  else {
795  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
796  }
797  }
798 }
799 
805 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
806 {
807  std::vector<std::string> cameraNames;
808 
809  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
810  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
811  cameraNames.push_back(it_tracker->first);
812  }
813 
814  return cameraNames;
815 }
816 
818 {
820 }
821 
831 {
832  if (m_mapOfTrackers.size() == 2) {
833  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
834  it->second->getCameraParameters(cam1);
835  ++it;
836 
837  it->second->getCameraParameters(cam2);
838  }
839  else {
840  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
841  << std::endl;
842  }
843 }
844 
850 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
851 {
852  // Clear the input map
853  mapOfCameraParameters.clear();
854 
855  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
856  it != m_mapOfTrackers.end(); ++it) {
857  vpCameraParameters cam_;
858  it->second->getCameraParameters(cam_);
859  mapOfCameraParameters[it->first] = cam_;
860  }
861 }
862 
869 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
870 {
871  std::map<std::string, int> trackingTypes;
872 
873  TrackerWrapper *traker;
874  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
875  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
876  traker = it_tracker->second;
877  trackingTypes[it_tracker->first] = traker->getTrackerType();
878  }
879 
880  return trackingTypes;
881 }
882 
891 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
892 {
893  if (m_mapOfTrackers.size() == 2) {
894  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
895  clippingFlag1 = it->second->getClipping();
896  ++it;
897 
898  clippingFlag2 = it->second->getClipping();
899  }
900  else {
901  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
902  << std::endl;
903  }
904 }
905 
911 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
912 {
913  mapOfClippingFlags.clear();
914 
915  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
916  it != m_mapOfTrackers.end(); ++it) {
917  TrackerWrapper *tracker = it->second;
918  mapOfClippingFlags[it->first] = tracker->getClipping();
919  }
920 }
921 
928 {
929  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
930  if (it != m_mapOfTrackers.end()) {
931  return it->second->getFaces();
932  }
933 
934  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
935  return faces;
936 }
937 
944 {
945  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
946  if (it != m_mapOfTrackers.end()) {
947  return it->second->getFaces();
948  }
949 
950  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
951  return faces;
952 }
953 
954 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
958 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
959 {
960  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
961  if (it != m_mapOfTrackers.end()) {
962  TrackerWrapper *tracker = it->second;
963  return tracker->getFeaturesCircle();
964  }
965  else {
966  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
967  m_referenceCameraName.c_str());
968  }
969 }
970 
974 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
975 {
976  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
977  if (it != m_mapOfTrackers.end()) {
978  TrackerWrapper *tracker = it->second;
979  return tracker->getFeaturesKltCylinder();
980  }
981  else {
982  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
983  m_referenceCameraName.c_str());
984  }
985 }
986 
990 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
991 {
992  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
993  if (it != m_mapOfTrackers.end()) {
994  TrackerWrapper *tracker = it->second;
995  return tracker->getFeaturesKlt();
996  }
997  else {
998  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
999  m_referenceCameraName.c_str());
1000  }
1001 }
1002 #endif
1003 
1030 std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1031 {
1032  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1033 
1034  if (it != m_mapOfTrackers.end()) {
1035  return it->second->getFeaturesForDisplay();
1036  }
1037  else {
1038  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1039  }
1040 
1041  return std::vector<std::vector<double> >();
1042 }
1043 
1069 void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1070 {
1071  // Clear the input map
1072  mapOfFeatures.clear();
1073 
1074  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1075  it != m_mapOfTrackers.end(); ++it) {
1076  mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1077  }
1078 }
1079 
1090 
1091 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
1100 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1101 {
1102  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1103  if (it != m_mapOfTrackers.end()) {
1104  TrackerWrapper *tracker = it->second;
1105  return tracker->getKltImagePoints();
1106  }
1107  else {
1108  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1109  }
1110 
1111  return std::vector<vpImagePoint>();
1112 }
1113 
1122 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1123 {
1124  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1125  if (it != m_mapOfTrackers.end()) {
1126  TrackerWrapper *tracker = it->second;
1127  return tracker->getKltImagePointsWithId();
1128  }
1129  else {
1130  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1131  }
1132 
1133  return std::map<int, vpImagePoint>();
1134 }
1135 
1142 {
1143  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1144  if (it != m_mapOfTrackers.end()) {
1145  TrackerWrapper *tracker = it->second;
1146  return tracker->getKltMaskBorder();
1147  }
1148  else {
1149  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1150  }
1151 
1152  return 0;
1153 }
1154 
1161 {
1162  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1163  if (it != m_mapOfTrackers.end()) {
1164  TrackerWrapper *tracker = it->second;
1165  return tracker->getKltNbPoints();
1166  }
1167  else {
1168  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1169  }
1170 
1171  return 0;
1172 }
1173 
1180 {
1181  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1182 
1183  if (it_tracker != m_mapOfTrackers.end()) {
1184  TrackerWrapper *tracker;
1185  tracker = it_tracker->second;
1186  return tracker->getKltOpencv();
1187  }
1188  else {
1189  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1190  }
1191 
1192  return vpKltOpencv();
1193 }
1194 
1204 {
1205  if (m_mapOfTrackers.size() == 2) {
1206  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1207  klt1 = it->second->getKltOpencv();
1208  ++it;
1209 
1210  klt2 = it->second->getKltOpencv();
1211  }
1212  else {
1213  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1214  << std::endl;
1215  }
1216 }
1217 
1223 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1224 {
1225  mapOfKlts.clear();
1226 
1227  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1228  it != m_mapOfTrackers.end(); ++it) {
1229  TrackerWrapper *tracker = it->second;
1230  mapOfKlts[it->first] = tracker->getKltOpencv();
1231  }
1232 }
1233 
1234 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
1240 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1241 {
1242  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1243  if (it != m_mapOfTrackers.end()) {
1244  TrackerWrapper *tracker = it->second;
1245  return tracker->getKltPoints();
1246  }
1247  else {
1248  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1249  }
1250 
1251  return std::vector<cv::Point2f>();
1252 }
1253 #endif
1254 
1262 #endif
1263 
1276 void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1277 {
1278  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1279  if (it != m_mapOfTrackers.end()) {
1280  it->second->getLcircle(circlesList, level);
1281  }
1282  else {
1283  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1284  }
1285 }
1286 
1300 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1301  unsigned int level) const
1302 {
1303  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1304  if (it != m_mapOfTrackers.end()) {
1305  it->second->getLcircle(circlesList, level);
1306  }
1307  else {
1308  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1309  }
1310 }
1311 
1324 void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1325 {
1326  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1327  if (it != m_mapOfTrackers.end()) {
1328  it->second->getLcylinder(cylindersList, level);
1329  }
1330  else {
1331  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1332  }
1333 }
1334 
1348 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1349  unsigned int level) const
1350 {
1351  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1352  if (it != m_mapOfTrackers.end()) {
1353  it->second->getLcylinder(cylindersList, level);
1354  }
1355  else {
1356  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1357  }
1358 }
1359 
1372 void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1373 {
1374  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1375 
1376  if (it != m_mapOfTrackers.end()) {
1377  it->second->getLline(linesList, level);
1378  }
1379  else {
1380  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1381  }
1382 }
1383 
1397 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1398  unsigned int level) const
1399 {
1400  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1401  if (it != m_mapOfTrackers.end()) {
1402  it->second->getLline(linesList, level);
1403  }
1404  else {
1405  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1406  }
1407 }
1408 
1438 std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1439  const vpHomogeneousMatrix &cMo,
1440  const vpCameraParameters &cam,
1441  bool displayFullModel)
1442 {
1443  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1444 
1445  if (it != m_mapOfTrackers.end()) {
1446  return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1447  }
1448  else {
1449  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1450  }
1451 
1452  return std::vector<std::vector<double> >();
1453 }
1454 
1480 void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1481  const std::map<std::string, unsigned int> &mapOfwidths,
1482  const std::map<std::string, unsigned int> &mapOfheights,
1483  const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1484  const std::map<std::string, vpCameraParameters> &mapOfCams,
1485  bool displayFullModel)
1486 {
1487  // Clear the input map
1488  mapOfModels.clear();
1489 
1490  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1491  it != m_mapOfTrackers.end(); ++it) {
1492  std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1493  std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1494  std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1495  std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1496 
1497  if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1498  findCam != mapOfCams.end()) {
1499  mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1500  findCam->second, displayFullModel);
1501  }
1502  }
1503 }
1504 
1511 {
1512  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1513 
1514  if (it != m_mapOfTrackers.end()) {
1515  return it->second->getMovingEdge();
1516  }
1517  else {
1518  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1519  }
1520 
1521  return vpMe();
1522 }
1523 
1533 {
1534  if (m_mapOfTrackers.size() == 2) {
1535  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1536  it->second->getMovingEdge(me1);
1537  ++it;
1538 
1539  it->second->getMovingEdge(me2);
1540  }
1541  else {
1542  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1543  << std::endl;
1544  }
1545 }
1546 
1552 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1553 {
1554  mapOfMovingEdges.clear();
1555 
1556  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1557  it != m_mapOfTrackers.end(); ++it) {
1558  TrackerWrapper *tracker = it->second;
1559  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1560  }
1561 }
1562 
1580 unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1581 {
1582  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1583 
1584  unsigned int nbGoodPoints = 0;
1585  if (it != m_mapOfTrackers.end()) {
1586 
1587  nbGoodPoints = it->second->getNbPoints(level);
1588  }
1589  else {
1590  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1591  }
1592 
1593  return nbGoodPoints;
1594 }
1595 
1610 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1611 {
1612  mapOfNbPoints.clear();
1613 
1614  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1615  it != m_mapOfTrackers.end(); ++it) {
1616  TrackerWrapper *tracker = it->second;
1617  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1618  }
1619 }
1620 
1627 {
1628  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1629  if (it != m_mapOfTrackers.end()) {
1630  return it->second->getNbPolygon();
1631  }
1632 
1633  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1634  return 0;
1635 }
1636 
1642 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1643 {
1644  mapOfNbPolygons.clear();
1645 
1646  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1647  it != m_mapOfTrackers.end(); ++it) {
1648  TrackerWrapper *tracker = it->second;
1649  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1650  }
1651 }
1652 
1664 {
1665  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1666  if (it != m_mapOfTrackers.end()) {
1667  return it->second->getPolygon(index);
1668  }
1669 
1670  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1671  return NULL;
1672 }
1673 
1685 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1686 {
1687  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1688  if (it != m_mapOfTrackers.end()) {
1689  return it->second->getPolygon(index);
1690  }
1691 
1692  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1693  return NULL;
1694 }
1695 
1711 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1712 vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1713 {
1714  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1715 
1716  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1717  if (it != m_mapOfTrackers.end()) {
1718  TrackerWrapper *tracker = it->second;
1719  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1720  }
1721  else {
1722  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1723  }
1724 
1725  return polygonFaces;
1726 }
1727 
1745 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1746  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1747  bool orderPolygons, bool useVisibility, bool clipPolygon)
1748 {
1749  mapOfPolygons.clear();
1750  mapOfPoints.clear();
1751 
1752  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1753  it != m_mapOfTrackers.end(); ++it) {
1754  TrackerWrapper *tracker = it->second;
1755  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1756  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1757 
1758  mapOfPolygons[it->first] = polygonFaces.first;
1759  mapOfPoints[it->first] = polygonFaces.second;
1760  }
1761 }
1762 
1764 
1774 {
1775  if (m_mapOfTrackers.size() == 2) {
1776  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1777  it->second->getPose(c1Mo);
1778  ++it;
1779 
1780  it->second->getPose(c2Mo);
1781  }
1782  else {
1783  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1784  << std::endl;
1785  }
1786 }
1787 
1793 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1794 {
1795  // Clear the map
1796  mapOfCameraPoses.clear();
1797 
1798  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1799  it != m_mapOfTrackers.end(); ++it) {
1800  TrackerWrapper *tracker = it->second;
1801  tracker->getPose(mapOfCameraPoses[it->first]);
1802  }
1803 }
1804 
1809 
1814 {
1815  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1816  if (it != m_mapOfTrackers.end()) {
1817  TrackerWrapper *tracker = it->second;
1818  return tracker->getTrackerType();
1819  }
1820  else {
1821  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1822  m_referenceCameraName.c_str());
1823  }
1824 }
1825 
1827 {
1828  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1829  it != m_mapOfTrackers.end(); ++it) {
1830  TrackerWrapper *tracker = it->second;
1831  tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1832  tracker->init(I);
1833  }
1834 }
1835 
1836 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1837  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1838 {
1839  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1840 }
1841 
1842 #ifdef VISP_HAVE_MODULE_GUI
1843 
1887  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1888  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1889 {
1890  if (m_mapOfTrackers.size() == 2) {
1891  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1892  TrackerWrapper *tracker = it->second;
1893  tracker->initClick(I1, initFile1, displayHelp, T1);
1894 
1895  ++it;
1896 
1897  tracker = it->second;
1898  tracker->initClick(I2, initFile2, displayHelp, T2);
1899 
1901  if (it != m_mapOfTrackers.end()) {
1902  tracker = it->second;
1903 
1904  // Set the reference cMo
1905  tracker->getPose(m_cMo);
1906  }
1907  }
1908  else {
1910  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1911  }
1912 }
1913 
1956 void vpMbGenericTracker::initClick(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
1957  const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1958  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1959 {
1960  if (m_mapOfTrackers.size() == 2) {
1961  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1962  TrackerWrapper *tracker = it->second;
1963  tracker->initClick(I_color1, initFile1, displayHelp, T1);
1964 
1965  ++it;
1966 
1967  tracker = it->second;
1968  tracker->initClick(I_color2, initFile2, displayHelp, T2);
1969 
1971  if (it != m_mapOfTrackers.end()) {
1972  tracker = it->second;
1973 
1974  // Set the reference cMo
1975  tracker->getPose(m_cMo);
1976  }
1977  }
1978  else {
1980  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1981  }
1982 }
1983 
2026 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2027  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2028  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2029 {
2030  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2031  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2032  mapOfImages.find(m_referenceCameraName);
2033  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2034 
2035  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2036  TrackerWrapper *tracker = it_tracker->second;
2037  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2038  if (it_T != mapOfT.end())
2039  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2040  else
2041  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2042  tracker->getPose(m_cMo);
2043  }
2044  else {
2045  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2046  }
2047 
2048  // Vector of missing initFile for cameras
2049  std::vector<std::string> vectorOfMissingCameraPoses;
2050 
2051  // Set pose for the specified cameras
2052  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2053  if (it_tracker->first != m_referenceCameraName) {
2054  it_img = mapOfImages.find(it_tracker->first);
2055  it_initFile = mapOfInitFiles.find(it_tracker->first);
2056 
2057  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2058  // InitClick for the current camera
2059  TrackerWrapper *tracker = it_tracker->second;
2060  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2061  }
2062  else {
2063  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2064  }
2065  }
2066  }
2067 
2068  // Init for cameras that do not have an initFile
2069  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2070  it != vectorOfMissingCameraPoses.end(); ++it) {
2071  it_img = mapOfImages.find(*it);
2072  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2074 
2075  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2076  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2077  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2078  m_mapOfTrackers[*it]->init(*it_img->second);
2079  }
2080  else {
2082  "Missing image or missing camera transformation "
2083  "matrix! Cannot set the pose for camera: %s!",
2084  it->c_str());
2085  }
2086  }
2087 }
2088 
2131 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2132  const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2133  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2134 {
2135  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2136  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2137  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2138 
2139  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2140  TrackerWrapper *tracker = it_tracker->second;
2141  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2142  if (it_T != mapOfT.end())
2143  tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2144  else
2145  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2146  tracker->getPose(m_cMo);
2147  }
2148  else {
2149  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2150  }
2151 
2152  // Vector of missing initFile for cameras
2153  std::vector<std::string> vectorOfMissingCameraPoses;
2154 
2155  // Set pose for the specified cameras
2156  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2157  if (it_tracker->first != m_referenceCameraName) {
2158  it_img = mapOfColorImages.find(it_tracker->first);
2159  it_initFile = mapOfInitFiles.find(it_tracker->first);
2160 
2161  if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2162  // InitClick for the current camera
2163  TrackerWrapper *tracker = it_tracker->second;
2164  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2165  }
2166  else {
2167  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2168  }
2169  }
2170  }
2171 
2172  // Init for cameras that do not have an initFile
2173  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2174  it != vectorOfMissingCameraPoses.end(); ++it) {
2175  it_img = mapOfColorImages.find(*it);
2176  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2178 
2179  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2180  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2181  m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2182  vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2183  m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2184  }
2185  else {
2187  "Missing image or missing camera transformation "
2188  "matrix! Cannot set the pose for camera: %s!",
2189  it->c_str());
2190  }
2191  }
2192 }
2193 #endif
2194 
2195 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2196  const int /*idFace*/, const std::string & /*name*/)
2197 {
2198  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2199 }
2200 
2202 {
2203  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2204 }
2205 
2207 {
2208  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2209 }
2210 
2241  const std::string &initFile1, const std::string &initFile2)
2242 {
2243  if (m_mapOfTrackers.size() == 2) {
2244  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2245  TrackerWrapper *tracker = it->second;
2246  tracker->initFromPoints(I1, initFile1);
2247 
2248  ++it;
2249 
2250  tracker = it->second;
2251  tracker->initFromPoints(I2, initFile2);
2252 
2254  if (it != m_mapOfTrackers.end()) {
2255  tracker = it->second;
2256 
2257  // Set the reference cMo
2258  tracker->getPose(m_cMo);
2259 
2260  // Set the reference camera parameters
2261  tracker->getCameraParameters(m_cam);
2262  }
2263  }
2264  else {
2266  "Cannot initFromPoints()! Require two cameras but "
2267  "there are %d cameras!",
2268  m_mapOfTrackers.size());
2269  }
2270 }
2271 
2302  const std::string &initFile1, const std::string &initFile2)
2303 {
2304  if (m_mapOfTrackers.size() == 2) {
2305  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2306  TrackerWrapper *tracker = it->second;
2307  tracker->initFromPoints(I_color1, initFile1);
2308 
2309  ++it;
2310 
2311  tracker = it->second;
2312  tracker->initFromPoints(I_color2, initFile2);
2313 
2315  if (it != m_mapOfTrackers.end()) {
2316  tracker = it->second;
2317 
2318  // Set the reference cMo
2319  tracker->getPose(m_cMo);
2320 
2321  // Set the reference camera parameters
2322  tracker->getCameraParameters(m_cam);
2323  }
2324  }
2325  else {
2327  "Cannot initFromPoints()! Require two cameras but "
2328  "there are %d cameras!",
2329  m_mapOfTrackers.size());
2330  }
2331 }
2332 
2333 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2334  const std::map<std::string, std::string> &mapOfInitPoints)
2335 {
2336  // Set the reference cMo
2337  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2338  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2339  mapOfImages.find(m_referenceCameraName);
2340  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2341 
2342  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2343  TrackerWrapper *tracker = it_tracker->second;
2344  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2345  tracker->getPose(m_cMo);
2346  }
2347  else {
2348  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2349  }
2350 
2351  // Vector of missing initPoints for cameras
2352  std::vector<std::string> vectorOfMissingCameraPoints;
2353 
2354  // Set pose for the specified cameras
2355  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2356  it_img = mapOfImages.find(it_tracker->first);
2357  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2358 
2359  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2360  // Set pose
2361  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2362  }
2363  else {
2364  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2365  }
2366  }
2367 
2368  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2369  it != vectorOfMissingCameraPoints.end(); ++it) {
2370  it_img = mapOfImages.find(*it);
2371  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2373 
2374  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2375  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2376  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2377  }
2378  else {
2380  "Missing image or missing camera transformation "
2381  "matrix! Cannot init the pose for camera: %s!",
2382  it->c_str());
2383  }
2384  }
2385 }
2386 
2387 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2388  const std::map<std::string, std::string> &mapOfInitPoints)
2389 {
2390  // Set the reference cMo
2391  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2392  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2393  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2394 
2395  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2396  it_initPoints != mapOfInitPoints.end()) {
2397  TrackerWrapper *tracker = it_tracker->second;
2398  tracker->initFromPoints(*it_img->second, it_initPoints->second);
2399  tracker->getPose(m_cMo);
2400  }
2401  else {
2402  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2403  }
2404 
2405  // Vector of missing initPoints for cameras
2406  std::vector<std::string> vectorOfMissingCameraPoints;
2407 
2408  // Set pose for the specified cameras
2409  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2410  it_img = mapOfColorImages.find(it_tracker->first);
2411  it_initPoints = mapOfInitPoints.find(it_tracker->first);
2412 
2413  if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2414  // Set pose
2415  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2416  }
2417  else {
2418  vectorOfMissingCameraPoints.push_back(it_tracker->first);
2419  }
2420  }
2421 
2422  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2423  it != vectorOfMissingCameraPoints.end(); ++it) {
2424  it_img = mapOfColorImages.find(*it);
2425  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2427 
2428  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2429  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2430  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2431  }
2432  else {
2434  "Missing image or missing camera transformation "
2435  "matrix! Cannot init the pose for camera: %s!",
2436  it->c_str());
2437  }
2438  }
2439 }
2440 
2442 {
2443  vpMbTracker::initFromPose(I, cMo);
2444 }
2445 
2458  const std::string &initFile1, const std::string &initFile2)
2459 {
2460  if (m_mapOfTrackers.size() == 2) {
2461  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2462  TrackerWrapper *tracker = it->second;
2463  tracker->initFromPose(I1, initFile1);
2464 
2465  ++it;
2466 
2467  tracker = it->second;
2468  tracker->initFromPose(I2, initFile2);
2469 
2471  if (it != m_mapOfTrackers.end()) {
2472  tracker = it->second;
2473 
2474  // Set the reference cMo
2475  tracker->getPose(m_cMo);
2476 
2477  // Set the reference camera parameters
2478  tracker->getCameraParameters(m_cam);
2479  }
2480  }
2481  else {
2483  "Cannot initFromPose()! Require two cameras but there "
2484  "are %d cameras!",
2485  m_mapOfTrackers.size());
2486  }
2487 }
2488 
2501  const std::string &initFile1, const std::string &initFile2)
2502 {
2503  if (m_mapOfTrackers.size() == 2) {
2504  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2505  TrackerWrapper *tracker = it->second;
2506  tracker->initFromPose(I_color1, initFile1);
2507 
2508  ++it;
2509 
2510  tracker = it->second;
2511  tracker->initFromPose(I_color2, initFile2);
2512 
2514  if (it != m_mapOfTrackers.end()) {
2515  tracker = it->second;
2516 
2517  // Set the reference cMo
2518  tracker->getPose(m_cMo);
2519 
2520  // Set the reference camera parameters
2521  tracker->getCameraParameters(m_cam);
2522  }
2523  }
2524  else {
2526  "Cannot initFromPose()! Require two cameras but there "
2527  "are %d cameras!",
2528  m_mapOfTrackers.size());
2529  }
2530 }
2531 
2546 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2547  const std::map<std::string, std::string> &mapOfInitPoses)
2548 {
2549  // Set the reference cMo
2550  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2551  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2552  mapOfImages.find(m_referenceCameraName);
2553  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2554 
2555  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2556  TrackerWrapper *tracker = it_tracker->second;
2557  tracker->initFromPose(*it_img->second, it_initPose->second);
2558  tracker->getPose(m_cMo);
2559  }
2560  else {
2561  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2562  }
2563 
2564  // Vector of missing pose matrices for cameras
2565  std::vector<std::string> vectorOfMissingCameraPoses;
2566 
2567  // Set pose for the specified cameras
2568  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2569  it_img = mapOfImages.find(it_tracker->first);
2570  it_initPose = mapOfInitPoses.find(it_tracker->first);
2571 
2572  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2573  // Set pose
2574  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2575  }
2576  else {
2577  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2578  }
2579  }
2580 
2581  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2582  it != vectorOfMissingCameraPoses.end(); ++it) {
2583  it_img = mapOfImages.find(*it);
2584  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2586 
2587  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2588  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2589  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2590  }
2591  else {
2593  "Missing image or missing camera transformation "
2594  "matrix! Cannot init the pose for camera: %s!",
2595  it->c_str());
2596  }
2597  }
2598 }
2599 
2614 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2615  const std::map<std::string, std::string> &mapOfInitPoses)
2616 {
2617  // Set the reference cMo
2618  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2619  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2620  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2621 
2622  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2623  TrackerWrapper *tracker = it_tracker->second;
2624  tracker->initFromPose(*it_img->second, it_initPose->second);
2625  tracker->getPose(m_cMo);
2626  }
2627  else {
2628  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2629  }
2630 
2631  // Vector of missing pose matrices for cameras
2632  std::vector<std::string> vectorOfMissingCameraPoses;
2633 
2634  // Set pose for the specified cameras
2635  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2636  it_img = mapOfColorImages.find(it_tracker->first);
2637  it_initPose = mapOfInitPoses.find(it_tracker->first);
2638 
2639  if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2640  // Set pose
2641  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2642  }
2643  else {
2644  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2645  }
2646  }
2647 
2648  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2649  it != vectorOfMissingCameraPoses.end(); ++it) {
2650  it_img = mapOfColorImages.find(*it);
2651  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2653 
2654  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2655  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2656  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2657  }
2658  else {
2660  "Missing image or missing camera transformation "
2661  "matrix! Cannot init the pose for camera: %s!",
2662  it->c_str());
2663  }
2664  }
2665 }
2666 
2678  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2679 {
2680  if (m_mapOfTrackers.size() == 2) {
2681  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2682  it->second->initFromPose(I1, c1Mo);
2683 
2684  ++it;
2685 
2686  it->second->initFromPose(I2, c2Mo);
2687 
2688  m_cMo = c1Mo;
2689  }
2690  else {
2692  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2693  }
2694 }
2695 
2707  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2708 {
2709  if (m_mapOfTrackers.size() == 2) {
2710  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2711  it->second->initFromPose(I_color1, c1Mo);
2712 
2713  ++it;
2714 
2715  it->second->initFromPose(I_color2, c2Mo);
2716 
2717  m_cMo = c1Mo;
2718  }
2719  else {
2721  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2722  }
2723 }
2724 
2738 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2739  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2740 {
2741  // Set the reference cMo
2742  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2743  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2744  mapOfImages.find(m_referenceCameraName);
2745  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2746 
2747  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2748  TrackerWrapper *tracker = it_tracker->second;
2749  tracker->initFromPose(*it_img->second, it_camPose->second);
2750  tracker->getPose(m_cMo);
2751  }
2752  else {
2753  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2754  }
2755 
2756  // Vector of missing pose matrices for cameras
2757  std::vector<std::string> vectorOfMissingCameraPoses;
2758 
2759  // Set pose for the specified cameras
2760  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2761  it_img = mapOfImages.find(it_tracker->first);
2762  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2763 
2764  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2765  // Set pose
2766  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2767  }
2768  else {
2769  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2770  }
2771  }
2772 
2773  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2774  it != vectorOfMissingCameraPoses.end(); ++it) {
2775  it_img = mapOfImages.find(*it);
2776  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2778 
2779  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2780  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2781  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2782  }
2783  else {
2785  "Missing image or missing camera transformation "
2786  "matrix! Cannot set the pose for camera: %s!",
2787  it->c_str());
2788  }
2789  }
2790 }
2791 
2805 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2806  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2807 {
2808  // Set the reference cMo
2809  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2810  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2811  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2812 
2813  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2814  TrackerWrapper *tracker = it_tracker->second;
2815  tracker->initFromPose(*it_img->second, it_camPose->second);
2816  tracker->getPose(m_cMo);
2817  }
2818  else {
2819  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2820  }
2821 
2822  // Vector of missing pose matrices for cameras
2823  std::vector<std::string> vectorOfMissingCameraPoses;
2824 
2825  // Set pose for the specified cameras
2826  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2827  it_img = mapOfColorImages.find(it_tracker->first);
2828  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2829 
2830  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2831  // Set pose
2832  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2833  }
2834  else {
2835  vectorOfMissingCameraPoses.push_back(it_tracker->first);
2836  }
2837  }
2838 
2839  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2840  it != vectorOfMissingCameraPoses.end(); ++it) {
2841  it_img = mapOfColorImages.find(*it);
2842  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2844 
2845  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2846  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2847  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2848  }
2849  else {
2851  "Missing image or missing camera transformation "
2852  "matrix! Cannot set the pose for camera: %s!",
2853  it->c_str());
2854  }
2855  }
2856 }
2857 
2869 void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2870 {
2871  const std::string extension = vpIoTools::getFileExtension(configFile);
2872  if (extension == ".xml") {
2873  loadConfigFileXML(configFile, verbose);
2874  }
2875 #ifdef VISP_HAVE_NLOHMANN_JSON
2876  else if (extension == ".json") {
2877  loadConfigFileJSON(configFile, verbose);
2878  }
2879 #endif
2880  else {
2881  throw vpException(vpException::ioError, "MBT config parsing: File format " + extension + "for file " + configFile + " is not supported.");
2882  }
2883 }
2884 
2896 void vpMbGenericTracker::loadConfigFileXML(const std::string &configFile, bool verbose)
2897 {
2898  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2899  it != m_mapOfTrackers.end(); ++it) {
2900  TrackerWrapper *tracker = it->second;
2901  tracker->loadConfigFile(configFile, verbose);
2902  }
2903 
2905  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2906  }
2907 
2908  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2909  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2910  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2911  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2912 }
2913 
2914 #ifdef VISP_HAVE_NLOHMANN_JSON
2923 void vpMbGenericTracker::loadConfigFileJSON(const std::string &settingsFile, bool verbose)
2924 {
2925  //Read file
2926  std::ifstream jsonFile(settingsFile);
2927  if (!jsonFile.good()) {
2928  throw vpException(vpException::ioError, "Could not read from settings file " + settingsFile + " to initialise the vpMbGenericTracker");
2929  }
2930  json settings;
2931  try {
2932  settings = json::parse(jsonFile);
2933  }
2934  catch (json::parse_error &e) {
2935  std::stringstream msg;
2936  msg << "Could not parse JSON file : \n";
2937 
2938  msg << e.what() << std::endl;
2939  msg << "Byte position of error: " << e.byte;
2940  throw vpException(vpException::ioError, msg.str());
2941  }
2942  jsonFile.close();
2943 
2944  if (!settings.contains("version")) {
2945  throw vpException(vpException::notInitialized, "JSON configuration does not contain versioning information");
2946  }
2947  else if (settings["version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2948  throw vpException(vpException::badValue, "Trying to load an old configuration file");
2949  }
2950 
2951  //Load Basic settings
2952  settings.at("referenceCameraName").get_to(m_referenceCameraName);
2953  json trackersJson;
2954  trackersJson = settings.at("trackers");
2955 
2956  //Find camera that are already present in the tracker but not in the config file: they will be removed
2957  std::vector<std::string> unusedCameraNames = getCameraNames();
2958 
2959  bool refCameraFound = false;
2960  //Foreach camera
2961  for (const auto &it : trackersJson.items()) {
2962  const std::string cameraName = it.key();
2963  const json trackerJson = it.value();
2964  refCameraFound = refCameraFound || cameraName == m_referenceCameraName;
2965 
2966  //Load transformation between current camera and reference camera, if it exists
2967  if (trackerJson.contains("camTref")) {
2968  m_mapOfCameraTransformationMatrix[cameraName] = trackerJson["camTref"].get<vpHomogeneousMatrix>();
2969  }
2970  else if (cameraName != m_referenceCameraName) { // No transformation to reference and its not the reference itself
2971  throw vpException(vpException::notInitialized, "Camera " + cameraName + " has no transformation to the reference camera");
2972  }
2973  if (verbose) {
2974  std::cout << "Loading tracker " << cameraName << std::endl << " with settings: " << std::endl << trackerJson.dump(2);
2975  }
2976  if (m_mapOfTrackers.count(cameraName)) {
2977  if (verbose) {
2978  std::cout << "Updating an already existing tracker with JSON configuration." << std::endl;
2979  }
2980  from_json(trackerJson, *(m_mapOfTrackers[cameraName]));
2981  }
2982  else {
2983  if (verbose) {
2984  std::cout << "Creating a new tracker from JSON configuration." << std::endl;
2985  }
2986  TrackerWrapper *tw = new TrackerWrapper(); //vpMBTracker is responsible for deleting trackers
2987  *tw = trackerJson;
2988  m_mapOfTrackers[cameraName] = tw;
2989  }
2990  const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName); // Mark this camera name as used
2991  unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2992  }
2993  if (!refCameraFound) {
2994  throw vpException(vpException::badValue, "Reference camera not found in trackers");
2995  }
2996 
2997  // All camerasthat were defined in the tracker but not in the config file are removed
2998  for (const std::string &oldCameraName : unusedCameraNames) {
2999  m_mapOfCameraTransformationMatrix.erase(oldCameraName);
3000  TrackerWrapper *tw = m_mapOfTrackers[oldCameraName];
3001  m_mapOfTrackers.erase(oldCameraName);
3002  delete tw;
3003  }
3004 
3005  const TrackerWrapper *refTracker = m_mapOfTrackers[m_referenceCameraName];
3006  refTracker->getCameraParameters(m_cam);
3007  this->angleAppears = refTracker->getAngleAppear();
3008  this->angleDisappears = refTracker->getAngleDisappear();
3009  this->clippingFlag = refTracker->getClipping();
3010  this->distNearClip = refTracker->getNearClippingDistance();
3011  this->distFarClip = refTracker->getFarClippingDistance();
3012 
3013  // These settings can be set in each tracker or globally. Global value overrides local ones.
3014  if (settings.contains("display")) {
3015  const json displayJson = settings["display"];
3016  setDisplayFeatures(displayJson.value("features", displayFeatures));
3017  setProjectionErrorDisplay(displayJson.value("projectionError", m_projectionErrorDisplay));
3018  }
3019  if (settings.contains("visibilityTest")) {
3020  const json visJson = settings["visibilityTest"];
3021  setOgreVisibilityTest(visJson.value("ogre", useOgre));
3022  setScanLineVisibilityTest(visJson.value("scanline", useScanLine));
3023  }
3024  //If a 3D model is defined, load it
3025  if (settings.contains("model")) {
3026  loadModel(settings.at("model").get<std::string>(), verbose);
3027  }
3028 }
3029 
3037 void vpMbGenericTracker::saveConfigFile(const std::string &settingsFile) const
3038 {
3039  json j;
3040  j["referenceCameraName"] = m_referenceCameraName;
3041  j["version"] = MBT_JSON_SETTINGS_VERSION;
3042  // j["thresholdOutlier"] = m_thresholdOutlier;
3043  json trackers;
3044  for (const auto &kv : m_mapOfTrackers) {
3045  trackers[kv.first] = *(kv.second);
3046  const auto itTransformation = m_mapOfCameraTransformationMatrix.find(kv.first);
3047  if (itTransformation != m_mapOfCameraTransformationMatrix.end()) {
3048  trackers[kv.first]["camTref"] = itTransformation->second;
3049  }
3050  }
3051  j["trackers"] = trackers;
3052  std::ofstream f(settingsFile);
3053  if (f.good()) {
3054  const unsigned indentLevel = 4;
3055  f << j.dump(indentLevel);
3056  f.close();
3057  }
3058  else {
3059  throw vpException(vpException::ioError, "Could not save tracker configuration to JSON file: " + settingsFile);
3060  }
3061 }
3062 
3063 #endif
3064 
3079 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
3080 {
3081  if (m_mapOfTrackers.size() != 2) {
3082  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3083  }
3084 
3085  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3086  TrackerWrapper *tracker = it_tracker->second;
3087  tracker->loadConfigFile(configFile1, verbose);
3088 
3089  ++it_tracker;
3090  tracker = it_tracker->second;
3091  tracker->loadConfigFile(configFile2, verbose);
3092 
3094  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
3095  }
3096 
3097  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
3098  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
3099  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
3100  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
3101 }
3102 
3116 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
3117 {
3118  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3119  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3120  TrackerWrapper *tracker = it_tracker->second;
3121 
3122  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3123  if (it_config != mapOfConfigFiles.end()) {
3124  tracker->loadConfigFile(it_config->second, verbose);
3125  }
3126  else {
3127  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
3128  it_tracker->first.c_str());
3129  }
3130  }
3131 
3132  // Set the reference camera parameters
3133  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3134  if (it != m_mapOfTrackers.end()) {
3135  TrackerWrapper *tracker = it->second;
3136  tracker->getCameraParameters(m_cam);
3137 
3138  // Set clipping
3139  this->clippingFlag = tracker->getClipping();
3140  this->angleAppears = tracker->getAngleAppear();
3141  this->angleDisappears = tracker->getAngleDisappear();
3142  }
3143  else {
3144  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3145  m_referenceCameraName.c_str());
3146  }
3147 }
3148 
3167 void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
3168 {
3169  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3170  it != m_mapOfTrackers.end(); ++it) {
3171  TrackerWrapper *tracker = it->second;
3172  tracker->loadModel(modelFile, verbose, T);
3173  }
3174 }
3175 
3198 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
3199  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3200 {
3201  if (m_mapOfTrackers.size() != 2) {
3202  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3203  }
3204 
3205  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3206  TrackerWrapper *tracker = it_tracker->second;
3207  tracker->loadModel(modelFile1, verbose, T1);
3208 
3209  ++it_tracker;
3210  tracker = it_tracker->second;
3211  tracker->loadModel(modelFile2, verbose, T2);
3212 }
3213 
3233 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
3234  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3235 {
3236  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3237  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3238  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3239 
3240  if (it_model != mapOfModelFiles.end()) {
3241  TrackerWrapper *tracker = it_tracker->second;
3242  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3243 
3244  if (it_T != mapOfT.end())
3245  tracker->loadModel(it_model->second, verbose, it_T->second);
3246  else
3247  tracker->loadModel(it_model->second, verbose);
3248  }
3249  else {
3250  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3251  it_tracker->first.c_str());
3252  }
3253  }
3254 }
3255 
3256 #ifdef VISP_HAVE_PCL
3257 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3258  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3259 {
3260  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3261  it != m_mapOfTrackers.end(); ++it) {
3262  TrackerWrapper *tracker = it->second;
3263  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3264  }
3265 }
3266 #endif
3267 
3268 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3269  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3270  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3271  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3272 {
3273  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3274  it != m_mapOfTrackers.end(); ++it) {
3275  TrackerWrapper *tracker = it->second;
3276  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3277  mapOfPointCloudHeights[it->first]);
3278  }
3279 }
3280 
3292 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3293  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3294 {
3295  if (m_mapOfTrackers.size() != 1) {
3296  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3297  m_mapOfTrackers.size());
3298  }
3299 
3300  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3301  if (it_tracker != m_mapOfTrackers.end()) {
3302  TrackerWrapper *tracker = it_tracker->second;
3303  tracker->reInitModel(I, cad_name, cMo, verbose, T);
3304 
3305  // Set reference pose
3306  tracker->getPose(m_cMo);
3307  }
3308  else {
3309  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3310  }
3311 
3312  modelInitialised = true;
3313 }
3314 
3326 void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3327  const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3328 {
3329  if (m_mapOfTrackers.size() != 1) {
3330  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3331  m_mapOfTrackers.size());
3332  }
3333 
3334  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3335  if (it_tracker != m_mapOfTrackers.end()) {
3336  TrackerWrapper *tracker = it_tracker->second;
3337  tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3338 
3339  // Set reference pose
3340  tracker->getPose(m_cMo);
3341  }
3342  else {
3343  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3344  }
3345 
3346  modelInitialised = true;
3347 }
3348 
3370  const std::string &cad_name1, const std::string &cad_name2,
3371  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3372  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3373 {
3374  if (m_mapOfTrackers.size() == 2) {
3375  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3376 
3377  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3378 
3379  ++it_tracker;
3380 
3381  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3382 
3383  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3384  if (it_tracker != m_mapOfTrackers.end()) {
3385  // Set reference pose
3386  it_tracker->second->getPose(m_cMo);
3387  }
3388  }
3389  else {
3390  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3391  }
3392 
3393  modelInitialised = true;
3394 }
3395 
3417  const std::string &cad_name1, const std::string &cad_name2,
3418  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3419  const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3420 {
3421  if (m_mapOfTrackers.size() == 2) {
3422  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3423 
3424  it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3425 
3426  ++it_tracker;
3427 
3428  it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3429 
3430  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3431  if (it_tracker != m_mapOfTrackers.end()) {
3432  // Set reference pose
3433  it_tracker->second->getPose(m_cMo);
3434  }
3435  }
3436  else {
3437  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3438  }
3439 
3440  modelInitialised = true;
3441 }
3442 
3457 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3458  const std::map<std::string, std::string> &mapOfModelFiles,
3459  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3460  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3461 {
3462  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3463  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3464  mapOfImages.find(m_referenceCameraName);
3465  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3466  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3467 
3468  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3469  it_camPose != mapOfCameraPoses.end()) {
3470  TrackerWrapper *tracker = it_tracker->second;
3471  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3472  if (it_T != mapOfT.end())
3473  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3474  else
3475  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3476 
3477  // Set reference pose
3478  tracker->getPose(m_cMo);
3479  }
3480  else {
3481  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3482  }
3483 
3484  std::vector<std::string> vectorOfMissingCameras;
3485  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3486  if (it_tracker->first != m_referenceCameraName) {
3487  it_img = mapOfImages.find(it_tracker->first);
3488  it_model = mapOfModelFiles.find(it_tracker->first);
3489  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3490 
3491  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3492  TrackerWrapper *tracker = it_tracker->second;
3493  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3494  }
3495  else {
3496  vectorOfMissingCameras.push_back(it_tracker->first);
3497  }
3498  }
3499  }
3500 
3501  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3502  ++it) {
3503  it_img = mapOfImages.find(*it);
3504  it_model = mapOfModelFiles.find(*it);
3505  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3507 
3508  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3509  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3510  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3511  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3512  }
3513  }
3514 
3515  modelInitialised = true;
3516 }
3517 
3532 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3533  const std::map<std::string, std::string> &mapOfModelFiles,
3534  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3535  const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3536 {
3537  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3538  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3539  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3540  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3541 
3542  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3543  it_camPose != mapOfCameraPoses.end()) {
3544  TrackerWrapper *tracker = it_tracker->second;
3545  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3546  if (it_T != mapOfT.end())
3547  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3548  else
3549  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3550 
3551  // Set reference pose
3552  tracker->getPose(m_cMo);
3553  }
3554  else {
3555  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3556  }
3557 
3558  std::vector<std::string> vectorOfMissingCameras;
3559  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3560  if (it_tracker->first != m_referenceCameraName) {
3561  it_img = mapOfColorImages.find(it_tracker->first);
3562  it_model = mapOfModelFiles.find(it_tracker->first);
3563  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3564 
3565  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3566  it_camPose != mapOfCameraPoses.end()) {
3567  TrackerWrapper *tracker = it_tracker->second;
3568  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3569  }
3570  else {
3571  vectorOfMissingCameras.push_back(it_tracker->first);
3572  }
3573  }
3574  }
3575 
3576  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3577  ++it) {
3578  it_img = mapOfColorImages.find(*it);
3579  it_model = mapOfModelFiles.find(*it);
3580  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3582 
3583  if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3584  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3585  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3586  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3587  }
3588  }
3589 
3590  modelInitialised = true;
3591 }
3592 
3598 {
3599  m_cMo.eye();
3600 
3601  useScanLine = false;
3602 
3603 #ifdef VISP_HAVE_OGRE
3604  useOgre = false;
3605 #endif
3606 
3607  m_computeInteraction = true;
3608  m_lambda = 1.0;
3609 
3610  angleAppears = vpMath::rad(89);
3613  distNearClip = 0.001;
3614  distFarClip = 100;
3615 
3617  m_maxIter = 30;
3618  m_stopCriteriaEpsilon = 1e-8;
3619  m_initialMu = 0.01;
3620 
3621  // Only for Edge
3622  m_percentageGdPt = 0.4;
3623 
3624  // Only for KLT
3625  m_thresholdOutlier = 0.5;
3626 
3627  // Reset default ponderation between each feature type
3629 
3630 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3632 #endif
3633 
3636 
3637  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3638  it != m_mapOfTrackers.end(); ++it) {
3639  TrackerWrapper *tracker = it->second;
3640  tracker->resetTracker();
3641  }
3642 }
3643 
3654 {
3656 
3657  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3658  it != m_mapOfTrackers.end(); ++it) {
3659  TrackerWrapper *tracker = it->second;
3660  tracker->setAngleAppear(a);
3661  }
3662 }
3663 
3676 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3677 {
3678  if (m_mapOfTrackers.size() == 2) {
3679  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3680  it->second->setAngleAppear(a1);
3681 
3682  ++it;
3683  it->second->setAngleAppear(a2);
3684 
3686  if (it != m_mapOfTrackers.end()) {
3687  angleAppears = it->second->getAngleAppear();
3688  }
3689  else {
3690  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3691  }
3692  }
3693  else {
3694  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3695  m_mapOfTrackers.size());
3696  }
3697 }
3698 
3708 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3709 {
3710  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3711  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3712 
3713  if (it_tracker != m_mapOfTrackers.end()) {
3714  TrackerWrapper *tracker = it_tracker->second;
3715  tracker->setAngleAppear(it->second);
3716 
3717  if (it->first == m_referenceCameraName) {
3718  angleAppears = it->second;
3719  }
3720  }
3721  }
3722 }
3723 
3734 {
3736 
3737  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3738  it != m_mapOfTrackers.end(); ++it) {
3739  TrackerWrapper *tracker = it->second;
3740  tracker->setAngleDisappear(a);
3741  }
3742 }
3743 
3756 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3757 {
3758  if (m_mapOfTrackers.size() == 2) {
3759  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3760  it->second->setAngleDisappear(a1);
3761 
3762  ++it;
3763  it->second->setAngleDisappear(a2);
3764 
3766  if (it != m_mapOfTrackers.end()) {
3767  angleDisappears = it->second->getAngleDisappear();
3768  }
3769  else {
3770  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3771  }
3772  }
3773  else {
3774  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3775  m_mapOfTrackers.size());
3776  }
3777 }
3778 
3788 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3789 {
3790  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3791  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3792 
3793  if (it_tracker != m_mapOfTrackers.end()) {
3794  TrackerWrapper *tracker = it_tracker->second;
3795  tracker->setAngleDisappear(it->second);
3796 
3797  if (it->first == m_referenceCameraName) {
3798  angleDisappears = it->second;
3799  }
3800  }
3801  }
3802 }
3803 
3810 {
3812 
3813  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3814  it != m_mapOfTrackers.end(); ++it) {
3815  TrackerWrapper *tracker = it->second;
3816  tracker->setCameraParameters(camera);
3817  }
3818 }
3819 
3829 {
3830  if (m_mapOfTrackers.size() == 2) {
3831  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3832  it->second->setCameraParameters(camera1);
3833 
3834  ++it;
3835  it->second->setCameraParameters(camera2);
3836 
3838  if (it != m_mapOfTrackers.end()) {
3839  it->second->getCameraParameters(m_cam);
3840  }
3841  else {
3842  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3843  }
3844  }
3845  else {
3846  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3847  m_mapOfTrackers.size());
3848  }
3849 }
3850 
3859 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3860 {
3861  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3862  it != mapOfCameraParameters.end(); ++it) {
3863  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3864 
3865  if (it_tracker != m_mapOfTrackers.end()) {
3866  TrackerWrapper *tracker = it_tracker->second;
3867  tracker->setCameraParameters(it->second);
3868 
3869  if (it->first == m_referenceCameraName) {
3870  m_cam = it->second;
3871  }
3872  }
3873  }
3874 }
3875 
3884 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3885  const vpHomogeneousMatrix &cameraTransformationMatrix)
3886 {
3887  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3888 
3889  if (it != m_mapOfCameraTransformationMatrix.end()) {
3890  it->second = cameraTransformationMatrix;
3891  }
3892  else {
3893  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3894  }
3895 }
3896 
3905  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3906 {
3907  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3908  it != mapOfTransformationMatrix.end(); ++it) {
3909  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3910  m_mapOfCameraTransformationMatrix.find(it->first);
3911 
3912  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3913  it_camTrans->second = it->second;
3914  }
3915  }
3916 }
3917 
3927 void vpMbGenericTracker::setClipping(const unsigned int &flags)
3928 {
3929  vpMbTracker::setClipping(flags);
3930 
3931  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3932  it != m_mapOfTrackers.end(); ++it) {
3933  TrackerWrapper *tracker = it->second;
3934  tracker->setClipping(flags);
3935  }
3936 }
3937 
3948 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3949 {
3950  if (m_mapOfTrackers.size() == 2) {
3951  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3952  it->second->setClipping(flags1);
3953 
3954  ++it;
3955  it->second->setClipping(flags2);
3956 
3958  if (it != m_mapOfTrackers.end()) {
3959  clippingFlag = it->second->getClipping();
3960  }
3961  else {
3962  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3963  }
3964  }
3965  else {
3966  std::stringstream ss;
3967  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3969  }
3970 }
3971 
3979 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
3980 {
3981  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3982  it != mapOfClippingFlags.end(); ++it) {
3983  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3984 
3985  if (it_tracker != m_mapOfTrackers.end()) {
3986  TrackerWrapper *tracker = it_tracker->second;
3987  tracker->setClipping(it->second);
3988 
3989  if (it->first == m_referenceCameraName) {
3990  clippingFlag = it->second;
3991  }
3992  }
3993  }
3994 }
3995 
4006 {
4007  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4008  it != m_mapOfTrackers.end(); ++it) {
4009  TrackerWrapper *tracker = it->second;
4010  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4011  }
4012 }
4013 
4023 {
4024  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4025  it != m_mapOfTrackers.end(); ++it) {
4026  TrackerWrapper *tracker = it->second;
4027  tracker->setDepthDenseFilteringMethod(method);
4028  }
4029 }
4030 
4041 {
4042  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4043  it != m_mapOfTrackers.end(); ++it) {
4044  TrackerWrapper *tracker = it->second;
4045  tracker->setDepthDenseFilteringMinDistance(minDistance);
4046  }
4047 }
4048 
4059 {
4060  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4061  it != m_mapOfTrackers.end(); ++it) {
4062  TrackerWrapper *tracker = it->second;
4063  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4064  }
4065 }
4066 
4075 void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
4076 {
4077  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4078  it != m_mapOfTrackers.end(); ++it) {
4079  TrackerWrapper *tracker = it->second;
4080  tracker->setDepthDenseSamplingStep(stepX, stepY);
4081  }
4082 }
4083 
4092 {
4093  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4094  it != m_mapOfTrackers.end(); ++it) {
4095  TrackerWrapper *tracker = it->second;
4096  tracker->setDepthNormalFaceCentroidMethod(method);
4097  }
4098 }
4099 
4109 {
4110  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4111  it != m_mapOfTrackers.end(); ++it) {
4112  TrackerWrapper *tracker = it->second;
4113  tracker->setDepthNormalFeatureEstimationMethod(method);
4114  }
4115 }
4116 
4125 {
4126  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4127  it != m_mapOfTrackers.end(); ++it) {
4128  TrackerWrapper *tracker = it->second;
4129  tracker->setDepthNormalPclPlaneEstimationMethod(method);
4130  }
4131 }
4132 
4141 {
4142  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4143  it != m_mapOfTrackers.end(); ++it) {
4144  TrackerWrapper *tracker = it->second;
4145  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4146  }
4147 }
4148 
4157 {
4158  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4159  it != m_mapOfTrackers.end(); ++it) {
4160  TrackerWrapper *tracker = it->second;
4161  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4162  }
4163 }
4164 
4173 void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
4174 {
4175  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4176  it != m_mapOfTrackers.end(); ++it) {
4177  TrackerWrapper *tracker = it->second;
4178  tracker->setDepthNormalSamplingStep(stepX, stepY);
4179  }
4180 }
4181 
4201 {
4203 
4204  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4205  it != m_mapOfTrackers.end(); ++it) {
4206  TrackerWrapper *tracker = it->second;
4207  tracker->setDisplayFeatures(displayF);
4208  }
4209 }
4210 
4219 {
4221 
4222  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4223  it != m_mapOfTrackers.end(); ++it) {
4224  TrackerWrapper *tracker = it->second;
4225  tracker->setFarClippingDistance(dist);
4226  }
4227 }
4228 
4237 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
4238 {
4239  if (m_mapOfTrackers.size() == 2) {
4240  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4241  it->second->setFarClippingDistance(dist1);
4242 
4243  ++it;
4244  it->second->setFarClippingDistance(dist2);
4245 
4247  if (it != m_mapOfTrackers.end()) {
4248  distFarClip = it->second->getFarClippingDistance();
4249  }
4250  else {
4251  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4252  }
4253  }
4254  else {
4255  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4256  m_mapOfTrackers.size());
4257  }
4258 }
4259 
4265 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4266 {
4267  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4268  ++it) {
4269  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4270 
4271  if (it_tracker != m_mapOfTrackers.end()) {
4272  TrackerWrapper *tracker = it_tracker->second;
4273  tracker->setFarClippingDistance(it->second);
4274 
4275  if (it->first == m_referenceCameraName) {
4276  distFarClip = it->second;
4277  }
4278  }
4279  }
4280 }
4281 
4288 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4289 {
4290  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4291  ++it) {
4292  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4293  if (it_factor != mapOfFeatureFactors.end()) {
4294  it->second = it_factor->second;
4295  }
4296  }
4297 }
4298 
4315 {
4316  m_percentageGdPt = threshold;
4317 
4318  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4319  it != m_mapOfTrackers.end(); ++it) {
4320  TrackerWrapper *tracker = it->second;
4321  tracker->setGoodMovingEdgesRatioThreshold(threshold);
4322  }
4323 }
4324 
4325 #ifdef VISP_HAVE_OGRE
4338 {
4339  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4340  it != m_mapOfTrackers.end(); ++it) {
4341  TrackerWrapper *tracker = it->second;
4342  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4343  }
4344 }
4345 
4358 {
4359  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4360  it != m_mapOfTrackers.end(); ++it) {
4361  TrackerWrapper *tracker = it->second;
4362  tracker->setNbRayCastingAttemptsForVisibility(attempts);
4363  }
4364 }
4365 #endif
4366 
4367 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4376 {
4377  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4378  it != m_mapOfTrackers.end(); ++it) {
4379  TrackerWrapper *tracker = it->second;
4380  tracker->setKltOpencv(t);
4381  }
4382 }
4383 
4393 {
4394  if (m_mapOfTrackers.size() == 2) {
4395  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4396  it->second->setKltOpencv(t1);
4397 
4398  ++it;
4399  it->second->setKltOpencv(t2);
4400  }
4401  else {
4402  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4403  m_mapOfTrackers.size());
4404  }
4405 }
4406 
4412 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4413 {
4414  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4415  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4416 
4417  if (it_tracker != m_mapOfTrackers.end()) {
4418  TrackerWrapper *tracker = it_tracker->second;
4419  tracker->setKltOpencv(it->second);
4420  }
4421  }
4422 }
4423 
4432 {
4433  m_thresholdOutlier = th;
4434 
4435  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4436  it != m_mapOfTrackers.end(); ++it) {
4437  TrackerWrapper *tracker = it->second;
4438  tracker->setKltThresholdAcceptation(th);
4439  }
4440 }
4441 #endif
4442 
4455 void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4456 {
4457  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4458  it != m_mapOfTrackers.end(); ++it) {
4459  TrackerWrapper *tracker = it->second;
4460  tracker->setLod(useLod, name);
4461  }
4462 }
4463 
4464 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4472 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4473 {
4474  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4475  it != m_mapOfTrackers.end(); ++it) {
4476  TrackerWrapper *tracker = it->second;
4477  tracker->setKltMaskBorder(e);
4478  }
4479 }
4480 
4489 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4490 {
4491  if (m_mapOfTrackers.size() == 2) {
4492  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4493  it->second->setKltMaskBorder(e1);
4494 
4495  ++it;
4496 
4497  it->second->setKltMaskBorder(e2);
4498  }
4499  else {
4500  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4501  m_mapOfTrackers.size());
4502  }
4503 }
4504 
4510 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4511 {
4512  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4513  ++it) {
4514  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4515 
4516  if (it_tracker != m_mapOfTrackers.end()) {
4517  TrackerWrapper *tracker = it_tracker->second;
4518  tracker->setKltMaskBorder(it->second);
4519  }
4520  }
4521 }
4522 #endif
4523 
4530 {
4531  vpMbTracker::setMask(mask);
4532 
4533  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4534  it != m_mapOfTrackers.end(); ++it) {
4535  TrackerWrapper *tracker = it->second;
4536  tracker->setMask(mask);
4537  }
4538 }
4539 
4551 void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4552 {
4553  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4554  it != m_mapOfTrackers.end(); ++it) {
4555  TrackerWrapper *tracker = it->second;
4556  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4557  }
4558 }
4559 
4570 void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4571 {
4572  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4573  it != m_mapOfTrackers.end(); ++it) {
4574  TrackerWrapper *tracker = it->second;
4575  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4576  }
4577 }
4578 
4587 {
4588  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4589  it != m_mapOfTrackers.end(); ++it) {
4590  TrackerWrapper *tracker = it->second;
4591  tracker->setMovingEdge(me);
4592  }
4593 }
4594 
4604 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4605 {
4606  if (m_mapOfTrackers.size() == 2) {
4607  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4608  it->second->setMovingEdge(me1);
4609 
4610  ++it;
4611 
4612  it->second->setMovingEdge(me2);
4613  }
4614  else {
4615  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4616  m_mapOfTrackers.size());
4617  }
4618 }
4619 
4625 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4626 {
4627  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4628  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4629 
4630  if (it_tracker != m_mapOfTrackers.end()) {
4631  TrackerWrapper *tracker = it_tracker->second;
4632  tracker->setMovingEdge(it->second);
4633  }
4634  }
4635 }
4636 
4645 {
4647 
4648  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4649  it != m_mapOfTrackers.end(); ++it) {
4650  TrackerWrapper *tracker = it->second;
4651  tracker->setNearClippingDistance(dist);
4652  }
4653 }
4654 
4663 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4664 {
4665  if (m_mapOfTrackers.size() == 2) {
4666  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4667  it->second->setNearClippingDistance(dist1);
4668 
4669  ++it;
4670 
4671  it->second->setNearClippingDistance(dist2);
4672 
4674  if (it != m_mapOfTrackers.end()) {
4675  distNearClip = it->second->getNearClippingDistance();
4676  }
4677  else {
4678  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4679  }
4680  }
4681  else {
4682  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4683  m_mapOfTrackers.size());
4684  }
4685 }
4686 
4692 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4693 {
4694  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4695  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4696 
4697  if (it_tracker != m_mapOfTrackers.end()) {
4698  TrackerWrapper *tracker = it_tracker->second;
4699  tracker->setNearClippingDistance(it->second);
4700 
4701  if (it->first == m_referenceCameraName) {
4702  distNearClip = it->second;
4703  }
4704  }
4705  }
4706 }
4707 
4721 {
4722  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4723 
4724  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4725  it != m_mapOfTrackers.end(); ++it) {
4726  TrackerWrapper *tracker = it->second;
4727  tracker->setOgreShowConfigDialog(showConfigDialog);
4728  }
4729 }
4730 
4742 {
4744 
4745  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4746  it != m_mapOfTrackers.end(); ++it) {
4747  TrackerWrapper *tracker = it->second;
4748  tracker->setOgreVisibilityTest(v);
4749  }
4750 
4751 #ifdef VISP_HAVE_OGRE
4752  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4753  it != m_mapOfTrackers.end(); ++it) {
4754  TrackerWrapper *tracker = it->second;
4755  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4756  }
4757 #endif
4758 }
4759 
4768 {
4770 
4771  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4772  it != m_mapOfTrackers.end(); ++it) {
4773  TrackerWrapper *tracker = it->second;
4774  tracker->setOptimizationMethod(opt);
4775  }
4776 }
4777 
4791 {
4792  if (m_mapOfTrackers.size() > 1) {
4793  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4794  "to be configured with only one camera!");
4795  }
4796 
4797  m_cMo = cdMo;
4798 
4799  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4800  if (it != m_mapOfTrackers.end()) {
4801  TrackerWrapper *tracker = it->second;
4802  tracker->setPose(I, cdMo);
4803  }
4804  else {
4805  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4806  m_referenceCameraName.c_str());
4807  }
4808 }
4809 
4823 {
4824  if (m_mapOfTrackers.size() > 1) {
4825  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4826  "to be configured with only one camera!");
4827  }
4828 
4829  m_cMo = cdMo;
4830 
4831  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4832  if (it != m_mapOfTrackers.end()) {
4833  TrackerWrapper *tracker = it->second;
4834  vpImageConvert::convert(I_color, m_I);
4835  tracker->setPose(m_I, cdMo);
4836  }
4837  else {
4838  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4839  m_referenceCameraName.c_str());
4840  }
4841 }
4842 
4855  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4856 {
4857  if (m_mapOfTrackers.size() == 2) {
4858  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4859  it->second->setPose(I1, c1Mo);
4860 
4861  ++it;
4862 
4863  it->second->setPose(I2, c2Mo);
4864 
4866  if (it != m_mapOfTrackers.end()) {
4867  // Set reference pose
4868  it->second->getPose(m_cMo);
4869  }
4870  else {
4871  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4872  m_referenceCameraName.c_str());
4873  }
4874  }
4875  else {
4876  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4877  m_mapOfTrackers.size());
4878  }
4879 }
4880 
4892 void vpMbGenericTracker::setPose(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &I_color2,
4893  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4894 {
4895  if (m_mapOfTrackers.size() == 2) {
4896  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4897  it->second->setPose(I_color1, c1Mo);
4898 
4899  ++it;
4900 
4901  it->second->setPose(I_color2, c2Mo);
4902 
4904  if (it != m_mapOfTrackers.end()) {
4905  // Set reference pose
4906  it->second->getPose(m_cMo);
4907  }
4908  else {
4909  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4910  m_referenceCameraName.c_str());
4911  }
4912  }
4913  else {
4914  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4915  m_mapOfTrackers.size());
4916  }
4917 }
4918 
4934 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4935  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4936 {
4937  // Set the reference cMo
4938  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4939  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4940  mapOfImages.find(m_referenceCameraName);
4941  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4942 
4943  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4944  TrackerWrapper *tracker = it_tracker->second;
4945  tracker->setPose(*it_img->second, it_camPose->second);
4946  tracker->getPose(m_cMo);
4947  }
4948  else {
4949  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4950  }
4951 
4952  // Vector of missing pose matrices for cameras
4953  std::vector<std::string> vectorOfMissingCameraPoses;
4954 
4955  // Set pose for the specified cameras
4956  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4957  if (it_tracker->first != m_referenceCameraName) {
4958  it_img = mapOfImages.find(it_tracker->first);
4959  it_camPose = mapOfCameraPoses.find(it_tracker->first);
4960 
4961  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4962  // Set pose
4963  TrackerWrapper *tracker = it_tracker->second;
4964  tracker->setPose(*it_img->second, it_camPose->second);
4965  }
4966  else {
4967  vectorOfMissingCameraPoses.push_back(it_tracker->first);
4968  }
4969  }
4970  }
4971 
4972  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4973  it != vectorOfMissingCameraPoses.end(); ++it) {
4974  it_img = mapOfImages.find(*it);
4975  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4977 
4978  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4979  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4980  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4981  }
4982  else {
4984  "Missing image or missing camera transformation "
4985  "matrix! Cannot set pose for camera: %s!",
4986  it->c_str());
4987  }
4988  }
4989 }
4990 
5006 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5007  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5008 {
5009  // Set the reference cMo
5010  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
5011  std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
5012  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
5013 
5014  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5015  TrackerWrapper *tracker = it_tracker->second;
5016  tracker->setPose(*it_img->second, it_camPose->second);
5017  tracker->getPose(m_cMo);
5018  }
5019  else {
5020  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
5021  }
5022 
5023  // Vector of missing pose matrices for cameras
5024  std::vector<std::string> vectorOfMissingCameraPoses;
5025 
5026  // Set pose for the specified cameras
5027  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5028  if (it_tracker->first != m_referenceCameraName) {
5029  it_img = mapOfColorImages.find(it_tracker->first);
5030  it_camPose = mapOfCameraPoses.find(it_tracker->first);
5031 
5032  if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5033  // Set pose
5034  TrackerWrapper *tracker = it_tracker->second;
5035  tracker->setPose(*it_img->second, it_camPose->second);
5036  }
5037  else {
5038  vectorOfMissingCameraPoses.push_back(it_tracker->first);
5039  }
5040  }
5041  }
5042 
5043  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5044  it != vectorOfMissingCameraPoses.end(); ++it) {
5045  it_img = mapOfColorImages.find(*it);
5046  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5048 
5049  if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5050  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5051  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5052  }
5053  else {
5055  "Missing image or missing camera transformation "
5056  "matrix! Cannot set pose for camera: %s!",
5057  it->c_str());
5058  }
5059  }
5060 }
5061 
5077 {
5079 
5080  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5081  it != m_mapOfTrackers.end(); ++it) {
5082  TrackerWrapper *tracker = it->second;
5083  tracker->setProjectionErrorComputation(flag);
5084  }
5085 }
5086 
5091 {
5093 
5094  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5095  it != m_mapOfTrackers.end(); ++it) {
5096  TrackerWrapper *tracker = it->second;
5097  tracker->setProjectionErrorDisplay(display);
5098  }
5099 }
5100 
5105 {
5107 
5108  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5109  it != m_mapOfTrackers.end(); ++it) {
5110  TrackerWrapper *tracker = it->second;
5111  tracker->setProjectionErrorDisplayArrowLength(length);
5112  }
5113 }
5114 
5116 {
5118 
5119  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5120  it != m_mapOfTrackers.end(); ++it) {
5121  TrackerWrapper *tracker = it->second;
5122  tracker->setProjectionErrorDisplayArrowThickness(thickness);
5123  }
5124 }
5125 
5131 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
5132 {
5133  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
5134  if (it != m_mapOfTrackers.end()) {
5135  m_referenceCameraName = referenceCameraName;
5136  }
5137  else {
5138  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
5139  }
5140 }
5141 
5143 {
5145 
5146  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5147  it != m_mapOfTrackers.end(); ++it) {
5148  TrackerWrapper *tracker = it->second;
5149  tracker->setScanLineVisibilityTest(v);
5150  }
5151 }
5152 
5165 {
5166  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5167  it != m_mapOfTrackers.end(); ++it) {
5168  TrackerWrapper *tracker = it->second;
5169  tracker->setTrackerType(type);
5170  }
5171 }
5172 
5182 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
5183 {
5184  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5185  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
5186  if (it_tracker != m_mapOfTrackers.end()) {
5187  TrackerWrapper *tracker = it_tracker->second;
5188  tracker->setTrackerType(it->second);
5189  }
5190  }
5191 }
5192 
5202 void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
5203 {
5204  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5205  it != m_mapOfTrackers.end(); ++it) {
5206  TrackerWrapper *tracker = it->second;
5207  tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5208  }
5209 }
5210 
5220 void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
5221 {
5222  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5223  it != m_mapOfTrackers.end(); ++it) {
5224  TrackerWrapper *tracker = it->second;
5225  tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5226  }
5227 }
5228 
5238 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
5239 {
5240  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5241  it != m_mapOfTrackers.end(); ++it) {
5242  TrackerWrapper *tracker = it->second;
5243  tracker->setUseEdgeTracking(name, useEdgeTracking);
5244  }
5245 }
5246 
5247 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5257 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
5258 {
5259  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5260  it != m_mapOfTrackers.end(); ++it) {
5261  TrackerWrapper *tracker = it->second;
5262  tracker->setUseKltTracking(name, useKltTracking);
5263  }
5264 }
5265 #endif
5266 
5268 {
5269  // Test tracking fails only if all testTracking have failed
5270  bool isOneTestTrackingOk = false;
5271  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5272  it != m_mapOfTrackers.end(); ++it) {
5273  TrackerWrapper *tracker = it->second;
5274  try {
5275  tracker->testTracking();
5276  isOneTestTrackingOk = true;
5277  }
5278  catch (...) {
5279  }
5280  }
5281 
5282  if (!isOneTestTrackingOk) {
5283  std::ostringstream oss;
5284  oss << "Not enough moving edges to track the object. Try to reduce the "
5285  "threshold="
5286  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5288  }
5289 }
5290 
5301 {
5302  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5303  mapOfImages[m_referenceCameraName] = &I;
5304 
5305  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5306  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5307 
5308  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5309 }
5310 
5321 {
5322  std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5323  mapOfColorImages[m_referenceCameraName] = &I_color;
5324 
5325  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5326  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5327 
5328  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5329 }
5330 
5342 {
5343  if (m_mapOfTrackers.size() == 2) {
5344  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5345  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5346  mapOfImages[it->first] = &I1;
5347  ++it;
5348 
5349  mapOfImages[it->first] = &I2;
5350 
5351  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5352  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5353 
5354  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5355  }
5356  else {
5357  std::stringstream ss;
5358  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5359  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5360  }
5361 }
5362 
5373 void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5374 {
5375  if (m_mapOfTrackers.size() == 2) {
5376  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5377  std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5378  mapOfImages[it->first] = &I_color1;
5379  ++it;
5380 
5381  mapOfImages[it->first] = &_colorI2;
5382 
5383  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5384  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5385 
5386  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5387  }
5388  else {
5389  std::stringstream ss;
5390  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5391  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5392  }
5393 }
5394 
5402 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5403 {
5404  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5405  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5406 
5407  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5408 }
5409 
5417 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5418 {
5419  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5420  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5421 
5422  track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5423 }
5424 
5425 #ifdef VISP_HAVE_PCL
5434 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5435  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5436 {
5437  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5438  it != m_mapOfTrackers.end(); ++it) {
5439  TrackerWrapper *tracker = it->second;
5440 
5441  if ((tracker->m_trackerType & (EDGE_TRACKER |
5442 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5443  KLT_TRACKER |
5444 #endif
5446  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5447  }
5448 
5449  if (tracker->m_trackerType & (EDGE_TRACKER
5450 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5451  | KLT_TRACKER
5452 #endif
5453  ) &&
5454  mapOfImages[it->first] == NULL) {
5455  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5456  }
5457 
5458  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5459  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5460  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5461  }
5462  }
5463 
5464  preTracking(mapOfImages, mapOfPointClouds);
5465 
5466  try {
5467  computeVVS(mapOfImages);
5468  }
5469  catch (...) {
5470  covarianceMatrix = -1;
5471  throw; // throw the original exception
5472  }
5473 
5474  testTracking();
5475 
5476  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5477  it != m_mapOfTrackers.end(); ++it) {
5478  TrackerWrapper *tracker = it->second;
5479 
5480  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5481  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5482  }
5483 
5484  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5485 
5486  if (displayFeatures) {
5487 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5488  if (tracker->m_trackerType & KLT_TRACKER) {
5489  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5490  }
5491 #endif
5492 
5493  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5494  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5495  }
5496  }
5497  }
5498 
5500 }
5501 
5510 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5511  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5512 {
5513  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5514  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5515  it != m_mapOfTrackers.end(); ++it) {
5516  TrackerWrapper *tracker = it->second;
5517 
5518  if ((tracker->m_trackerType & (EDGE_TRACKER |
5519 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5520  KLT_TRACKER |
5521 #endif
5523  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5524  }
5525 
5526  if (tracker->m_trackerType & (EDGE_TRACKER
5527 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5528  | KLT_TRACKER
5529 #endif
5530  ) &&
5531  mapOfImages[it->first] == NULL) {
5532  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5533  }
5534  else if (tracker->m_trackerType & (EDGE_TRACKER
5535 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5536  | KLT_TRACKER
5537 #endif
5538  ) &&
5539  mapOfImages[it->first] != NULL) {
5540  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5541  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5542  }
5543 
5544  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5545  !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5546  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5547  }
5548  }
5549 
5550  preTracking(mapOfImages, mapOfPointClouds);
5551 
5552  try {
5553  computeVVS(mapOfImages);
5554  }
5555  catch (...) {
5556  covarianceMatrix = -1;
5557  throw; // throw the original exception
5558  }
5559 
5560  testTracking();
5561 
5562  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5563  it != m_mapOfTrackers.end(); ++it) {
5564  TrackerWrapper *tracker = it->second;
5565 
5566  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5567  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5568  }
5569 
5570  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5571 
5572  if (displayFeatures) {
5573 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5574  if (tracker->m_trackerType & KLT_TRACKER) {
5575  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5576  }
5577 #endif
5578 
5579  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5580  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5581  }
5582  }
5583  }
5584 
5586 }
5587 #endif
5588 
5599 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5600  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5601  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5602  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5603 {
5604  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5605  it != m_mapOfTrackers.end(); ++it) {
5606  TrackerWrapper *tracker = it->second;
5607 
5608  if ((tracker->m_trackerType & (EDGE_TRACKER |
5609 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5610  KLT_TRACKER |
5611 #endif
5613  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5614  }
5615 
5616  if (tracker->m_trackerType & (EDGE_TRACKER
5617 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5618  | KLT_TRACKER
5619 #endif
5620  ) &&
5621  mapOfImages[it->first] == NULL) {
5622  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5623  }
5624 
5625  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5626  (mapOfPointClouds[it->first] == NULL)) {
5627  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5628  }
5629  }
5630 
5631  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5632 
5633  try {
5634  computeVVS(mapOfImages);
5635  }
5636  catch (...) {
5637  covarianceMatrix = -1;
5638  throw; // throw the original exception
5639  }
5640 
5641  testTracking();
5642 
5643  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5644  it != m_mapOfTrackers.end(); ++it) {
5645  TrackerWrapper *tracker = it->second;
5646 
5647  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5648  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5649  }
5650 
5651  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5652 
5653  if (displayFeatures) {
5654 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5655  if (tracker->m_trackerType & KLT_TRACKER) {
5656  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5657  }
5658 #endif
5659 
5660  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5661  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5662  }
5663  }
5664  }
5665 
5667 }
5668 
5679 void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5680  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5681  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5682  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5683 {
5684  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5685  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5686  it != m_mapOfTrackers.end(); ++it) {
5687  TrackerWrapper *tracker = it->second;
5688 
5689  if ((tracker->m_trackerType & (EDGE_TRACKER |
5690 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5691  KLT_TRACKER |
5692 #endif
5694  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5695  }
5696 
5697  if (tracker->m_trackerType & (EDGE_TRACKER
5698 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5699  | KLT_TRACKER
5700 #endif
5701  ) &&
5702  mapOfColorImages[it->first] == NULL) {
5703  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5704  }
5705  else if (tracker->m_trackerType & (EDGE_TRACKER
5706 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5707  | KLT_TRACKER
5708 #endif
5709  ) &&
5710  mapOfColorImages[it->first] != NULL) {
5711  vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5712  mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5713  }
5714 
5715  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5716  (mapOfPointClouds[it->first] == NULL)) {
5717  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5718  }
5719  }
5720 
5721  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5722 
5723  try {
5724  computeVVS(mapOfImages);
5725  }
5726  catch (...) {
5727  covarianceMatrix = -1;
5728  throw; // throw the original exception
5729  }
5730 
5731  testTracking();
5732 
5733  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5734  it != m_mapOfTrackers.end(); ++it) {
5735  TrackerWrapper *tracker = it->second;
5736 
5737  if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5738  tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5739  }
5740 
5741  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5742 
5743  if (displayFeatures) {
5744 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5745  if (tracker->m_trackerType & KLT_TRACKER) {
5746  tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5747  }
5748 #endif
5749 
5750  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5751  tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5752  }
5753  }
5754  }
5755 
5757 }
5758 
5760 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5761  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5762 {
5763  m_lambda = 1.0;
5764  m_maxIter = 30;
5765 
5766 #ifdef VISP_HAVE_OGRE
5767  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5768 
5769  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5770 #endif
5771 }
5772 
5773 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5774  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5775 {
5776  if ((m_trackerType & (EDGE_TRACKER |
5777 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5778  KLT_TRACKER |
5779 #endif
5781  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5782  }
5783 
5784  m_lambda = 1.0;
5785  m_maxIter = 30;
5786 
5787 #ifdef VISP_HAVE_OGRE
5788  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5789 
5790  m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5791 #endif
5792 }
5793 
5794 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() { }
5795 
5796 // Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5798 {
5799  computeVVSInit(ptr_I);
5800 
5801  if (m_error.getRows() < 4) {
5802  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5803  }
5804 
5805  double normRes = 0;
5806  double normRes_1 = -1;
5807  unsigned int iter = 0;
5808 
5809  double factorEdge = 1.0;
5810 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5811  double factorKlt = 1.0;
5812 #endif
5813  double factorDepth = 1.0;
5814  double factorDepthDense = 1.0;
5815 
5816  vpMatrix LTL;
5817  vpColVector LTR, v;
5818  vpColVector error_prev;
5819 
5820  double mu = m_initialMu;
5821  vpHomogeneousMatrix cMo_prev;
5822 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5823  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
5824 #endif
5825  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
5826  if (isoJoIdentity)
5827  oJo.eye();
5828 
5829  // Covariance
5830  vpColVector W_true(m_error.getRows());
5831  vpMatrix L_true, LVJ_true;
5832 
5833  unsigned int nb_edge_features = m_error_edge.getRows();
5834 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5835  unsigned int nb_klt_features = m_error_klt.getRows();
5836 #endif
5837  unsigned int nb_depth_features = m_error_depthNormal.getRows();
5838  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5839 
5840  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5841  computeVVSInteractionMatrixAndResidu(ptr_I);
5842 
5843  bool reStartFromLastIncrement = false;
5844  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5845 
5846 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5847  if (reStartFromLastIncrement) {
5848  if (m_trackerType & KLT_TRACKER) {
5849  ctTc0 = ctTc0_Prev;
5850  }
5851  }
5852 #endif
5853 
5854  if (!reStartFromLastIncrement) {
5855  computeVVSWeights();
5856 
5857  if (computeCovariance) {
5858  L_true = m_L;
5859  if (!isoJoIdentity) {
5861  cVo.buildFrom(m_cMo);
5862  LVJ_true = (m_L * cVo * oJo);
5863  }
5864  }
5865 
5867  if (iter == 0) {
5868  // If all the 6 dof should be estimated, we check if the interaction
5869  // matrix is full rank. If not we remove automatically the dof that
5870  // cannot be estimated. This is particularly useful when considering
5871  // circles (rank 5) and cylinders (rank 4)
5872  if (isoJoIdentity) {
5873  cVo.buildFrom(m_cMo);
5874 
5875  vpMatrix K; // kernel
5876  unsigned int rank = (m_L * cVo).kernel(K);
5877  if (rank == 0) {
5878  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
5879  }
5880 
5881  if (rank != 6) {
5882  vpMatrix I; // Identity
5883  I.eye(6);
5884  oJo = I - K.AtA();
5885 
5886  isoJoIdentity = false;
5887  }
5888  }
5889  }
5890 
5891  // Weighting
5892  double num = 0;
5893  double den = 0;
5894 
5895  unsigned int start_index = 0;
5896  if (m_trackerType & EDGE_TRACKER) {
5897  for (unsigned int i = 0; i < nb_edge_features; i++) {
5898  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5899  W_true[i] = wi;
5900  m_weightedError[i] = wi * m_error[i];
5901 
5902  num += wi * vpMath::sqr(m_error[i]);
5903  den += wi;
5904 
5905  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5906  m_L[i][j] *= wi;
5907  }
5908  }
5909 
5910  start_index += nb_edge_features;
5911  }
5912 
5913 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5914  if (m_trackerType & KLT_TRACKER) {
5915  for (unsigned int i = 0; i < nb_klt_features; i++) {
5916  double wi = m_w_klt[i] * factorKlt;
5917  W_true[start_index + i] = wi;
5918  m_weightedError[start_index + i] = wi * m_error_klt[i];
5919 
5920  num += wi * vpMath::sqr(m_error[start_index + i]);
5921  den += wi;
5922 
5923  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5924  m_L[start_index + i][j] *= wi;
5925  }
5926  }
5927 
5928  start_index += nb_klt_features;
5929  }
5930 #endif
5931 
5932  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5933  for (unsigned int i = 0; i < nb_depth_features; i++) {
5934  double wi = m_w_depthNormal[i] * factorDepth;
5935  m_w[start_index + i] = m_w_depthNormal[i];
5936  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5937 
5938  num += wi * vpMath::sqr(m_error[start_index + i]);
5939  den += wi;
5940 
5941  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5942  m_L[start_index + i][j] *= wi;
5943  }
5944  }
5945 
5946  start_index += nb_depth_features;
5947  }
5948 
5949  if (m_trackerType & DEPTH_DENSE_TRACKER) {
5950  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
5951  double wi = m_w_depthDense[i] * factorDepthDense;
5952  m_w[start_index + i] = m_w_depthDense[i];
5953  m_weightedError[start_index + i] = wi * m_error[start_index + i];
5954 
5955  num += wi * vpMath::sqr(m_error[start_index + i]);
5956  den += wi;
5957 
5958  for (unsigned int j = 0; j < m_L.getCols(); j++) {
5959  m_L[start_index + i][j] *= wi;
5960  }
5961  }
5962 
5963  // start_index += nb_depth_dense_features;
5964  }
5965 
5966  computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5967 
5968  cMo_prev = m_cMo;
5969 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5970  if (m_trackerType & KLT_TRACKER) {
5971  ctTc0_Prev = ctTc0;
5972  }
5973 #endif
5974 
5975  m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo;
5976 
5977 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5978  if (m_trackerType & KLT_TRACKER) {
5979  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
5980  }
5981 #endif
5982  normRes_1 = normRes;
5983 
5984  normRes = sqrt(num / den);
5985  }
5986 
5987  iter++;
5988  }
5989 
5990  computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
5991 
5992  if (m_trackerType & EDGE_TRACKER) {
5994  }
5995 }
5996 
5997 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5998 {
5999  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6000  "TrackerWrapper::computeVVSInit("
6001  ") should not be called!");
6002 }
6003 
6004 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
6005 {
6006  initMbtTracking(ptr_I);
6007 
6008  unsigned int nbFeatures = 0;
6009 
6010  if (m_trackerType & EDGE_TRACKER) {
6011  nbFeatures += m_error_edge.getRows();
6012  }
6013  else {
6014  m_error_edge.clear();
6015  m_weightedError_edge.clear();
6016  m_L_edge.clear();
6017  m_w_edge.clear();
6018  }
6019 
6020 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6021  if (m_trackerType & KLT_TRACKER) {
6023  nbFeatures += m_error_klt.getRows();
6024  }
6025  else {
6026  m_error_klt.clear();
6027  m_weightedError_klt.clear();
6028  m_L_klt.clear();
6029  m_w_klt.clear();
6030  }
6031 #endif
6032 
6033  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6035  nbFeatures += m_error_depthNormal.getRows();
6036  }
6037  else {
6038  m_error_depthNormal.clear();
6039  m_weightedError_depthNormal.clear();
6040  m_L_depthNormal.clear();
6041  m_w_depthNormal.clear();
6042  }
6043 
6044  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6046  nbFeatures += m_error_depthDense.getRows();
6047  }
6048  else {
6049  m_error_depthDense.clear();
6050  m_weightedError_depthDense.clear();
6051  m_L_depthDense.clear();
6052  m_w_depthDense.clear();
6053  }
6054 
6055  m_L.resize(nbFeatures, 6, false, false);
6056  m_error.resize(nbFeatures, false);
6057 
6058  m_weightedError.resize(nbFeatures, false);
6059  m_w.resize(nbFeatures, false);
6060  m_w = 1;
6061 }
6062 
6064 {
6065  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6066  "TrackerWrapper::"
6067  "computeVVSInteractionMatrixAndR"
6068  "esidu() should not be called!");
6069 }
6070 
6072 {
6073  if (m_trackerType & EDGE_TRACKER) {
6075  }
6076 
6077 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6078  if (m_trackerType & KLT_TRACKER) {
6080  }
6081 #endif
6082 
6083  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6085  }
6086 
6087  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6089  }
6090 
6091  unsigned int start_index = 0;
6092  if (m_trackerType & EDGE_TRACKER) {
6093  m_L.insert(m_L_edge, start_index, 0);
6094  m_error.insert(start_index, m_error_edge);
6095 
6096  start_index += m_error_edge.getRows();
6097  }
6098 
6099 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6100  if (m_trackerType & KLT_TRACKER) {
6101  m_L.insert(m_L_klt, start_index, 0);
6102  m_error.insert(start_index, m_error_klt);
6103 
6104  start_index += m_error_klt.getRows();
6105  }
6106 #endif
6107 
6108  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6109  m_L.insert(m_L_depthNormal, start_index, 0);
6110  m_error.insert(start_index, m_error_depthNormal);
6111 
6112  start_index += m_error_depthNormal.getRows();
6113  }
6114 
6115  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6116  m_L.insert(m_L_depthDense, start_index, 0);
6117  m_error.insert(start_index, m_error_depthDense);
6118 
6119  // start_index += m_error_depthDense.getRows();
6120  }
6121 }
6122 
6124 {
6125  unsigned int start_index = 0;
6126 
6127  if (m_trackerType & EDGE_TRACKER) {
6129  m_w.insert(start_index, m_w_edge);
6130 
6131  start_index += m_w_edge.getRows();
6132  }
6133 
6134 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6135  if (m_trackerType & KLT_TRACKER) {
6136  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
6137  m_w.insert(start_index, m_w_klt);
6138 
6139  start_index += m_w_klt.getRows();
6140  }
6141 #endif
6142 
6143  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6144  if (m_depthNormalUseRobust) {
6145  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
6146  m_w.insert(start_index, m_w_depthNormal);
6147  }
6148 
6149  start_index += m_w_depthNormal.getRows();
6150  }
6151 
6152  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6154  m_w.insert(start_index, m_w_depthDense);
6155 
6156  // start_index += m_w_depthDense.getRows();
6157  }
6158 }
6159 
6160 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
6161  const vpCameraParameters &cam, const vpColor &col,
6162  unsigned int thickness, bool displayFullModel)
6163 {
6164  if (displayFeatures) {
6165  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6166  for (size_t i = 0; i < features.size(); i++) {
6167  if (vpMath::equal(features[i][0], 0)) {
6168  vpImagePoint ip(features[i][1], features[i][2]);
6169  int state = static_cast<int>(features[i][3]);
6170 
6171  switch (state) {
6174  break;
6175 
6176  case vpMeSite::CONTRAST:
6177  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6178  break;
6179 
6180  case vpMeSite::THRESHOLD:
6182  break;
6183 
6184  case vpMeSite::M_ESTIMATOR:
6185  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6186  break;
6187 
6188  case vpMeSite::TOO_NEAR:
6189  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6190  break;
6191 
6192  default:
6194  }
6195  }
6196  else if (vpMath::equal(features[i][0], 1)) {
6197  vpImagePoint ip1(features[i][1], features[i][2]);
6199 
6200  vpImagePoint ip2(features[i][3], features[i][4]);
6201  double id = features[i][5];
6202  std::stringstream ss;
6203  ss << id;
6204  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6205  }
6206  else if (vpMath::equal(features[i][0], 2)) {
6207  vpImagePoint im_centroid(features[i][1], features[i][2]);
6208  vpImagePoint im_extremity(features[i][3], features[i][4]);
6209  bool desired = vpMath::equal(features[i][0], 2);
6210  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6211  }
6212  }
6213  }
6214 
6215  std::vector<std::vector<double> > models =
6216  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6217  for (size_t i = 0; i < models.size(); i++) {
6218  if (vpMath::equal(models[i][0], 0)) {
6219  vpImagePoint ip1(models[i][1], models[i][2]);
6220  vpImagePoint ip2(models[i][3], models[i][4]);
6221  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6222  }
6223  else if (vpMath::equal(models[i][0], 1)) {
6224  vpImagePoint center(models[i][1], models[i][2]);
6225  double n20 = models[i][3];
6226  double n11 = models[i][4];
6227  double n02 = models[i][5];
6228  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6229  }
6230  }
6231 
6232 #ifdef VISP_HAVE_OGRE
6233  if ((m_trackerType & EDGE_TRACKER)
6234 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6235  || (m_trackerType & KLT_TRACKER)
6236 #endif
6237  ) {
6238  if (useOgre)
6239  faces.displayOgre(cMo);
6240  }
6241 #endif
6242 }
6243 
6244 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
6245  const vpCameraParameters &cam, const vpColor &col,
6246  unsigned int thickness, bool displayFullModel)
6247 {
6248  if (displayFeatures) {
6249  std::vector<std::vector<double> > features = getFeaturesForDisplay();
6250  for (size_t i = 0; i < features.size(); i++) {
6251  if (vpMath::equal(features[i][0], 0)) {
6252  vpImagePoint ip(features[i][1], features[i][2]);
6253  int state = static_cast<int>(features[i][3]);
6254 
6255  switch (state) {
6258  break;
6259 
6260  case vpMeSite::CONTRAST:
6261  vpDisplay::displayCross(I, ip, 3, vpColor::blue, 1);
6262  break;
6263 
6264  case vpMeSite::THRESHOLD:
6266  break;
6267 
6268  case vpMeSite::M_ESTIMATOR:
6269  vpDisplay::displayCross(I, ip, 3, vpColor::red, 1);
6270  break;
6271 
6272  case vpMeSite::TOO_NEAR:
6273  vpDisplay::displayCross(I, ip, 3, vpColor::cyan, 1);
6274  break;
6275 
6276  default:
6278  }
6279  }
6280  else if (vpMath::equal(features[i][0], 1)) {
6281  vpImagePoint ip1(features[i][1], features[i][2]);
6283 
6284  vpImagePoint ip2(features[i][3], features[i][4]);
6285  double id = features[i][5];
6286  std::stringstream ss;
6287  ss << id;
6288  vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6289  }
6290  else if (vpMath::equal(features[i][0], 2)) {
6291  vpImagePoint im_centroid(features[i][1], features[i][2]);
6292  vpImagePoint im_extremity(features[i][3], features[i][4]);
6293  bool desired = vpMath::equal(features[i][0], 2);
6294  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6295  }
6296  }
6297  }
6298 
6299  std::vector<std::vector<double> > models =
6300  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6301  for (size_t i = 0; i < models.size(); i++) {
6302  if (vpMath::equal(models[i][0], 0)) {
6303  vpImagePoint ip1(models[i][1], models[i][2]);
6304  vpImagePoint ip2(models[i][3], models[i][4]);
6305  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6306  }
6307  else if (vpMath::equal(models[i][0], 1)) {
6308  vpImagePoint center(models[i][1], models[i][2]);
6309  double n20 = models[i][3];
6310  double n11 = models[i][4];
6311  double n02 = models[i][5];
6312  vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6313  }
6314  }
6315 
6316 #ifdef VISP_HAVE_OGRE
6317  if ((m_trackerType & EDGE_TRACKER)
6318 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6319  || (m_trackerType & KLT_TRACKER)
6320 #endif
6321  ) {
6322  if (useOgre)
6323  faces.displayOgre(cMo);
6324  }
6325 #endif
6326 }
6327 
6328 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6329 {
6330  std::vector<std::vector<double> > features;
6331 
6332  if (m_trackerType & EDGE_TRACKER) {
6333  // m_featuresToBeDisplayedEdge updated after computeVVS()
6334  features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6335  }
6336 
6337 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6338  if (m_trackerType & KLT_TRACKER) {
6339  // m_featuresToBeDisplayedKlt updated after postTracking()
6340  features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6341  }
6342 #endif
6343 
6344  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6345  // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6346  features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6347  m_featuresToBeDisplayedDepthNormal.end());
6348  }
6349 
6350  return features;
6351 }
6352 
6353 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6354  unsigned int height,
6355  const vpHomogeneousMatrix &cMo,
6356  const vpCameraParameters &cam,
6357  bool displayFullModel)
6358 {
6359  std::vector<std::vector<double> > models;
6360 
6361  // Do not add multiple times the same models
6362  if (m_trackerType == EDGE_TRACKER) {
6363  models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6364  }
6365 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6366  else if (m_trackerType == KLT_TRACKER) {
6367  models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6368  }
6369 #endif
6370  else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6371  models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6372  }
6373  else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6374  models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6375  }
6376  else {
6377  // Edge and KLT trackers use the same primitives
6378  if (m_trackerType & EDGE_TRACKER) {
6379  std::vector<std::vector<double> > edgeModels =
6380  vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6381  models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6382  }
6383 
6384  // Depth dense and depth normal trackers use the same primitives
6385  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6386  std::vector<std::vector<double> > depthDenseModels =
6387  vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6388  models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6389  }
6390  }
6391 
6392  return models;
6393 }
6394 
6395 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6396 {
6397  if (!modelInitialised) {
6398  throw vpException(vpException::fatalError, "model not initialized");
6399  }
6400 
6401  if (useScanLine || clippingFlag > 3)
6402  m_cam.computeFov(I.getWidth(), I.getHeight());
6403 
6404  bool reInitialisation = false;
6405  if (!useOgre) {
6406  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6407  }
6408  else {
6409 #ifdef VISP_HAVE_OGRE
6410  if (!faces.isOgreInitialised()) {
6411  faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6412 
6413  faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6414  faces.initOgre(m_cam);
6415  // Turn off Ogre config dialog display for the next call to this
6416  // function since settings are saved in the ogre.cfg file and used
6417  // during the next call
6418  ogreShowConfigDialog = false;
6419  }
6420 
6421  faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6422 #else
6423  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6424 #endif
6425  }
6426 
6427  if (useScanLine) {
6428  faces.computeClippedPolygons(m_cMo, m_cam);
6429  faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6430  }
6431 
6432 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6433  if (m_trackerType & KLT_TRACKER)
6435 #endif
6436 
6437  if (m_trackerType & EDGE_TRACKER) {
6439 
6440  bool a = false;
6441  vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6442 
6444  }
6445 
6446  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6447  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6448 
6449  if (m_trackerType & DEPTH_DENSE_TRACKER)
6450  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6451 }
6452 
6453 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6454  double radius, int idFace, const std::string &name)
6455 {
6456  if (m_trackerType & EDGE_TRACKER)
6457  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6458 
6459 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6460  if (m_trackerType & KLT_TRACKER)
6461  vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6462 #endif
6463 }
6464 
6465 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6466  const std::string &name)
6467 {
6468  if (m_trackerType & EDGE_TRACKER)
6469  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6470 
6471 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6472  if (m_trackerType & KLT_TRACKER)
6473  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6474 #endif
6475 }
6476 
6477 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6478 {
6479  if (m_trackerType & EDGE_TRACKER)
6481 
6482 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6483  if (m_trackerType & KLT_TRACKER)
6485 #endif
6486 
6487  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6489 
6490  if (m_trackerType & DEPTH_DENSE_TRACKER)
6492 }
6493 
6494 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6495 {
6496  if (m_trackerType & EDGE_TRACKER)
6498 
6499 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6500  if (m_trackerType & KLT_TRACKER)
6502 #endif
6503 
6504  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6506 
6507  if (m_trackerType & DEPTH_DENSE_TRACKER)
6509 }
6510 
6512 {
6513  if (m_trackerType & EDGE_TRACKER) {
6516  }
6517 }
6518 
6519 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6520 {
6521  // Load projection error config
6522  vpMbTracker::loadConfigFile(configFile, verbose);
6523 
6525  xmlp.setVerbose(verbose);
6526  xmlp.setCameraParameters(m_cam);
6527  xmlp.setAngleAppear(vpMath::deg(angleAppears));
6528  xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6529 
6530  // Edge
6531  xmlp.setEdgeMe(me);
6532 
6533  // KLT
6534  xmlp.setKltMaxFeatures(10000);
6535  xmlp.setKltWindowSize(5);
6536  xmlp.setKltQuality(0.01);
6537  xmlp.setKltMinDistance(5);
6538  xmlp.setKltHarrisParam(0.01);
6539  xmlp.setKltBlockSize(3);
6540  xmlp.setKltPyramidLevels(3);
6541 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6542  xmlp.setKltMaskBorder(maskBorder);
6543 #endif
6544 
6545  // Depth normal
6546  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6547  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6548  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6549  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6550  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6551  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6552 
6553  // Depth dense
6554  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6555  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6556 
6557  try {
6558  if (verbose) {
6559  std::cout << " *********** Parsing XML for";
6560  }
6561 
6562  std::vector<std::string> tracker_names;
6563  if (m_trackerType & EDGE_TRACKER)
6564  tracker_names.push_back("Edge");
6565 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6566  if (m_trackerType & KLT_TRACKER)
6567  tracker_names.push_back("Klt");
6568 #endif
6569  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6570  tracker_names.push_back("Depth Normal");
6571  if (m_trackerType & DEPTH_DENSE_TRACKER)
6572  tracker_names.push_back("Depth Dense");
6573 
6574  if (verbose) {
6575  for (size_t i = 0; i < tracker_names.size(); i++) {
6576  std::cout << " " << tracker_names[i];
6577  if (i == tracker_names.size() - 1) {
6578  std::cout << " ";
6579  }
6580  }
6581 
6582  std::cout << "Model-Based Tracker ************ " << std::endl;
6583  }
6584 
6585  xmlp.parse(configFile);
6586  }
6587  catch (...) {
6588  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6589  }
6590 
6591  vpCameraParameters camera;
6592  xmlp.getCameraParameters(camera);
6593  setCameraParameters(camera);
6594 
6595  angleAppears = vpMath::rad(xmlp.getAngleAppear());
6596  angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6597 
6598  if (xmlp.hasNearClippingDistance())
6599  setNearClippingDistance(xmlp.getNearClippingDistance());
6600 
6601  if (xmlp.hasFarClippingDistance())
6602  setFarClippingDistance(xmlp.getFarClippingDistance());
6603 
6604  if (xmlp.getFovClipping()) {
6606  }
6607 
6608  useLodGeneral = xmlp.getLodState();
6609  minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6610  minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6611 
6612  applyLodSettingInConfig = false;
6613  if (this->getNbPolygon() > 0) {
6614  applyLodSettingInConfig = true;
6615  setLod(useLodGeneral);
6616  setMinLineLengthThresh(minLineLengthThresholdGeneral);
6617  setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6618  }
6619 
6620  // Edge
6621  vpMe meParser;
6622  xmlp.getEdgeMe(meParser);
6624 
6625  // KLT
6626 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6627  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6628  tracker.setWindowSize((int)xmlp.getKltWindowSize());
6629  tracker.setQuality(xmlp.getKltQuality());
6630  tracker.setMinDistance(xmlp.getKltMinDistance());
6631  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6632  tracker.setBlockSize((int)xmlp.getKltBlockSize());
6633  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6634  maskBorder = xmlp.getKltMaskBorder();
6635 
6636  // if(useScanLine)
6637  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6638 #endif
6639 
6640  // Depth normal
6641  setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6642  setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6643  setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6644  setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6645  setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6646 
6647  // Depth dense
6648  setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6649 }
6650 
6651 #ifdef VISP_HAVE_PCL
6653  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6654 {
6655 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6656  // KLT
6657  if (m_trackerType & KLT_TRACKER) {
6658  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6659  vpMbKltTracker::reinit(*ptr_I);
6660  }
6661  }
6662 #endif
6663 
6664  // Looking for new visible face
6665  if (m_trackerType & EDGE_TRACKER) {
6666  bool newvisibleface = false;
6667  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6668 
6669  if (useScanLine) {
6670  faces.computeClippedPolygons(m_cMo, m_cam);
6671  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6672  }
6673  }
6674 
6675  // Depth normal
6676  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6677  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6678 
6679  // Depth dense
6680  if (m_trackerType & DEPTH_DENSE_TRACKER)
6681  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6682 
6683  // Edge
6684  if (m_trackerType & EDGE_TRACKER) {
6686 
6687  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6688  // Reinit the moving edge for the lines which need it.
6689  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6690 
6691  if (computeProjError) {
6693  }
6694  }
6695 }
6696 
6698  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6699 {
6700  if (m_trackerType & EDGE_TRACKER) {
6701  try {
6703  }
6704  catch (...) {
6705  std::cerr << "Error in moving edge tracking" << std::endl;
6706  throw;
6707  }
6708  }
6709 
6710 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6711  if (m_trackerType & KLT_TRACKER) {
6712  try {
6714  }
6715  catch (const vpException &e) {
6716  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6717  throw;
6718  }
6719  }
6720 #endif
6721 
6722  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6723  try {
6725  }
6726  catch (...) {
6727  std::cerr << "Error in Depth normal tracking" << std::endl;
6728  throw;
6729  }
6730  }
6731 
6732  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6733  try {
6735  }
6736  catch (...) {
6737  std::cerr << "Error in Depth dense tracking" << std::endl;
6738  throw;
6739  }
6740  }
6741 }
6742 #endif
6743 
6745  const unsigned int pointcloud_width,
6746  const unsigned int pointcloud_height)
6747 {
6748 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6749  // KLT
6750  if (m_trackerType & KLT_TRACKER) {
6751  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6752  vpMbKltTracker::reinit(*ptr_I);
6753  }
6754  }
6755 #endif
6756 
6757  // Looking for new visible face
6758  if (m_trackerType & EDGE_TRACKER) {
6759  bool newvisibleface = false;
6760  vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6761 
6762  if (useScanLine) {
6763  faces.computeClippedPolygons(m_cMo, m_cam);
6764  faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6765  }
6766  }
6767 
6768  // Depth normal
6769  if (m_trackerType & DEPTH_NORMAL_TRACKER)
6770  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6771 
6772  // Depth dense
6773  if (m_trackerType & DEPTH_DENSE_TRACKER)
6774  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6775 
6776  // Edge
6777  if (m_trackerType & EDGE_TRACKER) {
6779 
6780  vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6781  // Reinit the moving edge for the lines which need it.
6782  vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6783 
6784  if (computeProjError) {
6786  }
6787  }
6788 }
6789 
6791  const std::vector<vpColVector> *const point_cloud,
6792  const unsigned int pointcloud_width,
6793  const unsigned int pointcloud_height)
6794 {
6795  if (m_trackerType & EDGE_TRACKER) {
6796  try {
6798  }
6799  catch (...) {
6800  std::cerr << "Error in moving edge tracking" << std::endl;
6801  throw;
6802  }
6803  }
6804 
6805 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6806  if (m_trackerType & KLT_TRACKER) {
6807  try {
6809  }
6810  catch (const vpException &e) {
6811  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6812  throw;
6813  }
6814  }
6815 #endif
6816 
6817  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6818  try {
6819  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6820  }
6821  catch (...) {
6822  std::cerr << "Error in Depth tracking" << std::endl;
6823  throw;
6824  }
6825  }
6826 
6827  if (m_trackerType & DEPTH_DENSE_TRACKER) {
6828  try {
6829  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6830  }
6831  catch (...) {
6832  std::cerr << "Error in Depth dense tracking" << std::endl;
6833  throw;
6834  }
6835  }
6836 }
6837 
6839  const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
6840  const vpHomogeneousMatrix &cMo, bool verbose,
6841  const vpHomogeneousMatrix &T)
6842 {
6843  m_cMo.eye();
6844 
6845  // Edge
6846  vpMbtDistanceLine *l;
6848  vpMbtDistanceCircle *ci;
6849 
6850  for (unsigned int i = 0; i < scales.size(); i++) {
6851  if (scales[i]) {
6852  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6853  l = *it;
6854  if (l != NULL)
6855  delete l;
6856  l = NULL;
6857  }
6858 
6859  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6860  ++it) {
6861  cy = *it;
6862  if (cy != NULL)
6863  delete cy;
6864  cy = NULL;
6865  }
6866 
6867  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6868  ci = *it;
6869  if (ci != NULL)
6870  delete ci;
6871  ci = NULL;
6872  }
6873 
6874  lines[i].clear();
6875  cylinders[i].clear();
6876  circles[i].clear();
6877  }
6878  }
6879 
6880  nline = 0;
6881  ncylinder = 0;
6882  ncircle = 0;
6883  nbvisiblepolygone = 0;
6884 
6885  // KLT
6886 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6887 #if (VISP_HAVE_OPENCV_VERSION < 0x020408)
6888  if (cur != NULL) {
6889  cvReleaseImage(&cur);
6890  cur = NULL;
6891  }
6892 #endif
6893 
6894  // delete the Klt Polygon features
6895  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6896  vpMbtDistanceKltPoints *kltpoly = *it;
6897  if (kltpoly != NULL) {
6898  delete kltpoly;
6899  }
6900  kltpoly = NULL;
6901  }
6902  kltPolygons.clear();
6903 
6904  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6905  ++it) {
6906  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
6907  if (kltPolyCylinder != NULL) {
6908  delete kltPolyCylinder;
6909  }
6910  kltPolyCylinder = NULL;
6911  }
6912  kltCylinders.clear();
6913 
6914  // delete the structures used to display circles
6915  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6916  ci = *it;
6917  if (ci != NULL) {
6918  delete ci;
6919  }
6920  ci = NULL;
6921  }
6922  circles_disp.clear();
6923 
6924  firstInitialisation = true;
6925 
6926 #endif
6927 
6928  // Depth normal
6929  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6930  delete m_depthNormalFaces[i];
6931  m_depthNormalFaces[i] = NULL;
6932  }
6933  m_depthNormalFaces.clear();
6934 
6935  // Depth dense
6936  for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6937  delete m_depthDenseFaces[i];
6938  m_depthDenseFaces[i] = NULL;
6939  }
6940  m_depthDenseFaces.clear();
6941 
6942  faces.reset();
6943 
6944  loadModel(cad_name, verbose, T);
6945  if (I) {
6946  initFromPose(*I, cMo);
6947  }
6948  else {
6949  initFromPose(*I_color, cMo);
6950  }
6951 }
6952 
6953 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
6954  const vpHomogeneousMatrix &cMo, bool verbose,
6955  const vpHomogeneousMatrix &T)
6956 {
6957  reInitModel(&I, NULL, cad_name, cMo, verbose, T);
6958 }
6959 
6960 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
6961  const vpHomogeneousMatrix &cMo, bool verbose,
6962  const vpHomogeneousMatrix &T)
6963 {
6964  reInitModel(NULL, &I_color, cad_name, cMo, verbose, T);
6965 }
6966 
6967 void vpMbGenericTracker::TrackerWrapper::resetTracker()
6968 {
6970 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6972 #endif
6975 }
6976 
6977 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
6978 {
6979  m_cam = cam;
6980 
6982 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
6984 #endif
6987 }
6988 
6989 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
6990 
6991 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
6992 {
6994 }
6995 
6996 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
6997 {
6999 }
7000 
7001 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
7002 {
7004 #ifdef VISP_HAVE_OGRE
7005  faces.getOgreContext()->setWindowName("TrackerWrapper");
7006 #endif
7007 }
7008 
7010  const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
7011 {
7012  bool performKltSetPose = false;
7013  if (I_color) {
7014  vpImageConvert::convert(*I_color, m_I);
7015  }
7016 
7017 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
7018  if (m_trackerType & KLT_TRACKER) {
7019  performKltSetPose = true;
7020 
7021  if (useScanLine || clippingFlag > 3) {
7022  m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7023  }
7024 
7025  vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7026  }
7027 #endif
7028 
7029  if (!performKltSetPose) {
7030  m_cMo = cdMo;
7031  init(I ? *I : m_I);
7032  return;
7033  }
7034 
7035  if (m_trackerType & EDGE_TRACKER) {
7037  }
7038 
7039  if (useScanLine) {
7040  faces.computeClippedPolygons(m_cMo, m_cam);
7041  faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7042  }
7043 
7044 #if 0
7045  if (m_trackerType & EDGE_TRACKER) {
7046  initPyramid(I, Ipyramid);
7047 
7048  unsigned int i = (unsigned int)scales.size();
7049  do {
7050  i--;
7051  if (scales[i]) {
7052  downScale(i);
7053  vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
7054  upScale(i);
7055  }
7056  } while (i != 0);
7057 
7058  cleanPyramid(Ipyramid);
7059  }
7060 #else
7061  if (m_trackerType & EDGE_TRACKER) {
7062  vpMbEdgeTracker::initMovingEdge(I ? *I : m_I, m_cMo);
7063  }
7064 #endif
7065 
7066  // Depth normal
7067  vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7068 
7069  // Depth dense
7070  vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7071 }
7072 
7074 {
7075  setPose(&I, NULL, cdMo);
7076 }
7077 
7079 {
7080  setPose(NULL, &I_color, cdMo);
7081 }
7082 
7083 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
7084 {
7086 }
7087 
7088 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
7089 {
7091 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
7093 #endif
7096 }
7097 
7098 void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
7099 {
7100  if ((type & (EDGE_TRACKER |
7101 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
7102  KLT_TRACKER |
7103 #endif
7104  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7105  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
7106  }
7107 
7108  m_trackerType = type;
7109 }
7110 
7111 void vpMbGenericTracker::TrackerWrapper::testTracking()
7112 {
7113  if (m_trackerType & EDGE_TRACKER) {
7115  }
7116 }
7117 
7119 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
7120  I
7121 #endif
7122 )
7123 {
7124  if ((m_trackerType & (EDGE_TRACKER
7125 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
7126  | KLT_TRACKER
7127 #endif
7128  )) == 0) {
7129  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7130  return;
7131  }
7132 
7133 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
7134  track(&I, nullptr);
7135 #endif
7136 }
7137 
7139 {
7140  // not exposed to the public API, only for debug
7141 }
7142 
7143 #ifdef VISP_HAVE_PCL
7145  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7146 {
7147  if ((m_trackerType & (EDGE_TRACKER |
7148 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
7149  KLT_TRACKER |
7150 #endif
7151  DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7152  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7153  return;
7154  }
7155 
7156  if (m_trackerType & (EDGE_TRACKER
7157 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
7158  | KLT_TRACKER
7159 #endif
7160  ) &&
7161  ptr_I == NULL) {
7162  throw vpException(vpException::fatalError, "Image pointer is NULL!");
7163  }
7164 
7165  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
7166  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
7167  }
7168 
7169  // Back-up cMo in case of exception
7170  vpHomogeneousMatrix cMo_1 = m_cMo;
7171  try {
7172  preTracking(ptr_I, point_cloud);
7173 
7174  try {
7175  computeVVS(ptr_I);
7176  }
7177  catch (...) {
7178  covarianceMatrix = -1;
7179  throw; // throw the original exception
7180  }
7181 
7182  if (m_trackerType == EDGE_TRACKER)
7183  testTracking();
7184 
7185  postTracking(ptr_I, point_cloud);
7186 
7187  }
7188  catch (const vpException &e) {
7189  std::cerr << "Exception: " << e.what() << std::endl;
7190  m_cMo = cMo_1;
7191  throw; // rethrowing the original exception
7192  }
7193 }
7194 #endif
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
unsigned int getCols() const
Definition: vpArray2D.h:282
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:307
unsigned int getRows() const
Definition: vpArray2D.h:292
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:172
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:357
Class to define RGB colors available for display functionnalities.
Definition: vpColor.h:158
static const vpColor red
Definition: vpColor.h:217
static const vpColor cyan
Definition: vpColor.h:226
static const vpColor blue
Definition: vpColor.h:223
static const vpColor purple
Definition: vpColor.h:228
static const vpColor yellow
Definition: vpColor.h:225
static const vpColor green
Definition: vpColor.h:220
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emited by ViSP classes.
Definition: vpException.h:72
@ ioError
I/O error.
Definition: vpException.h:91
@ badValue
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
@ notInitialized
Used to indicate that a parameter is not initialized.
Definition: vpException.h:98
@ fatalError
Fatal error.
Definition: vpException.h:96
const char * what() const
Definition: vpException.cpp:99
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:89
unsigned int getWidth() const
Definition: vpImage.h:247
unsigned int getHeight() const
Definition: vpImage.h:189
void init(unsigned int h, unsigned int w, Type value)
Definition: vpImage.h:632
static std::string getFileExtension(const std::string &pathname, bool checkFile=false)
Definition: vpIoTools.cpp:1465
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:79
static double rad(double deg)
Definition: vpMath.h:116
static double sqr(double x)
Definition: vpMath.h:122
static bool equal(double x, double y, double threshold=0.001)
Definition: vpMath.h:367
static double deg(double rad)
Definition: vpMath.h:106
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
void eye()
Definition: vpMatrix.cpp:447
vpMatrix AtA() const
Definition: vpMatrix.cpp:623
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:5970
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setCameraParameters(const vpCameraParameters &camera)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setNearClippingDistance(const double &dist)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void computeVVSInit()
virtual void setFarClippingDistance(const double &dist)
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void computeVVSWeights()
virtual void setClipping(const unsigned int &flags)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void testTracking()
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setLod(bool useLod, const std::string &name="")
virtual void setDisplayFeatures(bool displayF)
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void setKltMaskBorder(const unsigned int &e)
virtual void setProjectionErrorComputation(const bool &flag)
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void getCameraParameters(vpCameraParameters &cam) const
Definition: vpMbTracker.h:248
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpMbTracker.h:414
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void setAngleAppear(const double &a)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="")
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void setNearClippingDistance(const double &dist)
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void setAngleDisappear(const double &a)
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
vpMatrix m_L
Interaction matrix.
virtual void init(const vpImage< unsigned char > &I)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void computeVVSInit()
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
virtual unsigned int getNbPolygon() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setProjectionErrorDisplay(bool display)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
virtual vpKltOpencv getKltOpencv() const
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual vpMbtPolygon * getPolygon(unsigned int index)
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual vpMe getMovingEdge() const
virtual void setFarClippingDistance(const double &dist)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)
virtual void setClipping(const unsigned int &flags)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setOgreVisibilityTest(const bool &v)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
std::string m_referenceCameraName
Name of the reference camera.
virtual void setMask(const vpImage< bool > &mask)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="")
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual void track(const vpImage< unsigned char > &I)
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:256
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
vpAROgre * getOgreContext()
virtual void setScanLineVisibilityTest(const bool &v)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="")
void preTracking(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="")
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void reinit(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void setCameraParameters(const vpCameraParameters &cam)
virtual void computeVVSInit()
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
Definition: vpMbTracker.h:595
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:187
bool modelInitialised
Definition: vpMbTracker.h:123
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
Definition: vpMbTracker.h:213
virtual void setOgreShowConfigDialog(bool showConfigDialog)
Definition: vpMbTracker.h:647
virtual void setMask(const vpImage< bool > &mask)
Definition: vpMbTracker.h:564