Visual Servoing Platform  version 3.1.0
vpMbGenericTracker.cpp
1 /****************************************************************************
2  *
3  * This file is part of the ViSP software.
4  * Copyright (C) 2005 - 2017 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Generic model-based tracker.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/mbt/vpMbGenericTracker.h>
37 
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/mbt/vpMbtXmlGenericParser.h>
42 
44  : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
45  m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError()
46 {
47  m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
48 
49  // Add default camera transformation matrix
51 
52  // Add default ponderation between each feature type
54 
55 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
57 #endif
58 
61 }
62 
63 vpMbGenericTracker::vpMbGenericTracker(const unsigned int nbCameras, const int trackerType)
66 {
67  if (nbCameras == 0) {
68  throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
69  } else if (nbCameras == 1) {
70  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
71 
72  // Add default camera transformation matrix
74  } else {
75  for (unsigned int i = 1; i <= nbCameras; i++) {
76  std::stringstream ss;
77  ss << "Camera" << i;
78  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
79 
80  // Add default camera transformation matrix
82  }
83 
84  // Set by default the reference camera to the first one
85  m_referenceCameraName = m_mapOfTrackers.begin()->first;
86  }
87 
88  // Add default ponderation between each feature type
90 
91 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
93 #endif
94 
97 }
98 
99 vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
102 {
103  if (trackerTypes.empty()) {
104  throw vpException(vpException::badValue, "There is no camera!");
105  }
106 
107  if (trackerTypes.size() == 1) {
108  m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
109 
110  // Add default camera transformation matrix
112  } else {
113  for (size_t i = 1; i <= trackerTypes.size(); i++) {
114  std::stringstream ss;
115  ss << "Camera" << i;
116  m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
117 
118  // Add default camera transformation matrix
120  }
121 
122  // Set by default the reference camera to the first one
123  m_referenceCameraName = m_mapOfTrackers.begin()->first;
124  }
125 
126  // Add default ponderation between each feature type
128 
129 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
131 #endif
132 
135 }
136 
137 vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
138  const std::vector<int> &trackerTypes)
141 {
142  if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
144  "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
145  }
146 
147  for (size_t i = 0; i < cameraNames.size(); i++) {
148  m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
149 
150  // Add default camera transformation matrix
152  }
153 
154  // Set by default the reference camera to the first one
155  m_referenceCameraName = m_mapOfTrackers.begin()->first;
156 
157  // Add default ponderation between each feature type
159 
160 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
162 #endif
163 
166 }
167 
169 {
170  for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
171  ++it) {
172  delete it->second;
173  it->second = NULL;
174  }
175 }
176 
178 {
179  if (computeProjError) {
180  double rawTotalProjectionError = 0.0;
181  unsigned int nbTotalFeaturesUsed = 0;
182 
183  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
184  it != m_mapOfTrackers.end(); ++it) {
185  TrackerWrapper *tracker = it->second;
186 
187  double curProjError = tracker->getProjectionError();
188  unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
189 
190  if (nbFeaturesUsed > 0) {
191  nbTotalFeaturesUsed += nbFeaturesUsed;
192  rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
193  }
194  }
195 
196  if (nbTotalFeaturesUsed > 0) {
197  projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
198  } else {
199  projectionError = 90.0;
200  }
201  } else {
202  projectionError = 90.0;
203  }
204 }
205 
206 void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
207 {
208  computeVVSInit(mapOfImages);
209 
210  if (m_error.getRows() < 4) {
211  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
212  }
213 
214  double normRes = 0;
215  double normRes_1 = -1;
216  unsigned int iter = 0;
217 
218  vpMatrix LTL;
219  vpColVector LTR, v;
220  vpColVector error_prev;
221 
222  double mu = m_initialMu;
223  vpHomogeneousMatrix cMo_prev;
224 
225  bool isoJoIdentity_ = true;
226 
227  // Covariance
228  vpColVector W_true(m_error.getRows());
229  vpMatrix L_true, LVJ_true;
230 
231  // Create the map of VelocityTwistMatrices
232  std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
233  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
234  it != m_mapOfCameraTransformationMatrix.end(); ++it) {
236  cVo.buildFrom(it->second);
237  mapOfVelocityTwist[it->first] = cVo;
238  }
239 
240  double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
241 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
242  double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
243 #endif
244  double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
245  double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
246 
247  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
248  computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
249 
250  bool reStartFromLastIncrement = false;
251  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
252  if (reStartFromLastIncrement) {
253  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
254  it != m_mapOfTrackers.end(); ++it) {
255  TrackerWrapper *tracker = it->second;
256 
257  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
258 
259 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
260  vpHomogeneousMatrix c_curr_tTc_curr0 =
261  m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
262  tracker->ctTc0 = c_curr_tTc_curr0;
263 #endif
264  }
265  }
266 
267  if (!reStartFromLastIncrement) {
269 
270  if (computeCovariance) {
271  L_true = m_L;
272  if (!isoJoIdentity_) {
274  cVo.buildFrom(cMo);
275  LVJ_true = (m_L * (cVo * oJo));
276  }
277  }
278 
280  if (iter == 0) {
281  isoJoIdentity_ = true;
282  oJo.eye();
283 
284  // If all the 6 dof should be estimated, we check if the interaction
285  // matrix is full rank. If not we remove automatically the dof that
286  // cannot be estimated This is particularly useful when consering
287  // circles (rank 5) and cylinders (rank 4)
288  if (isoJoIdentity_) {
289  cVo.buildFrom(cMo);
290 
291  vpMatrix K; // kernel
292  unsigned int rank = (m_L * cVo).kernel(K);
293  if (rank == 0) {
294  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
295  }
296 
297  if (rank != 6) {
298  vpMatrix I; // Identity
299  I.eye(6);
300  oJo = I - K.AtA();
301 
302  isoJoIdentity_ = false;
303  }
304  }
305  }
306 
307  // Weighting
308  double num = 0;
309  double den = 0;
310 
311  unsigned int start_index = 0;
312  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
313  it != m_mapOfTrackers.end(); ++it) {
314  TrackerWrapper *tracker = it->second;
315 
316  if (tracker->m_trackerType & EDGE_TRACKER) {
317  for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
318  double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
319  W_true[start_index + i] = wi;
320  m_weightedError[start_index + i] = wi * m_error[start_index + i];
321 
322  num += wi * vpMath::sqr(m_error[start_index + i]);
323  den += wi;
324 
325  for (unsigned int j = 0; j < m_L.getCols(); j++) {
326  m_L[start_index + i][j] *= wi;
327  }
328  }
329 
330  start_index += tracker->m_error_edge.getRows();
331  }
332 
333 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
334  if (tracker->m_trackerType & KLT_TRACKER) {
335  for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
336  double wi = tracker->m_w_klt[i] * factorKlt;
337  W_true[start_index + i] = wi;
338  m_weightedError[start_index + i] = wi * m_error[start_index + i];
339 
340  num += wi * vpMath::sqr(m_error[start_index + i]);
341  den += wi;
342 
343  for (unsigned int j = 0; j < m_L.getCols(); j++) {
344  m_L[start_index + i][j] *= wi;
345  }
346  }
347 
348  start_index += tracker->m_error_klt.getRows();
349  }
350 #endif
351 
352  if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
353  for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
354  double wi = tracker->m_w_depthNormal[i] * factorDepth;
355  m_weightedError[start_index + i] = wi * m_error[start_index + i];
356 
357  num += wi * vpMath::sqr(m_error[start_index + i]);
358  den += wi;
359 
360  for (unsigned int j = 0; j < m_L.getCols(); j++) {
361  m_L[start_index + i][j] *= wi;
362  }
363  }
364 
365  start_index += tracker->m_error_depthNormal.getRows();
366  }
367 
368  if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
369  for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
370  double wi = tracker->m_w_depthDense[i] * factorDepthDense;
371  m_weightedError[start_index + i] = wi * m_error[start_index + i];
372 
373  num += wi * vpMath::sqr(m_error[start_index + i]);
374  den += wi;
375 
376  for (unsigned int j = 0; j < m_L.getCols(); j++) {
377  m_L[start_index + i][j] *= wi;
378  }
379  }
380 
381  start_index += tracker->m_error_depthDense.getRows();
382  }
383  }
384 
385  normRes_1 = normRes;
386  normRes = sqrt(num / den);
387 
388  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
389 
390  cMo_prev = cMo;
391 
393 
394 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
395  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
396  it != m_mapOfTrackers.end(); ++it) {
397  TrackerWrapper *tracker = it->second;
398 
399  vpHomogeneousMatrix c_curr_tTc_curr0 =
400  m_mapOfCameraTransformationMatrix[it->first] * cMo * tracker->c0Mo.inverse();
401  tracker->ctTc0 = c_curr_tTc_curr0;
402  }
403 #endif
404 
405  // Update cMo
406  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
407  it != m_mapOfTrackers.end(); ++it) {
408  TrackerWrapper *tracker = it->second;
409  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo;
410  }
411  }
412 
413  iter++;
414  }
415 
416  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
417 
418  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
419  it != m_mapOfTrackers.end(); ++it) {
420  TrackerWrapper *tracker = it->second;
421 
422  if (tracker->m_trackerType & EDGE_TRACKER) {
423  tracker->updateMovingEdgeWeights();
424  }
425  }
426 }
427 
429 {
430  throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
431 }
432 
433 void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
434 {
435  unsigned int nbFeatures = 0;
436 
437  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
438  it != m_mapOfTrackers.end(); ++it) {
439  TrackerWrapper *tracker = it->second;
440  tracker->computeVVSInit(mapOfImages[it->first]);
441 
442  nbFeatures += tracker->m_error.getRows();
443  }
444 
445  m_L.resize(nbFeatures, 6, false, false);
446  m_error.resize(nbFeatures, false);
447 
448  m_weightedError.resize(nbFeatures, false);
449  m_w.resize(nbFeatures, false);
450  m_w = 1;
451 }
452 
454 {
455  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
456  "computeVVSInteractionMatrixAndR"
457  "esidu() should not be called");
458 }
459 
461  std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
462  std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
463 {
464  unsigned int start_index = 0;
465 
466  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
467  it != m_mapOfTrackers.end(); ++it) {
468  TrackerWrapper *tracker = it->second;
469 
470  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo;
471 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
472  vpHomogeneousMatrix c_curr_tTc_curr0 = m_mapOfCameraTransformationMatrix[it->first] * cMo * tracker->c0Mo.inverse();
473  tracker->ctTc0 = c_curr_tTc_curr0;
474 #endif
475 
476  tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
477 
478  m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
479  m_error.insert(start_index, tracker->m_error);
480 
481  start_index += tracker->m_error.getRows();
482  }
483 }
484 
486 {
487  unsigned int start_index = 0;
488 
489  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
490  it != m_mapOfTrackers.end(); ++it) {
491  TrackerWrapper *tracker = it->second;
492  tracker->computeVVSWeights();
493 
494  m_w.insert(start_index, tracker->m_w);
495  start_index += tracker->m_w.getRows();
496  }
497 }
498 
513  const vpCameraParameters &cam_, const vpColor &col, const unsigned int thickness,
514  const bool displayFullModel)
515 {
516  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
517  if (it != m_mapOfTrackers.end()) {
518  TrackerWrapper *tracker = it->second;
519  tracker->display(I, cMo_, cam_, col, thickness, displayFullModel);
520  } else {
521  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
522  }
523 }
524 
539  const vpCameraParameters &cam_, const vpColor &col, const unsigned int thickness,
540  const bool displayFullModel)
541 {
542  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
543  if (it != m_mapOfTrackers.end()) {
544  TrackerWrapper *tracker = it->second;
545  tracker->display(I, cMo_, cam_, col, thickness, displayFullModel);
546  } else {
547  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
548  }
549 }
550 
568  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
569  const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
570  const unsigned int thickness, const bool displayFullModel)
571 {
572  if (m_mapOfTrackers.size() == 2) {
573  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
574  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
575  ++it;
576 
577  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
578  } else {
579  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
580  << std::endl;
581  }
582 }
583 
601  const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
602  const vpCameraParameters &cam2, const vpColor &color, const unsigned int thickness,
603  const bool displayFullModel)
604 {
605  if (m_mapOfTrackers.size() == 2) {
606  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
607  it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
608  ++it;
609 
610  it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
611  } else {
612  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
613  << std::endl;
614  }
615 }
616 
628 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
629  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
630  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
631  const vpColor &col, const unsigned int thickness, const bool displayFullModel)
632 {
633  // Display only for the given images
634  for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
635  it_img != mapOfImages.end(); ++it_img) {
636  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
637  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
638  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
639 
640  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
641  it_cam != mapOfCameraParameters.end()) {
642  TrackerWrapper *tracker = it_tracker->second;
643  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
644  } else {
645  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
646  }
647  }
648 }
649 
661 void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
662  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
663  const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
664  const vpColor &col, const unsigned int thickness, const bool displayFullModel)
665 {
666  // Display only for the given images
667  for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
668  it_img != mapOfImages.end(); ++it_img) {
669  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
670  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
671  std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
672 
673  if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
674  it_cam != mapOfCameraParameters.end()) {
675  TrackerWrapper *tracker = it_tracker->second;
676  tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
677  } else {
678  std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
679  }
680  }
681 }
682 
688 std::vector<std::string> vpMbGenericTracker::getCameraNames() const
689 {
690  std::vector<std::string> cameraNames;
691 
692  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
693  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
694  cameraNames.push_back(it_tracker->first);
695  }
696 
697  return cameraNames;
698 }
699 
709 {
710  if (m_mapOfTrackers.size() == 2) {
711  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
712  it->second->getCameraParameters(cam1);
713  ++it;
714 
715  it->second->getCameraParameters(cam2);
716  } else {
717  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
718  << std::endl;
719  }
720 }
721 
727 void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
728 {
729  // Clear the input map
730  mapOfCameraParameters.clear();
731 
732  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
733  it != m_mapOfTrackers.end(); ++it) {
734  vpCameraParameters cam_;
735  it->second->getCameraParameters(cam_);
736  mapOfCameraParameters[it->first] = cam_;
737  }
738 }
739 
746 std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
747 {
748  std::map<std::string, int> trackingTypes;
749 
750  TrackerWrapper *traker;
751  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
752  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
753  traker = it_tracker->second;
754  trackingTypes[it_tracker->first] = traker->getTrackerType();
755  }
756 
757  return trackingTypes;
758 }
759 
768 void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
769 {
770  if (m_mapOfTrackers.size() == 2) {
771  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
772  clippingFlag1 = it->second->getClipping();
773  ++it;
774 
775  clippingFlag2 = it->second->getClipping();
776  } else {
777  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
778  << std::endl;
779  }
780 }
781 
787 void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
788 {
789  mapOfClippingFlags.clear();
790 
791  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
792  it != m_mapOfTrackers.end(); ++it) {
793  TrackerWrapper *tracker = it->second;
794  mapOfClippingFlags[it->first] = tracker->getClipping();
795  }
796 }
797 
804 {
805  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
806  if (it != m_mapOfTrackers.end()) {
807  return it->second->getFaces();
808  }
809 
810  std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
811  return faces;
812 }
813 
820 {
821  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
822  if (it != m_mapOfTrackers.end()) {
823  return it->second->getFaces();
824  }
825 
826  std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
827  return faces;
828 }
829 
830 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
831 
834 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
835 {
836  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
837  if (it != m_mapOfTrackers.end()) {
838  TrackerWrapper *tracker = it->second;
839  return tracker->getFeaturesCircle();
840  } else {
841  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
842  m_referenceCameraName.c_str());
843  }
844 }
845 
849 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
850 {
851  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
852  if (it != m_mapOfTrackers.end()) {
853  TrackerWrapper *tracker = it->second;
854  return tracker->getFeaturesKltCylinder();
855  } else {
856  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
857  m_referenceCameraName.c_str());
858  }
859 }
860 
864 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
865 {
866  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
867  if (it != m_mapOfTrackers.end()) {
868  TrackerWrapper *tracker = it->second;
869  return tracker->getFeaturesKlt();
870  } else {
871  throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
872  m_referenceCameraName.c_str());
873  }
874 }
875 #endif
876 
887 
888 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
889 
897 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
898 {
899  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
900  if (it != m_mapOfTrackers.end()) {
901  TrackerWrapper *tracker = it->second;
902  return tracker->getKltImagePoints();
903  } else {
904  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
905  }
906 
907  return std::vector<vpImagePoint>();
908 }
909 
918 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
919 {
920  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
921  if (it != m_mapOfTrackers.end()) {
922  TrackerWrapper *tracker = it->second;
923  return tracker->getKltImagePointsWithId();
924  } else {
925  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
926  }
927 
928  return std::map<int, vpImagePoint>();
929 }
930 
937 {
938  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
939  if (it != m_mapOfTrackers.end()) {
940  TrackerWrapper *tracker = it->second;
941  return tracker->getKltMaskBorder();
942  } else {
943  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
944  }
945 
946  return 0;
947 }
948 
955 {
956  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
957  if (it != m_mapOfTrackers.end()) {
958  TrackerWrapper *tracker = it->second;
959  return tracker->getKltNbPoints();
960  } else {
961  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
962  }
963 
964  return 0;
965 }
966 
973 {
974  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
975 
976  if (it_tracker != m_mapOfTrackers.end()) {
977  TrackerWrapper *tracker;
978  tracker = it_tracker->second;
979  return tracker->getKltOpencv();
980  } else {
981  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
982  }
983 
984  return vpKltOpencv();
985 }
986 
996 {
997  if (m_mapOfTrackers.size() == 2) {
998  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
999  klt1 = it->second->getKltOpencv();
1000  ++it;
1001 
1002  klt2 = it->second->getKltOpencv();
1003  } else {
1004  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1005  << std::endl;
1006  }
1007 }
1008 
1014 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1015 {
1016  mapOfKlts.clear();
1017 
1018  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1019  it != m_mapOfTrackers.end(); ++it) {
1020  TrackerWrapper *tracker = it->second;
1021  mapOfKlts[it->first] = tracker->getKltOpencv();
1022  }
1023 }
1024 
1025 #if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
1026 
1031 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1032 {
1033  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1034  if (it != m_mapOfTrackers.end()) {
1035  TrackerWrapper *tracker = it->second;
1036  return tracker->getKltPoints();
1037  } else {
1038  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1039  }
1040 
1041  return std::vector<cv::Point2f>();
1042 }
1043 #endif
1044 
1052 #endif
1053 
1068 void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1069  const unsigned int level) const
1070 {
1071  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1072  if (it != m_mapOfTrackers.end()) {
1073  it->second->getLcircle(circlesList, level);
1074  } else {
1075  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1076  }
1077 }
1078 
1093 void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1094  const unsigned int level) const
1095 {
1096  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1097  if (it != m_mapOfTrackers.end()) {
1098  it->second->getLcylinder(cylindersList, level);
1099  } else {
1100  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1101  }
1102 }
1103 
1118 void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1119  const unsigned int level) const
1120 {
1121  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1122  if (it != m_mapOfTrackers.end()) {
1123  it->second->getLline(linesList, level);
1124  } else {
1125  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1126  }
1127 }
1128 
1135 {
1136  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1137 
1138  if (it != m_mapOfTrackers.end()) {
1139  return it->second->getMovingEdge();
1140  } else {
1141  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1142  }
1143 
1144  return vpMe();
1145 }
1146 
1156 {
1157  if (m_mapOfTrackers.size() == 2) {
1158  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1159  it->second->getMovingEdge(me1);
1160  ++it;
1161 
1162  it->second->getMovingEdge(me2);
1163  } else {
1164  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1165  << std::endl;
1166  }
1167 }
1168 
1169 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
1170 
1171 #endif
1172 
1178 void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1179 {
1180  mapOfMovingEdges.clear();
1181 
1182  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1183  it != m_mapOfTrackers.end(); ++it) {
1184  TrackerWrapper *tracker = it->second;
1185  mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1186  }
1187 }
1188 
1204 unsigned int vpMbGenericTracker::getNbPoints(const unsigned int level) const
1205 {
1206  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1207 
1208  unsigned int nbGoodPoints = 0;
1209  if (it != m_mapOfTrackers.end()) {
1210 
1211  nbGoodPoints = it->second->getNbPoints(level);
1212  } else {
1213  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1214  }
1215 
1216  return nbGoodPoints;
1217 }
1218 
1233 void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, const unsigned int level) const
1234 {
1235  mapOfNbPoints.clear();
1236 
1237  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1238  it != m_mapOfTrackers.end(); ++it) {
1239  TrackerWrapper *tracker = it->second;
1240  mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1241  }
1242 }
1243 
1250 {
1251  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1252  if (it != m_mapOfTrackers.end()) {
1253  return it->second->getNbPolygon();
1254  }
1255 
1256  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1257  return 0;
1258 }
1259 
1265 void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1266 {
1267  mapOfNbPolygons.clear();
1268 
1269  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1270  it != m_mapOfTrackers.end(); ++it) {
1271  TrackerWrapper *tracker = it->second;
1272  mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1273  }
1274 }
1275 
1287 {
1288  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1289  if (it != m_mapOfTrackers.end()) {
1290  return it->second->getPolygon(index);
1291  }
1292 
1293  std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1294  return NULL;
1295 }
1296 
1308 vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, const unsigned int index)
1309 {
1310  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1311  if (it != m_mapOfTrackers.end()) {
1312  return it->second->getPolygon(index);
1313  }
1314 
1315  std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1316  return NULL;
1317 }
1318 
1334 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1335 vpMbGenericTracker::getPolygonFaces(const bool orderPolygons, const bool useVisibility, const bool clipPolygon)
1336 {
1337  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1338 
1339  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1340  if (it != m_mapOfTrackers.end()) {
1341  TrackerWrapper *tracker = it->second;
1342  polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1343  } else {
1344  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1345  }
1346 
1347  return polygonFaces;
1348 }
1349 
1367 void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1368  std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1369  const bool orderPolygons, const bool useVisibility, const bool clipPolygon)
1370 {
1371  mapOfPolygons.clear();
1372  mapOfPoints.clear();
1373 
1374  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1375  it != m_mapOfTrackers.end(); ++it) {
1376  TrackerWrapper *tracker = it->second;
1377  std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1378  tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1379 
1380  mapOfPolygons[it->first] = polygonFaces.first;
1381  mapOfPoints[it->first] = polygonFaces.second;
1382  }
1383 }
1384 
1394 {
1395  if (m_mapOfTrackers.size() == 2) {
1396  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1397  it->second->getPose(c1Mo);
1398  ++it;
1399 
1400  it->second->getPose(c2Mo);
1401  } else {
1402  std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1403  << std::endl;
1404  }
1405 }
1406 
1412 void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1413 {
1414  // Clear the map
1415  mapOfCameraPoses.clear();
1416 
1417  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1418  it != m_mapOfTrackers.end(); ++it) {
1419  TrackerWrapper *tracker = it->second;
1420  tracker->getPose(mapOfCameraPoses[it->first]);
1421  }
1422 }
1423 
1425 {
1426  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1427  it != m_mapOfTrackers.end(); ++it) {
1428  TrackerWrapper *tracker = it->second;
1429  tracker->cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo;
1430  tracker->init(I);
1431  }
1432 }
1433 
1434 void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1435  const double /*radius*/, const int /*idFace*/, const std::string & /*name*/)
1436 {
1437  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1438 }
1439 
1440 #ifdef VISP_HAVE_MODULE_GUI
1441 
1479  const std::string &initFile1, const std::string &initFile2, const bool displayHelp)
1480 {
1481  if (m_mapOfTrackers.size() == 2) {
1482  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1483  TrackerWrapper *tracker = it->second;
1484  tracker->initClick(I1, initFile1, displayHelp);
1485 
1486  ++it;
1487 
1488  tracker = it->second;
1489  tracker->initClick(I2, initFile2, displayHelp);
1490 
1492  if (it != m_mapOfTrackers.end()) {
1493  tracker = it->second;
1494 
1495  // Set the reference cMo
1496  tracker->getPose(cMo);
1497  }
1498  } else {
1500  "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1501  }
1502 }
1503 
1541 void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1542  const std::map<std::string, std::string> &mapOfInitFiles, const bool displayHelp)
1543 {
1544  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1545  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1546  mapOfImages.find(m_referenceCameraName);
1547  std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
1548 
1549  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1550  TrackerWrapper *tracker = it_tracker->second;
1551  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1552  tracker->getPose(cMo);
1553  } else {
1554  throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
1555  }
1556 
1557  // Vector of missing initFile for cameras
1558  std::vector<std::string> vectorOfMissingCameraPoses;
1559 
1560  // Set pose for the specified cameras
1561  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
1562  if (it_tracker->first != m_referenceCameraName) {
1563  it_img = mapOfImages.find(it_tracker->first);
1564  it_initFile = mapOfInitFiles.find(it_tracker->first);
1565 
1566  if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
1567  // InitClick for the current camera
1568  TrackerWrapper *tracker = it_tracker->second;
1569  tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
1570  } else {
1571  vectorOfMissingCameraPoses.push_back(it_tracker->first);
1572  }
1573  }
1574  }
1575 
1576  // Init for cameras that do not have an initFile
1577  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
1578  it != vectorOfMissingCameraPoses.end(); ++it) {
1579  it_img = mapOfImages.find(*it);
1580  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
1582 
1583  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
1584  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
1585  m_mapOfTrackers[*it]->cMo = cCurrentMo;
1586  m_mapOfTrackers[*it]->init(*it_img->second);
1587  } else {
1589  "Missing image or missing camera transformation "
1590  "matrix! Cannot set the pose for camera: %s!",
1591  it->c_str());
1592  }
1593  }
1594 }
1595 #endif
1596 
1597 void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
1598  const int /*idFace*/, const std::string & /*name*/)
1599 {
1600  throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
1601 }
1602 
1604 {
1605  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
1606 }
1607 
1609 {
1610  throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
1611 }
1612 
1643  const std::string &initFile1, const std::string &initFile2)
1644 {
1645  if (m_mapOfTrackers.size() == 2) {
1646  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1647  TrackerWrapper *tracker = it->second;
1648  tracker->initFromPoints(I1, initFile1);
1649 
1650  ++it;
1651 
1652  tracker = it->second;
1653  tracker->initFromPoints(I2, initFile2);
1654 
1656  if (it != m_mapOfTrackers.end()) {
1657  tracker = it->second;
1658 
1659  // Set the reference cMo
1660  tracker->getPose(cMo);
1661 
1662  // Set the reference camera parameters
1663  tracker->getCameraParameters(cam);
1664  }
1665  } else {
1667  "Cannot initFromPoints()! Require two cameras but "
1668  "there are %d cameras!",
1669  m_mapOfTrackers.size());
1670  }
1671 }
1672 
1673 void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1674  const std::map<std::string, std::string> &mapOfInitPoints)
1675 {
1676  // Set the reference cMo
1677  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1678  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1679  mapOfImages.find(m_referenceCameraName);
1680  std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
1681 
1682  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
1683  TrackerWrapper *tracker = it_tracker->second;
1684  tracker->initFromPoints(*it_img->second, it_initPoints->second);
1685  tracker->getPose(cMo);
1686  } else {
1687  throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
1688  }
1689 
1690  // Vector of missing initPoints for cameras
1691  std::vector<std::string> vectorOfMissingCameraPoints;
1692 
1693  // Set pose for the specified cameras
1694  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
1695  it_img = mapOfImages.find(it_tracker->first);
1696  it_initPoints = mapOfInitPoints.find(it_tracker->first);
1697 
1698  if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
1699  // Set pose
1700  it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
1701  } else {
1702  vectorOfMissingCameraPoints.push_back(it_tracker->first);
1703  }
1704  }
1705 
1706  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
1707  it != vectorOfMissingCameraPoints.end(); ++it) {
1708  it_img = mapOfImages.find(*it);
1709  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
1711 
1712  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
1713  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
1714  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
1715  } else {
1717  "Missing image or missing camera transformation "
1718  "matrix! Cannot init the pose for camera: %s!",
1719  it->c_str());
1720  }
1721  }
1722 }
1723 
1736  const std::string &initFile1, const std::string &initFile2)
1737 {
1738  if (m_mapOfTrackers.size() == 2) {
1739  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1740  TrackerWrapper *tracker = it->second;
1741  tracker->initFromPose(I1, initFile1);
1742 
1743  ++it;
1744 
1745  tracker = it->second;
1746  tracker->initFromPose(I2, initFile2);
1747 
1749  if (it != m_mapOfTrackers.end()) {
1750  tracker = it->second;
1751 
1752  // Set the reference cMo
1753  tracker->getPose(cMo);
1754 
1755  // Set the reference camera parameters
1756  tracker->getCameraParameters(cam);
1757  }
1758  } else {
1760  "Cannot initFromPose()! Require two cameras but there "
1761  "are %d cameras!",
1762  m_mapOfTrackers.size());
1763  }
1764 }
1765 
1780 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1781  const std::map<std::string, std::string> &mapOfInitPoses)
1782 {
1783  // Set the reference cMo
1784  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1785  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1786  mapOfImages.find(m_referenceCameraName);
1787  std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
1788 
1789  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
1790  TrackerWrapper *tracker = it_tracker->second;
1791  tracker->initFromPose(*it_img->second, it_initPose->second);
1792  tracker->getPose(cMo);
1793  } else {
1794  throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
1795  }
1796 
1797  // Vector of missing pose matrices for cameras
1798  std::vector<std::string> vectorOfMissingCameraPoses;
1799 
1800  // Set pose for the specified cameras
1801  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
1802  it_img = mapOfImages.find(it_tracker->first);
1803  it_initPose = mapOfInitPoses.find(it_tracker->first);
1804 
1805  if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
1806  // Set pose
1807  it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
1808  } else {
1809  vectorOfMissingCameraPoses.push_back(it_tracker->first);
1810  }
1811  }
1812 
1813  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
1814  it != vectorOfMissingCameraPoses.end(); ++it) {
1815  it_img = mapOfImages.find(*it);
1816  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
1818 
1819  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
1820  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
1821  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
1822  } else {
1824  "Missing image or missing camera transformation "
1825  "matrix! Cannot init the pose for camera: %s!",
1826  it->c_str());
1827  }
1828  }
1829 }
1830 
1842  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
1843 {
1844  if (m_mapOfTrackers.size() == 2) {
1845  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1846  it->second->initFromPose(I1, c1Mo);
1847 
1848  ++it;
1849 
1850  it->second->initFromPose(I2, c2Mo);
1851 
1852  this->cMo = c1Mo;
1853  } else {
1855  "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
1856  }
1857 }
1858 
1872 void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
1873  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
1874 {
1875  // Set the reference cMo
1876  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1877  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
1878  mapOfImages.find(m_referenceCameraName);
1879  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
1880 
1881  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
1882  TrackerWrapper *tracker = it_tracker->second;
1883  tracker->initFromPose(*it_img->second, it_camPose->second);
1884  tracker->getPose(cMo);
1885  } else {
1886  throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
1887  }
1888 
1889  // Vector of missing pose matrices for cameras
1890  std::vector<std::string> vectorOfMissingCameraPoses;
1891 
1892  // Set pose for the specified cameras
1893  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
1894  it_img = mapOfImages.find(it_tracker->first);
1895  it_camPose = mapOfCameraPoses.find(it_tracker->first);
1896 
1897  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
1898  // Set pose
1899  it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
1900  } else {
1901  vectorOfMissingCameraPoses.push_back(it_tracker->first);
1902  }
1903  }
1904 
1905  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
1906  it != vectorOfMissingCameraPoses.end(); ++it) {
1907  it_img = mapOfImages.find(*it);
1908  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
1910 
1911  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
1912  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
1913  m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
1914  } else {
1916  "Missing image or missing camera transformation "
1917  "matrix! Cannot set the pose for camera: %s!",
1918  it->c_str());
1919  }
1920  }
1921 }
1922 
1938 void vpMbGenericTracker::loadConfigFile(const std::string &configFile)
1939 {
1940  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1941  it != m_mapOfTrackers.end(); ++it) {
1942  TrackerWrapper *tracker = it->second;
1943  tracker->loadConfigFile(configFile);
1944  }
1945 
1947  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
1948  }
1949 
1950  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(this->cam);
1951  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
1952  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
1953  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
1954 }
1955 
1971 void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2)
1972 {
1973  if (m_mapOfTrackers.size() != 2) {
1974  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
1975  }
1976 
1977  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
1978  TrackerWrapper *tracker = it_tracker->second;
1979  tracker->loadConfigFile(configFile1);
1980 
1981  ++it_tracker;
1982  tracker = it_tracker->second;
1983  tracker->loadConfigFile(configFile2);
1984 
1986  throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
1987  }
1988 
1989  m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(this->cam);
1990  this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
1991  this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
1992  this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
1993 }
1994 
2009 void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles)
2010 {
2011  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2012  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2013  TrackerWrapper *tracker = it_tracker->second;
2014 
2015  std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
2016  if (it_config != mapOfConfigFiles.end()) {
2017  tracker->loadConfigFile(it_config->second);
2018  } else {
2019  throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
2020  it_tracker->first.c_str());
2021  }
2022  }
2023 
2024  // Set the reference camera parameters
2025  std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
2026  if (it != m_mapOfTrackers.end()) {
2027  TrackerWrapper *tracker = it->second;
2028  tracker->getCameraParameters(cam);
2029 
2030  // Set clipping
2031  this->clippingFlag = tracker->getClipping();
2032  this->angleAppears = tracker->getAngleAppear();
2033  this->angleDisappears = tracker->getAngleDisappear();
2034  } else {
2035  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
2036  m_referenceCameraName.c_str());
2037  }
2038 }
2039 
2068 void vpMbGenericTracker::loadModel(const std::string &modelFile, const bool verbose)
2069 {
2070  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2071  it != m_mapOfTrackers.end(); ++it) {
2072  TrackerWrapper *tracker = it->second;
2073  tracker->loadModel(modelFile, verbose);
2074  }
2075 }
2076 
2106 void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, const bool verbose)
2107 {
2108  if (m_mapOfTrackers.size() != 2) {
2109  throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
2110  }
2111 
2112  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2113  TrackerWrapper *tracker = it_tracker->second;
2114  tracker->loadModel(modelFile1, verbose);
2115 
2116  ++it_tracker;
2117  tracker = it_tracker->second;
2118  tracker->loadModel(modelFile2, verbose);
2119 }
2120 
2148 void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, const bool verbose)
2149 {
2150  for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2151  it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2152  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
2153 
2154  if (it_model != mapOfModelFiles.end()) {
2155  TrackerWrapper *tracker = it_tracker->second;
2156  tracker->loadModel(it_model->second, verbose);
2157  } else {
2158  throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
2159  it_tracker->first.c_str());
2160  }
2161  }
2162 }
2163 
2164 #ifdef VISP_HAVE_PCL
2165 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2166  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
2167 {
2168  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2169  it != m_mapOfTrackers.end(); ++it) {
2170  TrackerWrapper *tracker = it->second;
2171  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
2172  }
2173 }
2174 #endif
2175 
2176 void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2177  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
2178  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
2179  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
2180 {
2181  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2182  it != m_mapOfTrackers.end(); ++it) {
2183  TrackerWrapper *tracker = it->second;
2184  tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
2185  mapOfPointCloudHeights[it->first]);
2186  }
2187 }
2188 
2198 void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
2199  const vpHomogeneousMatrix &cMo_, const bool verbose)
2200 {
2201  if (m_mapOfTrackers.size() != 1) {
2202  throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
2203  m_mapOfTrackers.size());
2204  }
2205 
2206  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2207  if (it_tracker != m_mapOfTrackers.end()) {
2208  TrackerWrapper *tracker = it_tracker->second;
2209  tracker->reInitModel(I, cad_name, cMo_, verbose);
2210 
2211  // Set reference pose
2212  tracker->getPose(cMo);
2213  } else {
2214  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
2215  }
2216 
2217  modelInitialised = true;
2218 }
2219 
2236  const std::string &cad_name1, const std::string &cad_name2,
2237  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
2238  const bool verbose)
2239 {
2240  if (m_mapOfTrackers.size() == 2) {
2241  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
2242 
2243  it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose);
2244 
2245  ++it_tracker;
2246 
2247  it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose);
2248 
2249  it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2250  if (it_tracker != m_mapOfTrackers.end()) {
2251  // Set reference pose
2252  it_tracker->second->getPose(cMo);
2253  }
2254  } else {
2255  throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
2256  }
2257 
2258  modelInitialised = true;
2259 }
2260 
2271 void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2272  const std::map<std::string, std::string> &mapOfModelFiles,
2273  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
2274  const bool verbose)
2275 {
2276  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2277  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2278  mapOfImages.find(m_referenceCameraName);
2279  std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
2280  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2281 
2282  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
2283  it_camPose != mapOfCameraPoses.end()) {
2284  TrackerWrapper *tracker = it_tracker->second;
2285  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
2286 
2287  // Set reference pose
2288  tracker->getPose(cMo);
2289  } else {
2290  throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
2291  }
2292 
2293  std::vector<std::string> vectorOfMissingCameras;
2294  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2295  if (it_tracker->first != m_referenceCameraName) {
2296  it_img = mapOfImages.find(it_tracker->first);
2297  it_model = mapOfModelFiles.find(it_tracker->first);
2298  it_camPose = mapOfCameraPoses.find(it_tracker->first);
2299 
2300  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
2301  TrackerWrapper *tracker = it_tracker->second;
2302  tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
2303  } else {
2304  vectorOfMissingCameras.push_back(it_tracker->first);
2305  }
2306  }
2307  }
2308 
2309  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
2310  ++it) {
2311  it_img = mapOfImages.find(*it);
2312  it_model = mapOfModelFiles.find(*it);
2313  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2315 
2316  if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
2317  it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2318  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
2319  m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
2320  }
2321  }
2322 
2323  modelInitialised = true;
2324 }
2325 
2331 {
2332  cMo.eye();
2333 
2334  useScanLine = false;
2335 
2336 #ifdef VISP_HAVE_OGRE
2337  useOgre = false;
2338 #endif
2339 
2340  m_computeInteraction = true;
2341  m_lambda = 1.0;
2342 
2343  angleAppears = vpMath::rad(89);
2346  distNearClip = 0.001;
2347  distFarClip = 100;
2348 
2350  m_maxIter = 30;
2351  m_stopCriteriaEpsilon = 1e-8;
2352  m_initialMu = 0.01;
2353 
2354  // Only for Edge
2355  m_percentageGdPt = 0.4;
2356 
2357  // Only for KLT
2358  m_thresholdOutlier = 0.5;
2359 
2360  // Reset default ponderation between each feature type
2362 
2363 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
2365 #endif
2366 
2369 
2370  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2371  it != m_mapOfTrackers.end(); ++it) {
2372  TrackerWrapper *tracker = it->second;
2373  tracker->resetTracker();
2374  }
2375 }
2376 
2387 {
2389 
2390  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2391  it != m_mapOfTrackers.end(); ++it) {
2392  TrackerWrapper *tracker = it->second;
2393  tracker->setAngleAppear(a);
2394  }
2395 }
2396 
2409 void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
2410 {
2411  if (m_mapOfTrackers.size() == 2) {
2412  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2413  it->second->setAngleAppear(a1);
2414 
2415  ++it;
2416  it->second->setAngleAppear(a2);
2417 
2419  if (it != m_mapOfTrackers.end()) {
2420  angleAppears = it->second->getAngleAppear();
2421  } else {
2422  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
2423  }
2424  } else {
2425  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
2426  m_mapOfTrackers.size());
2427  }
2428 }
2429 
2439 void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
2440 {
2441  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
2442  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
2443 
2444  if (it_tracker != m_mapOfTrackers.end()) {
2445  TrackerWrapper *tracker = it_tracker->second;
2446  tracker->setAngleAppear(it->second);
2447 
2448  if (it->first == m_referenceCameraName) {
2449  angleAppears = it->second;
2450  }
2451  }
2452  }
2453 }
2454 
2465 {
2467 
2468  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2469  it != m_mapOfTrackers.end(); ++it) {
2470  TrackerWrapper *tracker = it->second;
2471  tracker->setAngleDisappear(a);
2472  }
2473 }
2474 
2487 void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
2488 {
2489  if (m_mapOfTrackers.size() == 2) {
2490  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2491  it->second->setAngleDisappear(a1);
2492 
2493  ++it;
2494  it->second->setAngleDisappear(a2);
2495 
2497  if (it != m_mapOfTrackers.end()) {
2498  angleDisappears = it->second->getAngleDisappear();
2499  } else {
2500  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
2501  }
2502  } else {
2503  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
2504  m_mapOfTrackers.size());
2505  }
2506 }
2507 
2517 void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
2518 {
2519  for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
2520  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
2521 
2522  if (it_tracker != m_mapOfTrackers.end()) {
2523  TrackerWrapper *tracker = it_tracker->second;
2524  tracker->setAngleDisappear(it->second);
2525 
2526  if (it->first == m_referenceCameraName) {
2527  angleDisappears = it->second;
2528  }
2529  }
2530  }
2531 }
2532 
2539 {
2541 
2542  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2543  it != m_mapOfTrackers.end(); ++it) {
2544  TrackerWrapper *tracker = it->second;
2545  tracker->setCameraParameters(camera);
2546  }
2547 }
2548 
2558 {
2559  if (m_mapOfTrackers.size() == 2) {
2560  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2561  it->second->setCameraParameters(camera1);
2562 
2563  ++it;
2564  it->second->setCameraParameters(camera2);
2565 
2567  if (it != m_mapOfTrackers.end()) {
2568  it->second->getCameraParameters(cam);
2569  } else {
2570  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
2571  }
2572  } else {
2573  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
2574  m_mapOfTrackers.size());
2575  }
2576 }
2577 
2586 void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
2587 {
2588  for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
2589  it != mapOfCameraParameters.end(); ++it) {
2590  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
2591 
2592  if (it_tracker != m_mapOfTrackers.end()) {
2593  TrackerWrapper *tracker = it_tracker->second;
2594  tracker->setCameraParameters(it->second);
2595 
2596  if (it->first == m_referenceCameraName) {
2597  cam = it->second;
2598  }
2599  }
2600  }
2601 }
2602 
2611 void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
2612  const vpHomogeneousMatrix &cameraTransformationMatrix)
2613 {
2614  std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
2615 
2616  if (it != m_mapOfCameraTransformationMatrix.end()) {
2617  it->second = cameraTransformationMatrix;
2618  } else {
2619  throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
2620  }
2621 }
2622 
2631  const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
2632 {
2633  for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
2634  it != mapOfTransformationMatrix.end(); ++it) {
2635  std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
2636  m_mapOfCameraTransformationMatrix.find(it->first);
2637 
2638  if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2639  it_camTrans->second = it->second;
2640  }
2641  }
2642 }
2643 
2653 void vpMbGenericTracker::setClipping(const unsigned int &flags)
2654 {
2655  vpMbTracker::setClipping(flags);
2656 
2657  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2658  it != m_mapOfTrackers.end(); ++it) {
2659  TrackerWrapper *tracker = it->second;
2660  tracker->setClipping(flags);
2661  }
2662 }
2663 
2674 void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
2675 {
2676  if (m_mapOfTrackers.size() == 2) {
2677  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2678  it->second->setClipping(flags1);
2679 
2680  ++it;
2681  it->second->setClipping(flags2);
2682 
2684  if (it != m_mapOfTrackers.end()) {
2685  clippingFlag = it->second->getClipping();
2686  } else {
2687  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
2688  }
2689  } else {
2690  std::stringstream ss;
2691  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
2693  }
2694 }
2695 
2706 {
2707  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2708  it != m_mapOfTrackers.end(); ++it) {
2709  TrackerWrapper *tracker = it->second;
2710  tracker->setDepthDenseFilteringMaxDistance(maxDistance);
2711  }
2712 }
2713 
2723 {
2724  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2725  it != m_mapOfTrackers.end(); ++it) {
2726  TrackerWrapper *tracker = it->second;
2727  tracker->setDepthDenseFilteringMethod(method);
2728  }
2729 }
2730 
2741 {
2742  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2743  it != m_mapOfTrackers.end(); ++it) {
2744  TrackerWrapper *tracker = it->second;
2745  tracker->setDepthDenseFilteringMinDistance(minDistance);
2746  }
2747 }
2748 
2759 {
2760  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2761  it != m_mapOfTrackers.end(); ++it) {
2762  TrackerWrapper *tracker = it->second;
2763  tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
2764  }
2765 }
2766 
2775 void vpMbGenericTracker::setDepthDenseSamplingStep(const unsigned int stepX, const unsigned int stepY)
2776 {
2777  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2778  it != m_mapOfTrackers.end(); ++it) {
2779  TrackerWrapper *tracker = it->second;
2780  tracker->setDepthDenseSamplingStep(stepX, stepY);
2781  }
2782 }
2783 
2792 {
2793  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2794  it != m_mapOfTrackers.end(); ++it) {
2795  TrackerWrapper *tracker = it->second;
2796  tracker->setDepthNormalFaceCentroidMethod(method);
2797  }
2798 }
2799 
2809 {
2810  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2811  it != m_mapOfTrackers.end(); ++it) {
2812  TrackerWrapper *tracker = it->second;
2813  tracker->setDepthNormalFeatureEstimationMethod(method);
2814  }
2815 }
2816 
2825 {
2826  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2827  it != m_mapOfTrackers.end(); ++it) {
2828  TrackerWrapper *tracker = it->second;
2829  tracker->setDepthNormalPclPlaneEstimationMethod(method);
2830  }
2831 }
2832 
2841 {
2842  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2843  it != m_mapOfTrackers.end(); ++it) {
2844  TrackerWrapper *tracker = it->second;
2845  tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
2846  }
2847 }
2848 
2857 {
2858  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2859  it != m_mapOfTrackers.end(); ++it) {
2860  TrackerWrapper *tracker = it->second;
2861  tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
2862  }
2863 }
2864 
2873 void vpMbGenericTracker::setDepthNormalSamplingStep(const unsigned int stepX, const unsigned int stepY)
2874 {
2875  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2876  it != m_mapOfTrackers.end(); ++it) {
2877  TrackerWrapper *tracker = it->second;
2878  tracker->setDepthNormalSamplingStep(stepX, stepY);
2879  }
2880 }
2881 
2889 void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
2890 {
2891  for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
2892  it != mapOfClippingFlags.end(); ++it) {
2893  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
2894 
2895  if (it_tracker != m_mapOfTrackers.end()) {
2896  TrackerWrapper *tracker = it_tracker->second;
2897  tracker->setClipping(it->second);
2898 
2899  if (it->first == m_referenceCameraName) {
2900  clippingFlag = it->second;
2901  }
2902  }
2903  }
2904 }
2905 
2925 {
2927 
2928  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2929  it != m_mapOfTrackers.end(); ++it) {
2930  TrackerWrapper *tracker = it->second;
2931  tracker->setDisplayFeatures(displayF);
2932  }
2933 }
2934 
2943 {
2945 
2946  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2947  it != m_mapOfTrackers.end(); ++it) {
2948  TrackerWrapper *tracker = it->second;
2949  tracker->setFarClippingDistance(dist);
2950  }
2951 }
2952 
2961 void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
2962 {
2963  if (m_mapOfTrackers.size() == 2) {
2964  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2965  it->second->setFarClippingDistance(dist1);
2966 
2967  ++it;
2968  it->second->setFarClippingDistance(dist2);
2969 
2971  if (it != m_mapOfTrackers.end()) {
2972  distFarClip = it->second->getFarClippingDistance();
2973  } else {
2974  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
2975  }
2976  } else {
2977  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
2978  m_mapOfTrackers.size());
2979  }
2980 }
2981 
2987 void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
2988 {
2989  for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
2990  ++it) {
2991  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
2992 
2993  if (it_tracker != m_mapOfTrackers.end()) {
2994  TrackerWrapper *tracker = it_tracker->second;
2995  tracker->setFarClippingDistance(it->second);
2996 
2997  if (it->first == m_referenceCameraName) {
2998  distFarClip = it->second;
2999  }
3000  }
3001  }
3002 }
3003 
3010 void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
3011 {
3012  for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
3013  ++it) {
3014  std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
3015  if (it_factor != mapOfFeatureFactors.end()) {
3016  it->second = it_factor->second;
3017  }
3018  }
3019 }
3020 
3037 {
3038  m_percentageGdPt = threshold;
3039 
3040  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3041  it != m_mapOfTrackers.end(); ++it) {
3042  TrackerWrapper *tracker = it->second;
3043  tracker->setGoodMovingEdgesRatioThreshold(threshold);
3044  }
3045 }
3046 
3047 #ifdef VISP_HAVE_OGRE
3048 
3060 {
3061  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3062  it != m_mapOfTrackers.end(); ++it) {
3063  TrackerWrapper *tracker = it->second;
3064  tracker->setGoodNbRayCastingAttemptsRatio(ratio);
3065  }
3066 }
3067 
3080 {
3081  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3082  it != m_mapOfTrackers.end(); ++it) {
3083  TrackerWrapper *tracker = it->second;
3084  tracker->setNbRayCastingAttemptsForVisibility(attempts);
3085  }
3086 }
3087 #endif
3088 
3089 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3090 
3098 {
3099  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3100  it != m_mapOfTrackers.end(); ++it) {
3101  TrackerWrapper *tracker = it->second;
3102  tracker->setKltOpencv(t);
3103  }
3104 }
3105 
3115 {
3116  if (m_mapOfTrackers.size() == 2) {
3117  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3118  it->second->setKltOpencv(t1);
3119 
3120  ++it;
3121  it->second->setKltOpencv(t2);
3122  } else {
3123  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3124  m_mapOfTrackers.size());
3125  }
3126 }
3127 
3133 void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
3134 {
3135  for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
3136  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3137 
3138  if (it_tracker != m_mapOfTrackers.end()) {
3139  TrackerWrapper *tracker = it_tracker->second;
3140  tracker->setKltOpencv(it->second);
3141  }
3142  }
3143 }
3144 
3153 {
3154  m_thresholdOutlier = th;
3155 
3156  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3157  it != m_mapOfTrackers.end(); ++it) {
3158  TrackerWrapper *tracker = it->second;
3159  tracker->setKltThresholdAcceptation(th);
3160  }
3161 }
3162 #endif
3163 
3176 void vpMbGenericTracker::setLod(const bool useLod, const std::string &name)
3177 {
3178  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3179  it != m_mapOfTrackers.end(); ++it) {
3180  TrackerWrapper *tracker = it->second;
3181  tracker->setLod(useLod, name);
3182  }
3183 }
3184 
3185 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3186 
3193 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
3194 {
3195  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3196  it != m_mapOfTrackers.end(); ++it) {
3197  TrackerWrapper *tracker = it->second;
3198  tracker->setKltMaskBorder(e);
3199  }
3200 }
3201 
3210 void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
3211 {
3212  if (m_mapOfTrackers.size() == 2) {
3213  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3214  it->second->setKltMaskBorder(e1);
3215 
3216  ++it;
3217 
3218  it->second->setKltMaskBorder(e2);
3219  } else {
3220  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3221  m_mapOfTrackers.size());
3222  }
3223 }
3224 
3230 void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
3231 {
3232  for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
3233  ++it) {
3234  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3235 
3236  if (it_tracker != m_mapOfTrackers.end()) {
3237  TrackerWrapper *tracker = it_tracker->second;
3238  tracker->setKltMaskBorder(it->second);
3239  }
3240  }
3241 }
3242 #endif
3243 
3255 void vpMbGenericTracker::setMinLineLengthThresh(const double minLineLengthThresh, const std::string &name)
3256 {
3257  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3258  it != m_mapOfTrackers.end(); ++it) {
3259  TrackerWrapper *tracker = it->second;
3260  tracker->setMinLineLengthThresh(minLineLengthThresh, name);
3261  }
3262 }
3263 
3274 void vpMbGenericTracker::setMinPolygonAreaThresh(const double minPolygonAreaThresh, const std::string &name)
3275 {
3276  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3277  it != m_mapOfTrackers.end(); ++it) {
3278  TrackerWrapper *tracker = it->second;
3279  tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
3280  }
3281 }
3282 
3291 {
3292  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3293  it != m_mapOfTrackers.end(); ++it) {
3294  TrackerWrapper *tracker = it->second;
3295  tracker->setMovingEdge(me);
3296  }
3297 }
3298 
3308 void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
3309 {
3310  if (m_mapOfTrackers.size() == 2) {
3311  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3312  it->second->setMovingEdge(me1);
3313 
3314  ++it;
3315 
3316  it->second->setMovingEdge(me2);
3317  } else {
3318  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3319  m_mapOfTrackers.size());
3320  }
3321 }
3322 
3328 void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
3329 {
3330  for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
3331  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3332 
3333  if (it_tracker != m_mapOfTrackers.end()) {
3334  TrackerWrapper *tracker = it_tracker->second;
3335  tracker->setMovingEdge(it->second);
3336  }
3337  }
3338 }
3339 
3348 {
3350 
3351  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3352  it != m_mapOfTrackers.end(); ++it) {
3353  TrackerWrapper *tracker = it->second;
3354  tracker->setNearClippingDistance(dist);
3355  }
3356 }
3357 
3366 void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
3367 {
3368  if (m_mapOfTrackers.size() == 2) {
3369  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3370  it->second->setNearClippingDistance(dist1);
3371 
3372  ++it;
3373 
3374  it->second->setNearClippingDistance(dist2);
3375 
3377  if (it != m_mapOfTrackers.end()) {
3378  distNearClip = it->second->getNearClippingDistance();
3379  } else {
3380  std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3381  }
3382  } else {
3383  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3384  m_mapOfTrackers.size());
3385  }
3386 }
3387 
3393 void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
3394 {
3395  for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
3396  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3397 
3398  if (it_tracker != m_mapOfTrackers.end()) {
3399  TrackerWrapper *tracker = it_tracker->second;
3400  tracker->setNearClippingDistance(it->second);
3401 
3402  if (it->first == m_referenceCameraName) {
3403  distNearClip = it->second;
3404  }
3405  }
3406  }
3407 }
3408 
3421 void vpMbGenericTracker::setOgreShowConfigDialog(const bool showConfigDialog)
3422 {
3423  vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
3424 
3425  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3426  it != m_mapOfTrackers.end(); ++it) {
3427  TrackerWrapper *tracker = it->second;
3428  tracker->setOgreShowConfigDialog(showConfigDialog);
3429  }
3430 }
3431 
3443 {
3445 
3446  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3447  it != m_mapOfTrackers.end(); ++it) {
3448  TrackerWrapper *tracker = it->second;
3449  tracker->setOgreVisibilityTest(v);
3450  }
3451 
3452 #ifdef VISP_HAVE_OGRE
3453  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3454  it != m_mapOfTrackers.end(); ++it) {
3455  TrackerWrapper *tracker = it->second;
3456  tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
3457  }
3458 #endif
3459 }
3460 
3469 {
3471 
3472  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3473  it != m_mapOfTrackers.end(); ++it) {
3474  TrackerWrapper *tracker = it->second;
3475  tracker->setOptimizationMethod(opt);
3476  }
3477 }
3478 
3492 {
3493  if (m_mapOfTrackers.size() > 1) {
3494  throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
3495  "to be configured with only one camera!");
3496  }
3497 
3498  cMo = cdMo;
3499 
3500  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3501  if (it != m_mapOfTrackers.end()) {
3502  TrackerWrapper *tracker = it->second;
3503  tracker->setPose(I, cdMo);
3504  } else {
3505  throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3506  m_referenceCameraName.c_str());
3507  }
3508 }
3509 
3522  const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
3523 {
3524  if (m_mapOfTrackers.size() == 2) {
3525  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3526  it->second->setPose(I1, c1Mo);
3527 
3528  ++it;
3529 
3530  it->second->setPose(I2, c2Mo);
3531 
3533  if (it != m_mapOfTrackers.end()) {
3534  // Set reference pose
3535  it->second->getPose(cMo);
3536  } else {
3537  throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
3538  m_referenceCameraName.c_str());
3539  }
3540  } else {
3541  throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3542  m_mapOfTrackers.size());
3543  }
3544 }
3545 
3561 void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3562  const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
3563 {
3564  // Set the reference cMo
3565  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3566  std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3567  mapOfImages.find(m_referenceCameraName);
3568  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3569 
3570  if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
3571  TrackerWrapper *tracker = it_tracker->second;
3572  tracker->setPose(*it_img->second, it_camPose->second);
3573  tracker->getPose(cMo);
3574  } else {
3575  throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
3576  }
3577 
3578  // Vector of missing pose matrices for cameras
3579  std::vector<std::string> vectorOfMissingCameraPoses;
3580 
3581  // Set pose for the specified cameras
3582  for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3583  if (it_tracker->first != m_referenceCameraName) {
3584  it_img = mapOfImages.find(it_tracker->first);
3585  it_camPose = mapOfCameraPoses.find(it_tracker->first);
3586 
3587  if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
3588  // Set pose
3589  TrackerWrapper *tracker = it_tracker->second;
3590  tracker->setPose(*it_img->second, it_camPose->second);
3591  } else {
3592  vectorOfMissingCameraPoses.push_back(it_tracker->first);
3593  }
3594  }
3595  }
3596 
3597  for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
3598  it != vectorOfMissingCameraPoses.end(); ++it) {
3599  it_img = mapOfImages.find(*it);
3600  std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3602 
3603  if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3604  vpHomogeneousMatrix cCurrentMo = it_camTrans->second * cMo;
3605  m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
3606  } else {
3608  "Missing image or missing camera transformation "
3609  "matrix! Cannot set pose for camera: %s!",
3610  it->c_str());
3611  }
3612  }
3613 }
3614 
3630 {
3632 
3633  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3634  it != m_mapOfTrackers.end(); ++it) {
3635  TrackerWrapper *tracker = it->second;
3636  tracker->setProjectionErrorComputation(flag);
3637  }
3638 }
3639 
3645 void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
3646 {
3647  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
3648  if (it != m_mapOfTrackers.end()) {
3649  m_referenceCameraName = referenceCameraName;
3650  } else {
3651  std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
3652  }
3653 }
3654 
3656 {
3658 
3659  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3660  it != m_mapOfTrackers.end(); ++it) {
3661  TrackerWrapper *tracker = it->second;
3662  tracker->setScanLineVisibilityTest(v);
3663  }
3664 }
3665 
3678 {
3679  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3680  it != m_mapOfTrackers.end(); ++it) {
3681  TrackerWrapper *tracker = it->second;
3682  tracker->setTrackerType(type);
3683  }
3684 }
3685 
3695 void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
3696 {
3697  for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
3698  std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3699  if (it_tracker != m_mapOfTrackers.end()) {
3700  TrackerWrapper *tracker = it_tracker->second;
3701  tracker->setTrackerType(it->second);
3702  }
3703  }
3704 }
3705 
3715 void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
3716 {
3717  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3718  it != m_mapOfTrackers.end(); ++it) {
3719  TrackerWrapper *tracker = it->second;
3720  tracker->setUseEdgeTracking(name, useEdgeTracking);
3721  }
3722 }
3723 
3724 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3725 
3734 void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
3735 {
3736  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3737  it != m_mapOfTrackers.end(); ++it) {
3738  TrackerWrapper *tracker = it->second;
3739  tracker->setUseKltTracking(name, useKltTracking);
3740  }
3741 }
3742 #endif
3743 
3745 {
3746  // Test tracking fails only if all testTracking have failed
3747  bool isOneTestTrackingOk = false;
3748  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3749  it != m_mapOfTrackers.end(); ++it) {
3750  TrackerWrapper *tracker = it->second;
3751  try {
3752  tracker->testTracking();
3753  isOneTestTrackingOk = true;
3754  } catch (...) {
3755  }
3756  }
3757 
3758  if (!isOneTestTrackingOk) {
3759  std::ostringstream oss;
3760  oss << "Not enough moving edges to track the object. Try to reduce the "
3761  "threshold="
3762  << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
3764  }
3765 }
3766 
3777 {
3778  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
3779  mapOfImages[m_referenceCameraName] = &I;
3780 
3781  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
3782  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
3783 
3784  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
3785 }
3786 
3798 {
3799  if (m_mapOfTrackers.size() == 2) {
3800  std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3801  std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
3802  mapOfImages[it->first] = &I1;
3803  ++it;
3804 
3805  mapOfImages[it->first] = &I2;
3806 
3807  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
3808  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
3809 
3810  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
3811  } else {
3812  std::stringstream ss;
3813  ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3814  throw vpException(vpTrackingException::fatalError, ss.str().c_str());
3815  }
3816 }
3817 
3825 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
3826 {
3827  std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
3828  std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
3829 
3830  track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
3831 }
3832 
3833 #ifdef VISP_HAVE_PCL
3834 
3842 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3843  std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3844 {
3845  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3846  it != m_mapOfTrackers.end(); ++it) {
3847  TrackerWrapper *tracker = it->second;
3848 
3849  if ((tracker->m_trackerType & (EDGE_TRACKER |
3850 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3851  KLT_TRACKER |
3852 #endif
3854  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
3855  }
3856 
3857  if (tracker->m_trackerType & (EDGE_TRACKER
3858 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3859  | KLT_TRACKER
3860 #endif
3861  ) &&
3862  mapOfImages[it->first] == NULL) {
3863  throw vpException(vpException::fatalError, "Image pointer is NULL!");
3864  }
3865 
3866  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
3867  mapOfPointClouds[it->first] == nullptr) {
3868  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
3869  }
3870  }
3871 
3872  preTracking(mapOfImages, mapOfPointClouds);
3873 
3874  try {
3875  computeVVS(mapOfImages);
3876  } catch (...) {
3877  covarianceMatrix = -1;
3878  throw; // throw the original exception
3879  }
3880 
3881  testTracking();
3882 
3883  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3884  it != m_mapOfTrackers.end(); ++it) {
3885  TrackerWrapper *tracker = it->second;
3886 
3887  tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3888  }
3889 
3891 }
3892 #endif
3893 
3904 void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3905  std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3906  std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3907  std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3908 {
3909  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3910  it != m_mapOfTrackers.end(); ++it) {
3911  TrackerWrapper *tracker = it->second;
3912 
3913  if ((tracker->m_trackerType & (EDGE_TRACKER |
3914 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3915  KLT_TRACKER |
3916 #endif
3918  throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
3919  }
3920 
3921  if (tracker->m_trackerType & (EDGE_TRACKER
3922 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3923  | KLT_TRACKER
3924 #endif
3925  ) &&
3926  mapOfImages[it->first] == NULL) {
3927  throw vpException(vpException::fatalError, "Image pointer is NULL!");
3928  }
3929 
3930  if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
3931  (mapOfPointClouds[it->first] == NULL)) {
3932  throw vpException(vpException::fatalError, "Pointcloud is NULL!");
3933  }
3934  }
3935 
3936  preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
3937 
3938  try {
3939  computeVVS(mapOfImages);
3940  } catch (...) {
3941  covarianceMatrix = -1;
3942  throw; // throw the original exception
3943  }
3944 
3945  testTracking();
3946 
3947  for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3948  it != m_mapOfTrackers.end(); ++it) {
3949  TrackerWrapper *tracker = it->second;
3950 
3951  tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
3952  }
3953 
3955 }
3956 
3958 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
3959  : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
3960 {
3961  m_lambda = 1.0;
3962  m_maxIter = 30;
3963 
3964 #ifdef VISP_HAVE_OGRE
3965  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
3966 #endif
3967 }
3968 
3969 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(const int trackerType)
3970  : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
3971 {
3972  if ((m_trackerType & (EDGE_TRACKER |
3973 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
3974  KLT_TRACKER |
3975 #endif
3977  throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
3978  }
3979 
3980  m_lambda = 1.0;
3981  m_maxIter = 30;
3982 
3983 #ifdef VISP_HAVE_OGRE
3984  faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
3985 #endif
3986 }
3987 
3988 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() {}
3989 
3990 // Implemented only for debugging purposes: use TrackerWrapper as a standalone
3991 // tracker
3992 void vpMbGenericTracker::TrackerWrapper::computeVVS(const vpImage<unsigned char> *const ptr_I)
3993 {
3994  computeVVSInit(ptr_I);
3995 
3996  if (m_error.getRows() < 4) {
3997  throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
3998  }
3999 
4000  double normRes = 0;
4001  double normRes_1 = -1;
4002  unsigned int iter = 0;
4003 
4004  double factorEdge = 1.0;
4005 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4006  double factorKlt = 1.0;
4007 #endif
4008  double factorDepth = 1.0;
4009  double factorDepthDense = 1.0;
4010 
4011  vpMatrix LTL;
4012  vpColVector LTR, v;
4013  vpColVector error_prev;
4014 
4015  double mu = m_initialMu;
4016  vpHomogeneousMatrix cMo_prev;
4017 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4018  vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
4019 #endif
4020  bool isoJoIdentity_ = true;
4021 
4022  // Covariance
4023  vpColVector W_true(m_error.getRows());
4024  vpMatrix L_true, LVJ_true;
4025 
4026  unsigned int nb_edge_features = m_error_edge.getRows();
4027 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4028  unsigned int nb_klt_features = m_error_klt.getRows();
4029 #endif
4030  unsigned int nb_depth_features = m_error_depthNormal.getRows();
4031  unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
4032 
4033  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
4035 
4036  bool reStartFromLastIncrement = false;
4037  computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
4038 
4039 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4040  if (reStartFromLastIncrement) {
4041  if (m_trackerType & KLT_TRACKER) {
4042  ctTc0 = ctTc0_Prev;
4043  }
4044  }
4045 #endif
4046 
4047  if (!reStartFromLastIncrement) {
4049 
4050  if (computeCovariance) {
4051  L_true = m_L;
4052  if (!isoJoIdentity_) {
4054  cVo.buildFrom(cMo);
4055  LVJ_true = (m_L * cVo * oJo);
4056  }
4057  }
4058 
4060  if (iter == 0) {
4061  isoJoIdentity_ = true;
4062  oJo.eye();
4063 
4064  // If all the 6 dof should be estimated, we check if the interaction
4065  // matrix is full rank. If not we remove automatically the dof that
4066  // cannot be estimated This is particularly useful when consering
4067  // circles (rank 5) and cylinders (rank 4)
4068  if (isoJoIdentity_) {
4069  cVo.buildFrom(cMo);
4070 
4071  vpMatrix K; // kernel
4072  unsigned int rank = (m_L * cVo).kernel(K);
4073  if (rank == 0) {
4074  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
4075  }
4076 
4077  if (rank != 6) {
4078  vpMatrix I; // Identity
4079  I.eye(6);
4080  oJo = I - K.AtA();
4081 
4082  isoJoIdentity_ = false;
4083  }
4084  }
4085  }
4086 
4087  // Weighting
4088  double num = 0;
4089  double den = 0;
4090 
4091  unsigned int start_index = 0;
4092  if (m_trackerType & EDGE_TRACKER) {
4093  for (unsigned int i = 0; i < nb_edge_features; i++) {
4094  double wi = m_w_edge[i] * m_factor[i] * factorEdge;
4095  W_true[i] = wi;
4096  m_weightedError[i] = wi * m_error[i];
4097 
4098  num += wi * vpMath::sqr(m_error[i]);
4099  den += wi;
4100 
4101  for (unsigned int j = 0; j < m_L.getCols(); j++) {
4102  m_L[i][j] *= wi;
4103  }
4104  }
4105 
4106  start_index += nb_edge_features;
4107  }
4108 
4109 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4110  if (m_trackerType & KLT_TRACKER) {
4111  for (unsigned int i = 0; i < nb_klt_features; i++) {
4112  double wi = m_w_klt[i] * factorKlt;
4113  W_true[start_index + i] = wi;
4114  m_weightedError[start_index + i] = wi * m_error_klt[i];
4115 
4116  num += wi * vpMath::sqr(m_error[start_index + i]);
4117  den += wi;
4118 
4119  for (unsigned int j = 0; j < m_L.getCols(); j++) {
4120  m_L[start_index + i][j] *= wi;
4121  }
4122  }
4123 
4124  start_index += nb_klt_features;
4125  }
4126 #endif
4127 
4128  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4129  for (unsigned int i = 0; i < nb_depth_features; i++) {
4130  double wi = m_w_depthNormal[i] * factorDepth;
4131  m_w[start_index + i] = m_w_depthNormal[i];
4132  m_weightedError[start_index + i] = wi * m_error[start_index + i];
4133 
4134  num += wi * vpMath::sqr(m_error[start_index + i]);
4135  den += wi;
4136 
4137  for (unsigned int j = 0; j < m_L.getCols(); j++) {
4138  m_L[start_index + i][j] *= wi;
4139  }
4140  }
4141 
4142  start_index += nb_depth_features;
4143  }
4144 
4145  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4146  for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
4147  double wi = m_w_depthDense[i] * factorDepthDense;
4148  m_w[start_index + i] = m_w_depthDense[i];
4149  m_weightedError[start_index + i] = wi * m_error[start_index + i];
4150 
4151  num += wi * vpMath::sqr(m_error[start_index + i]);
4152  den += wi;
4153 
4154  for (unsigned int j = 0; j < m_L.getCols(); j++) {
4155  m_L[start_index + i][j] *= wi;
4156  }
4157  }
4158 
4159  // start_index += nb_depth_dense_features;
4160  }
4161 
4162  computeVVSPoseEstimation(isoJoIdentity_, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
4163 
4164  cMo_prev = cMo;
4165 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4166  if (m_trackerType & KLT_TRACKER) {
4167  ctTc0_Prev = ctTc0;
4168  }
4169 #endif
4170 
4172 
4173 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4174  if (m_trackerType & KLT_TRACKER) {
4175  ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
4176  }
4177 #endif
4178  normRes_1 = normRes;
4179 
4180  normRes = sqrt(num / den);
4181  }
4182 
4183  iter++;
4184  }
4185 
4186  computeCovarianceMatrixVVS(isoJoIdentity_, W_true, cMo_prev, L_true, LVJ_true, m_error);
4187 
4188  if (m_trackerType & EDGE_TRACKER) {
4190  }
4191 }
4192 
4193 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
4194 {
4195  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
4196  "TrackerWrapper::computeVVSInit("
4197  ") should not be called!");
4198 }
4199 
4200 void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
4201 {
4202  initMbtTracking(ptr_I);
4203 
4204  unsigned int nbFeatures = 0;
4205 
4206  if (m_trackerType & EDGE_TRACKER) {
4207  nbFeatures += m_error_edge.getRows();
4208  } else {
4209  m_error_edge.clear();
4210  m_weightedError_edge.clear();
4211  m_L_edge.clear();
4212  m_w_edge.clear();
4213  }
4214 
4215 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4216  if (m_trackerType & KLT_TRACKER) {
4218  nbFeatures += m_error_klt.getRows();
4219  } else {
4220  m_error_klt.clear();
4221  m_weightedError_klt.clear();
4222  m_L_klt.clear();
4223  m_w_klt.clear();
4224  }
4225 #endif
4226 
4227  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4229  nbFeatures += m_error_depthNormal.getRows();
4230  } else {
4231  m_error_depthNormal.clear();
4232  m_weightedError_depthNormal.clear();
4233  m_L_depthNormal.clear();
4234  m_w_depthNormal.clear();
4235  }
4236 
4237  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4239  nbFeatures += m_error_depthDense.getRows();
4240  } else {
4241  m_error_depthDense.clear();
4242  m_weightedError_depthDense.clear();
4243  m_L_depthDense.clear();
4244  m_w_depthDense.clear();
4245  }
4246 
4247  m_L.resize(nbFeatures, 6, false, false);
4248  m_error.resize(nbFeatures, false);
4249 
4250  m_weightedError.resize(nbFeatures, false);
4251  m_w.resize(nbFeatures, false);
4252  m_w = 1;
4253 }
4254 
4255 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu()
4256 {
4257  throw vpException(vpException::fatalError, "vpMbGenericTracker::"
4258  "TrackerWrapper::"
4259  "computeVVSInteractionMatrixAndR"
4260  "esidu() should not be called!");
4261 }
4262 
4263 void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> *const ptr_I)
4264 {
4265  if (m_trackerType & EDGE_TRACKER) {
4267  }
4268 
4269 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4270  if (m_trackerType & KLT_TRACKER) {
4272  }
4273 #endif
4274 
4275  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4277  }
4278 
4279  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4281  }
4282 
4283  unsigned int start_index = 0;
4284  if (m_trackerType & EDGE_TRACKER) {
4285  m_L.insert(m_L_edge, start_index, 0);
4286  m_error.insert(start_index, m_error_edge);
4287 
4288  start_index += m_error_edge.getRows();
4289  }
4290 
4291 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4292  if (m_trackerType & KLT_TRACKER) {
4293  m_L.insert(m_L_klt, start_index, 0);
4294  m_error.insert(start_index, m_error_klt);
4295 
4296  start_index += m_error_klt.getRows();
4297  }
4298 #endif
4299 
4300  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4301  m_L.insert(m_L_depthNormal, start_index, 0);
4302  m_error.insert(start_index, m_error_depthNormal);
4303 
4304  start_index += m_error_depthNormal.getRows();
4305  }
4306 
4307  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4308  m_L.insert(m_L_depthDense, start_index, 0);
4309  m_error.insert(start_index, m_error_depthDense);
4310 
4311  // start_index += m_error_depthDense.getRows();
4312  }
4313 }
4314 
4316 {
4317  unsigned int start_index = 0;
4318 
4319  if (m_trackerType & EDGE_TRACKER) {
4321  m_w.insert(start_index, m_w_edge);
4322 
4323  start_index += m_w_edge.getRows();
4324  }
4325 
4326 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4327  if (m_trackerType & KLT_TRACKER) {
4328  vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
4329  m_w.insert(start_index, m_w_klt);
4330 
4331  start_index += m_w_klt.getRows();
4332  }
4333 #endif
4334 
4335  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4336  if (m_depthNormalUseRobust) {
4337  vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
4338  m_w.insert(start_index, m_w_depthNormal);
4339  }
4340 
4341  start_index += m_w_depthNormal.getRows();
4342  }
4343 
4344  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4346  m_w.insert(start_index, m_w_depthDense);
4347 
4348  // start_index += m_w_depthDense.getRows();
4349  }
4350 }
4351 
4352 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo_,
4353  const vpCameraParameters &camera, const vpColor &col,
4354  const unsigned int thickness, const bool displayFullModel)
4355 {
4356  if (m_trackerType == EDGE_TRACKER) {
4357  vpMbEdgeTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4358 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4359  } else if (m_trackerType == KLT_TRACKER) {
4360  vpMbKltTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4361 #endif
4362  } else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
4363  vpMbDepthNormalTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4364  } else if (m_trackerType == DEPTH_DENSE_TRACKER) {
4365  vpMbDepthDenseTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4366  } else {
4367  if (m_trackerType & EDGE_TRACKER) {
4368  for (unsigned int i = 0; i < scales.size(); i += 1) {
4369  if (scales[i]) {
4370  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[scaleLevel].begin();
4371  it != lines[scaleLevel].end(); ++it) {
4372  (*it)->display(I, cMo_, camera, col, thickness, displayFullModel);
4373  }
4374 
4375  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[scaleLevel].begin();
4376  it != cylinders[scaleLevel].end(); ++it) {
4377  (*it)->display(I, cMo_, camera, col, thickness, displayFullModel);
4378  }
4379 
4380  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[scaleLevel].begin();
4381  it != circles[scaleLevel].end(); ++it) {
4382  (*it)->display(I, cMo_, camera, col, thickness, displayFullModel);
4383  }
4384 
4385  break; // display model on one scale only
4386  }
4387  }
4388  }
4389 
4390 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4391  if (m_trackerType & KLT_TRACKER) {
4392  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end();
4393  ++it) {
4394  vpMbtDistanceKltPoints *kltpoly = *it;
4395  if (displayFeatures && kltpoly->hasEnoughPoints() && kltpoly->isTracked() && kltpoly->polygon->isVisible()) {
4396  kltpoly->displayPrimitive(I);
4397  }
4398  }
4399 
4400  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
4401  ++it) {
4402  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
4403  if (displayFeatures && kltPolyCylinder->isTracked() && kltPolyCylinder->hasEnoughPoints())
4404  kltPolyCylinder->displayPrimitive(I);
4405  }
4406  }
4407 #endif
4408 
4409  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4410  vpMbDepthNormalTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4411  }
4412 
4413  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4414  vpMbDepthDenseTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4415  }
4416 
4417 #ifdef VISP_HAVE_OGRE
4418  if (useOgre)
4419  faces.displayOgre(cMo_);
4420 #endif
4421  }
4422 }
4423 
4424 void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo_,
4425  const vpCameraParameters &camera, const vpColor &col,
4426  const unsigned int thickness, const bool displayFullModel)
4427 {
4428  if (m_trackerType == EDGE_TRACKER) {
4429  vpMbEdgeTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4430 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4431  } else if (m_trackerType == KLT_TRACKER) {
4432  vpMbKltTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4433 #endif
4434  } else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
4435  vpMbDepthNormalTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4436  } else if (m_trackerType == DEPTH_DENSE_TRACKER) {
4437  vpMbDepthDenseTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4438  } else {
4439  if (m_trackerType & EDGE_TRACKER) {
4440  for (unsigned int i = 0; i < scales.size(); i += 1) {
4441  if (scales[i]) {
4442  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[scaleLevel].begin();
4443  it != lines[scaleLevel].end(); ++it) {
4444  (*it)->display(I, cMo_, camera, col, thickness, displayFullModel);
4445  }
4446 
4447  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[scaleLevel].begin();
4448  it != cylinders[scaleLevel].end(); ++it) {
4449  (*it)->display(I, cMo_, camera, col, thickness, displayFullModel);
4450  }
4451 
4452  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[scaleLevel].begin();
4453  it != circles[scaleLevel].end(); ++it) {
4454  (*it)->display(I, cMo_, camera, col, thickness, displayFullModel);
4455  }
4456 
4457  break; // display model on one scale only
4458  }
4459  }
4460  }
4461 
4462 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4463  if (m_trackerType & KLT_TRACKER) {
4464  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end();
4465  ++it) {
4466  vpMbtDistanceKltPoints *kltpoly = *it;
4467  if (displayFeatures && kltpoly->hasEnoughPoints() && kltpoly->isTracked() && kltpoly->polygon->isVisible()) {
4468  kltpoly->displayPrimitive(I);
4469  }
4470  }
4471 
4472  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
4473  ++it) {
4474  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
4475  if (displayFeatures && kltPolyCylinder->isTracked() && kltPolyCylinder->hasEnoughPoints())
4476  kltPolyCylinder->displayPrimitive(I);
4477  }
4478  }
4479 #endif
4480 
4481  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4482  vpMbDepthNormalTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4483  }
4484 
4485  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4486  vpMbDepthNormalTracker::display(I, cMo_, camera, col, thickness, displayFullModel);
4487  }
4488 
4489 #ifdef VISP_HAVE_OGRE
4490  if (useOgre)
4491  faces.displayOgre(cMo_);
4492 #endif
4493  }
4494 }
4495 
4496 void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
4497 {
4498  if (!modelInitialised) {
4499  throw vpException(vpException::fatalError, "model not initialized");
4500  }
4501 
4502  if (useScanLine || clippingFlag > 3)
4503  cam.computeFov(I.getWidth(), I.getHeight());
4504 
4505  bool reInitialisation = false;
4506  if (!useOgre) {
4507  faces.setVisible(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
4508  } else {
4509 #ifdef VISP_HAVE_OGRE
4510  if (!faces.isOgreInitialised()) {
4512 
4514  faces.initOgre(cam);
4515  // Turn off Ogre config dialog display for the next call to this
4516  // function since settings are saved in the ogre.cfg file and used
4517  // during the next call
4518  ogreShowConfigDialog = false;
4519  }
4520 
4521  faces.setVisibleOgre(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
4522 #else
4523  faces.setVisible(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
4524 #endif
4525  }
4526 
4527  if (useScanLine) {
4530  }
4531 
4532 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4533  if (m_trackerType & KLT_TRACKER)
4535 #endif
4536 
4537  if (m_trackerType & EDGE_TRACKER) {
4539 
4540  bool a = false;
4541  vpMbEdgeTracker::visibleFace(I, cMo, a); // should be useless, but keep it for nbvisiblepolygone
4542 
4543  initMovingEdge(I, cMo);
4544  }
4545 
4546  if (m_trackerType & DEPTH_NORMAL_TRACKER)
4547  vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
4548 
4549  if (m_trackerType & DEPTH_DENSE_TRACKER)
4550  vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
4551 }
4552 
4553 void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
4554  const double radius, const int idFace, const std::string &name)
4555 {
4556  if (m_trackerType & EDGE_TRACKER)
4557  vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
4558 }
4559 
4560 void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, const double radius,
4561  const int idFace, const std::string &name)
4562 {
4563  if (m_trackerType & EDGE_TRACKER)
4564  vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
4565 
4566 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4567  if (m_trackerType & KLT_TRACKER)
4568  vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
4569 #endif
4570 }
4571 
4572 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
4573 {
4574  if (m_trackerType & EDGE_TRACKER)
4576 
4577 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4578  if (m_trackerType & KLT_TRACKER)
4580 #endif
4581 
4582  if (m_trackerType & DEPTH_NORMAL_TRACKER)
4584 
4585  if (m_trackerType & DEPTH_DENSE_TRACKER)
4587 }
4588 
4589 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
4590 {
4591  if (m_trackerType & EDGE_TRACKER)
4593 
4594 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4595  if (m_trackerType & KLT_TRACKER)
4597 #endif
4598 
4599  if (m_trackerType & DEPTH_NORMAL_TRACKER)
4601 
4602  if (m_trackerType & DEPTH_DENSE_TRACKER)
4604 }
4605 
4606 void vpMbGenericTracker::TrackerWrapper::initMbtTracking(const vpImage<unsigned char> *const ptr_I)
4607 {
4608  if (m_trackerType & EDGE_TRACKER) {
4611  }
4612 }
4613 
4614 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile)
4615 {
4616 #ifdef VISP_HAVE_XML2
4618 
4619  xmlp.setCameraParameters(cam);
4622 
4623  // Edge
4624  xmlp.setEdgeMe(me);
4625 
4626  // KLT
4627  xmlp.setKltMaxFeatures(10000);
4628  xmlp.setKltWindowSize(5);
4629  xmlp.setKltQuality(0.01);
4630  xmlp.setKltMinDistance(5);
4631  xmlp.setKltHarrisParam(0.01);
4632  xmlp.setKltBlockSize(3);
4633  xmlp.setKltPyramidLevels(3);
4634 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4635  xmlp.setKltMaskBorder(maskBorder);
4636 #endif
4637 
4638  // Depth normal
4639  xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
4640  xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
4641  xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
4642  xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
4643  xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
4644  xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
4645 
4646  // Depth dense
4647  xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
4648  xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
4649 
4650  try {
4651 
4652  std::cout << " *********** Parsing XML for";
4653 
4654  std::vector<std::string> tracker_names;
4655  if (m_trackerType & EDGE_TRACKER)
4656  tracker_names.push_back("Edge");
4657 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4658  if (m_trackerType & KLT_TRACKER)
4659  tracker_names.push_back("Klt");
4660 #endif
4661  if (m_trackerType & DEPTH_NORMAL_TRACKER)
4662  tracker_names.push_back("Depth Normal");
4663  if (m_trackerType & DEPTH_DENSE_TRACKER)
4664  tracker_names.push_back("Depth Dense");
4665 
4666  for (size_t i = 0; i < tracker_names.size(); i++) {
4667  std::cout << " " << tracker_names[i];
4668  if (i == tracker_names.size() - 1) {
4669  std::cout << " ";
4670  }
4671  }
4672 
4673  std::cout << "Model-Based Tracker ************ " << std::endl;
4674  xmlp.parse(configFile.c_str());
4675  } catch (...) {
4676  throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
4677  }
4678 
4679  vpCameraParameters camera;
4680  xmlp.getCameraParameters(camera);
4681  setCameraParameters(camera);
4682 
4685 
4686  if (xmlp.hasNearClippingDistance())
4688 
4689  if (xmlp.hasFarClippingDistance())
4691 
4692  if (xmlp.getFovClipping()) {
4694  }
4695 
4696  useLodGeneral = xmlp.getLodState();
4699 
4700  applyLodSettingInConfig = false;
4701  if (this->getNbPolygon() > 0) {
4702  applyLodSettingInConfig = true;
4706  }
4707 
4708  // Edge
4709  vpMe meParser;
4710  xmlp.getEdgeMe(meParser);
4712 
4713 // KLT
4714 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4715  tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
4716  tracker.setWindowSize((int)xmlp.getKltWindowSize());
4717  tracker.setQuality(xmlp.getKltQuality());
4718  tracker.setMinDistance(xmlp.getKltMinDistance());
4719  tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
4720  tracker.setBlockSize((int)xmlp.getKltBlockSize());
4721  tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
4722  maskBorder = xmlp.getKltMaskBorder();
4723 
4724  // if(useScanLine)
4725  faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
4726 #endif
4727 
4728  // Depth normal
4734 
4735  // Depth dense
4737 #else
4738  std::cerr << "You need the libXML2 to read the config file: " << configFile << std::endl;
4739 #endif
4740 }
4741 
4742 #ifdef VISP_HAVE_PCL
4743 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
4744  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
4745 {
4746  if (displayFeatures) {
4747  if (m_trackerType & EDGE_TRACKER) {
4749  }
4750  }
4751 
4752 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4753  // KLT
4754  if (m_trackerType & KLT_TRACKER) {
4755  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
4756  vpMbKltTracker::reinit(*ptr_I);
4757  }
4758  }
4759 #endif
4760 
4761  // Looking for new visible face
4762  if (m_trackerType & EDGE_TRACKER) {
4763  bool newvisibleface = false;
4764  vpMbEdgeTracker::visibleFace(*ptr_I, cMo, newvisibleface);
4765 
4766  if (useScanLine) {
4768  faces.computeScanLineRender(cam, ptr_I->getWidth(), ptr_I->getHeight());
4769  }
4770  }
4771 
4772  // Depth normal
4773  if (m_trackerType & DEPTH_NORMAL_TRACKER)
4774  vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
4775 
4776  // Depth dense
4777  if (m_trackerType & DEPTH_DENSE_TRACKER)
4778  vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
4779 
4780  // Edge
4781  if (m_trackerType & EDGE_TRACKER) {
4783 
4785  // Reinit the moving edge for the lines which need it.
4787 
4788  if (computeProjError) {
4790  }
4791  }
4792 }
4793 
4794 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
4795  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
4796 {
4797  if (m_trackerType & EDGE_TRACKER) {
4798  try {
4800  } catch (...) {
4801  std::cerr << "Error in moving edge tracking" << std::endl;
4802  throw;
4803  }
4804  }
4805 
4806 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4807  if (m_trackerType & KLT_TRACKER) {
4808  try {
4810  } catch (vpException &e) {
4811  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
4812  throw;
4813  }
4814  }
4815 #endif
4816 
4817  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4818  try {
4820  } catch (...) {
4821  std::cerr << "Error in Depth normal tracking" << std::endl;
4822  throw;
4823  }
4824  }
4825 
4826  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4827  try {
4829  } catch (...) {
4830  std::cerr << "Error in Depth dense tracking" << std::endl;
4831  throw;
4832  }
4833  }
4834 }
4835 #endif
4836 
4837 void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
4838  const unsigned int pointcloud_width,
4839  const unsigned int pointcloud_height)
4840 {
4841  if (displayFeatures) {
4842  if (m_trackerType & EDGE_TRACKER) {
4844  }
4845  }
4846 
4847 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4848  // KLT
4849  if (m_trackerType & KLT_TRACKER) {
4850  if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
4851  vpMbKltTracker::reinit(*ptr_I);
4852  }
4853  }
4854 #endif
4855 
4856  // Looking for new visible face
4857  if (m_trackerType & EDGE_TRACKER) {
4858  bool newvisibleface = false;
4859  vpMbEdgeTracker::visibleFace(*ptr_I, cMo, newvisibleface);
4860 
4861  if (useScanLine) {
4863  faces.computeScanLineRender(cam, ptr_I->getWidth(), ptr_I->getHeight());
4864  }
4865  }
4866 
4867  // Depth normal
4868  if (m_trackerType & DEPTH_NORMAL_TRACKER)
4869  vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
4870 
4871  // Depth dense
4872  if (m_trackerType & DEPTH_DENSE_TRACKER)
4873  vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
4874 
4875  // Edge
4876  if (m_trackerType & EDGE_TRACKER) {
4878 
4880  // Reinit the moving edge for the lines which need it.
4882 
4883  if (computeProjError) {
4885  }
4886  }
4887 }
4888 
4889 void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
4890  const std::vector<vpColVector> *const point_cloud,
4891  const unsigned int pointcloud_width,
4892  const unsigned int pointcloud_height)
4893 {
4894  if (m_trackerType & EDGE_TRACKER) {
4895  try {
4897  } catch (...) {
4898  std::cerr << "Error in moving edge tracking" << std::endl;
4899  throw;
4900  }
4901  }
4902 
4903 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4904  if (m_trackerType & KLT_TRACKER) {
4905  try {
4907  } catch (vpException &e) {
4908  std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
4909  throw;
4910  }
4911  }
4912 #endif
4913 
4914  if (m_trackerType & DEPTH_NORMAL_TRACKER) {
4915  try {
4916  vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
4917  } catch (...) {
4918  std::cerr << "Error in Depth tracking" << std::endl;
4919  throw;
4920  }
4921  }
4922 
4923  if (m_trackerType & DEPTH_DENSE_TRACKER) {
4924  try {
4925  vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
4926  } catch (...) {
4927  std::cerr << "Error in Depth dense tracking" << std::endl;
4928  throw;
4929  }
4930  }
4931 }
4932 
4933 void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
4934  const vpHomogeneousMatrix &cMo_, const bool verbose)
4935 {
4936  cMo.eye();
4937 
4938  // Edge
4939  vpMbtDistanceLine *l;
4941  vpMbtDistanceCircle *ci;
4942 
4943  for (unsigned int i = 0; i < scales.size(); i++) {
4944  if (scales[i]) {
4945  for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
4946  l = *it;
4947  if (l != NULL)
4948  delete l;
4949  l = NULL;
4950  }
4951 
4952  for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
4953  ++it) {
4954  cy = *it;
4955  if (cy != NULL)
4956  delete cy;
4957  cy = NULL;
4958  }
4959 
4960  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
4961  ci = *it;
4962  if (ci != NULL)
4963  delete ci;
4964  ci = NULL;
4965  }
4966 
4967  lines[i].clear();
4968  cylinders[i].clear();
4969  circles[i].clear();
4970  }
4971  }
4972 
4973  nline = 0;
4974  ncylinder = 0;
4975  ncircle = 0;
4976  nbvisiblepolygone = 0;
4977 
4978 // KLT
4979 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
4980 #if (VISP_HAVE_OPENCV_VERSION < 0x020408)
4981  if (cur != NULL) {
4982  cvReleaseImage(&cur);
4983  cur = NULL;
4984  }
4985 #endif
4986 
4987  // delete the Klt Polygon features
4988  for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
4989  vpMbtDistanceKltPoints *kltpoly = *it;
4990  if (kltpoly != NULL) {
4991  delete kltpoly;
4992  }
4993  kltpoly = NULL;
4994  }
4995  kltPolygons.clear();
4996 
4997  for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
4998  ++it) {
4999  vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
5000  if (kltPolyCylinder != NULL) {
5001  delete kltPolyCylinder;
5002  }
5003  kltPolyCylinder = NULL;
5004  }
5005  kltCylinders.clear();
5006 
5007  // delete the structures used to display circles
5008  for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
5009  ci = *it;
5010  if (ci != NULL) {
5011  delete ci;
5012  }
5013  ci = NULL;
5014  }
5015  circles_disp.clear();
5016 
5017  firstInitialisation = true;
5018 
5019 #endif
5020 
5021  // Depth normal
5022  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
5023  delete m_depthNormalFaces[i];
5024  m_depthNormalFaces[i] = NULL;
5025  }
5026  m_depthNormalFaces.clear();
5027 
5028  // Depth dense
5029  for (size_t i = 0; i < m_depthDenseNormalFaces.size(); i++) {
5030  delete m_depthDenseNormalFaces[i];
5031  m_depthDenseNormalFaces[i] = NULL;
5032  }
5033  m_depthDenseNormalFaces.clear();
5034 
5035  faces.reset();
5036 
5037  loadModel(cad_name, verbose);
5038  initFromPose(I, cMo_);
5039 }
5040 
5041 void vpMbGenericTracker::TrackerWrapper::resetTracker()
5042 {
5044 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5046 #endif
5049 }
5050 
5051 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &camera)
5052 {
5053  this->cam = camera;
5054 
5056 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5058 #endif
5061 }
5062 
5063 void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
5064 
5065 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
5066 {
5068 }
5069 
5070 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
5071 {
5073 }
5074 
5075 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
5076 {
5078 #ifdef VISP_HAVE_OGRE
5079  faces.getOgreContext()->setWindowName("TrackerWrapper");
5080 #endif
5081 }
5082 
5083 void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo)
5084 {
5085  bool performKltSetPose = false;
5086 
5087 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5088  if (m_trackerType & KLT_TRACKER) {
5089  performKltSetPose = true;
5090 
5091  if (useScanLine || clippingFlag > 3)
5092  cam.computeFov(I.getWidth(), I.getHeight());
5093 
5094  vpMbKltTracker::setPose(I, cdMo);
5095  }
5096 #endif
5097 
5098  if (!performKltSetPose) {
5099  cMo = cdMo;
5100  init(I);
5101  return;
5102  }
5103 
5104  if (m_trackerType & EDGE_TRACKER)
5105  resetMovingEdge();
5106 
5107  if (useScanLine) {
5110  }
5111 
5112 #if 0
5113  if (m_trackerType & EDGE_TRACKER) {
5114  initPyramid(I, Ipyramid);
5115 
5116  unsigned int i = (unsigned int) scales.size();
5117  do {
5118  i--;
5119  if(scales[i]){
5120  downScale(i);
5121  initMovingEdge(*Ipyramid[i], cMo);
5122  upScale(i);
5123  }
5124  } while(i != 0);
5125 
5126  cleanPyramid(Ipyramid);
5127  }
5128 #else
5129  if (m_trackerType & EDGE_TRACKER)
5130  initMovingEdge(I, cMo);
5131 #endif
5132 
5133  // Depth normal
5135 
5136  // Depth dense
5138 }
5139 
5140 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
5141 {
5143 }
5144 
5145 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
5146 {
5148 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5150 #endif
5153 }
5154 
5155 void vpMbGenericTracker::TrackerWrapper::setTrackerType(const int type)
5156 {
5157  if ((type & (EDGE_TRACKER |
5158 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5159  KLT_TRACKER |
5160 #endif
5162  throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
5163  }
5164 
5165  m_trackerType = type;
5166 }
5167 
5168 void vpMbGenericTracker::TrackerWrapper::testTracking()
5169 {
5170  if (m_trackerType & EDGE_TRACKER) {
5172  }
5173 }
5174 
5175 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> &
5176 #ifdef VISP_HAVE_PCL
5177  I
5178 #endif
5179 )
5180 {
5181  if ((m_trackerType & (EDGE_TRACKER
5182 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5183  | KLT_TRACKER
5184 #endif
5185  )) == 0) {
5186  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
5187  return;
5188  }
5189 
5190 #ifdef VISP_HAVE_PCL
5191  track(&I, nullptr);
5192 #endif
5193 }
5194 
5195 #ifdef VISP_HAVE_PCL
5196 void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> *const ptr_I,
5197  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
5198 {
5199  if ((m_trackerType & (EDGE_TRACKER |
5200 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5201  KLT_TRACKER |
5202 #endif
5204  std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
5205  return;
5206  }
5207 
5208  if (m_trackerType & (EDGE_TRACKER
5209 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
5210  | KLT_TRACKER
5211 #endif
5212  ) &&
5213  ptr_I == NULL) {
5214  throw vpException(vpException::fatalError, "Image pointer is NULL!");
5215  }
5216 
5217  if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && point_cloud == nullptr) {
5218  throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5219  }
5220 
5221  // Back-up cMo in case of exception
5222  vpHomogeneousMatrix cMo_1 = cMo;
5223  try {
5224  preTracking(ptr_I, point_cloud);
5225 
5226  try {
5227  computeVVS(ptr_I);
5228  } catch (...) {
5229  covarianceMatrix = -1;
5230  throw; // throw the original exception
5231  }
5232 
5233  if (m_trackerType == EDGE_TRACKER)
5234  testTracking();
5235 
5236  postTracking(ptr_I, point_cloud);
5237 
5238  } catch (const vpException &e) {
5239  std::cerr << "Exception: " << e.what() << std::endl;
5240  cMo = cMo_1;
5241  throw; // rethrowing the original exception
5242  }
5243 }
5244 #endif
virtual void setDepthDenseFilteringOccupancyRatio(const double occupancyRatio)
virtual void setDisplayFeatures(const bool displayF)
virtual unsigned int getClipping() const
Definition: vpMbTracker.h:223
bool m_computeInteraction
Definition: vpMbTracker.h:187
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:269
unsigned int getDepthNormalSamplingStepY() const
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
bool computeProjError
Definition: vpMbTracker.h:135
virtual void setDisplayFeatures(const bool displayF)
Definition: vpMbTracker.h:470
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void loadModel(const std::string &modelFile, const bool verbose=false)
virtual void setOgreShowConfigDialog(const bool showConfigDialog)
Definition: vpMbTracker.h:575
virtual void loadConfigFile(const std::string &configFile)
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:104
virtual void setKltThresholdAcceptation(const double th)
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:132
void displayPrimitive(const vpImage< unsigned char > &_I)
void setMovingEdge(const vpMe &me)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setMinLineLengthThresh(const double minLineLengthThresh, const std::string &name="")
virtual void setScanLineVisibilityTest(const bool &v)
virtual void track(const vpImage< unsigned char > &I)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, const double radius, const int idFace=0, const std::string &name="")
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(const bool orderPolygons=true, const bool useVisibility=true, const bool clipPolygon=false)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void getLline(const std::string &cameraName, std::list< vpMbtDistanceLine *> &linesList, const unsigned int level=0) const
vpColVector m_w
Robust weights.
int getDepthNormalPclPlaneEstimationMethod() const
void setKltQuality(const double &q)
virtual void setAngleDisappear(const double &a)
Definition: vpMbTracker.h:433
void parse(const std::string &filename)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:145
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
Implementation of an homogeneous matrix and operations on such kind of matrices.
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual std::map< std::string, int > getCameraTrackerTypes() const
void setKltPyramidLevels(const unsigned int &pL)
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo_, const bool verbose=false)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
vpMatrix AtA() const
Definition: vpMatrix.cpp:524
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
Definition: vpMbTracker.h:510
void resize(const unsigned int nrows, const unsigned int ncols, const bool flagNullify=true, const bool recopy_=true)
Definition: vpArray2D.h:171
Class to define colors available for display functionnalities.
Definition: vpColor.h:120
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
double getFarClippingDistance() const
virtual void initCylinder(const vpPoint &, const vpPoint &, const double, const int, const std::string &name="")
vpHomogeneousMatrix cMo
The current pose.
Definition: vpMbTracker.h:115
virtual void setClipping(const unsigned int &flags)
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
double getDepthNormalPclPlaneEstimationRansacThreshold() const
void setOgreShowConfigDialog(const bool showConfigDialog)
bool modelInitialised
Definition: vpMbTracker.h:125
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void setAngleDisappear(const double &a)
virtual void computeVVSInit()
error that can be emited by ViSP classes.
Definition: vpException.h:71
Manage a cylinder used in the model-based tracker.
vpMbScanLine & getMbScanLineRenderer()
unsigned int getRows() const
Definition: vpArray2D.h:156
Definition: vpMe.h:60
vpHomogeneousMatrix inverse() const
Manage the line of a polygon used in the model-based tracker.
virtual double getGoodMovingEdgesRatioThreshold() const
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void computeVVSInteractionMatrixAndResidu()
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:157
void getCameraParameters(vpCameraParameters &_cam) const
virtual void setDepthDenseFilteringMethod(const int method)
unsigned int getKltBlockSize() const
void setKltMaskBorder(const unsigned int &mb)
double getNearClippingDistance() const
virtual void setMovingEdge(const vpMe &me)
virtual void setCameraParameters(const vpCameraParameters &camera)
void displayFeaturesOnImage(const vpImage< unsigned char > &I, const unsigned int lvl)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
virtual int getKltNbPoints() const
unsigned int setVisibleOgre(const vpImage< unsigned char > &I, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:130
void updateMovingEdge(const vpImage< unsigned char > &I)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > *> &mapOfImages)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
virtual void reinit(const vpImage< unsigned char > &I)
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
unsigned int getDepthDenseSamplingStepY() const
virtual void computeVVSInteractionMatrixAndResidu()
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
virtual void setDepthDenseSamplingStep(const unsigned int stepX, const unsigned int stepY)
Class that defines what is a point.
Definition: vpPoint.h:58
virtual void getLcylinder(const std::string &cameraName, std::list< vpMbtDistanceCylinder *> &cylindersList, const unsigned int level=0) const
vpCameraParameters cam
The camera parameters.
Definition: vpMbTracker.h:113
virtual void computeVVSWeights()
void computeClippedPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
unsigned int setVisible(const vpImage< unsigned char > &I, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angle, bool &changed)
virtual void setNearClippingDistance(const double &dist)
vpMatrix m_L
Interaction matrix.
unsigned int getCols() const
Definition: vpArray2D.h:146
void setDepthNormalSamplingStepX(const unsigned int stepX)
Parse an Xml file to extract configuration parameters of a mbtConfig object.Data parser for the model...
void initOgre(const vpCameraParameters &cam=vpCameraParameters())
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:153
double projectionError
Definition: vpMbTracker.h:138
vpAROgre * getOgreContext()
void setKltHarrisParam(const double &hp)
virtual vpHomogeneousMatrix getPose() const
Definition: vpMbTracker.h:390
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void setDepthDenseSamplingStepX(const unsigned int stepX)
Manage a circle used in the model-based tracker.
void insert(const vpMatrix &A, const unsigned int r, const unsigned int c)
Definition: vpMatrix.cpp:4512
virtual void initFaceFromLines(vpMbtPolygon &polygon)
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:117
virtual void setKltMaskBorder(const unsigned int &e)
vpColVector m_weightedError
Weighted error.
virtual void initFromPose(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
virtual void setDepthNormalPclPlaneEstimationMethod(const int method)
Error that can be emited by the vpTracker class and its derivates.
vp_deprecated void init()
virtual void setScanLineVisibilityTest(const bool &v)
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:66
virtual void setCameraParameters(const vpCameraParameters &camera)
Definition: vpMbTracker.h:440
void setAngleDisappear(const double &adisappear)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void setProjectionErrorComputation(const bool &flag)
void setEdgeMe(const vpMe &_ecm)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual std::vector< cv::Point2f > getKltPoints() const
void displayPrimitive(const vpImage< unsigned char > &_I)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:160
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
void setKltMinDistance(const double &mD)
static double sqr(double x)
Definition: vpMath.h:108
double minLineLengthThresholdGeneral
Minimum line length threshold for LOD mode (general setting)
Definition: vpMbTracker.h:179
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual unsigned int getNbPolygon() const
virtual vpMbtPolygon * getPolygon(const unsigned int index)
virtual void setOgreShowConfigDialog(const bool showConfigDialog)
Generic class defining intrinsic camera parameters.
virtual unsigned int getKltMaskBorder() const
unsigned int getKltWindowSize() const
virtual unsigned int getNbPoints(const unsigned int level=0) const
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
unsigned int getKltMaxFeatures() const
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, const bool displayHelp=false)
virtual void setAngleAppear(const double &a)
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(const std::string &cameraName, std::list< vpMbtDistanceCircle *> &circlesList, const unsigned int level=0) const
virtual void computeVVSInteractionMatrixAndResidu()
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:195
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
virtual void setFarClippingDistance(const double &dist)
virtual void setAngleAppear(const double &a)
Definition: vpMbTracker.h:422
virtual void setMinPolygonAreaThresh(const double minPolygonAreaThresh, const std::string &name="")
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:142
unsigned int getDepthNormalSamplingStepX() const
void setDepthNormalPclPlaneEstimationRansacMaxIter(const int maxIter)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:147
virtual bool isVisible(const vpHomogeneousMatrix &cMo, const double alpha, const bool &modulo=false, const vpCameraParameters &cam=vpCameraParameters(), const vpImage< unsigned char > &I=vpImage< unsigned char >())
void setKltWindowSize(const unsigned int &w)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:191
virtual void setDepthDenseFilteringMaxDistance(const double maxDistance)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void computeVVSWeights()
void setAngleAppear(const double &aappear)
const char * what() const
static double rad(double deg)
Definition: vpMath.h:102
virtual void computeVVSInteractionMatrixAndResidu()
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void computeProjectionError(const vpImage< unsigned char > &_I)
void insert(unsigned int i, const vpColVector &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setNearClippingDistance(const double &dist)
virtual void setCameraParameters(const vpCameraParameters &camera)
void setDepthDenseSamplingStepY(const unsigned int stepY)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, const double radius, const int idFace=0, const std::string &name="")
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:193
double minPolygonAreaThresholdGeneral
Minimum polygon area threshold for LOD mode (general setting)
Definition: vpMbTracker.h:181
static double deg(double rad)
Definition: vpMath.h:95
void computeVisibility(const unsigned int width, const unsigned int height)
virtual void setOgreVisibilityTest(const bool &v)
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:140
unsigned int getHeight() const
Definition: vpImage.h:178
virtual void initFromPoints(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
void setCameraParameters(const vpCameraParameters &cam)
bool ogreShowConfigDialog
Definition: vpMbTracker.h:158
void preTracking(const vpImage< unsigned char > &I)
vpColVector m_error
(s - s*)
virtual void init(const vpImage< unsigned char > &I)
bool applyLodSettingInConfig
Definition: vpMbTracker.h:177
Implementation of column vector and the associated operations.
Definition: vpColVector.h:72
virtual void setOgreVisibilityTest(const bool &v)
std::string m_referenceCameraName
Name of the reference camera.
virtual void setFarClippingDistance(const double &dist)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, const double radius, const int idFace=0, const std::string &name="")
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > *> &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void computeVVSPoseEstimation(const bool isoJoIdentity_, const unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
double getLodMinLineLengthThreshold() const
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:78
virtual std::vector< std::string > getCameraNames() const
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
static vpHomogeneousMatrix direct(const vpColVector &v)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity_, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:149
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
virtual void getCameraParameters(vpCameraParameters &cam1, vpCameraParameters &cam2) const
unsigned int getDepthDenseSamplingStepX() const
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void computeVVSInit()
virtual void setDepthDenseFilteringMinDistance(const double minDistance)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
vpMbtPolygon * polygon
Pointer to the polygon that define a face.
virtual void computeVVSInit()
int getDepthNormalPclPlaneEstimationRansacMaxIter() const
virtual void computeVVSInteractionMatrixAndResidu()
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(const int maxIter)
unsigned int getKltMaskBorder() const
virtual void setTrackerType(const int type)
void setCameraParameters(const vpCameraParameters &_cam)
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:533
virtual void setClipping(const unsigned int &flags)
void computeVisibility(const unsigned int width, const unsigned int height)
void getEdgeMe(vpMe &_ecm) const
void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:155
void trackMovingEdge(const vpImage< unsigned char > &I)
void displayOgre(const vpHomogeneousMatrix &cMo)
virtual void setClipping(const unsigned int &flags)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalSamplingStep(const unsigned int stepX, const unsigned int stepY)
virtual vpKltOpencv getKltOpencv() const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void initFaceFromLines(vpMbtPolygon &polygon)
void setDepthNormalPclPlaneEstimationMethod(const int method)
virtual void setFarClippingDistance(const double &dist)
void setKltBlockSize(const unsigned int &bs)
void setDepthNormalPclPlaneEstimationRansacThreshold(const double threshold)
void setKltMaxFeatures(const unsigned int &mF)
virtual void setCameraParameters(const vpCameraParameters &camera)
unsigned int getWidth() const
Definition: vpImage.h:229
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(const double thresold)
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, const unsigned int lvl=0)
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:151
bool useLodGeneral
True if LOD mode is enabled.
Definition: vpMbTracker.h:174
void computeScanLineRender(const vpCameraParameters &cam, const unsigned int &w, const unsigned int &h)
virtual void computeProjectionError()
void setDepthNormalSamplingStepY(const unsigned int stepY)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, const double radius, const int idFace=0, const std::string &name="")
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
vpMbtFaceDepthNormal::vpFeatureEstimationType getDepthNormalFeatureEstimationMethod() const
void eye()
Definition: vpMatrix.cpp:360
virtual void computeVVSCheckLevenbergMarquardt(const unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=NULL, const vpColVector *const m_w_prev=NULL)
virtual vpMe getMovingEdge() const
virtual void setLod(const bool useLod, const std::string &name="")
double getLodMinPolygonAreaThreshold() const
virtual void setGoodMovingEdgesRatioThreshold(const double threshold)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setCameraParameters(const vpCameraParameters &camera)
void resize(const unsigned int i, const bool flagNullify=true)
Definition: vpColVector.h:241
unsigned int getKltPyramidLevels() const
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbTracker.h:531
virtual void setNearClippingDistance(const double &dist)
void computeFov(const unsigned int &w, const unsigned int &h)