36 #include <visp3/mbt/vpMbGenericTracker.h>
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include <nlohmann/json.hpp>
46 using json = nlohmann::json;
50 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
51 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
52 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
62 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
71 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
72 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
73 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
78 else if (nbCameras == 1) {
85 for (
unsigned int i = 1; i <= nbCameras; i++) {
101 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
110 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
111 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
112 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
114 if (trackerTypes.empty()) {
118 if (trackerTypes.size() == 1) {
125 for (
size_t i = 1; i <= trackerTypes.size(); i++) {
126 std::stringstream ss;
141 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
150 const std::vector<int> &trackerTypes)
151 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
152 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
153 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
155 if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
157 "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
160 for (
size_t i = 0; i < cameraNames.size(); i++) {
173 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
186 it->second =
nullptr;
210 double rawTotalProjectionError = 0.0;
211 unsigned int nbTotalFeaturesUsed = 0;
213 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
215 TrackerWrapper *tracker = it->second;
217 unsigned int nbFeaturesUsed = 0;
218 double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
220 if (nbFeaturesUsed > 0) {
221 nbTotalFeaturesUsed += nbFeaturesUsed;
222 rawTotalProjectionError += curProjError;
226 if (nbTotalFeaturesUsed > 0) {
227 return vpMath::deg(rawTotalProjectionError / (
double)nbTotalFeaturesUsed);
263 double rawTotalProjectionError = 0.0;
264 unsigned int nbTotalFeaturesUsed = 0;
266 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
268 TrackerWrapper *tracker = it->second;
270 double curProjError = tracker->getProjectionError();
271 unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
273 if (nbFeaturesUsed > 0) {
274 nbTotalFeaturesUsed += nbFeaturesUsed;
275 rawTotalProjectionError += (
vpMath::rad(curProjError) * nbFeaturesUsed);
279 if (nbTotalFeaturesUsed > 0) {
300 double normRes_1 = -1;
301 unsigned int iter = 0;
319 std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
324 mapOfVelocityTwist[it->first] = cVo;
328 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
342 bool reStartFromLastIncrement =
false;
344 if (reStartFromLastIncrement) {
345 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
347 TrackerWrapper *tracker = it->second;
351 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
354 tracker->ctTc0 = c_curr_tTc_curr0;
359 if (!reStartFromLastIncrement) {
364 if (!isoJoIdentity) {
367 LVJ_true = (
m_L * (cVo *
oJo));
381 unsigned int rank = (
m_L * cVo).kernel(K);
390 isoJoIdentity =
false;
399 unsigned int start_index = 0;
400 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
402 TrackerWrapper *tracker = it->second;
405 for (
unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
406 double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
407 W_true[start_index + i] = wi;
413 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
414 m_L[start_index + i][j] *= wi;
418 start_index += tracker->m_error_edge.
getRows();
421 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
423 for (
unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
424 double wi = tracker->m_w_klt[i] * factorKlt;
425 W_true[start_index + i] = wi;
431 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
432 m_L[start_index + i][j] *= wi;
436 start_index += tracker->m_error_klt.
getRows();
441 for (
unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
442 double wi = tracker->m_w_depthNormal[i] * factorDepth;
443 W_true[start_index + i] = wi;
449 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
450 m_L[start_index + i][j] *= wi;
454 start_index += tracker->m_error_depthNormal.
getRows();
458 for (
unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
459 double wi = tracker->m_w_depthDense[i] * factorDepthDense;
460 W_true[start_index + i] = wi;
466 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
467 m_L[start_index + i][j] *= wi;
471 start_index += tracker->m_error_depthDense.
getRows();
476 normRes = sqrt(num / den);
484 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
485 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
487 TrackerWrapper *tracker = it->second;
491 tracker->ctTc0 = c_curr_tTc_curr0;
496 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
498 TrackerWrapper *tracker = it->second;
507 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
509 TrackerWrapper *tracker = it->second;
513 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
528 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
530 TrackerWrapper *tracker = it->second;
533 tracker->updateMovingEdgeWeights();
545 unsigned int nbFeatures = 0;
547 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
549 TrackerWrapper *tracker = it->second;
550 tracker->computeVVSInit(mapOfImages[it->first]);
552 nbFeatures += tracker->m_error.getRows();
566 "computeVVSInteractionMatrixAndR"
567 "esidu() should not be called");
572 std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
574 unsigned int start_index = 0;
576 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
578 TrackerWrapper *tracker = it->second;
581 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
584 tracker->ctTc0 = c_curr_tTc_curr0;
587 tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
589 m_L.
insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
592 start_index += tracker->m_error.getRows();
598 unsigned int start_index = 0;
600 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
602 TrackerWrapper *tracker = it->second;
603 tracker->computeVVSWeights();
606 start_index += tracker->m_w.getRows();
625 bool displayFullModel)
629 TrackerWrapper *tracker = it->second;
630 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
652 bool displayFullModel)
656 TrackerWrapper *tracker = it->second;
657 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
683 unsigned int thickness,
bool displayFullModel)
686 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
687 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
690 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
693 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
717 bool displayFullModel)
720 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
721 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
724 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
727 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
744 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
745 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
746 const vpColor &col,
unsigned int thickness,
bool displayFullModel)
750 it_img != mapOfImages.end(); ++it_img) {
751 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it_img->first);
752 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
753 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
755 if (it_tracker !=
m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
756 it_cam != mapOfCameraParameters.end()) {
757 TrackerWrapper *tracker = it_tracker->second;
758 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
761 std::cerr <<
"Missing elements for image:" << it_img->first <<
"!" << std::endl;
778 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
779 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
780 const vpColor &col,
unsigned int thickness,
bool displayFullModel)
783 for (std::map<std::string,
const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
784 it_img != mapOfImages.end(); ++it_img) {
785 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it_img->first);
786 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
787 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
789 if (it_tracker !=
m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
790 it_cam != mapOfCameraParameters.end()) {
791 TrackerWrapper *tracker = it_tracker->second;
792 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
795 std::cerr <<
"Missing elements for image:" << it_img->first <<
"!" << std::endl;
807 std::vector<std::string> cameraNames;
809 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
811 cameraNames.push_back(it_tracker->first);
833 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
834 it->second->getCameraParameters(cam1);
837 it->second->getCameraParameters(cam2);
840 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
853 mapOfCameraParameters.clear();
855 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
858 it->second->getCameraParameters(cam_);
859 mapOfCameraParameters[it->first] = cam_;
871 std::map<std::string, int> trackingTypes;
873 TrackerWrapper *traker;
874 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
876 traker = it_tracker->second;
877 trackingTypes[it_tracker->first] = traker->getTrackerType();
880 return trackingTypes;
894 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
895 clippingFlag1 = it->second->getClipping();
898 clippingFlag2 = it->second->getClipping();
901 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
913 mapOfClippingFlags.clear();
915 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
917 TrackerWrapper *tracker = it->second;
918 mapOfClippingFlags[it->first] = tracker->getClipping();
931 return it->second->getFaces();
945 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
947 return it->second->getFaces();
950 std::cerr <<
"The camera: " << cameraName <<
" cannot be found!" << std::endl;
954 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
962 TrackerWrapper *tracker = it->second;
963 return tracker->getFeaturesCircle();
978 TrackerWrapper *tracker = it->second;
979 return tracker->getFeaturesKltCylinder();
994 TrackerWrapper *tracker = it->second;
995 return tracker->getFeaturesKlt();
1035 return it->second->getFeaturesForDisplay();
1041 return std::vector<std::vector<double> >();
1072 mapOfFeatures.clear();
1074 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1076 mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1091 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1104 TrackerWrapper *tracker = it->second;
1105 return tracker->getKltImagePoints();
1111 return std::vector<vpImagePoint>();
1126 TrackerWrapper *tracker = it->second;
1127 return tracker->getKltImagePointsWithId();
1133 return std::map<int, vpImagePoint>();
1145 TrackerWrapper *tracker = it->second;
1146 return tracker->getKltMaskBorder();
1164 TrackerWrapper *tracker = it->second;
1165 return tracker->getKltNbPoints();
1184 TrackerWrapper *tracker;
1185 tracker = it_tracker->second;
1186 return tracker->getKltOpencv();
1206 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1207 klt1 = it->second->getKltOpencv();
1210 klt2 = it->second->getKltOpencv();
1213 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1227 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1229 TrackerWrapper *tracker = it->second;
1230 mapOfKlts[it->first] = tracker->getKltOpencv();
1243 TrackerWrapper *tracker = it->second;
1244 return tracker->getKltPoints();
1250 return std::vector<cv::Point2f>();
1278 it->second->getLcircle(circlesList, level);
1299 unsigned int level)
const
1301 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1303 it->second->getLcircle(circlesList, level);
1306 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1326 it->second->getLcylinder(cylindersList, level);
1347 unsigned int level)
const
1349 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1351 it->second->getLcylinder(cylindersList, level);
1354 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1375 it->second->getLline(linesList, level);
1396 unsigned int level)
const
1398 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1400 it->second->getLline(linesList, level);
1403 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1439 bool displayFullModel)
1444 return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1450 return std::vector<std::vector<double> >();
1479 const std::map<std::string, unsigned int> &mapOfwidths,
1480 const std::map<std::string, unsigned int> &mapOfheights,
1481 const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1482 const std::map<std::string, vpCameraParameters> &mapOfCams,
1483 bool displayFullModel)
1486 mapOfModels.clear();
1488 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1490 std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1491 std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1492 std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1493 std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1495 if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1496 findCam != mapOfCams.end()) {
1497 mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1498 findCam->second, displayFullModel);
1513 return it->second->getMovingEdge();
1533 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1534 it->second->getMovingEdge(me1);
1537 it->second->getMovingEdge(me2);
1540 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1552 mapOfMovingEdges.clear();
1554 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1556 TrackerWrapper *tracker = it->second;
1557 mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1582 unsigned int nbGoodPoints = 0;
1585 nbGoodPoints = it->second->getNbPoints(level);
1591 return nbGoodPoints;
1610 mapOfNbPoints.clear();
1612 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1614 TrackerWrapper *tracker = it->second;
1615 mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1628 return it->second->getNbPolygon();
1642 mapOfNbPolygons.clear();
1644 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1646 TrackerWrapper *tracker = it->second;
1647 mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1665 return it->second->getPolygon(index);
1685 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1687 return it->second->getPolygon(index);
1690 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1709 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1712 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1716 TrackerWrapper *tracker = it->second;
1717 polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1723 return polygonFaces;
1744 std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1745 bool orderPolygons,
bool useVisibility,
bool clipPolygon)
1747 mapOfPolygons.clear();
1748 mapOfPoints.clear();
1750 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1752 TrackerWrapper *tracker = it->second;
1753 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1754 tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1756 mapOfPolygons[it->first] = polygonFaces.first;
1757 mapOfPoints[it->first] = polygonFaces.second;
1774 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1775 it->second->getPose(c1Mo);
1778 it->second->getPose(c2Mo);
1781 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1794 mapOfCameraPoses.clear();
1796 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1798 TrackerWrapper *tracker = it->second;
1799 tracker->getPose(mapOfCameraPoses[it->first]);
1815 TrackerWrapper *tracker = it->second;
1816 return tracker->getTrackerType();
1826 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1828 TrackerWrapper *tracker = it->second;
1835 double ,
int ,
const std::string & )
1840 #ifdef VISP_HAVE_MODULE_GUI
1885 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp,
1889 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1890 TrackerWrapper *tracker = it->second;
1891 tracker->initClick(I1, initFile1, displayHelp, T1);
1895 tracker = it->second;
1896 tracker->initClick(I2, initFile2, displayHelp, T2);
1900 tracker = it->second;
1903 tracker->getPose(
m_cMo);
1908 "Cannot initClick()! Require two cameras but there are %d cameras!",
m_mapOfTrackers.size());
1955 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp,
1959 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1960 TrackerWrapper *tracker = it->second;
1961 tracker->initClick(I_color1, initFile1, displayHelp, T1);
1965 tracker = it->second;
1966 tracker->initClick(I_color2, initFile2, displayHelp, T2);
1970 tracker = it->second;
1973 tracker->getPose(
m_cMo);
1978 "Cannot initClick()! Require two cameras but there are %d cameras!",
m_mapOfTrackers.size());
2025 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp,
2026 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2029 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2031 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(
m_referenceCameraName);
2033 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2034 TrackerWrapper *tracker = it_tracker->second;
2035 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2036 if (it_T != mapOfT.end())
2037 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2039 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2040 tracker->getPose(
m_cMo);
2047 std::vector<std::string> vectorOfMissingCameraPoses;
2052 it_img = mapOfImages.find(it_tracker->first);
2053 it_initFile = mapOfInitFiles.find(it_tracker->first);
2055 if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2057 TrackerWrapper *tracker = it_tracker->second;
2058 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2061 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2067 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2068 it != vectorOfMissingCameraPoses.end(); ++it) {
2069 it_img = mapOfImages.find(*it);
2070 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2080 "Missing image or missing camera transformation "
2081 "matrix! Cannot set the pose for camera: %s!",
2130 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp,
2131 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2134 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2135 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(
m_referenceCameraName);
2137 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2138 TrackerWrapper *tracker = it_tracker->second;
2139 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2140 if (it_T != mapOfT.end())
2141 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2143 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2144 tracker->getPose(
m_cMo);
2151 std::vector<std::string> vectorOfMissingCameraPoses;
2156 it_img = mapOfColorImages.find(it_tracker->first);
2157 it_initFile = mapOfInitFiles.find(it_tracker->first);
2159 if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2161 TrackerWrapper *tracker = it_tracker->second;
2162 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2165 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2171 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2172 it != vectorOfMissingCameraPoses.end(); ++it) {
2173 it_img = mapOfColorImages.find(*it);
2174 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2185 "Missing image or missing camera transformation "
2186 "matrix! Cannot set the pose for camera: %s!",
2194 const int ,
const std::string & )
2239 const std::string &initFile1,
const std::string &initFile2)
2242 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2243 TrackerWrapper *tracker = it->second;
2244 tracker->initFromPoints(I1, initFile1);
2248 tracker = it->second;
2249 tracker->initFromPoints(I2, initFile2);
2253 tracker = it->second;
2256 tracker->getPose(
m_cMo);
2259 tracker->getCameraParameters(
m_cam);
2264 "Cannot initFromPoints()! Require two cameras but "
2265 "there are %d cameras!",
2300 const std::string &initFile1,
const std::string &initFile2)
2303 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2304 TrackerWrapper *tracker = it->second;
2305 tracker->initFromPoints(I_color1, initFile1);
2309 tracker = it->second;
2310 tracker->initFromPoints(I_color2, initFile2);
2314 tracker = it->second;
2317 tracker->getPose(
m_cMo);
2320 tracker->getCameraParameters(
m_cam);
2325 "Cannot initFromPoints()! Require two cameras but "
2326 "there are %d cameras!",
2332 const std::map<std::string, std::string> &mapOfInitPoints)
2336 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2338 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(
m_referenceCameraName);
2340 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2341 TrackerWrapper *tracker = it_tracker->second;
2342 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2343 tracker->getPose(
m_cMo);
2350 std::vector<std::string> vectorOfMissingCameraPoints;
2354 it_img = mapOfImages.find(it_tracker->first);
2355 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2357 if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2359 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2362 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2366 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2367 it != vectorOfMissingCameraPoints.end(); ++it) {
2368 it_img = mapOfImages.find(*it);
2369 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2378 "Missing image or missing camera transformation "
2379 "matrix! Cannot init the pose for camera: %s!",
2386 const std::map<std::string, std::string> &mapOfInitPoints)
2390 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2391 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(
m_referenceCameraName);
2393 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2394 it_initPoints != mapOfInitPoints.end()) {
2395 TrackerWrapper *tracker = it_tracker->second;
2396 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2397 tracker->getPose(
m_cMo);
2404 std::vector<std::string> vectorOfMissingCameraPoints;
2408 it_img = mapOfColorImages.find(it_tracker->first);
2409 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2411 if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2413 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2416 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2420 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2421 it != vectorOfMissingCameraPoints.end(); ++it) {
2422 it_img = mapOfColorImages.find(*it);
2423 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2432 "Missing image or missing camera transformation "
2433 "matrix! Cannot init the pose for camera: %s!",
2456 const std::string &initFile1,
const std::string &initFile2)
2459 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2460 TrackerWrapper *tracker = it->second;
2461 tracker->initFromPose(I1, initFile1);
2465 tracker = it->second;
2466 tracker->initFromPose(I2, initFile2);
2470 tracker = it->second;
2473 tracker->getPose(
m_cMo);
2476 tracker->getCameraParameters(
m_cam);
2481 "Cannot initFromPose()! Require two cameras but there "
2499 const std::string &initFile1,
const std::string &initFile2)
2502 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2503 TrackerWrapper *tracker = it->second;
2504 tracker->initFromPose(I_color1, initFile1);
2508 tracker = it->second;
2509 tracker->initFromPose(I_color2, initFile2);
2513 tracker = it->second;
2516 tracker->getPose(
m_cMo);
2519 tracker->getCameraParameters(
m_cam);
2524 "Cannot initFromPose()! Require two cameras but there "
2545 const std::map<std::string, std::string> &mapOfInitPoses)
2549 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2551 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(
m_referenceCameraName);
2553 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2554 TrackerWrapper *tracker = it_tracker->second;
2555 tracker->initFromPose(*it_img->second, it_initPose->second);
2556 tracker->getPose(
m_cMo);
2563 std::vector<std::string> vectorOfMissingCameraPoses;
2567 it_img = mapOfImages.find(it_tracker->first);
2568 it_initPose = mapOfInitPoses.find(it_tracker->first);
2570 if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2572 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2575 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2579 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2580 it != vectorOfMissingCameraPoses.end(); ++it) {
2581 it_img = mapOfImages.find(*it);
2582 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2591 "Missing image or missing camera transformation "
2592 "matrix! Cannot init the pose for camera: %s!",
2613 const std::map<std::string, std::string> &mapOfInitPoses)
2617 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2618 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(
m_referenceCameraName);
2620 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2621 TrackerWrapper *tracker = it_tracker->second;
2622 tracker->initFromPose(*it_img->second, it_initPose->second);
2623 tracker->getPose(
m_cMo);
2630 std::vector<std::string> vectorOfMissingCameraPoses;
2634 it_img = mapOfColorImages.find(it_tracker->first);
2635 it_initPose = mapOfInitPoses.find(it_tracker->first);
2637 if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2639 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2642 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2646 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2647 it != vectorOfMissingCameraPoses.end(); ++it) {
2648 it_img = mapOfColorImages.find(*it);
2649 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2658 "Missing image or missing camera transformation "
2659 "matrix! Cannot init the pose for camera: %s!",
2679 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2680 it->second->initFromPose(I1, c1Mo);
2684 it->second->initFromPose(I2, c2Mo);
2690 "This method requires 2 cameras but there are %d cameras!",
m_mapOfTrackers.size());
2708 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2709 it->second->initFromPose(I_color1, c1Mo);
2713 it->second->initFromPose(I_color2, c2Mo);
2719 "This method requires 2 cameras but there are %d cameras!",
m_mapOfTrackers.size());
2737 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2741 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2743 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
2745 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2746 TrackerWrapper *tracker = it_tracker->second;
2747 tracker->initFromPose(*it_img->second, it_camPose->second);
2748 tracker->getPose(
m_cMo);
2755 std::vector<std::string> vectorOfMissingCameraPoses;
2759 it_img = mapOfImages.find(it_tracker->first);
2760 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2762 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2764 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2767 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2771 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2772 it != vectorOfMissingCameraPoses.end(); ++it) {
2773 it_img = mapOfImages.find(*it);
2774 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2783 "Missing image or missing camera transformation "
2784 "matrix! Cannot set the pose for camera: %s!",
2804 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2808 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2809 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
2811 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2812 TrackerWrapper *tracker = it_tracker->second;
2813 tracker->initFromPose(*it_img->second, it_camPose->second);
2814 tracker->getPose(
m_cMo);
2821 std::vector<std::string> vectorOfMissingCameraPoses;
2825 it_img = mapOfColorImages.find(it_tracker->first);
2826 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2828 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2830 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2833 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2837 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2838 it != vectorOfMissingCameraPoses.end(); ++it) {
2839 it_img = mapOfColorImages.find(*it);
2840 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2849 "Missing image or missing camera transformation "
2850 "matrix! Cannot set the pose for camera: %s!",
2870 if (extension ==
".xml") {
2873 #ifdef VISP_HAVE_NLOHMANN_JSON
2874 else if (extension ==
".json") {
2896 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2898 TrackerWrapper *tracker = it->second;
2899 tracker->loadConfigFile(configFile, verbose);
2912 #ifdef VISP_HAVE_NLOHMANN_JSON
2924 std::ifstream jsonFile(settingsFile);
2925 if (!jsonFile.good()) {
2930 settings = json::parse(jsonFile);
2932 catch (json::parse_error &e) {
2933 std::stringstream msg;
2934 msg <<
"Could not parse JSON file : \n";
2936 msg << e.what() << std::endl;
2937 msg <<
"Byte position of error: " << e.byte;
2942 if (!settings.contains(
"version")) {
2945 else if (settings[
"version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2952 trackersJson = settings.at(
"trackers");
2957 bool refCameraFound =
false;
2959 for (
const auto &it : trackersJson.items()) {
2960 const std::string cameraName = it.key();
2961 const json trackerJson = it.value();
2965 if (trackerJson.contains(
"camTref")) {
2972 std::cout <<
"Loading tracker " << cameraName << std::endl <<
" with settings: " << std::endl << trackerJson.dump(2);
2976 std::cout <<
"Updating an already existing tracker with JSON configuration." << std::endl;
2982 std::cout <<
"Creating a new tracker from JSON configuration." << std::endl;
2984 TrackerWrapper *tw =
new TrackerWrapper();
2988 const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName);
2989 unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2991 if (!refCameraFound) {
2996 for (
const std::string &oldCameraName : unusedCameraNames) {
3004 refTracker->getCameraParameters(
m_cam);
3008 this->
distNearClip = refTracker->getNearClippingDistance();
3009 this->
distFarClip = refTracker->getFarClippingDistance();
3012 if (settings.contains(
"display")) {
3013 const json displayJson = settings[
"display"];
3017 if (settings.contains(
"visibilityTest")) {
3018 const json visJson = settings[
"visibilityTest"];
3024 if (settings.contains(
"vvs")) {
3025 const json vvsJson = settings[
"vvs"];
3026 setLambda(vvsJson.value(
"lambda", this->m_lambda));
3027 setMaxIter(vvsJson.value(
"maxIter", this->m_maxIter));
3028 setInitialMu(vvsJson.value(
"initialMu", this->m_initialMu));
3032 if (settings.contains(
"model")) {
3033 loadModel(settings.at(
"model").get<std::string>(), verbose);
3048 j[
"version"] = MBT_JSON_SETTINGS_VERSION;
3052 trackers[kv.first] = *(kv.second);
3055 trackers[kv.first][
"camTref"] = itTransformation->second;
3058 j[
"trackers"] = trackers;
3065 std::ofstream f(settingsFile);
3067 const unsigned indentLevel = 4;
3068 f << j.dump(indentLevel);
3098 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3099 TrackerWrapper *tracker = it_tracker->second;
3100 tracker->loadConfigFile(configFile1, verbose);
3103 tracker = it_tracker->second;
3104 tracker->loadConfigFile(configFile2, verbose);
3131 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3133 TrackerWrapper *tracker = it_tracker->second;
3135 std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3136 if (it_config != mapOfConfigFiles.end()) {
3137 tracker->loadConfigFile(it_config->second, verbose);
3141 it_tracker->first.c_str());
3148 TrackerWrapper *tracker = it->second;
3149 tracker->getCameraParameters(
m_cam);
3182 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3184 TrackerWrapper *tracker = it->second;
3185 tracker->loadModel(modelFile, verbose, T);
3218 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3219 TrackerWrapper *tracker = it_tracker->second;
3220 tracker->loadModel(modelFile1, verbose, T1);
3223 tracker = it_tracker->second;
3224 tracker->loadModel(modelFile2, verbose, T2);
3247 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3249 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3251 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3253 if (it_model != mapOfModelFiles.end()) {
3254 TrackerWrapper *tracker = it_tracker->second;
3255 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3257 if (it_T != mapOfT.end())
3258 tracker->loadModel(it_model->second, verbose, it_T->second);
3260 tracker->loadModel(it_model->second, verbose);
3264 it_tracker->first.c_str());
3269 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
3271 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3273 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3275 TrackerWrapper *tracker = it->second;
3276 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3282 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
3283 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3284 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3286 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3288 TrackerWrapper *tracker = it->second;
3289 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3290 mapOfPointCloudHeights[it->first]);
3295 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
3296 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3297 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3299 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3301 TrackerWrapper *tracker = it->second;
3302 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3303 mapOfPointCloudHeights[it->first]);
3328 TrackerWrapper *tracker = it_tracker->second;
3329 tracker->reInitModel(I, cad_name, cMo, verbose, T);
3332 tracker->getPose(
m_cMo);
3362 TrackerWrapper *tracker = it_tracker->second;
3363 tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3366 tracker->getPose(
m_cMo);
3396 const std::string &cad_name1,
const std::string &cad_name2,
3401 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3403 it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3407 it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3412 it_tracker->second->getPose(
m_cMo);
3443 const std::string &cad_name1,
const std::string &cad_name2,
3448 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3450 it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3454 it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3459 it_tracker->second->getPose(
m_cMo);
3484 const std::map<std::string, std::string> &mapOfModelFiles,
3485 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose,
3486 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3489 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3491 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(
m_referenceCameraName);
3492 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
3494 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3495 it_camPose != mapOfCameraPoses.end()) {
3496 TrackerWrapper *tracker = it_tracker->second;
3497 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3498 if (it_T != mapOfT.end())
3499 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3501 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3504 tracker->getPose(
m_cMo);
3510 std::vector<std::string> vectorOfMissingCameras;
3513 it_img = mapOfImages.find(it_tracker->first);
3514 it_model = mapOfModelFiles.find(it_tracker->first);
3515 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3517 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3518 TrackerWrapper *tracker = it_tracker->second;
3519 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3522 vectorOfMissingCameras.push_back(it_tracker->first);
3527 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3529 it_img = mapOfImages.find(*it);
3530 it_model = mapOfModelFiles.find(*it);
3531 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3534 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3537 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3559 const std::map<std::string, std::string> &mapOfModelFiles,
3560 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose,
3561 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3564 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
3565 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(
m_referenceCameraName);
3566 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
3568 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3569 it_camPose != mapOfCameraPoses.end()) {
3570 TrackerWrapper *tracker = it_tracker->second;
3571 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3572 if (it_T != mapOfT.end())
3573 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3575 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3578 tracker->getPose(
m_cMo);
3584 std::vector<std::string> vectorOfMissingCameras;
3587 it_img = mapOfColorImages.find(it_tracker->first);
3588 it_model = mapOfModelFiles.find(it_tracker->first);
3589 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3591 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3592 it_camPose != mapOfCameraPoses.end()) {
3593 TrackerWrapper *tracker = it_tracker->second;
3594 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3597 vectorOfMissingCameras.push_back(it_tracker->first);
3602 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3604 it_img = mapOfColorImages.find(*it);
3605 it_model = mapOfModelFiles.find(*it);
3606 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3609 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3612 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3629 #ifdef VISP_HAVE_OGRE
3656 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3663 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3665 TrackerWrapper *tracker = it->second;
3666 tracker->resetTracker();
3683 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3685 TrackerWrapper *tracker = it->second;
3686 tracker->setAngleAppear(a);
3705 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3706 it->second->setAngleAppear(a1);
3709 it->second->setAngleAppear(a2);
3736 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3737 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3740 TrackerWrapper *tracker = it_tracker->second;
3741 tracker->setAngleAppear(it->second);
3763 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3765 TrackerWrapper *tracker = it->second;
3766 tracker->setAngleDisappear(a);
3785 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3786 it->second->setAngleDisappear(a1);
3789 it->second->setAngleDisappear(a2);
3816 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3817 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3820 TrackerWrapper *tracker = it_tracker->second;
3821 tracker->setAngleDisappear(it->second);
3839 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3841 TrackerWrapper *tracker = it->second;
3842 tracker->setCameraParameters(camera);
3857 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3858 it->second->setCameraParameters(camera1);
3861 it->second->setCameraParameters(camera2);
3865 it->second->getCameraParameters(
m_cam);
3887 for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3888 it != mapOfCameraParameters.end(); ++it) {
3889 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3892 TrackerWrapper *tracker = it_tracker->second;
3893 tracker->setCameraParameters(it->second);
3916 it->second = cameraTransformationMatrix;
3931 const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3933 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3934 it != mapOfTransformationMatrix.end(); ++it) {
3935 std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3939 it_camTrans->second = it->second;
3957 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3959 TrackerWrapper *tracker = it->second;
3960 tracker->setClipping(flags);
3977 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3978 it->second->setClipping(flags1);
3981 it->second->setClipping(flags2);
3992 std::stringstream ss;
3993 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
4007 for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
4008 it != mapOfClippingFlags.end(); ++it) {
4009 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4012 TrackerWrapper *tracker = it_tracker->second;
4013 tracker->setClipping(it->second);
4033 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4035 TrackerWrapper *tracker = it->second;
4036 tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4050 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4052 TrackerWrapper *tracker = it->second;
4053 tracker->setDepthDenseFilteringMethod(method);
4068 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4070 TrackerWrapper *tracker = it->second;
4071 tracker->setDepthDenseFilteringMinDistance(minDistance);
4086 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4088 TrackerWrapper *tracker = it->second;
4089 tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4103 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4105 TrackerWrapper *tracker = it->second;
4106 tracker->setDepthDenseSamplingStep(stepX, stepY);
4119 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4121 TrackerWrapper *tracker = it->second;
4122 tracker->setDepthNormalFaceCentroidMethod(method);
4136 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4138 TrackerWrapper *tracker = it->second;
4139 tracker->setDepthNormalFeatureEstimationMethod(method);
4152 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4154 TrackerWrapper *tracker = it->second;
4155 tracker->setDepthNormalPclPlaneEstimationMethod(method);
4168 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4170 TrackerWrapper *tracker = it->second;
4171 tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4184 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4186 TrackerWrapper *tracker = it->second;
4187 tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4201 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4203 TrackerWrapper *tracker = it->second;
4204 tracker->setDepthNormalSamplingStep(stepX, stepY);
4230 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4232 TrackerWrapper *tracker = it->second;
4233 tracker->setDisplayFeatures(displayF);
4248 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4250 TrackerWrapper *tracker = it->second;
4251 tracker->setFarClippingDistance(dist);
4266 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4267 it->second->setFarClippingDistance(dist1);
4270 it->second->setFarClippingDistance(dist2);
4274 distFarClip = it->second->getFarClippingDistance();
4293 for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4295 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4298 TrackerWrapper *tracker = it_tracker->second;
4299 tracker->setFarClippingDistance(it->second);
4318 std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4319 if (it_factor != mapOfFeatureFactors.end()) {
4320 it->second = it_factor->second;
4344 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4346 TrackerWrapper *tracker = it->second;
4347 tracker->setGoodMovingEdgesRatioThreshold(threshold);
4351 #ifdef VISP_HAVE_OGRE
4365 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4367 TrackerWrapper *tracker = it->second;
4368 tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4385 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4387 TrackerWrapper *tracker = it->second;
4388 tracker->setNbRayCastingAttemptsForVisibility(attempts);
4393 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4403 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4405 TrackerWrapper *tracker = it->second;
4406 tracker->setKltOpencv(t);
4421 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4422 it->second->setKltOpencv(t1);
4425 it->second->setKltOpencv(t2);
4440 for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4441 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4444 TrackerWrapper *tracker = it_tracker->second;
4445 tracker->setKltOpencv(it->second);
4461 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4463 TrackerWrapper *tracker = it->second;
4464 tracker->setKltThresholdAcceptation(th);
4483 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4485 TrackerWrapper *tracker = it->second;
4486 tracker->setLod(useLod, name);
4490 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4500 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4502 TrackerWrapper *tracker = it->second;
4503 tracker->setKltMaskBorder(e);
4518 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4519 it->second->setKltMaskBorder(e1);
4523 it->second->setKltMaskBorder(e2);
4538 for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4540 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4543 TrackerWrapper *tracker = it_tracker->second;
4544 tracker->setKltMaskBorder(it->second);
4559 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4561 TrackerWrapper *tracker = it->second;
4562 tracker->setMask(mask);
4579 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4581 TrackerWrapper *tracker = it->second;
4582 tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4598 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4600 TrackerWrapper *tracker = it->second;
4601 tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4614 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4616 TrackerWrapper *tracker = it->second;
4617 tracker->setMovingEdge(me);
4633 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4634 it->second->setMovingEdge(me1);
4638 it->second->setMovingEdge(me2);
4653 for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4654 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4657 TrackerWrapper *tracker = it_tracker->second;
4658 tracker->setMovingEdge(it->second);
4674 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4676 TrackerWrapper *tracker = it->second;
4677 tracker->setNearClippingDistance(dist);
4692 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4693 it->second->setNearClippingDistance(dist1);
4697 it->second->setNearClippingDistance(dist2);
4720 for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4721 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4724 TrackerWrapper *tracker = it_tracker->second;
4725 tracker->setNearClippingDistance(it->second);
4750 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4752 TrackerWrapper *tracker = it->second;
4753 tracker->setOgreShowConfigDialog(showConfigDialog);
4771 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4773 TrackerWrapper *tracker = it->second;
4774 tracker->setOgreVisibilityTest(v);
4777 #ifdef VISP_HAVE_OGRE
4778 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4780 TrackerWrapper *tracker = it->second;
4781 tracker->faces.getOgreContext()->setWindowName(
"Multi Generic MBT (" + it->first +
")");
4797 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4799 TrackerWrapper *tracker = it->second;
4800 tracker->setOptimizationMethod(opt);
4820 "to be configured with only one camera!");
4827 TrackerWrapper *tracker = it->second;
4828 tracker->setPose(I, cdMo);
4852 "to be configured with only one camera!");
4859 TrackerWrapper *tracker = it->second;
4861 tracker->setPose(
m_I, cdMo);
4884 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4885 it->second->setPose(I1, c1Mo);
4889 it->second->setPose(I2, c2Mo);
4894 it->second->getPose(
m_cMo);
4922 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4923 it->second->setPose(I_color1, c1Mo);
4927 it->second->setPose(I_color2, c2Mo);
4932 it->second->getPose(
m_cMo);
4961 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4965 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4967 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
4969 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4970 TrackerWrapper *tracker = it_tracker->second;
4971 tracker->setPose(*it_img->second, it_camPose->second);
4972 tracker->getPose(
m_cMo);
4979 std::vector<std::string> vectorOfMissingCameraPoses;
4984 it_img = mapOfImages.find(it_tracker->first);
4985 it_camPose = mapOfCameraPoses.find(it_tracker->first);
4987 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4989 TrackerWrapper *tracker = it_tracker->second;
4990 tracker->setPose(*it_img->second, it_camPose->second);
4993 vectorOfMissingCameraPoses.push_back(it_tracker->first);
4998 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4999 it != vectorOfMissingCameraPoses.end(); ++it) {
5000 it_img = mapOfImages.find(*it);
5001 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5010 "Missing image or missing camera transformation "
5011 "matrix! Cannot set pose for camera: %s!",
5033 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5037 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
5038 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
5040 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5041 TrackerWrapper *tracker = it_tracker->second;
5042 tracker->setPose(*it_img->second, it_camPose->second);
5043 tracker->getPose(
m_cMo);
5050 std::vector<std::string> vectorOfMissingCameraPoses;
5055 it_img = mapOfColorImages.find(it_tracker->first);
5056 it_camPose = mapOfCameraPoses.find(it_tracker->first);
5058 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5060 TrackerWrapper *tracker = it_tracker->second;
5061 tracker->setPose(*it_img->second, it_camPose->second);
5064 vectorOfMissingCameraPoses.push_back(it_tracker->first);
5069 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5070 it != vectorOfMissingCameraPoses.end(); ++it) {
5071 it_img = mapOfColorImages.find(*it);
5072 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5081 "Missing image or missing camera transformation "
5082 "matrix! Cannot set pose for camera: %s!",
5106 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5108 TrackerWrapper *tracker = it->second;
5109 tracker->setProjectionErrorComputation(flag);
5120 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5122 TrackerWrapper *tracker = it->second;
5123 tracker->setProjectionErrorDisplay(
display);
5134 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5136 TrackerWrapper *tracker = it->second;
5137 tracker->setProjectionErrorDisplayArrowLength(length);
5145 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5147 TrackerWrapper *tracker = it->second;
5148 tracker->setProjectionErrorDisplayArrowThickness(thickness);
5159 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(referenceCameraName);
5164 std::cerr <<
"The reference camera: " << referenceCameraName <<
" does not exist!";
5172 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5174 TrackerWrapper *tracker = it->second;
5175 tracker->setScanLineVisibilityTest(v);
5192 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5194 TrackerWrapper *tracker = it->second;
5195 tracker->setTrackerType(type);
5210 for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5211 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
5213 TrackerWrapper *tracker = it_tracker->second;
5214 tracker->setTrackerType(it->second);
5230 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5232 TrackerWrapper *tracker = it->second;
5233 tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5248 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5250 TrackerWrapper *tracker = it->second;
5251 tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5266 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5268 TrackerWrapper *tracker = it->second;
5269 tracker->setUseEdgeTracking(name, useEdgeTracking);
5273 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5285 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5287 TrackerWrapper *tracker = it->second;
5288 tracker->setUseKltTracking(name, useKltTracking);
5296 bool isOneTestTrackingOk =
false;
5297 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5299 TrackerWrapper *tracker = it->second;
5301 tracker->testTracking();
5302 isOneTestTrackingOk =
true;
5308 if (!isOneTestTrackingOk) {
5309 std::ostringstream oss;
5310 oss <<
"Not enough moving edges to track the object. Try to reduce the "
5328 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5331 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5332 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5334 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5348 std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5351 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5352 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5354 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5370 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5371 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5372 mapOfImages[it->first] = &I1;
5375 mapOfImages[it->first] = &I2;
5377 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5378 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5380 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5383 std::stringstream ss;
5384 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
5402 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5403 std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5404 mapOfImages[it->first] = &I_color1;
5407 mapOfImages[it->first] = &_colorI2;
5409 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5410 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5412 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5415 std::stringstream ss;
5416 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
5430 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5431 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5433 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5445 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5446 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5448 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5451 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
5461 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5463 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5465 TrackerWrapper *tracker = it->second;
5468 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5476 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5480 mapOfImages[it->first] ==
nullptr) {
5485 !mapOfPointClouds[it->first]) {
5502 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5504 TrackerWrapper *tracker = it->second;
5507 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5510 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5513 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5515 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5520 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5537 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5539 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5540 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5542 TrackerWrapper *tracker = it->second;
5545 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5553 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5557 mapOfImages[it->first] ==
nullptr) {
5561 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5565 mapOfImages[it->first] !=
nullptr) {
5567 mapOfImages[it->first] = &tracker->m_I;
5571 !mapOfPointClouds[it->first]) {
5588 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5590 TrackerWrapper *tracker = it->second;
5593 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5596 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5599 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5601 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5606 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5626 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
5627 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5628 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5630 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5632 TrackerWrapper *tracker = it->second;
5635 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5643 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5647 mapOfImages[it->first] ==
nullptr) {
5652 (mapOfPointClouds[it->first] ==
nullptr)) {
5657 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5669 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5671 TrackerWrapper *tracker = it->second;
5674 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5677 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5680 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5682 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5687 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5706 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
5707 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5708 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5710 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5711 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5713 TrackerWrapper *tracker = it->second;
5716 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5724 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5728 mapOfColorImages[it->first] ==
nullptr) {
5732 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5736 mapOfColorImages[it->first] !=
nullptr) {
5738 mapOfImages[it->first] = &tracker->m_I;
5742 (mapOfPointClouds[it->first] ==
nullptr)) {
5747 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5759 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5761 TrackerWrapper *tracker = it->second;
5764 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5767 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5770 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5772 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5777 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5786 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5787 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5788 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5790 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5792 TrackerWrapper *tracker = it->second;
5795 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5803 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5807 mapOfImages[it->first] ==
nullptr) {
5812 (mapOfPointClouds[it->first] ==
nullptr)) {
5817 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5829 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5831 TrackerWrapper *tracker = it->second;
5834 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5837 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5840 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5842 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5847 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5866 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5867 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5868 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5870 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5871 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5873 TrackerWrapper *tracker = it->second;
5876 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5884 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5888 mapOfColorImages[it->first] ==
nullptr) {
5892 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5896 mapOfColorImages[it->first] !=
nullptr) {
5898 mapOfImages[it->first] = &tracker->m_I;
5902 (mapOfPointClouds[it->first] ==
nullptr)) {
5907 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5919 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5921 TrackerWrapper *tracker = it->second;
5924 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5927 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5930 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5932 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5937 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5946 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5947 : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5952 #ifdef VISP_HAVE_OGRE
5959 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(
int trackerType)
5960 : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5963 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5973 #ifdef VISP_HAVE_OGRE
5983 computeVVSInit(ptr_I);
5985 if (m_error.getRows() < 4) {
5990 double normRes_1 = -1;
5991 unsigned int iter = 0;
5993 double factorEdge = 1.0;
5994 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5995 double factorKlt = 1.0;
5997 double factorDepth = 1.0;
5998 double factorDepthDense = 1.0;
6004 double mu = m_initialMu;
6006 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6009 bool isoJoIdentity = m_isoJoIdentity;
6017 unsigned int nb_edge_features = m_error_edge.
getRows();
6018 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6019 unsigned int nb_klt_features = m_error_klt.getRows();
6021 unsigned int nb_depth_features = m_error_depthNormal.getRows();
6022 unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
6024 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
6025 computeVVSInteractionMatrixAndResidu(ptr_I);
6027 bool reStartFromLastIncrement =
false;
6028 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
6030 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6031 if (reStartFromLastIncrement) {
6032 if (m_trackerType & KLT_TRACKER) {
6038 if (!reStartFromLastIncrement) {
6039 computeVVSWeights();
6041 if (computeCovariance) {
6043 if (!isoJoIdentity) {
6046 LVJ_true = (m_L * cVo * oJo);
6056 if (isoJoIdentity) {
6060 unsigned int rank = (m_L * cVo).kernel(K);
6070 isoJoIdentity =
false;
6079 unsigned int start_index = 0;
6080 if (m_trackerType & EDGE_TRACKER) {
6081 for (
unsigned int i = 0; i < nb_edge_features; i++) {
6082 double wi = m_w_edge[i] * m_factor[i] * factorEdge;
6084 m_weightedError[i] = wi * m_error[i];
6089 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6094 start_index += nb_edge_features;
6097 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6098 if (m_trackerType & KLT_TRACKER) {
6099 for (
unsigned int i = 0; i < nb_klt_features; i++) {
6100 double wi = m_w_klt[i] * factorKlt;
6101 W_true[start_index + i] = wi;
6102 m_weightedError[start_index + i] = wi * m_error_klt[i];
6104 num += wi *
vpMath::sqr(m_error[start_index + i]);
6107 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6108 m_L[start_index + i][j] *= wi;
6112 start_index += nb_klt_features;
6116 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6117 for (
unsigned int i = 0; i < nb_depth_features; i++) {
6118 double wi = m_w_depthNormal[i] * factorDepth;
6119 m_w[start_index + i] = m_w_depthNormal[i];
6120 m_weightedError[start_index + i] = wi * m_error[start_index + i];
6122 num += wi *
vpMath::sqr(m_error[start_index + i]);
6125 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6126 m_L[start_index + i][j] *= wi;
6130 start_index += nb_depth_features;
6133 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6134 for (
unsigned int i = 0; i < nb_depth_dense_features; i++) {
6135 double wi = m_w_depthDense[i] * factorDepthDense;
6136 m_w[start_index + i] = m_w_depthDense[i];
6137 m_weightedError[start_index + i] = wi * m_error[start_index + i];
6139 num += wi *
vpMath::sqr(m_error[start_index + i]);
6142 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6143 m_L[start_index + i][j] *= wi;
6150 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
6153 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6154 if (m_trackerType & KLT_TRACKER) {
6161 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6162 if (m_trackerType & KLT_TRACKER) {
6166 normRes_1 = normRes;
6168 normRes = sqrt(num / den);
6174 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
6176 if (m_trackerType & EDGE_TRACKER) {
6181 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
6184 "TrackerWrapper::computeVVSInit("
6185 ") should not be called!");
6190 initMbtTracking(ptr_I);
6192 unsigned int nbFeatures = 0;
6194 if (m_trackerType & EDGE_TRACKER) {
6195 nbFeatures += m_error_edge.getRows();
6198 m_error_edge.clear();
6199 m_weightedError_edge.clear();
6204 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6205 if (m_trackerType & KLT_TRACKER) {
6207 nbFeatures += m_error_klt.getRows();
6210 m_error_klt.clear();
6211 m_weightedError_klt.clear();
6217 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6219 nbFeatures += m_error_depthNormal.getRows();
6222 m_error_depthNormal.clear();
6223 m_weightedError_depthNormal.clear();
6224 m_L_depthNormal.clear();
6225 m_w_depthNormal.clear();
6228 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6230 nbFeatures += m_error_depthDense.getRows();
6233 m_error_depthDense.clear();
6234 m_weightedError_depthDense.clear();
6235 m_L_depthDense.clear();
6236 m_w_depthDense.clear();
6239 m_L.resize(nbFeatures, 6,
false,
false);
6240 m_error.resize(nbFeatures,
false);
6242 m_weightedError.resize(nbFeatures,
false);
6243 m_w.resize(nbFeatures,
false);
6251 "computeVVSInteractionMatrixAndR"
6252 "esidu() should not be called!");
6257 if (m_trackerType & EDGE_TRACKER) {
6261 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6262 if (m_trackerType & KLT_TRACKER) {
6267 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6271 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6275 unsigned int start_index = 0;
6276 if (m_trackerType & EDGE_TRACKER) {
6277 m_L.insert(m_L_edge, start_index, 0);
6278 m_error.insert(start_index, m_error_edge);
6280 start_index += m_error_edge.getRows();
6283 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6284 if (m_trackerType & KLT_TRACKER) {
6285 m_L.insert(m_L_klt, start_index, 0);
6286 m_error.insert(start_index, m_error_klt);
6288 start_index += m_error_klt.getRows();
6292 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6293 m_L.insert(m_L_depthNormal, start_index, 0);
6294 m_error.insert(start_index, m_error_depthNormal);
6296 start_index += m_error_depthNormal.getRows();
6299 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6300 m_L.insert(m_L_depthDense, start_index, 0);
6301 m_error.insert(start_index, m_error_depthDense);
6309 unsigned int start_index = 0;
6311 if (m_trackerType & EDGE_TRACKER) {
6313 m_w.insert(start_index, m_w_edge);
6315 start_index += m_w_edge.getRows();
6318 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6319 if (m_trackerType & KLT_TRACKER) {
6321 m_w.insert(start_index, m_w_klt);
6323 start_index += m_w_klt.getRows();
6327 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6328 if (m_depthNormalUseRobust) {
6330 m_w.insert(start_index, m_w_depthNormal);
6333 start_index += m_w_depthNormal.getRows();
6336 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6338 m_w.insert(start_index, m_w_depthDense);
6346 unsigned int thickness,
bool displayFullModel)
6348 if (displayFeatures) {
6349 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6350 for (
size_t i = 0; i < features.size(); i++) {
6353 int state =
static_cast<int>(features[i][3]);
6385 double id = features[i][5];
6386 std::stringstream ss;
6391 vpImagePoint im_centroid(features[i][1], features[i][2]);
6392 vpImagePoint im_extremity(features[i][3], features[i][4]);
6399 std::vector<std::vector<double> > models =
6401 for (
size_t i = 0; i < models.size(); i++) {
6409 double n20 = models[i][3];
6410 double n11 = models[i][4];
6411 double n02 = models[i][5];
6416 #ifdef VISP_HAVE_OGRE
6417 if ((m_trackerType & EDGE_TRACKER)
6418 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6419 || (m_trackerType & KLT_TRACKER)
6423 faces.displayOgre(cMo);
6430 unsigned int thickness,
bool displayFullModel)
6432 if (displayFeatures) {
6433 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6434 for (
size_t i = 0; i < features.size(); i++) {
6437 int state =
static_cast<int>(features[i][3]);
6469 double id = features[i][5];
6470 std::stringstream ss;
6475 vpImagePoint im_centroid(features[i][1], features[i][2]);
6476 vpImagePoint im_extremity(features[i][3], features[i][4]);
6483 std::vector<std::vector<double> > models =
6485 for (
size_t i = 0; i < models.size(); i++) {
6493 double n20 = models[i][3];
6494 double n11 = models[i][4];
6495 double n02 = models[i][5];
6500 #ifdef VISP_HAVE_OGRE
6501 if ((m_trackerType & EDGE_TRACKER)
6502 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6503 || (m_trackerType & KLT_TRACKER)
6507 faces.displayOgre(cMo);
6512 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6514 std::vector<std::vector<double> > features;
6516 if (m_trackerType & EDGE_TRACKER) {
6518 features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6521 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6522 if (m_trackerType & KLT_TRACKER) {
6524 features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6528 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6530 features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6531 m_featuresToBeDisplayedDepthNormal.end());
6537 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(
unsigned int width,
6538 unsigned int height,
6541 bool displayFullModel)
6543 std::vector<std::vector<double> > models;
6546 if (m_trackerType == EDGE_TRACKER) {
6549 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6550 else if (m_trackerType == KLT_TRACKER) {
6554 else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6557 else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6562 if (m_trackerType & EDGE_TRACKER) {
6563 std::vector<std::vector<double> > edgeModels =
6565 models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6569 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6570 std::vector<std::vector<double> > depthDenseModels =
6572 models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6581 if (!modelInitialised) {
6585 if (useScanLine || clippingFlag > 3)
6588 bool reInitialisation =
false;
6590 faces.setVisible(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6593 #ifdef VISP_HAVE_OGRE
6594 if (!faces.isOgreInitialised()) {
6597 faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6598 faces.initOgre(m_cam);
6602 ogreShowConfigDialog =
false;
6605 faces.setVisibleOgre(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6607 faces.setVisible(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6612 faces.computeClippedPolygons(m_cMo, m_cam);
6616 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6617 if (m_trackerType & KLT_TRACKER)
6621 if (m_trackerType & EDGE_TRACKER) {
6630 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6633 if (m_trackerType & DEPTH_DENSE_TRACKER)
6637 void vpMbGenericTracker::TrackerWrapper::initCircle(
const vpPoint &p1,
const vpPoint &p2,
const vpPoint &p3,
6638 double radius,
int idFace,
const std::string &name)
6640 if (m_trackerType & EDGE_TRACKER)
6643 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6644 if (m_trackerType & KLT_TRACKER)
6649 void vpMbGenericTracker::TrackerWrapper::initCylinder(
const vpPoint &p1,
const vpPoint &p2,
double radius,
int idFace,
6650 const std::string &name)
6652 if (m_trackerType & EDGE_TRACKER)
6655 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6656 if (m_trackerType & KLT_TRACKER)
6661 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(
vpMbtPolygon &polygon)
6663 if (m_trackerType & EDGE_TRACKER)
6666 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6667 if (m_trackerType & KLT_TRACKER)
6671 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6674 if (m_trackerType & DEPTH_DENSE_TRACKER)
6678 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(
vpMbtPolygon &polygon)
6680 if (m_trackerType & EDGE_TRACKER)
6683 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6684 if (m_trackerType & KLT_TRACKER)
6688 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6691 if (m_trackerType & DEPTH_DENSE_TRACKER)
6697 if (m_trackerType & EDGE_TRACKER) {
6703 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(
const std::string &configFile,
bool verbose)
6705 #if defined(VISP_HAVE_PUGIXML)
6710 xmlp.setVerbose(verbose);
6711 xmlp.setCameraParameters(m_cam);
6713 xmlp.setAngleDisappear(
vpMath::deg(angleDisappears));
6719 xmlp.setKltMaxFeatures(10000);
6720 xmlp.setKltWindowSize(5);
6721 xmlp.setKltQuality(0.01);
6722 xmlp.setKltMinDistance(5);
6723 xmlp.setKltHarrisParam(0.01);
6724 xmlp.setKltBlockSize(3);
6725 xmlp.setKltPyramidLevels(3);
6726 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6727 xmlp.setKltMaskBorder(maskBorder);
6731 xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6732 xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6733 xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6734 xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6735 xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6736 xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6739 xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6740 xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6744 std::cout <<
" *********** Parsing XML for";
6747 std::vector<std::string> tracker_names;
6748 if (m_trackerType & EDGE_TRACKER)
6749 tracker_names.push_back(
"Edge");
6750 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6751 if (m_trackerType & KLT_TRACKER)
6752 tracker_names.push_back(
"Klt");
6754 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6755 tracker_names.push_back(
"Depth Normal");
6756 if (m_trackerType & DEPTH_DENSE_TRACKER)
6757 tracker_names.push_back(
"Depth Dense");
6760 for (
size_t i = 0; i < tracker_names.size(); i++) {
6761 std::cout <<
" " << tracker_names[i];
6762 if (i == tracker_names.size() - 1) {
6767 std::cout <<
"Model-Based Tracker ************ " << std::endl;
6770 xmlp.parse(configFile);
6777 xmlp.getCameraParameters(camera);
6778 setCameraParameters(camera);
6780 angleAppears =
vpMath::rad(xmlp.getAngleAppear());
6781 angleDisappears =
vpMath::rad(xmlp.getAngleDisappear());
6783 if (xmlp.hasNearClippingDistance())
6784 setNearClippingDistance(xmlp.getNearClippingDistance());
6786 if (xmlp.hasFarClippingDistance())
6787 setFarClippingDistance(xmlp.getFarClippingDistance());
6789 if (xmlp.getFovClipping()) {
6793 useLodGeneral = xmlp.getLodState();
6794 minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6795 minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6797 applyLodSettingInConfig =
false;
6798 if (this->getNbPolygon() > 0) {
6799 applyLodSettingInConfig =
true;
6800 setLod(useLodGeneral);
6801 setMinLineLengthThresh(minLineLengthThresholdGeneral);
6802 setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6807 xmlp.getEdgeMe(meParser);
6811 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6812 tracker.setMaxFeatures((
int)xmlp.getKltMaxFeatures());
6813 tracker.setWindowSize((
int)xmlp.getKltWindowSize());
6814 tracker.setQuality(xmlp.getKltQuality());
6815 tracker.setMinDistance(xmlp.getKltMinDistance());
6816 tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6817 tracker.setBlockSize((
int)xmlp.getKltBlockSize());
6818 tracker.setPyramidLevels((
int)xmlp.getKltPyramidLevels());
6819 maskBorder = xmlp.getKltMaskBorder();
6822 faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6826 setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6827 setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6828 setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6829 setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6830 setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6833 setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6841 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
6843 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6845 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6847 if (m_trackerType & KLT_TRACKER) {
6855 if (m_trackerType & EDGE_TRACKER) {
6856 bool newvisibleface =
false;
6860 faces.computeClippedPolygons(m_cMo, m_cam);
6866 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6870 if (m_trackerType & DEPTH_DENSE_TRACKER)
6874 if (m_trackerType & EDGE_TRACKER) {
6881 if (computeProjError) {
6888 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6890 if (m_trackerType & EDGE_TRACKER) {
6895 std::cerr <<
"Error in moving edge tracking" << std::endl;
6900 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6901 if (m_trackerType & KLT_TRACKER) {
6906 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
6912 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6917 std::cerr <<
"Error in Depth normal tracking" << std::endl;
6922 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6927 std::cerr <<
"Error in Depth dense tracking" << std::endl;
6935 const unsigned int pointcloud_width,
6936 const unsigned int pointcloud_height)
6938 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6940 if (m_trackerType & KLT_TRACKER) {
6948 if (m_trackerType & EDGE_TRACKER) {
6949 bool newvisibleface =
false;
6953 faces.computeClippedPolygons(m_cMo, m_cam);
6959 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6963 if (m_trackerType & DEPTH_DENSE_TRACKER)
6967 if (m_trackerType & EDGE_TRACKER) {
6974 if (computeProjError) {
6981 const std::vector<vpColVector> *
const point_cloud,
6982 const unsigned int pointcloud_width,
6983 const unsigned int pointcloud_height)
6985 if (m_trackerType & EDGE_TRACKER) {
6990 std::cerr <<
"Error in moving edge tracking" << std::endl;
6995 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6996 if (m_trackerType & KLT_TRACKER) {
7001 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
7007 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7012 std::cerr <<
"Error in Depth tracking" << std::endl;
7017 if (m_trackerType & DEPTH_DENSE_TRACKER) {
7022 std::cerr <<
"Error in Depth dense tracking" << std::endl;
7030 const unsigned int pointcloud_width,
7031 const unsigned int pointcloud_height)
7033 if (m_trackerType & EDGE_TRACKER) {
7038 std::cerr <<
"Error in moving edge tracking" << std::endl;
7043 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7044 if (m_trackerType & KLT_TRACKER) {
7049 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
7055 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7060 std::cerr <<
"Error in Depth tracking" << std::endl;
7065 if (m_trackerType & DEPTH_DENSE_TRACKER) {
7070 std::cerr <<
"Error in Depth dense tracking" << std::endl;
7088 for (
unsigned int i = 0; i < scales.size(); i++) {
7090 for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
7097 for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
7105 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
7113 cylinders[i].clear();
7121 nbvisiblepolygone = 0;
7124 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7126 for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
7128 if (kltpoly !=
nullptr) {
7133 kltPolygons.clear();
7135 for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
7138 if (kltPolyCylinder !=
nullptr) {
7139 delete kltPolyCylinder;
7141 kltPolyCylinder =
nullptr;
7143 kltCylinders.clear();
7146 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
7148 if (ci !=
nullptr) {
7153 circles_disp.clear();
7155 firstInitialisation =
true;
7160 for (
size_t i = 0; i < m_depthNormalFaces.size(); i++) {
7161 delete m_depthNormalFaces[i];
7162 m_depthNormalFaces[i] =
nullptr;
7164 m_depthNormalFaces.clear();
7167 for (
size_t i = 0; i < m_depthDenseFaces.size(); i++) {
7168 delete m_depthDenseFaces[i];
7169 m_depthDenseFaces[i] =
nullptr;
7171 m_depthDenseFaces.clear();
7175 loadModel(cad_name, verbose, T);
7177 initFromPose(*I, cMo);
7180 initFromPose(*I_color, cMo);
7188 reInitModel(&I,
nullptr, cad_name, cMo, verbose, T);
7195 reInitModel(
nullptr, &I_color, cad_name, cMo, verbose, T);
7198 void vpMbGenericTracker::TrackerWrapper::resetTracker()
7201 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7208 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(
const vpCameraParameters &cam)
7213 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7222 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(
const double &dist)
7227 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(
const double &dist)
7232 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(
const bool &v)
7235 #ifdef VISP_HAVE_OGRE
7236 faces.getOgreContext()->setWindowName(
"TrackerWrapper");
7243 bool performKltSetPose =
false;
7248 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7249 if (m_trackerType & KLT_TRACKER) {
7250 performKltSetPose =
true;
7252 if (useScanLine || clippingFlag > 3) {
7253 m_cam.computeFov(I ? I->
getWidth() : m_I.getWidth(), I ? I->
getHeight() : m_I.getHeight());
7260 if (!performKltSetPose) {
7266 if (m_trackerType & EDGE_TRACKER) {
7271 faces.computeClippedPolygons(m_cMo, m_cam);
7272 faces.computeScanLineRender(m_cam, I ? I->
getWidth() : m_I.getWidth(), I ? I->
getHeight() : m_I.getHeight());
7276 if (m_trackerType & EDGE_TRACKER) {
7277 initPyramid(I, Ipyramid);
7279 unsigned int i = (
unsigned int)scales.size();
7289 cleanPyramid(Ipyramid);
7292 if (m_trackerType & EDGE_TRACKER) {
7306 setPose(&I,
nullptr, cdMo);
7311 setPose(
nullptr, &I_color, cdMo);
7314 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(
const bool &flag)
7319 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(
const bool &v)
7322 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7329 void vpMbGenericTracker::TrackerWrapper::setTrackerType(
int type)
7331 if ((type & (EDGE_TRACKER |
7332 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7335 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7339 m_trackerType = type;
7342 void vpMbGenericTracker::TrackerWrapper::testTracking()
7344 if (m_trackerType & EDGE_TRACKER) {
7350 #
if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7355 if ((m_trackerType & (EDGE_TRACKER
7356 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7360 std::cerr <<
"Bad tracker type: " << m_trackerType << std::endl;
7364 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7374 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7376 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7378 if ((m_trackerType & (EDGE_TRACKER |
7379 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7382 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7383 std::cerr <<
"Bad tracker type: " << m_trackerType << std::endl;
7387 if (m_trackerType & (EDGE_TRACKER
7388 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7396 if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) {
7403 preTracking(ptr_I, point_cloud);
7409 covarianceMatrix = -1;
7413 if (m_trackerType == EDGE_TRACKER)
7416 postTracking(ptr_I, point_cloud);
7420 std::cerr <<
"Exception: " << e.
what() << std::endl;
void setWindowName(const Ogre::String &n)
unsigned int getCols() const
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
unsigned int getRows() const
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
static const vpColor cyan
static const vpColor blue
static const vpColor purple
static const vpColor yellow
static const vpColor green
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint ¢er, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
@ badValue
Used to indicate that a value is not in the allowed range.
@ notInitialized
Used to indicate that a parameter is not initialized.
const char * what() const
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
vp_deprecated void init()
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
unsigned int getWidth() const
unsigned int getHeight() const
void init(unsigned int h, unsigned int w, Type value)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
static double rad(double deg)
static double sqr(double x)
static bool equal(double x, double y, double threshold=0.001)
static double deg(double rad)
Implementation of a matrix and operations on matrices.
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
void segmentPointCloud(const std::vector< vpColVector > &point_cloud, unsigned int width, unsigned int height)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
void computeVisibility(unsigned int width, unsigned int height)
virtual void computeVVSInteractionMatrixAndResidu() vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
virtual void resetTracker() vp_override
virtual void computeVVSWeights()
virtual void computeVVSInit() vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual void track(const std::vector< vpColVector > &point_cloud, unsigned int width, unsigned int height)
virtual void resetTracker() vp_override
virtual void computeVVSInteractionMatrixAndResidu() vp_override
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void computeVVSInit() vp_override
void segmentPointCloud(const std::vector< vpColVector > &point_cloud, unsigned int width, unsigned int height)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
void updateMovingEdgeWeights()
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void computeVVSWeights()
virtual void setNearClippingDistance(const double &dist) vp_override
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") vp_override
virtual void setFarClippingDistance(const double &dist) vp_override
void resetTracker() vp_override
virtual void computeVVSInteractionMatrixAndResidu() vp_override
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void setClipping(const unsigned int &flags) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") vp_override
virtual void setCameraParameters(const vpCameraParameters &cam) vp_override
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void computeVVSInit() vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void testTracking() vp_override
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) vp_override
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setMask(const vpImage< bool > &mask) vp_override
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt) vp_override
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void setKltMaskBorder(const unsigned int &e)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) vp_override
virtual ~vpMbGenericTracker() vp_override
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces() vp_override
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void setOgreShowConfigDialog(bool showConfigDialog) vp_override
virtual void getCameraParameters(vpCameraParameters &cam) const
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setAngleDisappear(const double &a) vp_override
virtual unsigned int getNbPolygon() const vp_override
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) vp_override
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="") vp_override
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void setProjectionErrorDisplay(bool display) vp_override
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
vpMbGenericTracker()
json namespace shortcut
virtual vpMbtPolygon * getPolygon(unsigned int index) vp_override
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length) vp_override
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setOgreVisibilityTest(const bool &v) vp_override
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, const std::vector< vpColVector > * > &mapOfPointClouds, std::map< std::string, unsigned int > &mapOfPointCloudWidths, std::map< std::string, unsigned int > &mapOfPointCloudHeights)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setAngleAppear(const double &a) vp_override
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void computeVVSInteractionMatrixAndResidu() vp_override
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false) vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
virtual void computeVVSInit() vp_override
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
virtual void init(const vpImage< unsigned char > &I) vp_override
vpMatrix m_L
Interaction matrix.
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) vp_override
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void setFarClippingDistance(const double &dist) vp_override
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") vp_override
virtual void computeVVSWeights()
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") vp_override
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void testTracking() vp_override
virtual void setNearClippingDistance(const double &dist) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
double m_thresholdOutlier
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void resetTracker() vp_override
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness) vp_override
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
virtual void setTrackerType(int type)
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual vpKltOpencv getKltOpencv() const
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void setClipping(const unsigned int &flags) vp_override
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) vp_override
virtual vpMe getMovingEdge() const
virtual void setDisplayFeatures(bool displayF) vp_override
virtual void setProjectionErrorComputation(const bool &flag) vp_override
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="") vp_override
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void track(const vpImage< unsigned char > &I) vp_override
std::string m_referenceCameraName
Name of the reference camera.
virtual void setLod(bool useLod, const std::string &name="") vp_override
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual unsigned int getClipping() const
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
vpAROgre * getOgreContext()
virtual void computeVVSInit() vp_override
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) vp_override
virtual void computeVVSInteractionMatrixAndResidu() vp_override
void preTracking(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="") vp_override
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void initFaceFromLines(vpMbtPolygon &polygon) vp_override
virtual void reinit(const vpImage< unsigned char > &I)
void setCameraParameters(const vpCameraParameters &cam) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) vp_override
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="") vp_override
virtual void initFaceFromCorners(vpMbtPolygon &polygon) vp_override
void resetTracker() vp_override
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
double m_lambda
Gain of the virtual visual servoing stage.
virtual void setMaxIter(unsigned int max)
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void setMask(const vpImage< bool > &mask)
virtual void getCameraParameters(vpCameraParameters &cam) const
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
virtual void setDisplayFeatures(bool displayF)
virtual vpHomogeneousMatrix getPose() const
bool m_computeInteraction
vpMatrix oJo
The Degrees of Freedom to estimate.
vpMatrix covarianceMatrix
Covariance matrix.
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
vpHomogeneousMatrix m_cMo
The current pose.
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=nullptr, const vpColVector *const m_w_prev=nullptr)
vpCameraParameters m_cam
The camera parameters.
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
bool useOgre
Use Ogre3d for visibility tests.
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setAngleDisappear(const double &a)
virtual void setInitialMu(double mu)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix <L, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector <R, double &mu, vpColVector &v, const vpColVector *const w=nullptr, vpColVector *const m_w_prev=nullptr)
bool displayFeatures
If true, the features are displayed.
virtual void setProjectionErrorDisplay(bool display)
double angleDisappears
Angle used to detect a face disappearance.
virtual void setLambda(double gain)
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation)
bool useScanLine
Use Scanline for visibility tests.
virtual void setProjectionErrorComputation(const bool &flag)
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void setAngleAppear(const double &a)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
double distNearClip
Distance for near clipping.
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
unsigned int clippingFlag
Flags specifying which clipping to used.
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Manage a circle used in the model-based tracker.
Manage a cylinder used in the model-based tracker.
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Manage the line of a polygon used in the model-based tracker.
Implementation of a polygon of the model used by the model-based tracker.
Parse an Xml file to extract configuration parameters of a mbtConfig object.
@ TOO_NEAR
Point not tracked anymore, since too near from its neighbor.
@ THRESHOLD
Point not tracked due to the likelihood that is below the threshold, but retained in the ME list.
@ CONTRAST
Point not tracked due to a contrast problem, but retained in the ME list.
@ M_ESTIMATOR
Point detected as an outlier during virtual visual-servoing.
@ NO_SUPPRESSION
Point successfully tracked.
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Error that can be emitted by the vpTracker class and its derivatives.
@ notEnoughPointError
Not enough point to track.
@ initializationError
Tracker initialization error.
@ fatalError
Tracker fatal error.
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)