36 #include <visp3/mbt/vpMbGenericTracker.h>
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include <nlohmann/json.hpp>
46 using json = nlohmann::json;
50 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
51 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
52 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
62 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
71 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
72 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
73 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
78 else if (nbCameras == 1) {
85 for (
unsigned int i = 1; i <= nbCameras; i++) {
101 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
110 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
111 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
112 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
114 if (trackerTypes.empty()) {
118 if (trackerTypes.size() == 1) {
125 for (
size_t i = 1; i <= trackerTypes.size(); i++) {
126 std::stringstream ss;
141 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
150 const std::vector<int> &trackerTypes)
151 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
152 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
153 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
155 if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
157 "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
160 for (
size_t i = 0; i < cameraNames.size(); i++) {
173 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
210 double rawTotalProjectionError = 0.0;
211 unsigned int nbTotalFeaturesUsed = 0;
213 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
215 TrackerWrapper *tracker = it->second;
217 unsigned int nbFeaturesUsed = 0;
218 double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
220 if (nbFeaturesUsed > 0) {
221 nbTotalFeaturesUsed += nbFeaturesUsed;
222 rawTotalProjectionError += curProjError;
226 if (nbTotalFeaturesUsed > 0) {
227 return vpMath::deg(rawTotalProjectionError / (
double)nbTotalFeaturesUsed);
263 double rawTotalProjectionError = 0.0;
264 unsigned int nbTotalFeaturesUsed = 0;
266 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
268 TrackerWrapper *tracker = it->second;
270 double curProjError = tracker->getProjectionError();
271 unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
273 if (nbFeaturesUsed > 0) {
274 nbTotalFeaturesUsed += nbFeaturesUsed;
275 rawTotalProjectionError += (
vpMath::rad(curProjError) * nbFeaturesUsed);
279 if (nbTotalFeaturesUsed > 0) {
300 double normRes_1 = -1;
301 unsigned int iter = 0;
319 std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
324 mapOfVelocityTwist[it->first] = cVo;
328 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
342 bool reStartFromLastIncrement =
false;
344 if (reStartFromLastIncrement) {
345 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
347 TrackerWrapper *tracker = it->second;
351 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
354 tracker->ctTc0 = c_curr_tTc_curr0;
359 if (!reStartFromLastIncrement) {
364 if (!isoJoIdentity) {
367 LVJ_true = (
m_L * (cVo *
oJo));
381 unsigned int rank = (
m_L * cVo).kernel(K);
390 isoJoIdentity =
false;
399 unsigned int start_index = 0;
400 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
402 TrackerWrapper *tracker = it->second;
405 for (
unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
406 double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
407 W_true[start_index + i] = wi;
413 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
414 m_L[start_index + i][j] *= wi;
418 start_index += tracker->m_error_edge.
getRows();
421 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
423 for (
unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
424 double wi = tracker->m_w_klt[i] * factorKlt;
425 W_true[start_index + i] = wi;
431 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
432 m_L[start_index + i][j] *= wi;
436 start_index += tracker->m_error_klt.
getRows();
441 for (
unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
442 double wi = tracker->m_w_depthNormal[i] * factorDepth;
443 W_true[start_index + i] = wi;
449 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
450 m_L[start_index + i][j] *= wi;
454 start_index += tracker->m_error_depthNormal.
getRows();
458 for (
unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
459 double wi = tracker->m_w_depthDense[i] * factorDepthDense;
460 W_true[start_index + i] = wi;
466 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
467 m_L[start_index + i][j] *= wi;
471 start_index += tracker->m_error_depthDense.
getRows();
476 normRes = sqrt(num / den);
484 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
485 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
487 TrackerWrapper *tracker = it->second;
491 tracker->ctTc0 = c_curr_tTc_curr0;
496 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
498 TrackerWrapper *tracker = it->second;
507 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
509 TrackerWrapper *tracker = it->second;
513 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
528 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
530 TrackerWrapper *tracker = it->second;
533 tracker->updateMovingEdgeWeights();
545 unsigned int nbFeatures = 0;
547 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
549 TrackerWrapper *tracker = it->second;
550 tracker->computeVVSInit(mapOfImages[it->first]);
552 nbFeatures += tracker->m_error.getRows();
566 "computeVVSInteractionMatrixAndR"
567 "esidu() should not be called");
572 std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
574 unsigned int start_index = 0;
576 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
578 TrackerWrapper *tracker = it->second;
581 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
584 tracker->ctTc0 = c_curr_tTc_curr0;
587 tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
589 m_L.
insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
592 start_index += tracker->m_error.getRows();
598 unsigned int start_index = 0;
600 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
602 TrackerWrapper *tracker = it->second;
603 tracker->computeVVSWeights();
606 start_index += tracker->m_w.getRows();
625 bool displayFullModel)
629 TrackerWrapper *tracker = it->second;
630 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
652 bool displayFullModel)
656 TrackerWrapper *tracker = it->second;
657 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
683 unsigned int thickness,
bool displayFullModel)
686 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
687 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
690 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
693 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
717 bool displayFullModel)
720 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
721 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
724 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
727 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
744 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
745 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
746 const vpColor &col,
unsigned int thickness,
bool displayFullModel)
750 it_img != mapOfImages.end(); ++it_img) {
751 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it_img->first);
752 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
753 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
755 if (it_tracker !=
m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
756 it_cam != mapOfCameraParameters.end()) {
757 TrackerWrapper *tracker = it_tracker->second;
758 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
761 std::cerr <<
"Missing elements for image:" << it_img->first <<
"!" << std::endl;
778 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
779 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
780 const vpColor &col,
unsigned int thickness,
bool displayFullModel)
783 for (std::map<std::string,
const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
784 it_img != mapOfImages.end(); ++it_img) {
785 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it_img->first);
786 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
787 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
789 if (it_tracker !=
m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
790 it_cam != mapOfCameraParameters.end()) {
791 TrackerWrapper *tracker = it_tracker->second;
792 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
795 std::cerr <<
"Missing elements for image:" << it_img->first <<
"!" << std::endl;
807 std::vector<std::string> cameraNames;
809 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
811 cameraNames.push_back(it_tracker->first);
833 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
834 it->second->getCameraParameters(cam1);
837 it->second->getCameraParameters(cam2);
840 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
853 mapOfCameraParameters.clear();
855 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
858 it->second->getCameraParameters(cam_);
859 mapOfCameraParameters[it->first] = cam_;
871 std::map<std::string, int> trackingTypes;
873 TrackerWrapper *traker;
874 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
876 traker = it_tracker->second;
877 trackingTypes[it_tracker->first] = traker->getTrackerType();
880 return trackingTypes;
894 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
895 clippingFlag1 = it->second->getClipping();
898 clippingFlag2 = it->second->getClipping();
901 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
913 mapOfClippingFlags.clear();
915 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
917 TrackerWrapper *tracker = it->second;
918 mapOfClippingFlags[it->first] = tracker->getClipping();
931 return it->second->getFaces();
945 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
947 return it->second->getFaces();
950 std::cerr <<
"The camera: " << cameraName <<
" cannot be found!" << std::endl;
954 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
962 TrackerWrapper *tracker = it->second;
963 return tracker->getFeaturesCircle();
978 TrackerWrapper *tracker = it->second;
979 return tracker->getFeaturesKltCylinder();
994 TrackerWrapper *tracker = it->second;
995 return tracker->getFeaturesKlt();
1035 return it->second->getFeaturesForDisplay();
1041 return std::vector<std::vector<double> >();
1072 mapOfFeatures.clear();
1074 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1076 mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1091 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1104 TrackerWrapper *tracker = it->second;
1105 return tracker->getKltImagePoints();
1111 return std::vector<vpImagePoint>();
1126 TrackerWrapper *tracker = it->second;
1127 return tracker->getKltImagePointsWithId();
1133 return std::map<int, vpImagePoint>();
1145 TrackerWrapper *tracker = it->second;
1146 return tracker->getKltMaskBorder();
1164 TrackerWrapper *tracker = it->second;
1165 return tracker->getKltNbPoints();
1184 TrackerWrapper *tracker;
1185 tracker = it_tracker->second;
1186 return tracker->getKltOpencv();
1206 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1207 klt1 = it->second->getKltOpencv();
1210 klt2 = it->second->getKltOpencv();
1213 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1227 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1229 TrackerWrapper *tracker = it->second;
1230 mapOfKlts[it->first] = tracker->getKltOpencv();
1243 TrackerWrapper *tracker = it->second;
1244 return tracker->getKltPoints();
1250 return std::vector<cv::Point2f>();
1278 it->second->getLcircle(circlesList, level);
1299 unsigned int level)
const
1301 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1303 it->second->getLcircle(circlesList, level);
1306 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1326 it->second->getLcylinder(cylindersList, level);
1347 unsigned int level)
const
1349 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1351 it->second->getLcylinder(cylindersList, level);
1354 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1375 it->second->getLline(linesList, level);
1396 unsigned int level)
const
1398 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1400 it->second->getLline(linesList, level);
1403 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1439 bool displayFullModel)
1444 return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1450 return std::vector<std::vector<double> >();
1479 const std::map<std::string, unsigned int> &mapOfwidths,
1480 const std::map<std::string, unsigned int> &mapOfheights,
1481 const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1482 const std::map<std::string, vpCameraParameters> &mapOfCams,
1483 bool displayFullModel)
1486 mapOfModels.clear();
1488 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1490 std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1491 std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1492 std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1493 std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1495 if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1496 findCam != mapOfCams.end()) {
1497 mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1498 findCam->second, displayFullModel);
1513 return it->second->getMovingEdge();
1533 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1534 it->second->getMovingEdge(me1);
1537 it->second->getMovingEdge(me2);
1540 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1552 mapOfMovingEdges.clear();
1554 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1556 TrackerWrapper *tracker = it->second;
1557 mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1582 unsigned int nbGoodPoints = 0;
1585 nbGoodPoints = it->second->getNbPoints(level);
1591 return nbGoodPoints;
1610 mapOfNbPoints.clear();
1612 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1614 TrackerWrapper *tracker = it->second;
1615 mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1628 return it->second->getNbPolygon();
1642 mapOfNbPolygons.clear();
1644 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1646 TrackerWrapper *tracker = it->second;
1647 mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1665 return it->second->getPolygon(index);
1685 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1687 return it->second->getPolygon(index);
1690 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1709 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1712 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1716 TrackerWrapper *tracker = it->second;
1717 polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1723 return polygonFaces;
1744 std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1745 bool orderPolygons,
bool useVisibility,
bool clipPolygon)
1747 mapOfPolygons.clear();
1748 mapOfPoints.clear();
1750 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1752 TrackerWrapper *tracker = it->second;
1753 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1754 tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1756 mapOfPolygons[it->first] = polygonFaces.first;
1757 mapOfPoints[it->first] = polygonFaces.second;
1774 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1775 it->second->getPose(c1Mo);
1778 it->second->getPose(c2Mo);
1781 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1794 mapOfCameraPoses.clear();
1796 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1798 TrackerWrapper *tracker = it->second;
1799 tracker->getPose(mapOfCameraPoses[it->first]);
1815 TrackerWrapper *tracker = it->second;
1816 return tracker->getTrackerType();
1826 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1828 TrackerWrapper *tracker = it->second;
1835 double ,
int ,
const std::string & )
1840 #ifdef VISP_HAVE_MODULE_GUI
1885 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp,
1889 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1890 TrackerWrapper *tracker = it->second;
1891 tracker->initClick(I1, initFile1, displayHelp, T1);
1895 tracker = it->second;
1896 tracker->initClick(I2, initFile2, displayHelp, T2);
1900 tracker = it->second;
1903 tracker->getPose(
m_cMo);
1908 "Cannot initClick()! Require two cameras but there are %d cameras!",
m_mapOfTrackers.size());
1955 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp,
1959 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1960 TrackerWrapper *tracker = it->second;
1961 tracker->initClick(I_color1, initFile1, displayHelp, T1);
1965 tracker = it->second;
1966 tracker->initClick(I_color2, initFile2, displayHelp, T2);
1970 tracker = it->second;
1973 tracker->getPose(
m_cMo);
1978 "Cannot initClick()! Require two cameras but there are %d cameras!",
m_mapOfTrackers.size());
2025 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp,
2026 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2029 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2031 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(
m_referenceCameraName);
2033 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2034 TrackerWrapper *tracker = it_tracker->second;
2035 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2036 if (it_T != mapOfT.end())
2037 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2039 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2040 tracker->getPose(
m_cMo);
2047 std::vector<std::string> vectorOfMissingCameraPoses;
2052 it_img = mapOfImages.find(it_tracker->first);
2053 it_initFile = mapOfInitFiles.find(it_tracker->first);
2055 if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2057 TrackerWrapper *tracker = it_tracker->second;
2058 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2061 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2067 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2068 it != vectorOfMissingCameraPoses.end(); ++it) {
2069 it_img = mapOfImages.find(*it);
2070 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2080 "Missing image or missing camera transformation "
2081 "matrix! Cannot set the pose for camera: %s!",
2130 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp,
2131 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2134 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2135 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(
m_referenceCameraName);
2137 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2138 TrackerWrapper *tracker = it_tracker->second;
2139 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2140 if (it_T != mapOfT.end())
2141 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2143 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2144 tracker->getPose(
m_cMo);
2151 std::vector<std::string> vectorOfMissingCameraPoses;
2156 it_img = mapOfColorImages.find(it_tracker->first);
2157 it_initFile = mapOfInitFiles.find(it_tracker->first);
2159 if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2161 TrackerWrapper *tracker = it_tracker->second;
2162 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2165 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2171 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2172 it != vectorOfMissingCameraPoses.end(); ++it) {
2173 it_img = mapOfColorImages.find(*it);
2174 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2185 "Missing image or missing camera transformation "
2186 "matrix! Cannot set the pose for camera: %s!",
2194 const int ,
const std::string & )
2239 const std::string &initFile1,
const std::string &initFile2)
2242 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2243 TrackerWrapper *tracker = it->second;
2244 tracker->initFromPoints(I1, initFile1);
2248 tracker = it->second;
2249 tracker->initFromPoints(I2, initFile2);
2253 tracker = it->second;
2256 tracker->getPose(
m_cMo);
2259 tracker->getCameraParameters(
m_cam);
2264 "Cannot initFromPoints()! Require two cameras but "
2265 "there are %d cameras!",
2300 const std::string &initFile1,
const std::string &initFile2)
2303 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2304 TrackerWrapper *tracker = it->second;
2305 tracker->initFromPoints(I_color1, initFile1);
2309 tracker = it->second;
2310 tracker->initFromPoints(I_color2, initFile2);
2314 tracker = it->second;
2317 tracker->getPose(
m_cMo);
2320 tracker->getCameraParameters(
m_cam);
2325 "Cannot initFromPoints()! Require two cameras but "
2326 "there are %d cameras!",
2332 const std::map<std::string, std::string> &mapOfInitPoints)
2336 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2338 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(
m_referenceCameraName);
2340 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2341 TrackerWrapper *tracker = it_tracker->second;
2342 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2343 tracker->getPose(
m_cMo);
2350 std::vector<std::string> vectorOfMissingCameraPoints;
2354 it_img = mapOfImages.find(it_tracker->first);
2355 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2357 if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2359 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2362 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2366 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2367 it != vectorOfMissingCameraPoints.end(); ++it) {
2368 it_img = mapOfImages.find(*it);
2369 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2378 "Missing image or missing camera transformation "
2379 "matrix! Cannot init the pose for camera: %s!",
2386 const std::map<std::string, std::string> &mapOfInitPoints)
2390 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2391 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(
m_referenceCameraName);
2393 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2394 it_initPoints != mapOfInitPoints.end()) {
2395 TrackerWrapper *tracker = it_tracker->second;
2396 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2397 tracker->getPose(
m_cMo);
2404 std::vector<std::string> vectorOfMissingCameraPoints;
2408 it_img = mapOfColorImages.find(it_tracker->first);
2409 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2411 if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2413 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2416 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2420 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2421 it != vectorOfMissingCameraPoints.end(); ++it) {
2422 it_img = mapOfColorImages.find(*it);
2423 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2432 "Missing image or missing camera transformation "
2433 "matrix! Cannot init the pose for camera: %s!",
2456 const std::string &initFile1,
const std::string &initFile2)
2459 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2460 TrackerWrapper *tracker = it->second;
2461 tracker->initFromPose(I1, initFile1);
2465 tracker = it->second;
2466 tracker->initFromPose(I2, initFile2);
2470 tracker = it->second;
2473 tracker->getPose(
m_cMo);
2476 tracker->getCameraParameters(
m_cam);
2481 "Cannot initFromPose()! Require two cameras but there "
2499 const std::string &initFile1,
const std::string &initFile2)
2502 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2503 TrackerWrapper *tracker = it->second;
2504 tracker->initFromPose(I_color1, initFile1);
2508 tracker = it->second;
2509 tracker->initFromPose(I_color2, initFile2);
2513 tracker = it->second;
2516 tracker->getPose(
m_cMo);
2519 tracker->getCameraParameters(
m_cam);
2524 "Cannot initFromPose()! Require two cameras but there "
2545 const std::map<std::string, std::string> &mapOfInitPoses)
2549 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2551 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(
m_referenceCameraName);
2553 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2554 TrackerWrapper *tracker = it_tracker->second;
2555 tracker->initFromPose(*it_img->second, it_initPose->second);
2556 tracker->getPose(
m_cMo);
2563 std::vector<std::string> vectorOfMissingCameraPoses;
2567 it_img = mapOfImages.find(it_tracker->first);
2568 it_initPose = mapOfInitPoses.find(it_tracker->first);
2570 if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2572 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2575 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2579 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2580 it != vectorOfMissingCameraPoses.end(); ++it) {
2581 it_img = mapOfImages.find(*it);
2582 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2591 "Missing image or missing camera transformation "
2592 "matrix! Cannot init the pose for camera: %s!",
2613 const std::map<std::string, std::string> &mapOfInitPoses)
2617 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2618 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(
m_referenceCameraName);
2620 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2621 TrackerWrapper *tracker = it_tracker->second;
2622 tracker->initFromPose(*it_img->second, it_initPose->second);
2623 tracker->getPose(
m_cMo);
2630 std::vector<std::string> vectorOfMissingCameraPoses;
2634 it_img = mapOfColorImages.find(it_tracker->first);
2635 it_initPose = mapOfInitPoses.find(it_tracker->first);
2637 if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2639 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2642 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2646 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2647 it != vectorOfMissingCameraPoses.end(); ++it) {
2648 it_img = mapOfColorImages.find(*it);
2649 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2658 "Missing image or missing camera transformation "
2659 "matrix! Cannot init the pose for camera: %s!",
2679 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2680 it->second->initFromPose(I1, c1Mo);
2684 it->second->initFromPose(I2, c2Mo);
2690 "This method requires 2 cameras but there are %d cameras!",
m_mapOfTrackers.size());
2708 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2709 it->second->initFromPose(I_color1, c1Mo);
2713 it->second->initFromPose(I_color2, c2Mo);
2719 "This method requires 2 cameras but there are %d cameras!",
m_mapOfTrackers.size());
2737 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2741 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2743 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
2745 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2746 TrackerWrapper *tracker = it_tracker->second;
2747 tracker->initFromPose(*it_img->second, it_camPose->second);
2748 tracker->getPose(
m_cMo);
2755 std::vector<std::string> vectorOfMissingCameraPoses;
2759 it_img = mapOfImages.find(it_tracker->first);
2760 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2762 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2764 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2767 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2771 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2772 it != vectorOfMissingCameraPoses.end(); ++it) {
2773 it_img = mapOfImages.find(*it);
2774 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2783 "Missing image or missing camera transformation "
2784 "matrix! Cannot set the pose for camera: %s!",
2804 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2808 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2809 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
2811 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2812 TrackerWrapper *tracker = it_tracker->second;
2813 tracker->initFromPose(*it_img->second, it_camPose->second);
2814 tracker->getPose(
m_cMo);
2821 std::vector<std::string> vectorOfMissingCameraPoses;
2825 it_img = mapOfColorImages.find(it_tracker->first);
2826 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2828 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2830 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2833 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2837 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2838 it != vectorOfMissingCameraPoses.end(); ++it) {
2839 it_img = mapOfColorImages.find(*it);
2840 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2849 "Missing image or missing camera transformation "
2850 "matrix! Cannot set the pose for camera: %s!",
2870 if (extension ==
".xml") {
2873 #ifdef VISP_HAVE_NLOHMANN_JSON
2874 else if (extension ==
".json") {
2896 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2898 TrackerWrapper *tracker = it->second;
2899 tracker->loadConfigFile(configFile, verbose);
2912 #ifdef VISP_HAVE_NLOHMANN_JSON
2924 std::ifstream jsonFile(settingsFile);
2925 if (!jsonFile.good()) {
2930 settings = json::parse(jsonFile);
2932 catch (json::parse_error &e) {
2933 std::stringstream msg;
2934 msg <<
"Could not parse JSON file : \n";
2936 msg << e.what() << std::endl;
2937 msg <<
"Byte position of error: " << e.byte;
2942 if (!settings.contains(
"version")) {
2945 else if (settings[
"version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2952 trackersJson = settings.at(
"trackers");
2957 bool refCameraFound =
false;
2959 for (
const auto &it : trackersJson.items()) {
2960 const std::string cameraName = it.key();
2961 const json trackerJson = it.value();
2965 if (trackerJson.contains(
"camTref")) {
2972 std::cout <<
"Loading tracker " << cameraName << std::endl <<
" with settings: " << std::endl << trackerJson.dump(2);
2976 std::cout <<
"Updating an already existing tracker with JSON configuration." << std::endl;
2982 std::cout <<
"Creating a new tracker from JSON configuration." << std::endl;
2984 TrackerWrapper *tw =
new TrackerWrapper();
2988 const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName);
2989 unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2991 if (!refCameraFound) {
2996 for (
const std::string &oldCameraName : unusedCameraNames) {
3004 refTracker->getCameraParameters(
m_cam);
3008 this->
distNearClip = refTracker->getNearClippingDistance();
3009 this->
distFarClip = refTracker->getFarClippingDistance();
3012 if (settings.contains(
"display")) {
3013 const json displayJson = settings[
"display"];
3017 if (settings.contains(
"visibilityTest")) {
3018 const json visJson = settings[
"visibilityTest"];
3023 if (settings.contains(
"model")) {
3024 loadModel(settings.at(
"model").get<std::string>(), verbose);
3039 j[
"version"] = MBT_JSON_SETTINGS_VERSION;
3043 trackers[kv.first] = *(kv.second);
3046 trackers[kv.first][
"camTref"] = itTransformation->second;
3049 j[
"trackers"] = trackers;
3050 std::ofstream f(settingsFile);
3052 const unsigned indentLevel = 4;
3053 f << j.dump(indentLevel);
3083 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3084 TrackerWrapper *tracker = it_tracker->second;
3085 tracker->loadConfigFile(configFile1, verbose);
3088 tracker = it_tracker->second;
3089 tracker->loadConfigFile(configFile2, verbose);
3116 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3118 TrackerWrapper *tracker = it_tracker->second;
3120 std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3121 if (it_config != mapOfConfigFiles.end()) {
3122 tracker->loadConfigFile(it_config->second, verbose);
3126 it_tracker->first.c_str());
3133 TrackerWrapper *tracker = it->second;
3134 tracker->getCameraParameters(
m_cam);
3167 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3169 TrackerWrapper *tracker = it->second;
3170 tracker->loadModel(modelFile, verbose, T);
3203 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3204 TrackerWrapper *tracker = it_tracker->second;
3205 tracker->loadModel(modelFile1, verbose, T1);
3208 tracker = it_tracker->second;
3209 tracker->loadModel(modelFile2, verbose, T2);
3232 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3234 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3236 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3238 if (it_model != mapOfModelFiles.end()) {
3239 TrackerWrapper *tracker = it_tracker->second;
3240 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3242 if (it_T != mapOfT.end())
3243 tracker->loadModel(it_model->second, verbose, it_T->second);
3245 tracker->loadModel(it_model->second, verbose);
3249 it_tracker->first.c_str());
3254 #ifdef VISP_HAVE_PCL
3256 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3258 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3260 TrackerWrapper *tracker = it->second;
3261 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3267 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
3268 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3269 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3271 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3273 TrackerWrapper *tracker = it->second;
3274 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3275 mapOfPointCloudHeights[it->first]);
3300 TrackerWrapper *tracker = it_tracker->second;
3301 tracker->reInitModel(I, cad_name, cMo, verbose, T);
3304 tracker->getPose(
m_cMo);
3334 TrackerWrapper *tracker = it_tracker->second;
3335 tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3338 tracker->getPose(
m_cMo);
3368 const std::string &cad_name1,
const std::string &cad_name2,
3373 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3375 it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3379 it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3384 it_tracker->second->getPose(
m_cMo);
3415 const std::string &cad_name1,
const std::string &cad_name2,
3420 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3422 it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3426 it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3431 it_tracker->second->getPose(
m_cMo);
3456 const std::map<std::string, std::string> &mapOfModelFiles,
3457 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose,
3458 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3461 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3463 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(
m_referenceCameraName);
3464 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
3466 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3467 it_camPose != mapOfCameraPoses.end()) {
3468 TrackerWrapper *tracker = it_tracker->second;
3469 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3470 if (it_T != mapOfT.end())
3471 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3473 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3476 tracker->getPose(
m_cMo);
3482 std::vector<std::string> vectorOfMissingCameras;
3485 it_img = mapOfImages.find(it_tracker->first);
3486 it_model = mapOfModelFiles.find(it_tracker->first);
3487 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3489 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3490 TrackerWrapper *tracker = it_tracker->second;
3491 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3494 vectorOfMissingCameras.push_back(it_tracker->first);
3499 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3501 it_img = mapOfImages.find(*it);
3502 it_model = mapOfModelFiles.find(*it);
3503 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3506 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3509 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3531 const std::map<std::string, std::string> &mapOfModelFiles,
3532 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose,
3533 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3536 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
3537 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(
m_referenceCameraName);
3538 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
3540 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3541 it_camPose != mapOfCameraPoses.end()) {
3542 TrackerWrapper *tracker = it_tracker->second;
3543 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3544 if (it_T != mapOfT.end())
3545 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3547 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3550 tracker->getPose(
m_cMo);
3556 std::vector<std::string> vectorOfMissingCameras;
3559 it_img = mapOfColorImages.find(it_tracker->first);
3560 it_model = mapOfModelFiles.find(it_tracker->first);
3561 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3563 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3564 it_camPose != mapOfCameraPoses.end()) {
3565 TrackerWrapper *tracker = it_tracker->second;
3566 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3569 vectorOfMissingCameras.push_back(it_tracker->first);
3574 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3576 it_img = mapOfColorImages.find(*it);
3577 it_model = mapOfModelFiles.find(*it);
3578 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3581 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3584 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3601 #ifdef VISP_HAVE_OGRE
3628 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3635 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3637 TrackerWrapper *tracker = it->second;
3638 tracker->resetTracker();
3655 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3657 TrackerWrapper *tracker = it->second;
3658 tracker->setAngleAppear(a);
3677 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3678 it->second->setAngleAppear(a1);
3681 it->second->setAngleAppear(a2);
3708 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3709 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3712 TrackerWrapper *tracker = it_tracker->second;
3713 tracker->setAngleAppear(it->second);
3735 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3737 TrackerWrapper *tracker = it->second;
3738 tracker->setAngleDisappear(a);
3757 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3758 it->second->setAngleDisappear(a1);
3761 it->second->setAngleDisappear(a2);
3788 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3789 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3792 TrackerWrapper *tracker = it_tracker->second;
3793 tracker->setAngleDisappear(it->second);
3811 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3813 TrackerWrapper *tracker = it->second;
3814 tracker->setCameraParameters(camera);
3829 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3830 it->second->setCameraParameters(camera1);
3833 it->second->setCameraParameters(camera2);
3837 it->second->getCameraParameters(
m_cam);
3859 for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3860 it != mapOfCameraParameters.end(); ++it) {
3861 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3864 TrackerWrapper *tracker = it_tracker->second;
3865 tracker->setCameraParameters(it->second);
3888 it->second = cameraTransformationMatrix;
3903 const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3905 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3906 it != mapOfTransformationMatrix.end(); ++it) {
3907 std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3911 it_camTrans->second = it->second;
3929 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3931 TrackerWrapper *tracker = it->second;
3932 tracker->setClipping(flags);
3949 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3950 it->second->setClipping(flags1);
3953 it->second->setClipping(flags2);
3964 std::stringstream ss;
3965 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
3979 for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3980 it != mapOfClippingFlags.end(); ++it) {
3981 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3984 TrackerWrapper *tracker = it_tracker->second;
3985 tracker->setClipping(it->second);
4005 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4007 TrackerWrapper *tracker = it->second;
4008 tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4022 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4024 TrackerWrapper *tracker = it->second;
4025 tracker->setDepthDenseFilteringMethod(method);
4040 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4042 TrackerWrapper *tracker = it->second;
4043 tracker->setDepthDenseFilteringMinDistance(minDistance);
4058 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4060 TrackerWrapper *tracker = it->second;
4061 tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4075 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4077 TrackerWrapper *tracker = it->second;
4078 tracker->setDepthDenseSamplingStep(stepX, stepY);
4091 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4093 TrackerWrapper *tracker = it->second;
4094 tracker->setDepthNormalFaceCentroidMethod(method);
4108 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4110 TrackerWrapper *tracker = it->second;
4111 tracker->setDepthNormalFeatureEstimationMethod(method);
4124 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4126 TrackerWrapper *tracker = it->second;
4127 tracker->setDepthNormalPclPlaneEstimationMethod(method);
4140 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4142 TrackerWrapper *tracker = it->second;
4143 tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4156 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4158 TrackerWrapper *tracker = it->second;
4159 tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4173 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4175 TrackerWrapper *tracker = it->second;
4176 tracker->setDepthNormalSamplingStep(stepX, stepY);
4202 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4204 TrackerWrapper *tracker = it->second;
4205 tracker->setDisplayFeatures(displayF);
4220 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4222 TrackerWrapper *tracker = it->second;
4223 tracker->setFarClippingDistance(dist);
4238 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4239 it->second->setFarClippingDistance(dist1);
4242 it->second->setFarClippingDistance(dist2);
4246 distFarClip = it->second->getFarClippingDistance();
4265 for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4267 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4270 TrackerWrapper *tracker = it_tracker->second;
4271 tracker->setFarClippingDistance(it->second);
4290 std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4291 if (it_factor != mapOfFeatureFactors.end()) {
4292 it->second = it_factor->second;
4316 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4318 TrackerWrapper *tracker = it->second;
4319 tracker->setGoodMovingEdgesRatioThreshold(threshold);
4323 #ifdef VISP_HAVE_OGRE
4337 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4339 TrackerWrapper *tracker = it->second;
4340 tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4357 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4359 TrackerWrapper *tracker = it->second;
4360 tracker->setNbRayCastingAttemptsForVisibility(attempts);
4365 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4375 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4377 TrackerWrapper *tracker = it->second;
4378 tracker->setKltOpencv(t);
4393 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4394 it->second->setKltOpencv(t1);
4397 it->second->setKltOpencv(t2);
4412 for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4413 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4416 TrackerWrapper *tracker = it_tracker->second;
4417 tracker->setKltOpencv(it->second);
4433 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4435 TrackerWrapper *tracker = it->second;
4436 tracker->setKltThresholdAcceptation(th);
4455 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4457 TrackerWrapper *tracker = it->second;
4458 tracker->setLod(useLod, name);
4462 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4472 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4474 TrackerWrapper *tracker = it->second;
4475 tracker->setKltMaskBorder(e);
4490 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4491 it->second->setKltMaskBorder(e1);
4495 it->second->setKltMaskBorder(e2);
4510 for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4512 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4515 TrackerWrapper *tracker = it_tracker->second;
4516 tracker->setKltMaskBorder(it->second);
4531 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4533 TrackerWrapper *tracker = it->second;
4534 tracker->setMask(mask);
4551 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4553 TrackerWrapper *tracker = it->second;
4554 tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4570 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4572 TrackerWrapper *tracker = it->second;
4573 tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4586 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4588 TrackerWrapper *tracker = it->second;
4589 tracker->setMovingEdge(me);
4605 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4606 it->second->setMovingEdge(me1);
4610 it->second->setMovingEdge(me2);
4625 for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4626 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4629 TrackerWrapper *tracker = it_tracker->second;
4630 tracker->setMovingEdge(it->second);
4646 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4648 TrackerWrapper *tracker = it->second;
4649 tracker->setNearClippingDistance(dist);
4664 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4665 it->second->setNearClippingDistance(dist1);
4669 it->second->setNearClippingDistance(dist2);
4692 for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4693 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4696 TrackerWrapper *tracker = it_tracker->second;
4697 tracker->setNearClippingDistance(it->second);
4722 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4724 TrackerWrapper *tracker = it->second;
4725 tracker->setOgreShowConfigDialog(showConfigDialog);
4743 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4745 TrackerWrapper *tracker = it->second;
4746 tracker->setOgreVisibilityTest(v);
4749 #ifdef VISP_HAVE_OGRE
4750 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4752 TrackerWrapper *tracker = it->second;
4753 tracker->faces.getOgreContext()->setWindowName(
"Multi Generic MBT (" + it->first +
")");
4769 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4771 TrackerWrapper *tracker = it->second;
4772 tracker->setOptimizationMethod(opt);
4792 "to be configured with only one camera!");
4799 TrackerWrapper *tracker = it->second;
4800 tracker->setPose(I, cdMo);
4824 "to be configured with only one camera!");
4831 TrackerWrapper *tracker = it->second;
4833 tracker->setPose(
m_I, cdMo);
4856 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4857 it->second->setPose(I1, c1Mo);
4861 it->second->setPose(I2, c2Mo);
4866 it->second->getPose(
m_cMo);
4894 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4895 it->second->setPose(I_color1, c1Mo);
4899 it->second->setPose(I_color2, c2Mo);
4904 it->second->getPose(
m_cMo);
4933 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4937 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4939 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
4941 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4942 TrackerWrapper *tracker = it_tracker->second;
4943 tracker->setPose(*it_img->second, it_camPose->second);
4944 tracker->getPose(
m_cMo);
4951 std::vector<std::string> vectorOfMissingCameraPoses;
4956 it_img = mapOfImages.find(it_tracker->first);
4957 it_camPose = mapOfCameraPoses.find(it_tracker->first);
4959 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4961 TrackerWrapper *tracker = it_tracker->second;
4962 tracker->setPose(*it_img->second, it_camPose->second);
4965 vectorOfMissingCameraPoses.push_back(it_tracker->first);
4970 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4971 it != vectorOfMissingCameraPoses.end(); ++it) {
4972 it_img = mapOfImages.find(*it);
4973 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4982 "Missing image or missing camera transformation "
4983 "matrix! Cannot set pose for camera: %s!",
5005 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5009 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
5010 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
5012 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5013 TrackerWrapper *tracker = it_tracker->second;
5014 tracker->setPose(*it_img->second, it_camPose->second);
5015 tracker->getPose(
m_cMo);
5022 std::vector<std::string> vectorOfMissingCameraPoses;
5027 it_img = mapOfColorImages.find(it_tracker->first);
5028 it_camPose = mapOfCameraPoses.find(it_tracker->first);
5030 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5032 TrackerWrapper *tracker = it_tracker->second;
5033 tracker->setPose(*it_img->second, it_camPose->second);
5036 vectorOfMissingCameraPoses.push_back(it_tracker->first);
5041 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5042 it != vectorOfMissingCameraPoses.end(); ++it) {
5043 it_img = mapOfColorImages.find(*it);
5044 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5053 "Missing image or missing camera transformation "
5054 "matrix! Cannot set pose for camera: %s!",
5078 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5080 TrackerWrapper *tracker = it->second;
5081 tracker->setProjectionErrorComputation(flag);
5092 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5094 TrackerWrapper *tracker = it->second;
5095 tracker->setProjectionErrorDisplay(
display);
5106 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5108 TrackerWrapper *tracker = it->second;
5109 tracker->setProjectionErrorDisplayArrowLength(length);
5117 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5119 TrackerWrapper *tracker = it->second;
5120 tracker->setProjectionErrorDisplayArrowThickness(thickness);
5131 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(referenceCameraName);
5136 std::cerr <<
"The reference camera: " << referenceCameraName <<
" does not exist!";
5144 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5146 TrackerWrapper *tracker = it->second;
5147 tracker->setScanLineVisibilityTest(v);
5164 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5166 TrackerWrapper *tracker = it->second;
5167 tracker->setTrackerType(type);
5182 for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5183 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
5185 TrackerWrapper *tracker = it_tracker->second;
5186 tracker->setTrackerType(it->second);
5202 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5204 TrackerWrapper *tracker = it->second;
5205 tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5220 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5222 TrackerWrapper *tracker = it->second;
5223 tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5238 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5240 TrackerWrapper *tracker = it->second;
5241 tracker->setUseEdgeTracking(name, useEdgeTracking);
5245 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5257 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5259 TrackerWrapper *tracker = it->second;
5260 tracker->setUseKltTracking(name, useKltTracking);
5268 bool isOneTestTrackingOk =
false;
5269 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5271 TrackerWrapper *tracker = it->second;
5273 tracker->testTracking();
5274 isOneTestTrackingOk =
true;
5280 if (!isOneTestTrackingOk) {
5281 std::ostringstream oss;
5282 oss <<
"Not enough moving edges to track the object. Try to reduce the "
5300 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5303 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5304 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5306 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5320 std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5323 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5324 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5326 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5342 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5343 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5344 mapOfImages[it->first] = &I1;
5347 mapOfImages[it->first] = &I2;
5349 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5350 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5352 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5355 std::stringstream ss;
5356 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
5374 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5375 std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5376 mapOfImages[it->first] = &I_color1;
5379 mapOfImages[it->first] = &_colorI2;
5381 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5382 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5384 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5387 std::stringstream ss;
5388 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
5402 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5403 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5405 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5417 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5418 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5420 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5423 #ifdef VISP_HAVE_PCL
5433 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5435 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5437 TrackerWrapper *tracker = it->second;
5440 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5448 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5452 mapOfImages[it->first] == NULL) {
5457 !mapOfPointClouds[it->first]) {
5474 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5476 TrackerWrapper *tracker = it->second;
5479 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5482 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5485 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5487 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5492 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5509 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5511 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5512 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5514 TrackerWrapper *tracker = it->second;
5517 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5525 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5529 mapOfImages[it->first] == NULL) {
5533 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5537 mapOfImages[it->first] != NULL) {
5539 mapOfImages[it->first] = &tracker->m_I;
5543 !mapOfPointClouds[it->first]) {
5560 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5562 TrackerWrapper *tracker = it->second;
5565 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5568 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5571 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5573 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5578 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5598 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
5599 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5600 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5602 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5604 TrackerWrapper *tracker = it->second;
5607 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5615 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5619 mapOfImages[it->first] == NULL) {
5624 (mapOfPointClouds[it->first] == NULL)) {
5629 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5641 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5643 TrackerWrapper *tracker = it->second;
5646 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5649 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5652 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5654 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5659 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5678 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
5679 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5680 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5682 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5683 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5685 TrackerWrapper *tracker = it->second;
5688 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5696 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5700 mapOfColorImages[it->first] == NULL) {
5704 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5708 mapOfColorImages[it->first] != NULL) {
5710 mapOfImages[it->first] = &tracker->m_I;
5714 (mapOfPointClouds[it->first] == NULL)) {
5719 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5731 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5733 TrackerWrapper *tracker = it->second;
5736 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5739 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5742 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5744 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5749 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5758 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5759 : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5764 #ifdef VISP_HAVE_OGRE
5771 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(
int trackerType)
5772 : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5775 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5785 #ifdef VISP_HAVE_OGRE
5792 vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() { }
5797 computeVVSInit(ptr_I);
5799 if (m_error.getRows() < 4) {
5804 double normRes_1 = -1;
5805 unsigned int iter = 0;
5807 double factorEdge = 1.0;
5808 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5809 double factorKlt = 1.0;
5811 double factorDepth = 1.0;
5812 double factorDepthDense = 1.0;
5818 double mu = m_initialMu;
5820 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5823 bool isoJoIdentity = m_isoJoIdentity;
5831 unsigned int nb_edge_features = m_error_edge.
getRows();
5832 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5833 unsigned int nb_klt_features = m_error_klt.getRows();
5835 unsigned int nb_depth_features = m_error_depthNormal.getRows();
5836 unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5838 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5839 computeVVSInteractionMatrixAndResidu(ptr_I);
5841 bool reStartFromLastIncrement =
false;
5842 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5844 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5845 if (reStartFromLastIncrement) {
5846 if (m_trackerType & KLT_TRACKER) {
5852 if (!reStartFromLastIncrement) {
5853 computeVVSWeights();
5855 if (computeCovariance) {
5857 if (!isoJoIdentity) {
5860 LVJ_true = (m_L * cVo * oJo);
5870 if (isoJoIdentity) {
5874 unsigned int rank = (m_L * cVo).kernel(K);
5884 isoJoIdentity =
false;
5893 unsigned int start_index = 0;
5894 if (m_trackerType & EDGE_TRACKER) {
5895 for (
unsigned int i = 0; i < nb_edge_features; i++) {
5896 double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5898 m_weightedError[i] = wi * m_error[i];
5903 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
5908 start_index += nb_edge_features;
5911 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5912 if (m_trackerType & KLT_TRACKER) {
5913 for (
unsigned int i = 0; i < nb_klt_features; i++) {
5914 double wi = m_w_klt[i] * factorKlt;
5915 W_true[start_index + i] = wi;
5916 m_weightedError[start_index + i] = wi * m_error_klt[i];
5918 num += wi *
vpMath::sqr(m_error[start_index + i]);
5921 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
5922 m_L[start_index + i][j] *= wi;
5926 start_index += nb_klt_features;
5930 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5931 for (
unsigned int i = 0; i < nb_depth_features; i++) {
5932 double wi = m_w_depthNormal[i] * factorDepth;
5933 m_w[start_index + i] = m_w_depthNormal[i];
5934 m_weightedError[start_index + i] = wi * m_error[start_index + i];
5936 num += wi *
vpMath::sqr(m_error[start_index + i]);
5939 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
5940 m_L[start_index + i][j] *= wi;
5944 start_index += nb_depth_features;
5947 if (m_trackerType & DEPTH_DENSE_TRACKER) {
5948 for (
unsigned int i = 0; i < nb_depth_dense_features; i++) {
5949 double wi = m_w_depthDense[i] * factorDepthDense;
5950 m_w[start_index + i] = m_w_depthDense[i];
5951 m_weightedError[start_index + i] = wi * m_error[start_index + i];
5953 num += wi *
vpMath::sqr(m_error[start_index + i]);
5956 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
5957 m_L[start_index + i][j] *= wi;
5964 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5967 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5968 if (m_trackerType & KLT_TRACKER) {
5975 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5976 if (m_trackerType & KLT_TRACKER) {
5980 normRes_1 = normRes;
5982 normRes = sqrt(num / den);
5988 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
5990 if (m_trackerType & EDGE_TRACKER) {
5995 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5998 "TrackerWrapper::computeVVSInit("
5999 ") should not be called!");
6004 initMbtTracking(ptr_I);
6006 unsigned int nbFeatures = 0;
6008 if (m_trackerType & EDGE_TRACKER) {
6009 nbFeatures += m_error_edge.getRows();
6012 m_error_edge.clear();
6013 m_weightedError_edge.clear();
6018 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6019 if (m_trackerType & KLT_TRACKER) {
6021 nbFeatures += m_error_klt.getRows();
6024 m_error_klt.clear();
6025 m_weightedError_klt.clear();
6031 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6033 nbFeatures += m_error_depthNormal.getRows();
6036 m_error_depthNormal.clear();
6037 m_weightedError_depthNormal.clear();
6038 m_L_depthNormal.clear();
6039 m_w_depthNormal.clear();
6042 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6044 nbFeatures += m_error_depthDense.getRows();
6047 m_error_depthDense.clear();
6048 m_weightedError_depthDense.clear();
6049 m_L_depthDense.clear();
6050 m_w_depthDense.clear();
6053 m_L.resize(nbFeatures, 6,
false,
false);
6054 m_error.resize(nbFeatures,
false);
6056 m_weightedError.resize(nbFeatures,
false);
6057 m_w.resize(nbFeatures,
false);
6065 "computeVVSInteractionMatrixAndR"
6066 "esidu() should not be called!");
6071 if (m_trackerType & EDGE_TRACKER) {
6075 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6076 if (m_trackerType & KLT_TRACKER) {
6081 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6085 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6089 unsigned int start_index = 0;
6090 if (m_trackerType & EDGE_TRACKER) {
6091 m_L.insert(m_L_edge, start_index, 0);
6092 m_error.insert(start_index, m_error_edge);
6094 start_index += m_error_edge.getRows();
6097 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6098 if (m_trackerType & KLT_TRACKER) {
6099 m_L.insert(m_L_klt, start_index, 0);
6100 m_error.insert(start_index, m_error_klt);
6102 start_index += m_error_klt.getRows();
6106 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6107 m_L.insert(m_L_depthNormal, start_index, 0);
6108 m_error.insert(start_index, m_error_depthNormal);
6110 start_index += m_error_depthNormal.getRows();
6113 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6114 m_L.insert(m_L_depthDense, start_index, 0);
6115 m_error.insert(start_index, m_error_depthDense);
6123 unsigned int start_index = 0;
6125 if (m_trackerType & EDGE_TRACKER) {
6127 m_w.insert(start_index, m_w_edge);
6129 start_index += m_w_edge.getRows();
6132 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6133 if (m_trackerType & KLT_TRACKER) {
6135 m_w.insert(start_index, m_w_klt);
6137 start_index += m_w_klt.getRows();
6141 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6142 if (m_depthNormalUseRobust) {
6144 m_w.insert(start_index, m_w_depthNormal);
6147 start_index += m_w_depthNormal.getRows();
6150 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6152 m_w.insert(start_index, m_w_depthDense);
6160 unsigned int thickness,
bool displayFullModel)
6162 if (displayFeatures) {
6163 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6164 for (
size_t i = 0; i < features.size(); i++) {
6167 int state =
static_cast<int>(features[i][3]);
6199 double id = features[i][5];
6200 std::stringstream ss;
6205 vpImagePoint im_centroid(features[i][1], features[i][2]);
6206 vpImagePoint im_extremity(features[i][3], features[i][4]);
6213 std::vector<std::vector<double> > models =
6215 for (
size_t i = 0; i < models.size(); i++) {
6223 double n20 = models[i][3];
6224 double n11 = models[i][4];
6225 double n02 = models[i][5];
6230 #ifdef VISP_HAVE_OGRE
6231 if ((m_trackerType & EDGE_TRACKER)
6232 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6233 || (m_trackerType & KLT_TRACKER)
6237 faces.displayOgre(cMo);
6244 unsigned int thickness,
bool displayFullModel)
6246 if (displayFeatures) {
6247 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6248 for (
size_t i = 0; i < features.size(); i++) {
6251 int state =
static_cast<int>(features[i][3]);
6283 double id = features[i][5];
6284 std::stringstream ss;
6289 vpImagePoint im_centroid(features[i][1], features[i][2]);
6290 vpImagePoint im_extremity(features[i][3], features[i][4]);
6297 std::vector<std::vector<double> > models =
6299 for (
size_t i = 0; i < models.size(); i++) {
6307 double n20 = models[i][3];
6308 double n11 = models[i][4];
6309 double n02 = models[i][5];
6314 #ifdef VISP_HAVE_OGRE
6315 if ((m_trackerType & EDGE_TRACKER)
6316 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6317 || (m_trackerType & KLT_TRACKER)
6321 faces.displayOgre(cMo);
6326 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6328 std::vector<std::vector<double> > features;
6330 if (m_trackerType & EDGE_TRACKER) {
6332 features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6335 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6336 if (m_trackerType & KLT_TRACKER) {
6338 features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6342 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6344 features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6345 m_featuresToBeDisplayedDepthNormal.end());
6351 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(
unsigned int width,
6352 unsigned int height,
6355 bool displayFullModel)
6357 std::vector<std::vector<double> > models;
6360 if (m_trackerType == EDGE_TRACKER) {
6363 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6364 else if (m_trackerType == KLT_TRACKER) {
6368 else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6371 else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6376 if (m_trackerType & EDGE_TRACKER) {
6377 std::vector<std::vector<double> > edgeModels =
6379 models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6383 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6384 std::vector<std::vector<double> > depthDenseModels =
6386 models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6395 if (!modelInitialised) {
6399 if (useScanLine || clippingFlag > 3)
6402 bool reInitialisation =
false;
6404 faces.setVisible(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6407 #ifdef VISP_HAVE_OGRE
6408 if (!faces.isOgreInitialised()) {
6411 faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6412 faces.initOgre(m_cam);
6416 ogreShowConfigDialog =
false;
6419 faces.setVisibleOgre(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6421 faces.setVisible(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6426 faces.computeClippedPolygons(m_cMo, m_cam);
6430 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6431 if (m_trackerType & KLT_TRACKER)
6435 if (m_trackerType & EDGE_TRACKER) {
6444 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6447 if (m_trackerType & DEPTH_DENSE_TRACKER)
6451 void vpMbGenericTracker::TrackerWrapper::initCircle(
const vpPoint &p1,
const vpPoint &p2,
const vpPoint &p3,
6452 double radius,
int idFace,
const std::string &name)
6454 if (m_trackerType & EDGE_TRACKER)
6457 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6458 if (m_trackerType & KLT_TRACKER)
6463 void vpMbGenericTracker::TrackerWrapper::initCylinder(
const vpPoint &p1,
const vpPoint &p2,
double radius,
int idFace,
6464 const std::string &name)
6466 if (m_trackerType & EDGE_TRACKER)
6469 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6470 if (m_trackerType & KLT_TRACKER)
6475 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(
vpMbtPolygon &polygon)
6477 if (m_trackerType & EDGE_TRACKER)
6480 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6481 if (m_trackerType & KLT_TRACKER)
6485 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6488 if (m_trackerType & DEPTH_DENSE_TRACKER)
6492 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(
vpMbtPolygon &polygon)
6494 if (m_trackerType & EDGE_TRACKER)
6497 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6498 if (m_trackerType & KLT_TRACKER)
6502 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6505 if (m_trackerType & DEPTH_DENSE_TRACKER)
6511 if (m_trackerType & EDGE_TRACKER) {
6517 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(
const std::string &configFile,
bool verbose)
6523 xmlp.setVerbose(verbose);
6524 xmlp.setCameraParameters(m_cam);
6526 xmlp.setAngleDisappear(
vpMath::deg(angleDisappears));
6532 xmlp.setKltMaxFeatures(10000);
6533 xmlp.setKltWindowSize(5);
6534 xmlp.setKltQuality(0.01);
6535 xmlp.setKltMinDistance(5);
6536 xmlp.setKltHarrisParam(0.01);
6537 xmlp.setKltBlockSize(3);
6538 xmlp.setKltPyramidLevels(3);
6539 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6540 xmlp.setKltMaskBorder(maskBorder);
6544 xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6545 xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6546 xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6547 xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6548 xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6549 xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6552 xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6553 xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6557 std::cout <<
" *********** Parsing XML for";
6560 std::vector<std::string> tracker_names;
6561 if (m_trackerType & EDGE_TRACKER)
6562 tracker_names.push_back(
"Edge");
6563 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6564 if (m_trackerType & KLT_TRACKER)
6565 tracker_names.push_back(
"Klt");
6567 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6568 tracker_names.push_back(
"Depth Normal");
6569 if (m_trackerType & DEPTH_DENSE_TRACKER)
6570 tracker_names.push_back(
"Depth Dense");
6573 for (
size_t i = 0; i < tracker_names.size(); i++) {
6574 std::cout <<
" " << tracker_names[i];
6575 if (i == tracker_names.size() - 1) {
6580 std::cout <<
"Model-Based Tracker ************ " << std::endl;
6583 xmlp.parse(configFile);
6590 xmlp.getCameraParameters(camera);
6591 setCameraParameters(camera);
6593 angleAppears =
vpMath::rad(xmlp.getAngleAppear());
6594 angleDisappears =
vpMath::rad(xmlp.getAngleDisappear());
6596 if (xmlp.hasNearClippingDistance())
6597 setNearClippingDistance(xmlp.getNearClippingDistance());
6599 if (xmlp.hasFarClippingDistance())
6600 setFarClippingDistance(xmlp.getFarClippingDistance());
6602 if (xmlp.getFovClipping()) {
6606 useLodGeneral = xmlp.getLodState();
6607 minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6608 minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6610 applyLodSettingInConfig =
false;
6611 if (this->getNbPolygon() > 0) {
6612 applyLodSettingInConfig =
true;
6613 setLod(useLodGeneral);
6614 setMinLineLengthThresh(minLineLengthThresholdGeneral);
6615 setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6620 xmlp.getEdgeMe(meParser);
6624 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6625 tracker.setMaxFeatures((
int)xmlp.getKltMaxFeatures());
6626 tracker.setWindowSize((
int)xmlp.getKltWindowSize());
6627 tracker.setQuality(xmlp.getKltQuality());
6628 tracker.setMinDistance(xmlp.getKltMinDistance());
6629 tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6630 tracker.setBlockSize((
int)xmlp.getKltBlockSize());
6631 tracker.setPyramidLevels((
int)xmlp.getKltPyramidLevels());
6632 maskBorder = xmlp.getKltMaskBorder();
6635 faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6639 setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6640 setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6641 setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6642 setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6643 setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6646 setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6649 #ifdef VISP_HAVE_PCL
6651 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6653 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6655 if (m_trackerType & KLT_TRACKER) {
6663 if (m_trackerType & EDGE_TRACKER) {
6664 bool newvisibleface =
false;
6668 faces.computeClippedPolygons(m_cMo, m_cam);
6674 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6678 if (m_trackerType & DEPTH_DENSE_TRACKER)
6682 if (m_trackerType & EDGE_TRACKER) {
6689 if (computeProjError) {
6696 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6698 if (m_trackerType & EDGE_TRACKER) {
6703 std::cerr <<
"Error in moving edge tracking" << std::endl;
6708 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6709 if (m_trackerType & KLT_TRACKER) {
6714 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
6720 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6725 std::cerr <<
"Error in Depth normal tracking" << std::endl;
6730 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6735 std::cerr <<
"Error in Depth dense tracking" << std::endl;
6743 const unsigned int pointcloud_width,
6744 const unsigned int pointcloud_height)
6746 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6748 if (m_trackerType & KLT_TRACKER) {
6756 if (m_trackerType & EDGE_TRACKER) {
6757 bool newvisibleface =
false;
6761 faces.computeClippedPolygons(m_cMo, m_cam);
6767 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6771 if (m_trackerType & DEPTH_DENSE_TRACKER)
6775 if (m_trackerType & EDGE_TRACKER) {
6782 if (computeProjError) {
6789 const std::vector<vpColVector> *
const point_cloud,
6790 const unsigned int pointcloud_width,
6791 const unsigned int pointcloud_height)
6793 if (m_trackerType & EDGE_TRACKER) {
6798 std::cerr <<
"Error in moving edge tracking" << std::endl;
6803 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6804 if (m_trackerType & KLT_TRACKER) {
6809 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
6815 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6820 std::cerr <<
"Error in Depth tracking" << std::endl;
6825 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6830 std::cerr <<
"Error in Depth dense tracking" << std::endl;
6848 for (
unsigned int i = 0; i < scales.size(); i++) {
6850 for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6857 for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6865 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6873 cylinders[i].clear();
6881 nbvisiblepolygone = 0;
6884 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6886 for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6888 if (kltpoly != NULL) {
6893 kltPolygons.clear();
6895 for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6898 if (kltPolyCylinder != NULL) {
6899 delete kltPolyCylinder;
6901 kltPolyCylinder = NULL;
6903 kltCylinders.clear();
6906 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6913 circles_disp.clear();
6915 firstInitialisation =
true;
6920 for (
size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6921 delete m_depthNormalFaces[i];
6922 m_depthNormalFaces[i] = NULL;
6924 m_depthNormalFaces.clear();
6927 for (
size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6928 delete m_depthDenseFaces[i];
6929 m_depthDenseFaces[i] = NULL;
6931 m_depthDenseFaces.clear();
6935 loadModel(cad_name, verbose, T);
6937 initFromPose(*I, cMo);
6940 initFromPose(*I_color, cMo);
6948 reInitModel(&I, NULL, cad_name, cMo, verbose, T);
6955 reInitModel(NULL, &I_color, cad_name, cMo, verbose, T);
6958 void vpMbGenericTracker::TrackerWrapper::resetTracker()
6961 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6968 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(
const vpCameraParameters &cam)
6973 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6982 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(
const double &dist)
6987 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(
const double &dist)
6992 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(
const bool &v)
6995 #ifdef VISP_HAVE_OGRE
6996 faces.getOgreContext()->setWindowName(
"TrackerWrapper");
7003 bool performKltSetPose =
false;
7008 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7009 if (m_trackerType & KLT_TRACKER) {
7010 performKltSetPose =
true;
7012 if (useScanLine || clippingFlag > 3) {
7013 m_cam.computeFov(I ? I->
getWidth() : m_I.getWidth(), I ? I->
getHeight() : m_I.getHeight());
7020 if (!performKltSetPose) {
7026 if (m_trackerType & EDGE_TRACKER) {
7031 faces.computeClippedPolygons(m_cMo, m_cam);
7032 faces.computeScanLineRender(m_cam, I ? I->
getWidth() : m_I.getWidth(), I ? I->
getHeight() : m_I.getHeight());
7036 if (m_trackerType & EDGE_TRACKER) {
7037 initPyramid(I, Ipyramid);
7039 unsigned int i = (
unsigned int)scales.size();
7049 cleanPyramid(Ipyramid);
7052 if (m_trackerType & EDGE_TRACKER) {
7066 setPose(&I, NULL, cdMo);
7071 setPose(NULL, &I_color, cdMo);
7074 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(
const bool &flag)
7079 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(
const bool &v)
7082 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7089 void vpMbGenericTracker::TrackerWrapper::setTrackerType(
int type)
7091 if ((type & (EDGE_TRACKER |
7092 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7095 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7099 m_trackerType = type;
7102 void vpMbGenericTracker::TrackerWrapper::testTracking()
7104 if (m_trackerType & EDGE_TRACKER) {
7110 #
if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
7115 if ((m_trackerType & (EDGE_TRACKER
7116 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7120 std::cerr <<
"Bad tracker type: " << m_trackerType << std::endl;
7124 #if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
7134 #ifdef VISP_HAVE_PCL
7136 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7138 if ((m_trackerType & (EDGE_TRACKER |
7139 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7142 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7143 std::cerr <<
"Bad tracker type: " << m_trackerType << std::endl;
7147 if (m_trackerType & (EDGE_TRACKER
7148 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7156 if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) {
7163 preTracking(ptr_I, point_cloud);
7169 covarianceMatrix = -1;
7173 if (m_trackerType == EDGE_TRACKER)
7176 postTracking(ptr_I, point_cloud);
7180 std::cerr <<
"Exception: " << e.
what() << std::endl;
void setWindowName(const Ogre::String &n)
unsigned int getCols() const
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
unsigned int getRows() const
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
static const vpColor cyan
static const vpColor blue
static const vpColor purple
static const vpColor yellow
static const vpColor green
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint ¢er, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
@ badValue
Used to indicate that a value is not in the allowed range.
@ notInitialized
Used to indicate that a parameter is not initialized.
const char * what() const
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
vp_deprecated void init()
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
unsigned int getWidth() const
unsigned int getHeight() const
void init(unsigned int h, unsigned int w, Type value)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
static double rad(double deg)
static double sqr(double x)
static bool equal(double x, double y, double threshold=0.001)
static double deg(double rad)
Implementation of a matrix and operations on matrices.
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void computeVVSInit()
virtual void resetTracker()
virtual void computeVVSWeights()
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setCameraParameters(const vpCameraParameters &camera)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void resetTracker()
virtual void computeVVSInit()
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
void updateMovingEdgeWeights()
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setNearClippingDistance(const double &dist)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void computeVVSInit()
virtual void setFarClippingDistance(const double &dist)
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void computeVVSWeights()
virtual void setClipping(const unsigned int &flags)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void testTracking()
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setLod(bool useLod, const std::string &name="")
virtual void setDisplayFeatures(bool displayF)
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void setKltMaskBorder(const unsigned int &e)
virtual void setProjectionErrorComputation(const bool &flag)
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void getCameraParameters(vpCameraParameters &cam) const
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void resetTracker()
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void setAngleAppear(const double &a)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual ~vpMbGenericTracker()
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="")
virtual std::map< std::string, int > getCameraTrackerTypes() const
vpMbGenericTracker()
json namespace shortcut
virtual void setNearClippingDistance(const double &dist)
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void setAngleDisappear(const double &a)
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void testTracking()
virtual void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
vpMatrix m_L
Interaction matrix.
virtual void init(const vpImage< unsigned char > &I)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void computeVVSInit()
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
virtual unsigned int getNbPolygon() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setProjectionErrorDisplay(bool display)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
double m_thresholdOutlier
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
virtual vpKltOpencv getKltOpencv() const
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual vpMbtPolygon * getPolygon(unsigned int index)
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual vpMe getMovingEdge() const
virtual void setFarClippingDistance(const double &dist)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)
virtual void setClipping(const unsigned int &flags)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setOgreVisibilityTest(const bool &v)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
std::string m_referenceCameraName
Name of the reference camera.
virtual void setMask(const vpImage< bool > &mask)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="")
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual void track(const vpImage< unsigned char > &I)
virtual unsigned int getClipping() const
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
vpAROgre * getOgreContext()
virtual void setScanLineVisibilityTest(const bool &v)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="")
void preTracking(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="")
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void reinit(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void setCameraParameters(const vpCameraParameters &cam)
virtual void computeVVSInit()
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
double m_lambda
Gain of the virtual visual servoing stage.
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix <L, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector <R, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
virtual void setOgreShowConfigDialog(bool showConfigDialog)