Visual Servoing Platform  version 3.6.1 under development (2024-11-15)
tutorial-mb-generic-tracker-full.cpp
1 #include <visp3/core/vpConfig.h>
3 #include <visp3/core/vpIoTools.h>
4 #include <visp3/gui/vpDisplayGDI.h>
5 #include <visp3/gui/vpDisplayOpenCV.h>
6 #include <visp3/gui/vpDisplayX.h>
7 #include <visp3/gui/vpPlot.h>
9 #include <visp3/mbt/vpMbGenericTracker.h>
11 #include <visp3/io/vpVideoReader.h>
12 #include <visp3/io/vpVideoWriter.h>
13 
14 #if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_VIDEOIO) && defined(HAVE_OPENCV_HIGHGUI)
15 #ifdef ENABLE_VISP_NAMESPACE
16 using namespace VISP_NAMESPACE_NAME;
17 #endif
18 
19 namespace
20 {
21 std::vector<double> poseToVec(const vpHomogeneousMatrix &cMo)
22 {
25  std::vector<double> vec { t[0], t[1], t[2], tu[0], tu[1], tu[2] };
26 
27  return vec;
28 }
29 }
30 #endif
31 
32 int main(int argc, char **argv)
33 {
34 #if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_VIDEOIO) && defined(HAVE_OPENCV_HIGHGUI)
35  std::string opt_videoname = "model/teabox/teabox.mp4";
36  std::string opt_modelname = "model/teabox/teabox.cao";
37  int opt_tracker = 0;
38  int opt_video_first_frame = -1;
39  int opt_downscale_img = 1;
40  bool opt_verbose = false;
41  bool opt_plot = true;
42  bool opt_display_scale_auto = false;
43  vpColVector opt_dof_to_estimate(6, 1.); // Here we consider 6 dof estimation
44  std::string opt_save;
45 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
46  std::string opt_save_results;
47 #endif
48  unsigned int thickness = 2;
49 
51  std::shared_ptr<vpDisplay> display;
52  std::shared_ptr<vpPlot> plot;
53  std::shared_ptr<vpVideoWriter> writer;
54 
55  try {
56  for (int i = 0; i < argc; i++) {
57  if (std::string(argv[i]) == "--video") {
58  opt_videoname = std::string(argv[++i]);
59  }
60  else if (std::string(argv[i]) == "--video-first-frame") {
61  opt_video_first_frame = std::atoi(argv[++i]);
62  }
63  else if (std::string(argv[i]) == "--model") {
64  opt_modelname = std::string(argv[++i]);
65  }
66  else if (std::string(argv[i]) == "--tracker") {
67  opt_tracker = atoi(argv[++i]);
68  }
69  else if (std::string(argv[i]) == "--downscale-img") {
70  opt_downscale_img = std::atoi(argv[++i]);
71  }
72  else if (std::string(argv[i]) == "--save") {
73  opt_save = std::string(argv[++i]);
74  }
75 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
76  else if (std::string(argv[i]) == "--save-results") {
77  opt_save_results = std::string(argv[++i]);
78  }
79 #endif
80  else if (std::string(argv[i]) == "--plot") {
81  opt_plot = true;
82  }
83  else if (std::string(argv[i]) == "--dof") {
84  for (int j = 0; j < 6; j++) {
85  int val = std::atoi(argv[++i]);
86  if (val == 0 || val == 1) {
87  opt_dof_to_estimate[j] = val;
88  }
89  else {
90  std::cout << "Error: wrong value after --dof option. Authorized values are 0 or 1 for each 6 dof to estimate." << std::endl;
91  return EXIT_FAILURE;
92  }
93  }
94  }
95  else if (std::string(argv[i]) == "--display-scale-auto") {
96  opt_display_scale_auto = true;
97  }
98  else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
99  opt_verbose = true;
100  }
101  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
102  std::cout << "\nSYNOPSIS " << std::endl
103  << argv[0]
104  << " [--video <video name>]"
105  << " [--video-first-frame <image index>"
106  << " [--model <model name>"
107  << " [--tracker <0=egde|1=keypoint|2=hybrid>]"
108  << " [--downscale-img <scale factor>]"
109  << " [--dof <0/1 0/1 0/1 0/1 0/1 0/1>]"
110  << " [--save <e.g. results-%04d.png>]"
111 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
112  << " [--save-results <e.g. tracking_poses.npz>]"
113 #endif
114  << " [--display-scale-auto]"
115  << " [--plot]"
116  << " [--verbose,-v]"
117  << " [--help,-h]"
118  << std::endl;
119  std::cout << "\nOPTIONS " << std::endl
120  << " --video <video name>" << std::endl
121  << " Input video name." << std::endl
122  << " Default: model/teabox/teabox.mp4" << std::endl
123  << std::endl
124  << " --video-first-frame <image index>" << std::endl
125  << " Index of the first image to process." << std::endl
126  << " Set to -1 to process the first image of the video." << std::endl
127  << " Default: -1" << std::endl
128  << std::endl
129  << " --model <model name>" << std::endl
130  << " CAD model filename. Supported formats are .cao and .wrl." << std::endl
131  << " To use wrl format, ViSP need to be built with Coin3D third-party." << std::endl
132  << " Default: model/teabox/teabox.cao" << std::endl
133  << std::endl
134  << " --tracker <0=egde|1=keypoint|2=hybrid>" << std::endl
135  << " Tracker type:" << std::endl
136  << " - when 0: use only moving-edges" << std::endl
137  << " - when 1: use only KLT keypoints" << std::endl
138  << " - when 2: use hybrid scheme, moving-edges and KLT keypoints." << std::endl
139  << " Default: 0" << std::endl
140  << std::endl
141  << " --downscale-img <scale factor>" << std::endl
142  << " Downscale input image width and height by this factor." << std::endl
143  << " When set to 1, image not down scaled. When set to 2, image width" << std::endl
144  << " and height is divided by 2." << std::endl
145  << " Default: 1" << std::endl
146  << std::endl
147  << " --dof <0/1 0/1 0/1 0/1 0/1 0/1>" << std::endl
148  << " 6-dim vector of 0 and 1 to indicate which dof [tx ty tz rx ry rz]" << std::endl
149  << " has to be estimated." << std::endl
150  << " When set to 1 the dof is estimated. When rather set to 0 the dof" << std::endl
151  << " is not estimated. It's value is the one from the initialisation." << std::endl
152  << " Default: 1 1 1 1 1 1 (to estimate all 6 dof)" << std::endl
153  << std::endl
154  << " --save <e.g. results-%04d.png>" << std::endl
155  << " Name of the saved image sequence that contains tracking results in overlay." << std::endl
156  << " When the name contains a folder like in the next example, the folder" << std::endl
157  << " is created if it doesn't exist."
158  << " Example: \"result/image-%04d.png\"." << std::endl
159  << std::endl
160 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
161  << " --save-results <e.g. tracking_results.npz>" << std::endl
162  << " Name of the npz file containing cMo data estimated from MBT." << std::endl
163  << " When the name contains a folder like in the next example, the folder" << std::endl
164  << " is created if it doesn't exist."
165  << " Example: \"result/tracking_results.npz\"." << std::endl
166  << std::endl
167 #endif
168  << " --display-scale-auto" << std::endl
169  << " Enable display window auto scaling to ensure that the image is fully" << std::endl
170  << " visible on the screen. Useful for large images." << std::endl
171  << " Note that this option doesn't affect the size of the processed images." << std::endl
172  << std::endl
173  << " --plot" << std::endl
174  << " Open a window that plots the estimated pose evolution." << std::endl
175  << std::endl
176  << " --verbose, -v" << std::endl
177  << " Enable verbose mode." << std::endl
178  << std::endl
179  << " --help, -h" << std::endl
180  << " Display this helper message." << std::endl
181  << std::endl;
182  return EXIT_SUCCESS;
183  }
184  }
185  std::string parentname = vpIoTools::getParent(opt_modelname);
186  std::string objectname = vpIoTools::getNameWE(opt_modelname);
187 
188  if (!parentname.empty())
189  objectname = parentname + "/" + objectname;
190 
191  std::cout << " *********** Tracker config ************ " << std::endl;
192  std::cout << "Video name : " << opt_videoname << std::endl;
193  std::cout << "Tracker cad model file : " << objectname << ".[cao or wrl]" << std::endl;
194  std::cout << "Tracker init file : " << objectname << ".init" << std::endl;
195  std::cout << "Tracker optional init image: " << objectname << ".[png,ppm,jpg]" << std::endl;
196  if (opt_downscale_img > 1) {
197  std::cout << "Downscale image factor : " << opt_downscale_img << std::endl;
198  }
199  std::cout << "Dof to estimate : " << opt_dof_to_estimate.t() << std::endl;
200 
201  // Create output folder if needed
202  if (!opt_save.empty()) {
203  std::string parent = vpIoTools::getParent(opt_save);
204  if (!parent.empty()) {
205  std::cout << "Create output directory: " << parent << std::endl;
206  vpIoTools::makeDirectory(parent);
207  }
208  }
209 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
210  if (!opt_save_results.empty()) {
211  std::string parent = vpIoTools::getParent(opt_save_results);
212  if (!parent.empty()) {
213  std::cout << "Create output directory for the npz file: " << parent << std::endl;
214  vpIoTools::makeDirectory(parent);
215  }
216  }
217 #endif
218 
220  vpImage<unsigned char> Ivideo;
225 
226  vpVideoReader g;
227  g.setFileName(opt_videoname);
228  if (opt_video_first_frame > 0) {
229  g.setFirstFrameIndex(static_cast<unsigned int>(opt_video_first_frame));
230  }
231  if (opt_downscale_img > 1) {
232  g.open(Ivideo);
233  Ivideo.subsample(opt_downscale_img, opt_downscale_img, I);
234  }
235  else {
236  g.open(I);
237  }
238 
239  vpImage<vpRGBa> O;
240  if (!opt_save.empty()) {
241  writer = std::make_shared<vpVideoWriter>();
242  writer->setFileName(opt_save);
243  writer->open(O);
244  }
245 
246 #if defined(VISP_HAVE_X11)
247  display = std::make_shared<vpDisplayX>();
248 #elif defined(VISP_HAVE_GDI)
249  display = std::make_shared<vpDisplayGDI>();
250 #elif defined(HAVE_OPENCV_HIGHGUI)
251  display = std::make_shared<vpDisplayOpenCV>();
252 #endif
253  if (opt_display_scale_auto) {
254  display->setDownScalingFactor(vpDisplay::SCALE_AUTO);
255  }
256  display->init(I, 100, 100, "Model-based tracker");
257 
258  if (opt_plot) {
259  plot = std::make_shared<vpPlot>(2, 700, 700, display->getWindowXPosition() + I.getWidth() / display->getDownScalingFactor() + 30,
260  display->getWindowYPosition(), "Estimated pose");
261  plot->initGraph(0, 3); // Translation
262  plot->setTitle(0, "Translation [m]");
263  plot->setColor(0, 0, vpColor::red);
264  plot->setColor(0, 1, vpColor::green);
265  plot->setColor(0, 2, vpColor::blue);
266  plot->initGraph(1, 3); // Attitude
267  plot->setTitle(1, "Attitude thetaU [deg]");
268  plot->setColor(1, 0, vpColor::red);
269  plot->setColor(1, 1, vpColor::green);
270  plot->setColor(1, 2, vpColor::blue);
271  }
272 
274  vpMbGenericTracker tracker;
275  if (opt_tracker == 0)
277 #if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
278  else if (opt_tracker == 1)
279  tracker.setTrackerType(vpMbGenericTracker::KLT_TRACKER);
280  else
281  tracker.setTrackerType(vpMbGenericTracker::EDGE_TRACKER | vpMbGenericTracker::KLT_TRACKER);
282 #else
283  else {
284  std::cout << "klt and hybrid model-based tracker are not available since visp_klt module is not available. "
285  "In CMakeGUI turn visp_klt module ON, configure and build ViSP again."
286  << std::endl;
287  return EXIT_FAILURE;
288  }
289 #endif
291 
292  bool usexml = false;
294 #if defined(VISP_HAVE_PUGIXML)
295  if (vpIoTools::checkFilename(objectname + ".xml")) {
296  std::cout << "Tracker config file : " << objectname + ".xml" << std::endl;
297  tracker.loadConfigFile(objectname + ".xml");
298  usexml = true;
299  }
300 #endif
302 
303  if (!usexml) {
305  if (opt_tracker == 0 || opt_tracker == 2) {
307  vpMe me;
308  me.setMaskSize(5);
309  me.setMaskNumber(180);
310  me.setRange(8);
312  me.setThreshold(20);
313  me.setMu1(0.5);
314  me.setMu2(0.5);
315  me.setSampleStep(4);
316  tracker.setMovingEdge(me);
318  }
319 
320 #if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
321  if (opt_tracker == 1 || opt_tracker == 2) {
323  vpKltOpencv klt_settings;
324  klt_settings.setMaxFeatures(300);
325  klt_settings.setWindowSize(5);
326  klt_settings.setQuality(0.015);
327  klt_settings.setMinDistance(8);
328  klt_settings.setHarrisFreeParameter(0.01);
329  klt_settings.setBlockSize(3);
330  klt_settings.setPyramidLevels(3);
331  tracker.setKltOpencv(klt_settings);
332  tracker.setKltMaskBorder(5);
334  }
335 #endif
336 
338  tracker.setAngleAppear(vpMath::rad(70));
339  tracker.setAngleDisappear(vpMath::rad(80));
342  tracker.setNearClippingDistance(0.1);
343  tracker.setFarClippingDistance(100.0);
348 
350  vpCameraParameters cam;
351  cam.initPersProjWithoutDistortion(839.21470, 839.44555, 325.66776, 243.69727);
352  tracker.setCameraParameters(cam);
355  }
356 
359  tracker.setOgreVisibilityTest(false);
360  tracker.setOgreShowConfigDialog(false);
363  tracker.setScanLineVisibilityTest(true);
366 
368  if (vpIoTools::checkFilename(objectname + ".cao"))
369  tracker.loadModel(objectname + ".cao");
372  else if (vpIoTools::checkFilename(objectname + ".wrl"))
373  tracker.loadModel(objectname + ".wrl");
376  tracker.setDisplayFeatures(true);
378 
380 
381  if (opt_dof_to_estimate != 1.) {
382  tracker.setEstimatedDoF(opt_dof_to_estimate);
383  }
384 
386  vpCameraParameters cam;
387  tracker.getCameraParameters(cam);
388  std::cout << "Camera parameters: \n" << cam << std::endl;
390 
391  std::cout << "Initialize tracker on image size: " << I.getWidth() << " x " << I.getHeight() << std::endl;
392 
393  std::vector<double> vec_poses;
394 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
395  if (!opt_save_results.empty()) {
396  const unsigned int height = I.getHeight(), width = I.getWidth();
397  visp::cnpy::npz_save(opt_save_results, "height", &height, { 1 }, "w");
398  visp::cnpy::npz_save(opt_save_results, "width", &width, { 1 }, "a");
399 
400  const double cam_px = cam.get_px(), cam_py = cam.get_py(), cam_u0 = cam.get_u0(), cam_v0 = cam.get_v0();
401  visp::cnpy::npz_save(opt_save_results, "cam_px", &cam_px, { 1 }, "a");
402  visp::cnpy::npz_save(opt_save_results, "cam_py", &cam_py, { 1 }, "a");
403  visp::cnpy::npz_save(opt_save_results, "cam_u0", &cam_u0, { 1 }, "a");
404  visp::cnpy::npz_save(opt_save_results, "cam_v0", &cam_v0, { 1 }, "a");
405  }
406 #endif
407 
409  tracker.initClick(I, objectname + ".init", true);
411 
412  while (!g.end()) {
413  if (opt_downscale_img > 1) {
414  g.acquire(Ivideo);
415  Ivideo.subsample(opt_downscale_img, opt_downscale_img, I);
416  }
417  else {
418  g.acquire(I);
419  }
420  std::stringstream ss;
421  ss << "Process image " << g.getFrameIndex();
422  if (opt_verbose) {
423  std::cout << "-- " << ss.str() << std::endl;
424  }
427  tracker.track(I);
430  tracker.getPose(cMo);
433  tracker.display(I, cMo, cam, vpColor::red, thickness);
435  vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, thickness);
436  vpDisplay::displayText(I, 20 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), "A click to exit...", vpColor::red);
437  vpDisplay::displayText(I, 40 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), ss.str(), vpColor::red);
438  {
439  std::stringstream ss;
440  ss << "Features";
442  ss << " edge: " << tracker.getNbFeaturesEdge();
443  }
444 #if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
445  if (tracker.getTrackerType() & vpMbGenericTracker::KLT_TRACKER) {
446  ss << " klt: " << tracker.getNbFeaturesKlt();
447  }
448 #endif
449  vpDisplay::displayText(I, 60 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), ss.str(), vpColor::red);
450  if (opt_verbose) {
451  std::cout << ss.str() << std::endl;
452  std::cout << "cMo:\n" << cMo << std::endl;
453  }
454  }
455  {
456  double proj_error = tracker.computeCurrentProjectionError(I, cMo, cam);
457  std::stringstream ss;
458  ss << "Projection error: " << std::setprecision(2) << proj_error << " deg";
459  vpDisplay::displayText(I, 80 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), ss.str(), vpColor::red);
460  if (opt_verbose) {
461  std::cout << ss.str() << std::endl;
462  }
463  }
464  vpDisplay::flush(I);
465 
466  if (opt_plot) {
469  vpColVector c_tu_o_deg = vpMath::deg(c_tu_o);
470  plot->plot(0, g.getFrameIndex(), c_t_o);
471  plot->plot(1, g.getFrameIndex(), c_tu_o_deg);
472  }
473 
474  if (!opt_save.empty()) {
475  vpDisplay::getImage(I, O);
476  writer->saveFrame(O);
477  }
478 
479 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
480  if (!opt_save_results.empty()) {
481  std::vector<double> vec_pose = poseToVec(cMo);
482  vec_poses.insert(vec_poses.end(), vec_pose.begin(), vec_pose.end());
483  }
484 #endif
485 
486  if (vpDisplay::getClick(I, false))
487  break;
488  }
490 
491 #if defined(VISP_HAVE_MINIZ) && defined(VISP_HAVE_WORKING_REGEX)
492  if (!opt_save_results.empty()) {
493  visp::cnpy::npz_save(opt_save_results, "vec_poses", vec_poses.data(), { static_cast<size_t>(vec_poses.size()/6), 6 }, "a");
494  }
495 #endif
496  }
497  catch (const vpException &e) {
498  std::cout << "Catch a ViSP exception: " << e << std::endl;
500  }
501 #ifdef VISP_HAVE_OGRE
502  catch (Ogre::Exception &e) {
503  std::cout << "Catch an Ogre exception: " << e.getDescription() << std::endl;
505  }
506 #endif
507 #else
508  (void)argc;
509  (void)argv;
510  std::cout << "Install OpenCV and rebuild ViSP to use this example." << std::endl;
511 #endif
512  return EXIT_SUCCESS;
513 }
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
static const vpColor blue
Definition: vpColor.h:223
static const vpColor green
Definition: vpColor.h:220
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
Definition: vpDisplay.cpp:140
static void flush(const vpImage< unsigned char > &I)
@ SCALE_AUTO
Definition: vpDisplay.h:184
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpRotationMatrix getRotationMatrix() const
vpTranslationVector getTranslationVector() const
void subsample(unsigned int v_scale, unsigned int h_scale, vpImage< Type > &sampled) const
Definition: vpImage.h:753
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
static bool checkFilename(const std::string &filename)
Definition: vpIoTools.cpp:786
static void makeDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:550
static std::string getNameWE(const std::string &pathname)
Definition: vpIoTools.cpp:1227
static std::string getParent(const std::string &pathname)
Definition: vpIoTools.cpp:1314
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:74
void setBlockSize(int blockSize)
Definition: vpKltOpencv.h:267
void setQuality(double qualityLevel)
Definition: vpKltOpencv.h:356
void setHarrisFreeParameter(double harris_k)
Definition: vpKltOpencv.h:275
void setMaxFeatures(int maxCount)
Definition: vpKltOpencv.h:315
void setMinDistance(double minDistance)
Definition: vpKltOpencv.h:324
void setWindowSize(int winSize)
Definition: vpKltOpencv.h:377
void setPyramidLevels(int pyrMaxLevel)
Definition: vpKltOpencv.h:343
static double rad(double deg)
Definition: vpMath.h:129
static double deg(double rad)
Definition: vpMath.h:119
Real-time 6D object pose tracking using its CAD model.
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual int getTrackerType() const
virtual void setOgreVisibilityTest(const bool &v) VP_OVERRIDE
virtual void setDisplayFeatures(bool displayF) VP_OVERRIDE
virtual unsigned int getNbFeaturesEdge() const
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) VP_OVERRIDE
virtual void getCameraParameters(vpCameraParameters &camera) const VP_OVERRIDE
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void getPose(vpHomogeneousMatrix &cMo) const VP_OVERRIDE
virtual unsigned int getNbFeaturesKlt() const
virtual void setMovingEdge(const vpMe &me)
virtual void setAngleDisappear(const double &a) VP_OVERRIDE
virtual void track(const vpImage< unsigned char > &I) VP_OVERRIDE
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) VP_OVERRIDE
virtual void getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
virtual void setTrackerType(int type)
virtual void setOgreShowConfigDialog(bool showConfigDialog) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) VP_OVERRIDE
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) VP_OVERRIDE
virtual void setAngleAppear(const double &a) VP_OVERRIDE
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
virtual void setEstimatedDoF(const vpColVector &v)
Definition: vpMe.h:134
void setMu1(const double &mu_1)
Definition: vpMe.h:385
void setRange(const unsigned int &range)
Definition: vpMe.h:415
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition: vpMe.h:505
void setMaskNumber(const unsigned int &mask_number)
Definition: vpMe.cpp:552
void setThreshold(const double &threshold)
Definition: vpMe.h:466
void setSampleStep(const double &sample_step)
Definition: vpMe.h:422
void setMaskSize(const unsigned int &mask_size)
Definition: vpMe.cpp:560
void setMu2(const double &mu_2)
Definition: vpMe.h:392
@ NORMALIZED_THRESHOLD
Definition: vpMe.h:145
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
void acquire(vpImage< vpRGBa > &I)
void open(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
void setFirstFrameIndex(const long first_frame)
long getFrameIndex() const
void npz_save(std::string zipname, std::string fname, const T *data, const std::vector< size_t > &shape, std::string mode="w")
Definition: vpIoTools.h:235