Visual Servoing Platform  version 3.6.1 under development (2024-03-29)
tutorial-mb-generic-tracker-full.cpp
1 #include <visp3/core/vpIoTools.h>
3 #include <visp3/gui/vpDisplayGDI.h>
4 #include <visp3/gui/vpDisplayOpenCV.h>
5 #include <visp3/gui/vpDisplayX.h>
6 #include <visp3/gui/vpPlot.h>
8 #include <visp3/mbt/vpMbGenericTracker.h>
10 #include <visp3/io/vpVideoReader.h>
11 #include <visp3/io/vpVideoWriter.h>
12 
13 #if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_VIDEOIO) && defined(HAVE_OPENCV_HIGHGUI)
14 namespace
15 {
16 std::vector<double> poseToVec(const vpHomogeneousMatrix &cMo)
17 {
20  std::vector<double> vec { t[0], t[1], t[2], tu[0], tu[1], tu[2] };
21 
22  return vec;
23 }
24 }
25 #endif
26 
27 int main(int argc, char **argv)
28 {
29 #if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_VIDEOIO) && defined(HAVE_OPENCV_HIGHGUI)
30  std::string opt_videoname = "model/teabox/teabox.mp4";
31  std::string opt_modelname = "model/teabox/teabox.cao";
32  int opt_tracker = 0;
33  int opt_video_first_frame = -1;
34  int opt_downscale_img = 1;
35  bool opt_verbose = false;
36  bool opt_plot = true;
37  bool opt_display_scale_auto = false;
38  vpColVector opt_dof_to_estimate(6, 1.); // Here we consider 6 dof estimation
39  std::string opt_save;
40  std::string opt_save_results;
41  unsigned int thickness = 2;
42 
44  std::shared_ptr<vpDisplay> display;
45  std::shared_ptr<vpPlot> plot;
46  std::shared_ptr<vpVideoWriter> writer;
47 
48  try {
49  for (int i = 0; i < argc; i++) {
50  if (std::string(argv[i]) == "--video") {
51  opt_videoname = std::string(argv[++i]);
52  }
53  else if (std::string(argv[i]) == "--video-first-frame") {
54  opt_video_first_frame = std::atoi(argv[++i]);
55  }
56  else if (std::string(argv[i]) == "--model") {
57  opt_modelname = std::string(argv[++i]);
58  }
59  else if (std::string(argv[i]) == "--tracker") {
60  opt_tracker = atoi(argv[++i]);
61  }
62  else if (std::string(argv[i]) == "--downscale-img") {
63  opt_downscale_img = std::atoi(argv[++i]);
64  }
65  else if (std::string(argv[i]) == "--save") {
66  opt_save = std::string(argv[++i]);
67  }
68  else if (std::string(argv[i]) == "--save-results") {
69  opt_save_results = std::string(argv[++i]);
70  }
71  else if (std::string(argv[i]) == "--plot") {
72  opt_plot = true;
73  }
74  else if (std::string(argv[i]) == "--dof") {
75  for (int j = 0; j < 6; j++) {
76  int val = std::atoi(argv[++i]);
77  if (val == 0 || val == 1) {
78  opt_dof_to_estimate[j] = val;
79  }
80  else {
81  std::cout << "Error: wrong value after --dof option. Authorized values are 0 or 1 for each 6 dof to estimate." << std::endl;
82  return EXIT_FAILURE;
83  }
84  }
85  }
86  else if (std::string(argv[i]) == "--display-scale-auto") {
87  opt_display_scale_auto = true;
88  }
89  else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
90  opt_verbose = true;
91  }
92  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
93  std::cout << "\nSYNOPSIS " << std::endl
94  << argv[0]
95  << " [--video <video name>]"
96  << " [--video-first-frame <image index>"
97  << " [--model <model name>"
98  << " [--tracker <0=egde|1=keypoint|2=hybrid>]"
99  << " [--downscale-img <scale factor>]"
100  << " [--dof <0/1 0/1 0/1 0/1 0/1 0/1>]"
101  << " [--save <e.g. results-%04d.png>]"
102  << " [--save-results <e.g. tracking_poses.npz>]"
103  << " [--display-scale-auto]"
104  << " [--plot]"
105  << " [--verbose,-v]"
106  << " [--help,-h]"
107  << std::endl;
108  std::cout << "\nOPTIONS " << std::endl
109  << " --video <video name>" << std::endl
110  << " Input video name." << std::endl
111  << " Default: model/teabox/teabox.mp4" << std::endl
112  << std::endl
113  << " --video-first-frame <image index>" << std::endl
114  << " Index of the first image to process." << std::endl
115  << " Set to -1 to process the first image of the video." << std::endl
116  << " Default: -1" << std::endl
117  << std::endl
118  << " --model <model name>" << std::endl
119  << " CAD model filename. Supported formats are .cao and .wrl." << std::endl
120  << " To use wrl format, ViSP need to be built with Coin3D third-party." << std::endl
121  << " Default: model/teabox/teabox.cao" << std::endl
122  << std::endl
123  << " --tracker <0=egde|1=keypoint|2=hybrid>" << std::endl
124  << " Tracker type:" << std::endl
125  << " - when 0: use only moving-edges" << std::endl
126  << " - when 1: use only KLT keypoints" << std::endl
127  << " - when 2: use hybrid scheme, moving-edges and KLT keypoints." << std::endl
128  << " Default: 0" << std::endl
129  << std::endl
130  << " --downscale-img <scale factor>" << std::endl
131  << " Downscale input image width and height by this factor." << std::endl
132  << " When set to 1, image not down scaled. When set to 2, image width" << std::endl
133  << " and height is divided by 2." << std::endl
134  << " Default: 1" << std::endl
135  << std::endl
136  << " --dof <0/1 0/1 0/1 0/1 0/1 0/1>" << std::endl
137  << " 6-dim vector of 0 and 1 to indicate which dof [tx ty tz rx ry rz]" << std::endl
138  << " has to be estimated." << std::endl
139  << " When set to 1 the dof is estimated. When rather set to 0 the dof" << std::endl
140  << " is not estimated. It's value is the one from the initialisation." << std::endl
141  << " Default: 1 1 1 1 1 1 (to estimate all 6 dof)" << std::endl
142  << std::endl
143  << " --save <e.g. results-%04d.png>" << std::endl
144  << " Name of the saved image sequence that contains tracking results in overlay." << std::endl
145  << " When the name contains a folder like in the next example, the folder" << std::endl
146  << " is created if it doesn't exist."
147  << " Example: \"result/image-%04d.png\"." << std::endl
148  << std::endl
149  << " --save-results <e.g. tracking_results.npz>" << std::endl
150  << " Name of the npz file containing cMo data estimated from MBT." << std::endl
151  << " When the name contains a folder like in the next example, the folder" << std::endl
152  << " is created if it doesn't exist."
153  << " Example: \"result/tracking_results.npz\"." << std::endl
154  << std::endl
155  << " --display-scale-auto" << std::endl
156  << " Enable display window auto scaling to ensure that the image is fully" << std::endl
157  << " visible on the screen. Useful for large images." << std::endl
158  << " Note that this option doesn't affect the size of the processed images." << std::endl
159  << std::endl
160  << " --plot" << std::endl
161  << " Open a window that plots the estimated pose evolution." << std::endl
162  << std::endl
163  << " --verbose, -v" << std::endl
164  << " Enable verbose mode." << std::endl
165  << std::endl
166  << " --help, -h" << std::endl
167  << " Display this helper message." << std::endl
168  << std::endl;
169  return EXIT_SUCCESS;
170  }
171  }
172  std::string parentname = vpIoTools::getParent(opt_modelname);
173  std::string objectname = vpIoTools::getNameWE(opt_modelname);
174 
175  if (!parentname.empty())
176  objectname = parentname + "/" + objectname;
177 
178  std::cout << " *********** Tracker config ************ " << std::endl;
179  std::cout << "Video name : " << opt_videoname << std::endl;
180  std::cout << "Tracker cad model file : " << objectname << ".[cao or wrl]" << std::endl;
181  std::cout << "Tracker init file : " << objectname << ".init" << std::endl;
182  std::cout << "Tracker optional init image: " << objectname << ".[png,ppm,jpg]" << std::endl;
183  if (opt_downscale_img > 1) {
184  std::cout << "Downscale image factor : " << opt_downscale_img << std::endl;
185  }
186  std::cout << "Dof to estimate : " << opt_dof_to_estimate.t() << std::endl;
187 
188  // Create output folder if needed
189  if (!opt_save.empty()) {
190  std::string parent = vpIoTools::getParent(opt_save);
191  if (!parent.empty()) {
192  std::cout << "Create output directory: " << parent << std::endl;
193  vpIoTools::makeDirectory(parent);
194  }
195  }
196  if (!opt_save_results.empty()) {
197  std::string parent = vpIoTools::getParent(opt_save_results);
198  if (!parent.empty()) {
199  std::cout << "Create output directory for the npz file: " << parent << std::endl;
200  vpIoTools::makeDirectory(parent);
201  }
202  }
203 
205  vpImage<unsigned char> Ivideo;
210 
211  vpVideoReader g;
212  g.setFileName(opt_videoname);
213  if (opt_video_first_frame > 0) {
214  g.setFirstFrameIndex(static_cast<unsigned int>(opt_video_first_frame));
215  }
216  if (opt_downscale_img > 1) {
217  g.open(Ivideo);
218  Ivideo.subsample(opt_downscale_img, opt_downscale_img, I);
219  }
220  else {
221  g.open(I);
222  }
223 
224  vpImage<vpRGBa> O;
225  if (!opt_save.empty()) {
226  writer = std::make_shared<vpVideoWriter>();
227  writer->setFileName(opt_save);
228  writer->open(O);
229  }
230 
231 #if defined(VISP_HAVE_X11)
232  display = std::make_shared<vpDisplayX>();
233 #elif defined(VISP_HAVE_GDI)
234  display = std::make_shared<vpDisplayGDI>();
235 #elif defined(HAVE_OPENCV_HIGHGUI)
236  display = std::make_shared<vpDisplayOpenCV>();
237 #endif
238  if (opt_display_scale_auto) {
239  display->setDownScalingFactor(vpDisplay::SCALE_AUTO);
240  }
241  display->init(I, 100, 100, "Model-based tracker");
242 
243  if (opt_plot) {
244  plot = std::make_shared<vpPlot>(2, 700, 700, display->getWindowXPosition() + I.getWidth() / display->getDownScalingFactor() + 30,
245  display->getWindowYPosition(), "Estimated pose");
246  plot->initGraph(0, 3); // Translation
247  plot->setTitle(0, "Translation [m]");
248  plot->setColor(0, 0, vpColor::red);
249  plot->setColor(0, 1, vpColor::green);
250  plot->setColor(0, 2, vpColor::blue);
251  plot->initGraph(1, 3); // Attitude
252  plot->setTitle(1, "Attitude thetaU [deg]");
253  plot->setColor(1, 0, vpColor::red);
254  plot->setColor(1, 1, vpColor::green);
255  plot->setColor(1, 2, vpColor::blue);
256  }
257 
259  vpMbGenericTracker tracker;
260  if (opt_tracker == 0)
262 #if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
263  else if (opt_tracker == 1)
265  else
267 #else
268  else {
269  std::cout << "klt and hybrid model-based tracker are not available since visp_klt module is not available. "
270  "In CMakeGUI turn visp_klt module ON, configure and build ViSP again."
271  << std::endl;
272  return EXIT_FAILURE;
273  }
274 #endif
276 
277  bool usexml = false;
279 #if defined(VISP_HAVE_PUGIXML)
280  if (vpIoTools::checkFilename(objectname + ".xml")) {
281  std::cout << "Tracker config file : " << objectname + ".xml" << std::endl;
282  tracker.loadConfigFile(objectname + ".xml");
283  usexml = true;
284  }
285 #endif
287 
288  if (!usexml) {
290  if (opt_tracker == 0 || opt_tracker == 2) {
292  vpMe me;
293  me.setMaskSize(5);
294  me.setMaskNumber(180);
295  me.setRange(8);
297  me.setThreshold(20);
298  me.setMu1(0.5);
299  me.setMu2(0.5);
300  me.setSampleStep(4);
301  tracker.setMovingEdge(me);
303  }
304 
305 #if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
306  if (opt_tracker == 1 || opt_tracker == 2) {
308  vpKltOpencv klt_settings;
309  klt_settings.setMaxFeatures(300);
310  klt_settings.setWindowSize(5);
311  klt_settings.setQuality(0.015);
312  klt_settings.setMinDistance(8);
313  klt_settings.setHarrisFreeParameter(0.01);
314  klt_settings.setBlockSize(3);
315  klt_settings.setPyramidLevels(3);
316  tracker.setKltOpencv(klt_settings);
317  tracker.setKltMaskBorder(5);
319  }
320 #endif
321 
323  tracker.setAngleAppear(vpMath::rad(70));
324  tracker.setAngleDisappear(vpMath::rad(80));
327  tracker.setNearClippingDistance(0.1);
328  tracker.setFarClippingDistance(100.0);
333 
335  vpCameraParameters cam;
336  cam.initPersProjWithoutDistortion(839.21470, 839.44555, 325.66776, 243.69727);
337  tracker.setCameraParameters(cam);
340  }
341 
344  tracker.setOgreVisibilityTest(false);
345  tracker.setOgreShowConfigDialog(false);
348  tracker.setScanLineVisibilityTest(true);
351 
353  if (vpIoTools::checkFilename(objectname + ".cao"))
354  tracker.loadModel(objectname + ".cao");
357  else if (vpIoTools::checkFilename(objectname + ".wrl"))
358  tracker.loadModel(objectname + ".wrl");
361  tracker.setDisplayFeatures(true);
363 
365 
366  if (opt_dof_to_estimate != 1.) {
367  tracker.setEstimatedDoF(opt_dof_to_estimate);
368  }
369 
371  vpCameraParameters cam;
372  tracker.getCameraParameters(cam);
373  std::cout << "Camera parameters: \n" << cam << std::endl;
375 
376  std::cout << "Initialize tracker on image size: " << I.getWidth() << " x " << I.getHeight() << std::endl;
377 
378  std::vector<double> vec_poses;
379  if (!opt_save_results.empty()) {
380  const unsigned int height = I.getHeight(), width = I.getWidth();
381  visp::cnpy::npz_save(opt_save_results, "height", &height, { 1 }, "w");
382  visp::cnpy::npz_save(opt_save_results, "width", &width, { 1 }, "a");
383 
384  const double cam_px = cam.get_px(), cam_py = cam.get_py(), cam_u0 = cam.get_u0(), cam_v0 = cam.get_v0();
385  visp::cnpy::npz_save(opt_save_results, "cam_px", &cam_px, { 1 }, "a");
386  visp::cnpy::npz_save(opt_save_results, "cam_py", &cam_py, { 1 }, "a");
387  visp::cnpy::npz_save(opt_save_results, "cam_u0", &cam_u0, { 1 }, "a");
388  visp::cnpy::npz_save(opt_save_results, "cam_v0", &cam_v0, { 1 }, "a");
389  }
390 
392  tracker.initClick(I, objectname + ".init", true);
394 
395  while (!g.end()) {
396  if (opt_downscale_img > 1) {
397  g.acquire(Ivideo);
398  Ivideo.subsample(opt_downscale_img, opt_downscale_img, I);
399  }
400  else {
401  g.acquire(I);
402  }
403  std::stringstream ss;
404  ss << "Process image " << g.getFrameIndex();
405  if (opt_verbose) {
406  std::cout << "-- " << ss.str() << std::endl;
407  }
410  tracker.track(I);
413  tracker.getPose(cMo);
416  tracker.display(I, cMo, cam, vpColor::red, thickness);
418  vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, thickness);
419  vpDisplay::displayText(I, 20 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), "A click to exit...", vpColor::red);
420  vpDisplay::displayText(I, 40 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), ss.str(), vpColor::red);
421  {
422  std::stringstream ss;
423  ss << "Features";
425  ss << " edge: " << tracker.getNbFeaturesEdge();
426  }
427 #if defined(VISP_HAVE_MODULE_KLT) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
429  ss << " klt: " << tracker.getNbFeaturesKlt();
430  }
431 #endif
432  vpDisplay::displayText(I, 60 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), ss.str(), vpColor::red);
433  if (opt_verbose) {
434  std::cout << ss.str() << std::endl;
435  std::cout << "cMo:\n" << cMo << std::endl;
436  }
437  }
438  {
439  double proj_error = tracker.computeCurrentProjectionError(I, cMo, cam);
440  std::stringstream ss;
441  ss << "Projection error: " << std::setprecision(2) << proj_error << " deg";
442  vpDisplay::displayText(I, 80 * display->getDownScalingFactor(), 10 * display->getDownScalingFactor(), ss.str(), vpColor::red);
443  if (opt_verbose) {
444  std::cout << ss.str() << std::endl;
445  }
446  }
447  vpDisplay::flush(I);
448 
449  if (opt_plot) {
452  vpColVector c_tu_o_deg = vpMath::deg(c_tu_o);
453  plot->plot(0, g.getFrameIndex(), c_t_o);
454  plot->plot(1, g.getFrameIndex(), c_tu_o_deg);
455  }
456 
457  if (!opt_save.empty()) {
458  vpDisplay::getImage(I, O);
459  writer->saveFrame(O);
460  }
461 
462  if (!opt_save_results.empty()) {
463  std::vector<double> vec_pose = poseToVec(cMo);
464  vec_poses.insert(vec_poses.end(), vec_pose.begin(), vec_pose.end());
465  }
466 
467  if (vpDisplay::getClick(I, false))
468  break;
469  }
471 
472  if (!opt_save_results.empty()) {
473  visp::cnpy::npz_save(opt_save_results, "vec_poses", vec_poses.data(), { static_cast<size_t>(vec_poses.size()/6), 6 }, "a");
474  }
475  }
476  catch (const vpException &e) {
477  std::cout << "Catch a ViSP exception: " << e << std::endl;
479  }
480 #ifdef VISP_HAVE_OGRE
481  catch (Ogre::Exception &e) {
482  std::cout << "Catch an Ogre exception: " << e.getDescription() << std::endl;
484  }
485 #endif
486 #else
487  (void)argc;
488  (void)argv;
489  std::cout << "Install OpenCV and rebuild ViSP to use this example." << std::endl;
490 #endif
491  return EXIT_SUCCESS;
492 }
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
static const vpColor red
Definition: vpColor.h:211
static const vpColor none
Definition: vpColor.h:223
static const vpColor blue
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:214
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
Definition: vpDisplay.cpp:138
static void flush(const vpImage< unsigned char > &I)
@ SCALE_AUTO
Definition: vpDisplay.h:179
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:59
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpRotationMatrix getRotationMatrix() const
vpTranslationVector getTranslationVector() const
void subsample(unsigned int v_scale, unsigned int h_scale, vpImage< Type > &sampled) const
Definition: vpImage.h:1638
unsigned int getWidth() const
Definition: vpImage.h:245
unsigned int getHeight() const
Definition: vpImage.h:184
static bool checkFilename(const std::string &filename)
Definition: vpIoTools.cpp:1199
static void makeDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:967
static std::string getNameWE(const std::string &pathname)
Definition: vpIoTools.cpp:1950
static std::string getParent(const std::string &pathname)
Definition: vpIoTools.cpp:2033
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:73
void setBlockSize(int blockSize)
Definition: vpKltOpencv.h:266
void setQuality(double qualityLevel)
Definition: vpKltOpencv.h:355
void setHarrisFreeParameter(double harris_k)
Definition: vpKltOpencv.h:274
void setMaxFeatures(int maxCount)
Definition: vpKltOpencv.h:314
void setMinDistance(double minDistance)
Definition: vpKltOpencv.h:323
void setWindowSize(int winSize)
Definition: vpKltOpencv.h:376
void setPyramidLevels(int pyrMaxLevel)
Definition: vpKltOpencv.h:342
static double rad(double deg)
Definition: vpMath.h:127
static double deg(double rad)
Definition: vpMath.h:117
Real-time 6D object pose tracking using its CAD model.
virtual int getTrackerType() const
virtual void setKltMaskBorder(const unsigned int &e)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) vp_override
virtual unsigned int getNbFeaturesEdge() const
virtual void setOgreShowConfigDialog(bool showConfigDialog) vp_override
virtual void setAngleDisappear(const double &a) vp_override
virtual void getPose(vpHomogeneousMatrix &cMo) const vp_override
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) vp_override
virtual void getCameraParameters(vpCameraParameters &camera) const vp_override
virtual unsigned int getNbFeaturesKlt() const
virtual void setOgreVisibilityTest(const bool &v) vp_override
virtual void setMovingEdge(const vpMe &me)
virtual void setAngleAppear(const double &a) vp_override
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) vp_override
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setFarClippingDistance(const double &dist) vp_override
virtual void setNearClippingDistance(const double &dist) vp_override
virtual void setScanLineVisibilityTest(const bool &v) vp_override
virtual void getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
virtual void setCameraParameters(const vpCameraParameters &camera) vp_override
virtual void setTrackerType(int type)
virtual void setClipping(const unsigned int &flags) vp_override
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) vp_override
virtual void setDisplayFeatures(bool displayF) vp_override
virtual void track(const vpImage< unsigned char > &I) vp_override
virtual void setEstimatedDoF(const vpColVector &v)
Definition: vpMe.h:124
void setMu1(const double &mu_1)
Definition: vpMe.h:399
void setRange(const unsigned int &range)
Definition: vpMe.h:429
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition: vpMe.h:519
void setMaskNumber(const unsigned int &mask_number)
Definition: vpMe.cpp:488
void setThreshold(const double &threshold)
Definition: vpMe.h:480
void setSampleStep(const double &sample_step)
Definition: vpMe.h:436
void setMaskSize(const unsigned int &mask_size)
Definition: vpMe.cpp:496
void setMu2(const double &mu_2)
Definition: vpMe.h:406
@ NORMALIZED_THRESHOLD
Definition: vpMe.h:135
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
void acquire(vpImage< vpRGBa > &I)
void open(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
void setFirstFrameIndex(const long first_frame)
long getFrameIndex() const
void display(vpImage< unsigned char > &I, const std::string &title)
Display a gray-scale image.
void npz_save(std::string zipname, std::string fname, const T *data, const std::vector< size_t > &shape, std::string mode="w")
Definition: vpIoTools.h:233