Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
servoBebop2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Example that shows how to do visual servoing with Parrot Bebop 2 drone in ViSP.
33  *
34  * Authors:
35  * Fabien Spindler
36  * Gatien Gaumerais
37  *
38  *****************************************************************************/
39 
40 #include <iostream>
41 
42 #include <visp3/core/vpConfig.h>
43 #include <visp3/core/vpMomentAreaNormalized.h>
44 #include <visp3/core/vpMomentBasic.h>
45 #include <visp3/core/vpMomentCentered.h>
46 #include <visp3/core/vpMomentDatabase.h>
47 #include <visp3/core/vpMomentGravityCenter.h>
48 #include <visp3/core/vpMomentGravityCenterNormalized.h>
49 #include <visp3/core/vpMomentObject.h>
50 #include <visp3/core/vpPixelMeterConversion.h>
51 #include <visp3/core/vpPoint.h>
52 #include <visp3/core/vpTime.h>
53 #include <visp3/detection/vpDetectorAprilTag.h>
54 #include <visp3/gui/vpDisplayX.h>
55 #include <visp3/gui/vpPlot.h>
56 #include <visp3/robot/vpRobotBebop2.h>
57 #include <visp3/visual_features/vpFeatureBuilder.h>
58 #include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
59 #include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
60 #include <visp3/visual_features/vpFeatureVanishingPoint.h>
61 #include <visp3/vs/vpServo.h>
62 #include <visp3/vs/vpServoDisplay.h>
63 
64 #ifdef VISP_HAVE_PUGIXML
65 #include <visp3/core/vpXmlParserCamera.h>
66 #endif
67 
68 #if !defined(VISP_HAVE_ARSDK)
69 int main()
70 {
71  std::cout << "\nThis example requires Parrot ARSDK3 library. You should install it.\n" << std::endl;
72  return EXIT_SUCCESS;
73 }
74 #elif !defined(VISP_HAVE_FFMPEG)
75 int main()
76 {
77  std::cout << "\nThis example requires ffmpeg library. You should install it.\n" << std::endl;
78  return EXIT_SUCCESS;
79 }
80 #else
81 
82 bool compareImagePoint(std::pair<size_t, vpImagePoint> p1, std::pair<size_t, vpImagePoint> p2)
83 {
84  return (p1.second.get_v() < p2.second.get_v());
85 };
86 
98 int main(int argc, char **argv)
99 {
100  try {
101 
102  std::string ip_address = "192.168.42.1";
103  std::string opt_cam_parameters;
104  bool opt_has_cam_parameters = false;
105 
106  double tagSize = -1;
107 
108  double opt_distance_to_tag = -1;
109  bool opt_has_distance_to_tag = false;
110 
111  int stream_res = 0; // Default 480p resolution
112 
113  bool verbose = false;
114 
115  if (argc >= 3 && std::string(argv[1]) == "--tag_size") {
116  tagSize = std::atof(argv[2]); // Tag size option is required
117  if (tagSize <= 0) {
118  std::cout << "Error : invalid tag size." << std::endl << "See " << argv[0] << " --help" << std::endl;
119  return 0;
120  }
121  for (int i = 3; i < argc; i++) {
122  if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
123  ip_address = std::string(argv[i + 1]);
124  i++;
125  } else if (std::string(argv[i]) == "--distance_to_tag" && i + 1 < argc) {
126  opt_distance_to_tag = std::atof(argv[i + 1]);
127  if (opt_distance_to_tag <= 0) {
128  std::cout << "Error : invalid distance to tag." << std::endl << "See " << argv[0] << " --help" << std::endl;
129  return 0;
130  }
131  opt_has_distance_to_tag = true;
132  i++;
133  } else if (std::string(argv[i]) == "--intrinsic") {
134 
135 #ifdef VISP_HAVE_PUGIXML
136  opt_cam_parameters = std::string(argv[i + 1]);
137  opt_has_cam_parameters = true;
138  i++;
139 #else
140  std::cout << "PUGIXML is required for custom camera parameters input." << std::endl;
141  return 0;
142 #endif
143  } else if (std::string(argv[i]) == "--hd_stream") {
144  stream_res = 1;
145  } else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
146  verbose = true;
147  } else {
148  std::cout << "Error : unknown parameter " << argv[i] << std::endl
149  << "See " << argv[0] << " --help" << std::endl;
150  return 0;
151  }
152  }
153  } else if (argc >= 2 && (std::string(argv[1]) == "--help" || std::string(argv[1]) == "-h")) {
154  std::cout << "\nUsage:\n"
155  << " " << argv[0]
156  << " [--tag_size <size>] [--ip <drone ip>] [--distance_to_tag <distance>] [--intrinsic <xml file>] "
157  << "[--hd_stream] [--verbose] [-v] [--help] [-h]\n"
158  << std::endl
159  << "Description:\n"
160  << " --tag_size <size>\n"
161  << " The size of the tag to detect in meters, required.\n\n"
162  << " --ip <drone ip>\n"
163  << " Ip address of the drone to which you want to connect (default : 192.168.42.1).\n\n"
164  << " --distance_to_tag <distance>\n"
165  << " The desired distance to the tag in meters (default: 1 meter).\n\n"
166  << " --intrinsic <xml file>\n"
167  << " XML file containing computed intrinsic camera parameters (default: empty).\n\n"
168  << " --hd_stream\n"
169  << " Enables HD 720p streaming instead of default 480p.\n"
170  << " Allows to increase range and accuracy of the tag detection,\n"
171  << " but increases latency and computation time.\n"
172  << " Caution: camera calibration settings are different for the two resolutions.\n"
173  << " Make sure that if you pass custom intrinsic camera parameters,\n"
174  << " they were obtained with the correct resolution.\n\n"
175  << " --verbose, -v\n"
176  << " Enables verbose (drone information messages and velocity commands\n"
177  << " are then displayed).\n\n"
178  << " --help, -h\n"
179  << " Print help message.\n"
180  << std::endl;
181  return 0;
182 
183  } else {
184  std::cout << "Error : tag size parameter required." << std::endl << "See " << argv[0] << " --help" << std::endl;
185  return 0;
186  }
187 
188  std::cout
189  << "\nWARNING: \n - This program does no sensing or avoiding of "
190  "obstacles, \n"
191  "the drone WILL collide with any objects in the way! Make sure "
192  "the \n"
193  "drone has approximately 3 meters of free space on all sides.\n"
194  " - The drone uses a downward-facing camera for horizontal speed estimation,\n make sure the drone flies "
195  "above a non-uniform flooring,\n or its movement will be inacurate and dangerous !\n"
196 
197  << std::endl;
198 
199  vpRobotBebop2 drone(
200  verbose, true, ip_address); // Create the drone with desired verbose level, settings reset, and corresponding IP
201 
202  if (drone.isRunning()) {
203 
204  drone.setVideoResolution(stream_res); // Set video resolution to 480p (default) or 720p
205 
206  drone.setStreamingMode(0); // Set lowest latency stream mode
207  drone.setVideoStabilisationMode(0); // Disable video stabilisation
208 
209  drone.doFlatTrim(); // Flat trim calibration
210 
211  drone.startStreaming(); // Start streaming and decoding video data
212 
213  drone.setExposure(1.5f); // Set exposure to max so that the aprilTag detection is more efficient
214 
215  drone.setCameraOrientation(-5., 0.,
216  true); // Set camera to look slightly down so that the drone is slightly above the tag
217 
218  drone.takeOff(true); // Take off
219 
221  drone.getGrayscaleImage(I);
222 
223 #if defined VISP_HAVE_X11
224  vpDisplayX display;
225 #elif defined VISP_HAVE_GTK
226  vpDisplayGTK display;
227 #elif defined VISP_HAVE_GDI
228  vpDisplayGDI display;
229 #elif defined VISP_HAVE_OPENCV
230  vpDisplayOpenCV display;
231 #endif
232  int orig_displayX = 100;
233  int orig_displayY = 100;
234  display.init(I, orig_displayX, orig_displayY, "DRONE VIEW");
236  vpDisplay::flush(I);
237 
238  vpPlot plotter(1, 700, 700, orig_displayX + static_cast<int>(I.getWidth()) + 20, orig_displayY,
239  "Visual servoing tasks");
240  unsigned int iter = 0;
241 
243  vpDetectorAprilTag detector(tagFamily); // The detector used to detect Apritags
244  detector.setAprilTagQuadDecimate(4.0);
245  detector.setAprilTagNbThreads(4);
246  detector.setDisplayTag(true);
247 
248  vpCameraParameters cam;
249 
250  if (opt_has_cam_parameters) {
251 
254 
255  if (p.parse(cam, opt_cam_parameters, "Camera", projModel, I.getWidth(), I.getHeight()) !=
257  std::cout << "Cannot find parameters in XML file " << opt_cam_parameters << std::endl;
258  if (drone.getVideoHeight() == 720) { // 720p streaming
259  cam.initPersProjWithoutDistortion(785.6412585, 785.3322447, 637.9049857, 359.7524531);
260  } else { // 480p streaming
261  cam.initPersProjWithoutDistortion(531.9213063, 520.8495788, 429.133986, 240.9464457);
262  }
263  }
264  } else {
265  std::cout << "Setting default camera parameters ... " << std::endl;
266  if (drone.getVideoHeight() == 720) { // 720p streaming
267  cam.initPersProjWithoutDistortion(785.6412585, 785.3322447, 637.9049857, 359.7524531);
268  } else { // 480p streaming
269  cam.initPersProjWithoutDistortion(531.9213063, 520.8495788, 429.133986, 240.9464457);
270  }
271  }
272  cam.printParameters();
273 
274  vpServo task; // Visual servoing task
275 
276  // double lambda = 0.5;
277  vpAdaptiveGain lambda = vpAdaptiveGain(1.5, 0.7, 30);
280  task.setLambda(lambda);
281 
282  /*
283  In the following section, camera 1 refers to camera coordonates system of the drone, but without taking camera
284  pan and camera tilt into account.
285  Those values are taken into consideration in Camera 2.
286  E is the effective coordinate system of the drone, the one in which we need to convert every velocity command.
287 
288  We can easily compute homogeneous matrix between camera 1 and camera 2, and between camera 1
289  and effective coordonate system E of the drone.
290 
291  Using those matrices, we can in the end obtain the matrix between c2 and E
292  */
293  vpRxyzVector c1_rxyz_c2(vpMath::rad(drone.getCurrentCameraTilt()), vpMath::rad(drone.getCurrentCameraPan()), 0);
294  vpRotationMatrix c1Rc2(c1_rxyz_c2); // Rotation between camera 1 and 2
295  vpHomogeneousMatrix c1Mc2(vpTranslationVector(), c1Rc2); // Homogeneous matrix between c1 and c2
296 
297  vpRotationMatrix c1Re{0, 1, 0, 0, 0, 1, 1, 0, 0}; // Rotation between camera 1 and E
298  vpTranslationVector c1te(0, 0, -0.09); // Translation between camera 1 and E
299  vpHomogeneousMatrix c1Me(c1te, c1Re); // Homogeneous matrix between c1 and E
300 
301  vpHomogeneousMatrix c2Me = c1Mc2.inverse() * c1Me; // Homogeneous matrix between c2 and E
302 
303  vpVelocityTwistMatrix cVe(c2Me);
304  task.set_cVe(cVe);
305 
306  vpMatrix eJe(6, 4, 0);
307 
308  eJe[0][0] = 1;
309  eJe[1][1] = 1;
310  eJe[2][2] = 1;
311  eJe[5][3] = 1;
312 
313  // double Z_d = 1.; // Desired distance to the target
314  double Z_d = (opt_has_distance_to_tag ? opt_distance_to_tag : 1.);
315 
316  // Define the desired polygon corresponding the the AprilTag CLOCKWISE
317  double X[4] = {tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2.};
318  double Y[4] = {tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2.};
319  std::vector<vpPoint> vec_P, vec_P_d;
320 
321  for (int i = 0; i < 4; i++) {
322  vpPoint P_d(X[i], Y[i], 0);
323  vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
324  P_d.track(cdMo); //
325  vec_P_d.push_back(P_d);
326  }
327  vpMomentObject m_obj(3), m_obj_d(3);
328  vpMomentDatabase mdb, mdb_d;
329  vpMomentBasic mb_d; // Here only to get the desired area m00
330  vpMomentGravityCenter mg, mg_d;
331  vpMomentCentered mc, mc_d;
332  vpMomentAreaNormalized man(0, Z_d), man_d(0, Z_d); // Declare normalized area updated below with m00
333  vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
334 
335  // Desired moments
336  m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
337  m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
338 
339  mb_d.linkTo(mdb_d); // Add basic moments to database
340  mg_d.linkTo(mdb_d); // Add gravity center to database
341  mc_d.linkTo(mdb_d); // Add centered moments to database
342  man_d.linkTo(mdb_d); // Add area normalized to database
343  mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
344  mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
345  mg_d.compute(); // Compute gravity center moment
346  mc_d.compute(); // Compute centered moments AFTER gravity center
347 
348  double area = 0;
349  if (m_obj_d.getType() == vpMomentObject::DISCRETE)
350  area = mb_d.get(2, 0) + mb_d.get(0, 2);
351  else
352  area = mb_d.get(0, 0);
353  // Update moment with the desired area
354  man_d.setDesiredArea(area);
355 
356  man_d.compute(); // Compute area normalized moment AFTER centered moments
357  mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized moment
358 
359  // Desired plane
360  double A = 0.0;
361  double B = 0.0;
362  double C = 1.0 / Z_d;
363 
364  // Construct area normalized features
365  vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
366  vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
367  vpFeatureVanishingPoint s_vp, s_vp_d;
368 
369  // Add the features
370  task.addFeature(s_mgn, s_mgn_d);
371  task.addFeature(s_man, s_man_d);
373 
374  plotter.initGraph(0, 4);
375  plotter.setLegend(0, 0, "Xn"); // Distance from center on X axis feature
376  plotter.setLegend(0, 1, "Yn"); // Distance from center on Y axis feature
377  plotter.setLegend(0, 2, "an"); // Tag area feature
378  plotter.setLegend(0, 3, "atan(1/rho)"); // Vanishing point feature
379 
380  // Update desired gravity center normalized feature
381  s_mgn_d.update(A, B, C);
382  s_mgn_d.compute_interaction();
383  // Update desired area normalized feature
384  s_man_d.update(A, B, C);
385  s_man_d.compute_interaction();
386 
387  // Update desired vanishing point feature for the horizontal line
388  s_vp_d.setAtanOneOverRho(0);
389  s_vp_d.setAlpha(0);
390 
391  bool runLoop = true;
392  bool vec_ip_has_been_sorted = false;
393  std::vector<std::pair<size_t, vpImagePoint> > vec_ip_sorted;
394 
395  //** Visual servoing loop **//
396  while (drone.isRunning() && drone.isStreaming() && runLoop) {
397 
398  double startTime = vpTime::measureTimeMs();
399 
400  drone.getGrayscaleImage(I);
402 
403  std::vector<vpHomogeneousMatrix> cMo_vec;
404  detector.detect(I, tagSize, cam, cMo_vec); // Detect AprilTags in current image
405  double t = vpTime::measureTimeMs() - startTime;
406 
407  {
408  std::stringstream ss;
409  ss << "Detection time: " << t << " ms";
410  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
411  }
412 
413  if (detector.getNbObjects() == 1) {
414 
415  // Update current points used to compute the moments
416  std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
417  vec_P.clear();
418  for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
419  double x = 0, y = 0;
420  vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
421  vpPoint P;
422  P.set_x(x);
423  P.set_y(y);
424  vec_P.push_back(P);
425  }
426 
427  // Current moments
428  m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
429  m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
430 
431  mg.linkTo(mdb); // Add gravity center to database
432  mc.linkTo(mdb); // Add centered moments to database
433  man.linkTo(mdb); // Add area normalized to database
434  mgn.linkTo(mdb); // Add gravity center normalized to database
435  mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
436  mg.compute(); // Compute gravity center moment
437  mc.compute(); // Compute centered moments AFTER gravity center
438  man.setDesiredArea(area); // Desired area was init at 0 (unknow at contruction), need to be updated here
439  man.compute(); // Compute area normalized moment AFTER centered moment
440  mgn.compute(); // Compute gravity center normalized moment AFTER area normalized moment
441 
442  s_mgn.update(A, B, C);
443  s_mgn.compute_interaction();
444  s_man.update(A, B, C);
445  s_man.compute_interaction();
446 
447  /* Sort points from their height in the image, and keep original indexes.
448  This is done once, in order to be independent from the orientation of the tag
449  when detecting vanishing points. */
450  if (!vec_ip_has_been_sorted) {
451  for (size_t i = 0; i < vec_ip.size(); i++) {
452 
453  // Add the points and their corresponding index
454  std::pair<size_t, vpImagePoint> index_pair = std::pair<size_t, vpImagePoint>(i, vec_ip[i]);
455  vec_ip_sorted.push_back(index_pair);
456  }
457 
458  // Sort the points and indexes from the v value of the points
459  std::sort(vec_ip_sorted.begin(), vec_ip_sorted.end(), compareImagePoint);
460 
461  vec_ip_has_been_sorted = true;
462  }
463 
464  // Use the two highest points for the first line, and the two others for the second line.
465  vpFeatureBuilder::create(s_vp, cam, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first],
466  vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first],
468 
469  task.set_cVe(cVe);
470  task.set_eJe(eJe);
471 
472  // Compute the control law. Velocities are computed in the mobile robot reference frame
473  vpColVector ve = task.computeControlLaw();
474 
475  // Sending the control law to the drone
476  if (verbose) {
477  std::cout << "ve: " << ve.t() << std::endl;
478  }
479  drone.setVelocity(ve, 1.0);
480 
481  for (size_t i = 0; i < 4; i++) {
482  vpDisplay::displayCross(I, vec_ip[i], 15, vpColor::red, 1);
483  std::stringstream ss;
484  ss << i;
485  vpDisplay::displayText(I, vec_ip[i] + vpImagePoint(15, 15), ss.str(), vpColor::green);
486  }
487 
488  // Display visual features
489  vpDisplay::displayPolygon(I, vec_ip, vpColor::green, 3); // Current polygon used to compure an moment
490  vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
491  3); // Current polygon used to compute a moment
492  vpDisplay::displayLine(I, 0, static_cast<int>(cam.get_u0()), static_cast<int>(I.getHeight()) - 1,
493  static_cast<int>(cam.get_u0()), vpColor::red,
494  3); // Vertical line as desired x position
495  vpDisplay::displayLine(I, static_cast<int>(cam.get_v0()), 0, static_cast<int>(cam.get_v0()),
496  static_cast<int>(I.getWidth()) - 1, vpColor::red,
497  3); // Horizontal line as desired y position
498 
499  // Display lines corresponding to the vanishing point for the horizontal lines
500  vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first], vpColor::red, 1,
501  false);
502  vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first], vpColor::red, 1,
503  false);
504 
505  } else {
506  std::stringstream sserr;
507  sserr << "Failed to detect an Apriltag, or detected multiple ones";
508  vpDisplay::displayText(I, 120, 20, sserr.str(), vpColor::red);
509  vpDisplay::flush(I);
510  drone.stopMoving(); // In this case, we stop the drone
511  }
512 
513  vpDisplay::displayText(I, 10, 10, "Click to exit", vpColor::red);
514  vpDisplay::flush(I);
515  if (vpDisplay::getClick(I, false)) {
516  drone.land();
517  runLoop = false;
518  }
519 
520  plotter.plot(0, iter, task.getError());
521 
522  double totalTime = vpTime::measureTimeMs() - startTime;
523  std::stringstream sstime;
524  sstime << "Total time: " << totalTime << " ms";
525  vpDisplay::displayText(I, 80, 20, sstime.str(), vpColor::red);
526  vpDisplay::flush(I);
527 
528  iter++;
529  vpTime::wait(startTime, 40.0); // We wait a total of 40 milliseconds
530  }
531 
532  return EXIT_SUCCESS;
533 
534  } else {
535  std::cout << "ERROR : failed to setup drone control." << std::endl;
536  return EXIT_FAILURE;
537  }
538  } catch (const vpException &e) {
539  std::cout << "Caught an exception: " << e << std::endl;
540  return EXIT_FAILURE;
541  }
542 }
543 
544 #endif // #elif !defined(VISP_HAVE_FFMPEG)
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:164
VISP_EXPORT int wait(double t0, double t)
Definition: vpTime.cpp:173
Adaptive gain computation.
Class handling the normalized surface moment that is invariant in scale and used to estimate depth...
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject wich allows to us...
Definition: vpMomentBasic.h:74
Implementation of an homogeneous matrix and operations on such kind of matrices.
static unsigned int selectAtanOneOverRho()
AprilTag 36h11 pattern (recommended)
Class describing 2D normalized gravity center moment.
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:497
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:508
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:150
void linkTo(vpMomentDatabase &moments)
Definition: vpMoment.cpp:98
error that can be emited by ViSP classes.
Definition: vpException.h:71
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const std::string &title="")
Class for generic objects.
vpHomogeneousMatrix inverse() const
vpRowVector t() const
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
size_t getNbObjects() const
XML parser to load and save intrinsic camera parameters.
static const vpColor green
Definition: vpColor.h:182
static void flush(const vpImage< unsigned char > &I)
VISP_EXPORT double measureTimeMs()
Definition: vpTime.cpp:126
static const vpColor red
Definition: vpColor.h:179
Class that defines what is a point.
Definition: vpPoint.h:58
void set_x(double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:472
Implementation of a rotation matrix and operations on such kind of matrices.
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
void set_y(double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:474
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
void setAprilTagQuadDecimate(float quadDecimate)
const std::vector< double > & get() const
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
vpColVector computeControlLaw()
Definition: vpServo.cpp:935
This class allows to register all vpMoments so they can access each other according to their dependen...
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Generic class defining intrinsic camera parameters.
void setAprilTagNbThreads(int nThreads)
void setLambda(double c)
Definition: vpServo.h:406
void setAtanOneOverRho(double atan_one_over_rho)
Set vanishing point feature value.
virtual void updateAll(vpMomentObject &object)
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:137
This class defines the double-indexed centered moment descriptor .
vpImagePoint getCog(size_t i) const
Class describing 2D gravity center moment.
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:574
static double rad(double deg)
Definition: vpMath.h:108
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0)
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
void setAlpha(double alpha)
Set vanishing point feature value.
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
unsigned int getHeight() const
Definition: vpImage.h:186
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
void setDisplayTag(bool display, const vpColor &color=vpColor::none, unsigned int thickness=2)
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:450
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:115
Implementation of a rotation vector as Euler angle minimal representation.
Definition: vpRxyzVector.h:183
vpColVector getError() const
Definition: vpServo.h:282
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
unsigned int getWidth() const
Definition: vpImage.h:244
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:223
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
Class that consider the case of a translation vector.
std::vector< std::vector< vpImagePoint > > & getPolygon()
bool detect(const vpImage< unsigned char > &I)