Visual Servoing Platform  version 3.5.1 under development (2022-07-06)
servoBebop2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Example that shows how to do visual servoing with Parrot Bebop 2 drone in ViSP.
33  *
34  * Authors:
35  * Fabien Spindler
36  * Gatien Gaumerais
37  *
38  *****************************************************************************/
39 
40 #include <iostream>
41 
42 #include <visp3/core/vpConfig.h>
43 #include <visp3/core/vpMomentAreaNormalized.h>
44 #include <visp3/core/vpMomentBasic.h>
45 #include <visp3/core/vpMomentCentered.h>
46 #include <visp3/core/vpMomentDatabase.h>
47 #include <visp3/core/vpMomentGravityCenter.h>
48 #include <visp3/core/vpMomentGravityCenterNormalized.h>
49 #include <visp3/core/vpMomentObject.h>
50 #include <visp3/core/vpPixelMeterConversion.h>
51 #include <visp3/core/vpPoint.h>
52 #include <visp3/core/vpTime.h>
53 #include <visp3/core/vpXmlParserCamera.h>
54 #include <visp3/detection/vpDetectorAprilTag.h>
55 #include <visp3/gui/vpDisplayX.h>
56 #include <visp3/gui/vpPlot.h>
57 #include <visp3/robot/vpRobotBebop2.h>
58 #include <visp3/visual_features/vpFeatureBuilder.h>
59 #include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
60 #include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
61 #include <visp3/visual_features/vpFeatureVanishingPoint.h>
62 #include <visp3/vs/vpServo.h>
63 #include <visp3/vs/vpServoDisplay.h>
64 
65 #if !defined(VISP_HAVE_ARSDK)
66 int main()
67 {
68  std::cout << "\nThis example requires Parrot ARSDK3 library. You should install it.\n" << std::endl;
69  return EXIT_SUCCESS;
70 }
71 #elif !defined(VISP_HAVE_FFMPEG)
72 int main()
73 {
74  std::cout << "\nThis example requires ffmpeg library. You should install it.\n" << std::endl;
75  return EXIT_SUCCESS;
76 }
77 #elif (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
78 int main()
79 {
80  std::cout << "\nThis example requires cxx11 standard or higher. Turn it on using cmake -DUSE_CXX_STANDARD=11.\n"
81  << std::endl;
82  return EXIT_SUCCESS;
83 }
84 #else
85 
86 bool compareImagePoint(std::pair<size_t, vpImagePoint> p1, std::pair<size_t, vpImagePoint> p2)
87 {
88  return (p1.second.get_v() < p2.second.get_v());
89 };
90 
102 int main(int argc, char **argv)
103 {
104  try {
105 
106  std::string ip_address = "192.168.42.1";
107  std::string opt_cam_parameters;
108  bool opt_has_cam_parameters = false;
109 
110  double tagSize = -1;
111 
112  double opt_distance_to_tag = -1;
113  bool opt_has_distance_to_tag = false;
114 
115  int stream_res = 0; // Default 480p resolution
116 
117  bool verbose = false;
118 
119  if (argc >= 3 && std::string(argv[1]) == "--tag_size") {
120  tagSize = std::atof(argv[2]); // Tag size option is required
121  if (tagSize <= 0) {
122  std::cout << "Error : invalid tag size." << std::endl << "See " << argv[0] << " --help" << std::endl;
123  return 0;
124  }
125  for (int i = 3; i < argc; i++) {
126  if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
127  ip_address = std::string(argv[i + 1]);
128  i++;
129  } else if (std::string(argv[i]) == "--distance_to_tag" && i + 1 < argc) {
130  opt_distance_to_tag = std::atof(argv[i + 1]);
131  if (opt_distance_to_tag <= 0) {
132  std::cout << "Error : invalid distance to tag." << std::endl << "See " << argv[0] << " --help" << std::endl;
133  return 0;
134  }
135  opt_has_distance_to_tag = true;
136  i++;
137  } else if (std::string(argv[i]) == "--intrinsic") {
138 
139  opt_cam_parameters = std::string(argv[i + 1]);
140  opt_has_cam_parameters = true;
141  i++;
142  } else if (std::string(argv[i]) == "--hd_stream") {
143  stream_res = 1;
144  } else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
145  verbose = true;
146  } else {
147  std::cout << "Error : unknown parameter " << argv[i] << std::endl
148  << "See " << argv[0] << " --help" << std::endl;
149  return 0;
150  }
151  }
152  } else if (argc >= 2 && (std::string(argv[1]) == "--help" || std::string(argv[1]) == "-h")) {
153  std::cout << "\nUsage:\n"
154  << " " << argv[0]
155  << " [--tag_size <size>] [--ip <drone ip>] [--distance_to_tag <distance>] [--intrinsic <xml file>] "
156  << "[--hd_stream] [--verbose] [-v] [--help] [-h]\n"
157  << std::endl
158  << "Description:\n"
159  << " --tag_size <size>\n"
160  << " The size of the tag to detect in meters, required.\n\n"
161  << " --ip <drone ip>\n"
162  << " Ip address of the drone to which you want to connect (default : 192.168.42.1).\n\n"
163  << " --distance_to_tag <distance>\n"
164  << " The desired distance to the tag in meters (default: 1 meter).\n\n"
165  << " --intrinsic <xml file>\n"
166  << " XML file containing computed intrinsic camera parameters (default: empty).\n\n"
167  << " --hd_stream\n"
168  << " Enables HD 720p streaming instead of default 480p.\n"
169  << " Allows to increase range and accuracy of the tag detection,\n"
170  << " but increases latency and computation time.\n"
171  << " Caution: camera calibration settings are different for the two resolutions.\n"
172  << " Make sure that if you pass custom intrinsic camera parameters,\n"
173  << " they were obtained with the correct resolution.\n\n"
174  << " --verbose, -v\n"
175  << " Enables verbose (drone information messages and velocity commands\n"
176  << " are then displayed).\n\n"
177  << " --help, -h\n"
178  << " Print help message.\n"
179  << std::endl;
180  return 0;
181 
182  } else {
183  std::cout << "Error : tag size parameter required." << std::endl << "See " << argv[0] << " --help" << std::endl;
184  return 0;
185  }
186 
187  std::cout
188  << "\nWARNING: \n - This program does no sensing or avoiding of "
189  "obstacles, \n"
190  "the drone WILL collide with any objects in the way! Make sure "
191  "the \n"
192  "drone has approximately 3 meters of free space on all sides.\n"
193  " - The drone uses a downward-facing camera for horizontal speed estimation,\n make sure the drone flies "
194  "above a non-uniform flooring,\n or its movement will be inacurate and dangerous !\n"
195 
196  << std::endl;
197 
198  vpRobotBebop2 drone(
199  verbose, true, ip_address); // Create the drone with desired verbose level, settings reset, and corresponding IP
200 
201  if (drone.isRunning()) {
202 
203  drone.setVideoResolution(stream_res); // Set video resolution to 480p (default) or 720p
204 
205  drone.setStreamingMode(0); // Set lowest latency stream mode
206  drone.setVideoStabilisationMode(0); // Disable video stabilisation
207 
208  drone.doFlatTrim(); // Flat trim calibration
209 
210  drone.startStreaming(); // Start streaming and decoding video data
211 
212  drone.setExposure(1.5f); // Set exposure to max so that the aprilTag detection is more efficient
213 
214  drone.setCameraOrientation(-5., 0.,
215  true); // Set camera to look slightly down so that the drone is slightly above the tag
216 
217  drone.takeOff(true); // Take off
218 
220  drone.getGrayscaleImage(I);
221 
222 #if defined VISP_HAVE_X11
223  vpDisplayX display;
224 #elif defined VISP_HAVE_GTK
225  vpDisplayGTK display;
226 #elif defined VISP_HAVE_GDI
227  vpDisplayGDI display;
228 #elif defined VISP_HAVE_OPENCV
229  vpDisplayOpenCV display;
230 #endif
231  int orig_displayX = 100;
232  int orig_displayY = 100;
233  display.init(I, orig_displayX, orig_displayY, "DRONE VIEW");
235  vpDisplay::flush(I);
236 
237  vpPlot plotter(1, 700, 700, orig_displayX + static_cast<int>(I.getWidth()) + 20, orig_displayY,
238  "Visual servoing tasks");
239  unsigned int iter = 0;
240 
242  vpDetectorAprilTag detector(tagFamily); // The detector used to detect Apritags
243  detector.setAprilTagQuadDecimate(4.0);
244  detector.setAprilTagNbThreads(4);
245  detector.setDisplayTag(true);
246 
247  vpCameraParameters cam;
248 
249  if (opt_has_cam_parameters) {
250 
253 
254  if (p.parse(cam, opt_cam_parameters, "Camera", projModel, I.getWidth(), I.getHeight()) !=
256  std::cout << "Cannot find parameters in XML file " << opt_cam_parameters << std::endl;
257  if (drone.getVideoHeight() == 720) { // 720p streaming
258  cam.initPersProjWithoutDistortion(785.6412585, 785.3322447, 637.9049857, 359.7524531);
259  } else { // 480p streaming
260  cam.initPersProjWithoutDistortion(531.9213063, 520.8495788, 429.133986, 240.9464457);
261  }
262  }
263  } else {
264  std::cout << "Setting default camera parameters ... " << std::endl;
265  if (drone.getVideoHeight() == 720) { // 720p streaming
266  cam.initPersProjWithoutDistortion(785.6412585, 785.3322447, 637.9049857, 359.7524531);
267  } else { // 480p streaming
268  cam.initPersProjWithoutDistortion(531.9213063, 520.8495788, 429.133986, 240.9464457);
269  }
270  }
271  cam.printParameters();
272 
273  vpServo task; // Visual servoing task
274 
275  // double lambda = 0.5;
276  vpAdaptiveGain lambda = vpAdaptiveGain(1.5, 0.7, 30);
279  task.setLambda(lambda);
280 
281  /*
282  In the following section, camera 1 refers to camera coordonates system of the drone, but without taking camera
283  pan and camera tilt into account.
284  Those values are taken into consideration in Camera 2.
285  E is the effective coordinate system of the drone, the one in which we need to convert every velocity command.
286 
287  We can easily compute homogeneous matrix between camera 1 and camera 2, and between camera 1
288  and effective coordonate system E of the drone.
289 
290  Using those matrices, we can in the end obtain the matrix between c2 and E
291  */
292  vpRxyzVector c1_rxyz_c2(vpMath::rad(drone.getCurrentCameraTilt()), vpMath::rad(drone.getCurrentCameraPan()), 0);
293  vpRotationMatrix c1Rc2(c1_rxyz_c2); // Rotation between camera 1 and 2
294  vpHomogeneousMatrix c1Mc2(vpTranslationVector(), c1Rc2); // Homogeneous matrix between c1 and c2
295 
296  vpRotationMatrix c1Re{0, 1, 0, 0, 0, 1, 1, 0, 0}; // Rotation between camera 1 and E
297  vpTranslationVector c1te(0, 0, -0.09); // Translation between camera 1 and E
298  vpHomogeneousMatrix c1Me(c1te, c1Re); // Homogeneous matrix between c1 and E
299 
300  vpHomogeneousMatrix c2Me = c1Mc2.inverse() * c1Me; // Homogeneous matrix between c2 and E
301 
302  vpVelocityTwistMatrix cVe(c2Me);
303  task.set_cVe(cVe);
304 
305  vpMatrix eJe(6, 4, 0);
306 
307  eJe[0][0] = 1;
308  eJe[1][1] = 1;
309  eJe[2][2] = 1;
310  eJe[5][3] = 1;
311 
312  // double Z_d = 1.; // Desired distance to the target
313  double Z_d = (opt_has_distance_to_tag ? opt_distance_to_tag : 1.);
314 
315  // Define the desired polygon corresponding the the AprilTag CLOCKWISE
316  double X[4] = {tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2.};
317  double Y[4] = {tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2.};
318  std::vector<vpPoint> vec_P, vec_P_d;
319 
320  for (int i = 0; i < 4; i++) {
321  vpPoint P_d(X[i], Y[i], 0);
322  vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
323  P_d.track(cdMo); //
324  vec_P_d.push_back(P_d);
325  }
326  vpMomentObject m_obj(3), m_obj_d(3);
327  vpMomentDatabase mdb, mdb_d;
328  vpMomentBasic mb_d; // Here only to get the desired area m00
329  vpMomentGravityCenter mg, mg_d;
330  vpMomentCentered mc, mc_d;
331  vpMomentAreaNormalized man(0, Z_d), man_d(0, Z_d); // Declare normalized area updated below with m00
332  vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
333 
334  // Desired moments
335  m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
336  m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
337 
338  mb_d.linkTo(mdb_d); // Add basic moments to database
339  mg_d.linkTo(mdb_d); // Add gravity center to database
340  mc_d.linkTo(mdb_d); // Add centered moments to database
341  man_d.linkTo(mdb_d); // Add area normalized to database
342  mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
343  mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
344  mg_d.compute(); // Compute gravity center moment
345  mc_d.compute(); // Compute centered moments AFTER gravity center
346 
347  double area = 0;
348  if (m_obj_d.getType() == vpMomentObject::DISCRETE)
349  area = mb_d.get(2, 0) + mb_d.get(0, 2);
350  else
351  area = mb_d.get(0, 0);
352  // Update moment with the desired area
353  man_d.setDesiredArea(area);
354 
355  man_d.compute(); // Compute area normalized moment AFTER centered moments
356  mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized moment
357 
358  // Desired plane
359  double A = 0.0;
360  double B = 0.0;
361  double C = 1.0 / Z_d;
362 
363  // Construct area normalized features
364  vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
365  vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
366  vpFeatureVanishingPoint s_vp, s_vp_d;
367 
368  // Add the features
369  task.addFeature(s_mgn, s_mgn_d);
370  task.addFeature(s_man, s_man_d);
372 
373  plotter.initGraph(0, 4);
374  plotter.setLegend(0, 0, "Xn"); // Distance from center on X axis feature
375  plotter.setLegend(0, 1, "Yn"); // Distance from center on Y axis feature
376  plotter.setLegend(0, 2, "an"); // Tag area feature
377  plotter.setLegend(0, 3, "atan(1/rho)"); // Vanishing point feature
378 
379  // Update desired gravity center normalized feature
380  s_mgn_d.update(A, B, C);
381  s_mgn_d.compute_interaction();
382  // Update desired area normalized feature
383  s_man_d.update(A, B, C);
384  s_man_d.compute_interaction();
385 
386  // Update desired vanishing point feature for the horizontal line
387  s_vp_d.setAtanOneOverRho(0);
388  s_vp_d.setAlpha(0);
389 
390  bool runLoop = true;
391  bool vec_ip_has_been_sorted = false;
392  std::vector<std::pair<size_t, vpImagePoint> > vec_ip_sorted;
393 
394  //** Visual servoing loop **//
395  while (drone.isRunning() && drone.isStreaming() && runLoop) {
396 
397  double startTime = vpTime::measureTimeMs();
398 
399  drone.getGrayscaleImage(I);
401 
402  std::vector<vpHomogeneousMatrix> cMo_vec;
403  detector.detect(I, tagSize, cam, cMo_vec); // Detect AprilTags in current image
404  double t = vpTime::measureTimeMs() - startTime;
405 
406  {
407  std::stringstream ss;
408  ss << "Detection time: " << t << " ms";
409  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
410  }
411 
412  if (detector.getNbObjects() == 1) {
413 
414  // Update current points used to compute the moments
415  std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
416  vec_P.clear();
417  for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
418  double x = 0, y = 0;
419  vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
420  vpPoint P;
421  P.set_x(x);
422  P.set_y(y);
423  vec_P.push_back(P);
424  }
425 
426  // Current moments
427  m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
428  m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
429 
430  mg.linkTo(mdb); // Add gravity center to database
431  mc.linkTo(mdb); // Add centered moments to database
432  man.linkTo(mdb); // Add area normalized to database
433  mgn.linkTo(mdb); // Add gravity center normalized to database
434  mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
435  mg.compute(); // Compute gravity center moment
436  mc.compute(); // Compute centered moments AFTER gravity center
437  man.setDesiredArea(area); // Desired area was init at 0 (unknow at contruction), need to be updated here
438  man.compute(); // Compute area normalized moment AFTER centered moment
439  mgn.compute(); // Compute gravity center normalized moment AFTER area normalized moment
440 
441  s_mgn.update(A, B, C);
442  s_mgn.compute_interaction();
443  s_man.update(A, B, C);
444  s_man.compute_interaction();
445 
446  /* Sort points from their height in the image, and keep original indexes.
447  This is done once, in order to be independent from the orientation of the tag
448  when detecting vanishing points. */
449  if (!vec_ip_has_been_sorted) {
450  for (size_t i = 0; i < vec_ip.size(); i++) {
451 
452  // Add the points and their corresponding index
453  std::pair<size_t, vpImagePoint> index_pair = std::pair<size_t, vpImagePoint>(i, vec_ip[i]);
454  vec_ip_sorted.push_back(index_pair);
455  }
456 
457  // Sort the points and indexes from the v value of the points
458  std::sort(vec_ip_sorted.begin(), vec_ip_sorted.end(), compareImagePoint);
459 
460  vec_ip_has_been_sorted = true;
461  }
462 
463  // Use the two highest points for the first line, and the two others for the second line.
464  vpFeatureBuilder::create(s_vp, cam, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first],
465  vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first],
467 
468  task.set_cVe(cVe);
469  task.set_eJe(eJe);
470 
471  // Compute the control law. Velocities are computed in the mobile robot reference frame
472  vpColVector ve = task.computeControlLaw();
473 
474  // Sending the control law to the drone
475  if (verbose) {
476  std::cout << "ve: " << ve.t() << std::endl;
477  }
478  drone.setVelocity(ve, 1.0);
479 
480  for (size_t i = 0; i < 4; i++) {
481  vpDisplay::displayCross(I, vec_ip[i], 15, vpColor::red, 1);
482  std::stringstream ss;
483  ss << i;
484  vpDisplay::displayText(I, vec_ip[i] + vpImagePoint(15, 15), ss.str(), vpColor::green);
485  }
486 
487  // Display visual features
488  vpDisplay::displayPolygon(I, vec_ip, vpColor::green, 3); // Current polygon used to compure an moment
489  vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
490  3); // Current polygon used to compute a moment
491  vpDisplay::displayLine(I, 0, static_cast<int>(cam.get_u0()), static_cast<int>(I.getHeight()) - 1,
492  static_cast<int>(cam.get_u0()), vpColor::red,
493  3); // Vertical line as desired x position
494  vpDisplay::displayLine(I, static_cast<int>(cam.get_v0()), 0, static_cast<int>(cam.get_v0()),
495  static_cast<int>(I.getWidth()) - 1, vpColor::red,
496  3); // Horizontal line as desired y position
497 
498  // Display lines corresponding to the vanishing point for the horizontal lines
499  vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first], vpColor::red, 1,
500  false);
501  vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first], vpColor::red, 1,
502  false);
503 
504  } else {
505  std::stringstream sserr;
506  sserr << "Failed to detect an Apriltag, or detected multiple ones";
507  vpDisplay::displayText(I, 120, 20, sserr.str(), vpColor::red);
508  vpDisplay::flush(I);
509  drone.stopMoving(); // In this case, we stop the drone
510  }
511 
512  vpDisplay::displayText(I, 10, 10, "Click to exit", vpColor::red);
513  vpDisplay::flush(I);
514  if (vpDisplay::getClick(I, false)) {
515  drone.land();
516  runLoop = false;
517  }
518 
519  plotter.plot(0, iter, task.getError());
520 
521  double totalTime = vpTime::measureTimeMs() - startTime;
522  std::stringstream sstime;
523  sstime << "Total time: " << totalTime << " ms";
524  vpDisplay::displayText(I, 80, 20, sstime.str(), vpColor::red);
525  vpDisplay::flush(I);
526 
527  iter++;
528  vpTime::wait(startTime, 40.0); // We wait a total of 40 milliseconds
529  }
530 
531  return EXIT_SUCCESS;
532 
533  } else {
534  std::cout << "ERROR : failed to setup drone control." << std::endl;
535  return EXIT_FAILURE;
536  }
537  } catch (const vpException &e) {
538  std::cout << "Caught an exception: " << e << std::endl;
539  return EXIT_FAILURE;
540  }
541 }
542 
543 #endif // #elif !defined(VISP_HAVE_FFMPEG)
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
vpRowVector t() const
static const vpColor red
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:135
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emited by ViSP classes.
Definition: vpException.h:72
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
static unsigned int selectAtanOneOverRho()
void setAlpha(double alpha)
Set vanishing point feature value.
void setAtanOneOverRho(double atan_one_over_rho)
Set vanishing point feature value.
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:89
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
static double rad(double deg)
Definition: vpMath.h:117
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject wich allows to us...
Definition: vpMomentBasic.h:75
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
Definition: vpMoment.cpp:98
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:116
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
void set_x(double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:511
void set_y(double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:513
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
Definition: vpRxyzVector.h:184
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:564
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:159
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:448
void setLambda(double c)
Definition: vpServo.h:404
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:215
vpColVector getError() const
Definition: vpServo.h:278
vpColVector computeControlLaw()
Definition: vpServo.cpp:926
@ CURRENT
Definition: vpServo.h:182
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:487
Class that consider the case of a translation vector.
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0)
VISP_EXPORT int wait(double t0, double t)
VISP_EXPORT double measureTimeMs()