Visual Servoing Platform  version 3.6.1 under development (2024-11-21)
servoFlirPtuIBVS.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * tests the control law
33  * eye-in-hand control
34  * velocity computed in the camera frame
35  *
36 *****************************************************************************/
58 #include <iostream>
59 
60 #include <visp3/core/vpCameraParameters.h>
61 #include <visp3/core/vpConfig.h>
62 #include <visp3/core/vpXmlParserCamera.h>
63 #include <visp3/detection/vpDetectorAprilTag.h>
64 #include <visp3/gui/vpDisplayGDI.h>
65 #include <visp3/gui/vpDisplayX.h>
66 #include <visp3/gui/vpPlot.h>
67 #include <visp3/io/vpImageIo.h>
68 #include <visp3/robot/vpRobotFlirPtu.h>
69 #include <visp3/sensor/vpFlyCaptureGrabber.h>
70 #include <visp3/visual_features/vpFeatureBuilder.h>
71 #include <visp3/visual_features/vpFeaturePoint.h>
72 #include <visp3/vs/vpServo.h>
73 #include <visp3/vs/vpServoDisplay.h>
74 
75 #if defined(VISP_HAVE_FLIR_PTU_SDK) && defined(VISP_HAVE_FLYCAPTURE) && \
76  (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_PUGIXML)
77 
78 #ifdef ENABLE_VISP_NAMESPACE
79 using namespace VISP_NAMESPACE_NAME;
80 #endif
81 
82 void display_point_trajectory(const vpImage<unsigned char> &I, const vpImagePoint &ip,
83  std::vector<vpImagePoint> &traj_ip)
84 {
85  if (traj_ip.size()) {
86  // Add the point only if distance with the previous > 2 pixel
87  if (vpImagePoint::distance(ip, traj_ip.back()) > 2.) {
88  traj_ip.push_back(ip);
89  }
90  }
91  else {
92  traj_ip.push_back(ip);
93  }
94  for (size_t j = 1; j < traj_ip.size(); j++) {
95  vpDisplay::displayLine(I, traj_ip[j - 1], traj_ip[j], vpColor::green, 2);
96  }
97 }
98 
99 int main(int argc, char **argv)
100 {
101  std::string opt_portname;
102  int opt_baudrate = 9600;
103  bool opt_network = false;
104  std::string opt_extrinsic;
105  std::string opt_intrinsic;
106  std::string opt_camera_name;
107  bool display_tag = true;
108  int opt_quad_decimate = 2;
109  double opt_tag_size = 0.120; // Used to compute the distance of the cog wrt the camera
110  bool opt_verbose = false;
111  bool opt_plot = false;
112  bool opt_adaptive_gain = false;
113  bool opt_task_sequencing = false;
114  double opt_constant_gain = 0.5;
115  bool opt_display_trajectory = true;
116  double convergence_threshold = 0.00005;
117 
118  if (argc == 1) {
119  std::cout << "To see how to use this example, run: " << argv[0] << " --help" << std::endl;
120  return EXIT_SUCCESS;
121  }
122 
123  for (int i = 1; i < argc; i++) {
124  if ((std::string(argv[i]) == "--portname" || std::string(argv[i]) == "-p") && (i + 1 < argc)) {
125  opt_portname = std::string(argv[i + 1]);
126  }
127  else if ((std::string(argv[i]) == "--baudrate" || std::string(argv[i]) == "-b") && (i + 1 < argc)) {
128  opt_baudrate = std::atoi(argv[i + 1]);
129  }
130  else if ((std::string(argv[i]) == "--network" || std::string(argv[i]) == "-n")) {
131  opt_network = true;
132  }
133  else if (std::string(argv[i]) == "--extrinsic" && i + 1 < argc) {
134  opt_extrinsic = std::string(argv[i + 1]);
135  }
136  else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
137  opt_intrinsic = std::string(argv[i + 1]);
138  }
139  else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
140  opt_camera_name = std::string(argv[i + 1]);
141  }
142  else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
143  opt_verbose = true;
144  }
145  else if (std::string(argv[i]) == "--plot" || std::string(argv[i]) == "-p") {
146  opt_plot = true;
147  }
148  else if (std::string(argv[i]) == "--display-image-trajectory" || std::string(argv[i]) == "-traj") {
149  opt_display_trajectory = true;
150  }
151  else if (std::string(argv[i]) == "--adaptive-gain" || std::string(argv[i]) == "-a") {
152  opt_adaptive_gain = true;
153  }
154  else if (std::string(argv[i]) == "--constant-gain" || std::string(argv[i]) == "-g") {
155  opt_constant_gain = std::stod(argv[i + 1]);
156  }
157  else if (std::string(argv[i]) == "--task-sequencing") {
158  opt_task_sequencing = true;
159  }
160  else if (std::string(argv[i]) == "--quad-decimate" && i + 1 < argc) {
161  opt_quad_decimate = std::stoi(argv[i + 1]);
162  }
163  if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
164  opt_tag_size = std::stod(argv[i + 1]);
165  }
166  else if (std::string(argv[i]) == "--no-convergence-threshold") {
167  convergence_threshold = 0.;
168  }
169  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
170  std::cout << "SYNOPSIS" << std::endl
171  << " " << argv[0] << " [--portname <portname>] [--baudrate <rate>] [--network] "
172  << "[--extrinsic <extrinsic.yaml>] [--intrinsic <intrinsic.xml>] [--camera-name <name>] "
173  << "[--quad-decimate <decimation>] [--tag-size <size>] "
174  << "[--adaptive-gain] [--constant-gain] [--display-image-trajectory] [--plot] [--task-sequencing] "
175  << "[--no-convergence-threshold] [--verbose] [--help] [-h]" << std::endl
176  << std::endl;
177  std::cout << "DESCRIPTION" << std::endl
178  << " --portname, -p <portname>" << std::endl
179  << " Set serial or tcp port name." << std::endl
180  << std::endl
181  << " --baudrate, -b <rate>" << std::endl
182  << " Set serial communication baud rate. Default: " << opt_baudrate << "." << std::endl
183  << std::endl
184  << " --network, -n" << std::endl
185  << " Get PTU network information (Hostname, IP, Gateway) and exit. " << std::endl
186  << std::endl
187  << " --reset, -r" << std::endl
188  << " Reset PTU axis and exit. " << std::endl
189  << std::endl
190  << " --extrinsic <extrinsic.yaml>" << std::endl
191  << " YAML file containing extrinsic camera parameters as a vpHomogeneousMatrix." << std::endl
192  << " It corresponds to the homogeneous transformation eMc, between end-effector" << std::endl
193  << " and camera frame." << std::endl
194  << std::endl
195  << " --intrinsic <intrinsic.xml>" << std::endl
196  << " Intrinsic camera parameters obtained after camera calibration." << std::endl
197  << std::endl
198  << " --camera-name <name>" << std::endl
199  << " Name of the camera to consider in the xml file provided for intrinsic camera parameters."
200  << std::endl
201  << std::endl
202  << " --quad-decimate <decimation>" << std::endl
203  << " Decimation factor used to detect AprilTag. Default " << opt_quad_decimate << "." << std::endl
204  << std::endl
205  << " --tag-size <size>" << std::endl
206  << " Width in meter or the black part of the AprilTag used as target. Default " << opt_tag_size
207  << "." << std::endl
208  << std::endl
209  << " --adaptive-gain, -a" << std::endl
210  << " Enable adaptive gain instead of constant gain to speed up convergence. " << std::endl
211  << std::endl
212  << " --constant-gain, -g" << std::endl
213  << " Constant gain value. Default value: " << opt_constant_gain << std::endl
214  << std::endl
215  << " --display-image-trajectory, -traj" << std::endl
216  << " Display the trajectory of the target cog in the image. " << std::endl
217  << std::endl
218  << " --plot, -p" << std::endl
219  << " Enable curve plotter. " << std::endl
220  << std::endl
221  << " --task-sequencing" << std::endl
222  << " Enable task sequencing that allows to smoothly control the velocity of the robot. " << std::endl
223  << std::endl
224  << " --no-convergence-threshold" << std::endl
225  << " Disable ending servoing when it reaches the desired position." << std::endl
226  << std::endl
227  << " --verbose, -v" << std::endl
228  << " Additional printings. " << std::endl
229  << std::endl
230  << " --help, -h" << std::endl
231  << " Print this helper message. " << std::endl
232  << std::endl;
233  std::cout << "EXAMPLE" << std::endl
234  << " - How to get network IP" << std::endl
235 #ifdef _WIN32
236  << " $ " << argv[0] << " --portname COM1 --network" << std::endl
237  << " Try to connect FLIR PTU to port: COM1 with baudrate: 9600" << std::endl
238 #else
239  << " $ " << argv[0] << " -p /dev/ttyUSB0 --network" << std::endl
240  << " Try to connect FLIR PTU to port: /dev/ttyUSB0 with baudrate: 9600" << std::endl
241 #endif
242  << " PTU HostName: PTU-5" << std::endl
243  << " PTU IP : 169.254.110.254" << std::endl
244  << " PTU Gateway : 0.0.0.0" << std::endl
245  << " - How to run this binary using network communication" << std::endl
246  << " $ " << argv[0] << " --portname tcp:169.254.110.254 --tag-size 0.1 --gain 0.1" << std::endl;
247 
248  return EXIT_SUCCESS;
249  }
250  }
251 
252  vpRobotFlirPtu robot;
253 
254  try {
255  std::cout << "Try to connect FLIR PTU to port: " << opt_portname << " with baudrate: " << opt_baudrate << std::endl;
256  robot.connect(opt_portname, opt_baudrate);
257 
258  if (opt_network) {
259  std::cout << "PTU HostName: " << robot.getNetworkHostName() << std::endl;
260  std::cout << "PTU IP : " << robot.getNetworkIP() << std::endl;
261  std::cout << "PTU Gateway : " << robot.getNetworkGateway() << std::endl;
262  return EXIT_SUCCESS;
263  }
264 
266 
268  g.open(I);
269 
270  // Get camera extrinsics
272  vpRotationMatrix eRc;
273  eRc << 0, 0, 1, -1, 0, 0, 0, -1, 0;
274  etc << -0.1, -0.123, 0.035;
275  vpHomogeneousMatrix eMc(etc, eRc);
276 
277  // If provided, read camera extrinsics from command line option
278  if (!opt_extrinsic.empty()) {
279  vpPoseVector ePc;
280  ePc.loadYAML(opt_extrinsic, ePc);
281  eMc.buildFrom(ePc);
282  }
283  else {
284  std::cout << "***************************************************************" << std::endl;
285  std::cout << "Warning, use hard coded values for extrinsic camera parameters." << std::endl;
286  std::cout << "Create a yaml file that contains the extrinsic:" << std::endl
287  << std::endl
288  << "$ cat eMc.yaml" << std::endl
289  << "rows: 4" << std::endl
290  << "cols: 4" << std::endl
291  << "data:" << std::endl
292  << " - [0, 0, 1, -0.1]" << std::endl
293  << " - [-1, 0, 0, -0.123]" << std::endl
294  << " - [0, -1, 0, 0.035]" << std::endl
295  << " - [0, 0, 0, 1]" << std::endl
296  << std::endl
297  << "and load this file with [--extrinsic <extrinsic.yaml] command line option, like:" << std::endl
298  << std::endl
299  << "$ " << argv[0] << "-p " << opt_portname << " --extrinsic eMc.yaml" << std::endl
300  << std::endl;
301  std::cout << "***************************************************************" << std::endl;
302  }
303 
304  std::cout << "Considered extrinsic transformation eMc:\n" << eMc << std::endl;
305 
306  // Get camera intrinsics
307  vpCameraParameters cam(900, 900, I.getWidth() / 2., I.getHeight() / 2.);
308 
309  // If provided, read camera intrinsics from command line option
310  if (!opt_intrinsic.empty() && !opt_camera_name.empty()) {
311  vpXmlParserCamera parser;
312  parser.parse(cam, opt_intrinsic, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
313  }
314  else {
315  std::cout << "***************************************************************" << std::endl;
316  std::cout << "Warning, use hard coded values for intrinsic camera parameters." << std::endl;
317  std::cout << "Calibrate your camera and load the parameters from command line options, like:" << std::endl
318  << std::endl
319  << "$ " << argv[0] << "-p " << opt_portname << " --intrinsic camera.xml --camera-name \"Camera\""
320  << std::endl
321  << std::endl;
322  std::cout << "***************************************************************" << std::endl;
323  }
324 
325  std::cout << "Considered intrinsic camera parameters:\n" << cam << "\n";
326 
327 #if defined(VISP_HAVE_X11)
328  vpDisplayX dc(I, 10, 10, "Color image");
329 #elif defined(VISP_HAVE_GDI)
330  vpDisplayGDI dc(I, 10, 10, "Color image");
331 #endif
332 
335  vpDetectorAprilTag detector(tagFamily);
336  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
337  detector.setDisplayTag(display_tag);
338  detector.setAprilTagQuadDecimate(opt_quad_decimate);
339 
340  // Create visual features
341  vpFeaturePoint p, pd; // We use 1 point, the tag cog
342 
343  // Set desired position to the image center
344  pd.set_x(0);
345  pd.set_y(0);
346 
347  vpServo task;
348  // Add the visual feature point
349  task.addFeature(p, pd);
352 
353  if (opt_adaptive_gain) {
354  vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
355  task.setLambda(lambda);
356  }
357  else {
358  task.setLambda(opt_constant_gain);
359  }
360 
361  vpPlot *plotter = nullptr;
362  int iter_plot = 0;
363 
364  if (opt_plot) {
365  plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
366  "Real time curves plotter");
367  plotter->setTitle(0, "Visual features error");
368  plotter->setTitle(1, "Joint velocities");
369  plotter->initGraph(0, 2);
370  plotter->initGraph(1, 2);
371  plotter->setLegend(0, 0, "error_feat_p_x");
372  plotter->setLegend(0, 1, "error_feat_p_y");
373  plotter->setLegend(1, 0, "qdot_pan");
374  plotter->setLegend(1, 1, "qdot_tilt");
375  }
376 
377  bool final_quit = false;
378  bool has_converged = false;
379  bool send_velocities = false;
380  bool servo_started = false;
381  std::vector<vpImagePoint> traj_cog;
382  vpMatrix eJe;
383 
384  static double t_init_servo = vpTime::measureTimeMs();
385 
386  robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
387 
388  vpVelocityTwistMatrix cVe = robot.get_cVe();
389  std::cout << cVe << std::endl;
390  task.set_cVe(cVe);
391 
393 
394  while (!has_converged && !final_quit) {
395  double t_start = vpTime::measureTimeMs();
396 
397  g.acquire(I);
398 
400 
401  std::vector<vpHomogeneousMatrix> cMo_vec;
402  detector.detect(I, opt_tag_size, cam, cMo_vec);
403 
404  std::stringstream ss;
405  ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
406  vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
407 
408  vpColVector qdot(2);
409 
410  // Only one tag has to be detected
411  if (detector.getNbObjects() == 1) {
412 
413  vpImagePoint cog = detector.getCog(0);
414  double Z = cMo_vec[0][2][3];
415 
416  // Update current feature from measured cog position
417  double x = 0, y = 0;
418  vpPixelMeterConversion::convertPoint(cam, cog, x, y);
419  if (opt_verbose) {
420  std::cout << "Z: " << Z << std::endl;
421  }
422  p.set_xyZ(x, y, Z);
423  pd.set_Z(Z);
424 
425  // Get robot Jacobian
426  robot.get_eJe(eJe);
427  task.set_eJe(eJe);
428 
429  if (opt_task_sequencing) {
430  if (!servo_started) {
431  if (send_velocities) {
432  servo_started = true;
433  }
434  t_init_servo = vpTime::measureTimeMs();
435  }
436  qdot = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
437  }
438  else {
439  qdot = task.computeControlLaw();
440  }
441 
442  // Display the current and desired feature points in the image display
444 
445  // Display the trajectory of the points used as features
446  if (opt_display_trajectory) {
447  display_point_trajectory(I, cog, traj_cog);
448  }
449 
450  if (opt_plot) {
451  plotter->plot(0, iter_plot, task.getError());
452  plotter->plot(1, iter_plot, qdot);
453  iter_plot++;
454  }
455 
456  if (opt_verbose) {
457  std::cout << "qdot: " << qdot.t() << std::endl;
458  }
459 
460  double error = task.getError().sumSquare();
461  ss.str("");
462  ss << "error: " << error;
463  vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
464 
465  if (opt_verbose)
466  std::cout << "error: " << error << std::endl;
467 
468  if (error < convergence_threshold) {
469  has_converged = true;
470  std::cout << "Servo task has converged"
471  << "\n";
472  vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
473  }
474  } // end if (cMo_vec.size() == 1)
475  else {
476  qdot = 0;
477  }
478 
479  if (!send_velocities) {
480  qdot = 0;
481  }
482 
483  // Send to the robot
484  robot.setVelocity(vpRobot::JOINT_STATE, qdot);
485  ss.str("");
486  ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
487  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
488 
489  vpDisplay::flush(I);
490 
492  if (vpDisplay::getClick(I, button, false)) {
493  switch (button) {
495  send_velocities = !send_velocities;
496  break;
497 
499  final_quit = true;
500  qdot = 0;
501  break;
502 
503  default:
504  break;
505  }
506  }
507  }
508  std::cout << "Stop the robot " << std::endl;
510 
511  if (opt_plot && plotter != nullptr) {
512  delete plotter;
513  plotter = nullptr;
514  }
515 
516  if (!final_quit) {
517  while (!final_quit) {
518  g.acquire(I);
520 
521  vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
522  vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
523 
524  if (vpDisplay::getClick(I, false)) {
525  final_quit = true;
526  }
527 
528  vpDisplay::flush(I);
529  }
530  }
531  }
532  catch (const vpRobotException &e) {
533  std::cout << "Catch Flir Ptu signal exception: " << e.getMessage() << std::endl;
535  }
536  catch (const vpException &e) {
537  std::cout << "ViSP exception: " << e.what() << std::endl;
538  std::cout << "Stop the robot " << std::endl;
540  }
541 
542  return EXIT_SUCCESS;
543 }
544 #else
545 int main()
546 {
547 #if !defined(VISP_HAVE_FLYCAPTURE)
548  std::cout << "Install FLIR Flycapture" << std::endl;
549 #endif
550 #if !defined(VISP_HAVE_FLIR_PTU_SDK)
551  std::cout << "Install FLIR PTU SDK." << std::endl;
552 #endif
553 #if !defined(VISP_HAVE_PUGIXML)
554  std::cout << "pugixml built-in 3rdparty is requested." << std::endl;
555 #endif
556  return EXIT_SUCCESS;
557 }
558 #endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition: vpArray2D.h:783
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
double sumSquare() const
static const vpColor red
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:130
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * getMessage() const
Definition: vpException.cpp:65
const char * what() const
Definition: vpException.cpp:71
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_xyZ(double x, double y, double Z)
void set_y(double y)
void set_x(double x)
void set_Z(double Z)
void open(vpImage< unsigned char > &I)
void acquire(vpImage< unsigned char > &I)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:112
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition: vpPlot.cpp:203
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition: vpPlot.cpp:552
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition: vpPlot.cpp:270
void setTitle(unsigned int graphNum, const std::string &title)
Definition: vpPlot.cpp:510
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:203
Error that can be emitted by the vpRobot class and its derivatives.
void get_eJe(vpMatrix &eJe) VP_OVERRIDE
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ JOINT_STATE
Definition: vpRobot.h:82
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:67
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition: vpRobot.h:66
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:202
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:168
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:1038
void setLambda(double c)
Definition: vpServo.h:986
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:1101
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector getError() const
Definition: vpServo.h:510
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Class that consider the case of a translation vector.
vpVelocityTwistMatrix get_cVe() const
Definition: vpUnicycle.h:72
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0, bool verbose=true)
VISP_EXPORT double measureTimeMs()