Visual Servoing Platform  version 3.6.1 under development (2025-01-18)
servoFrankaIBVS.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * tests the control law
33  * eye-in-hand control
34  * velocity computed in the camera frame
35  *
36 *****************************************************************************/
62 #include <iostream>
63 
64 #include <visp3/core/vpCameraParameters.h>
65 #include <visp3/core/vpConfig.h>
66 #include <visp3/detection/vpDetectorAprilTag.h>
67 #include <visp3/gui/vpDisplayGDI.h>
68 #include <visp3/gui/vpDisplayX.h>
69 #include <visp3/gui/vpPlot.h>
70 #include <visp3/io/vpImageIo.h>
71 #include <visp3/robot/vpRobotFranka.h>
72 #include <visp3/sensor/vpRealSense2.h>
73 #include <visp3/visual_features/vpFeatureBuilder.h>
74 #include <visp3/visual_features/vpFeaturePoint.h>
75 #include <visp3/vs/vpServo.h>
76 #include <visp3/vs/vpServoDisplay.h>
77 
78 #if defined(VISP_HAVE_REALSENSE2) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_FRANKA)
79 
80 #ifdef ENABLE_VISP_NAMESPACE
81 using namespace VISP_NAMESPACE_NAME;
82 #endif
83 
84 void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
85  std::vector<vpImagePoint> *traj_vip)
86 {
87  for (size_t i = 0; i < vip.size(); i++) {
88  if (traj_vip[i].size()) {
89  // Add the point only if distance with the previous > 1 pixel
90  if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
91  traj_vip[i].push_back(vip[i]);
92  }
93  }
94  else {
95  traj_vip[i].push_back(vip[i]);
96  }
97  }
98  for (size_t i = 0; i < vip.size(); i++) {
99  for (size_t j = 1; j < traj_vip[i].size(); j++) {
100  vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
101  }
102  }
103 }
104 
105 int main(int argc, char **argv)
106 {
107  double opt_tagSize = 0.120;
108  std::string opt_robot_ip = "192.168.1.1";
109  std::string opt_eMc_filename = "";
110  bool display_tag = true;
111  int opt_quad_decimate = 2;
112  bool opt_verbose = false;
113  bool opt_plot = false;
114  bool opt_adaptive_gain = false;
115  bool opt_task_sequencing = false;
116  double convergence_threshold = 0.00005;
117 
118  for (int i = 1; i < argc; i++) {
119  if ((std::string(argv[i]) == "--tag-size") && (i + 1 < argc)) {
120  opt_tagSize = std::stod(argv[i + 1]);
121  ++i;
122  }
123  else if ((std::string(argv[i]) == "--ip") && (i + 1 < argc)) {
124  opt_robot_ip = std::string(argv[i + 1]);
125  ++i;
126  }
127  else if ((std::string(argv[i]) == "--eMc") && (i + 1 < argc)) {
128  opt_eMc_filename = std::string(argv[i + 1]);
129  ++i;
130  }
131  else if (std::string(argv[i]) == "--verbose") {
132  opt_verbose = true;
133  }
134  else if (std::string(argv[i]) == "--plot") {
135  opt_plot = true;
136  }
137  else if (std::string(argv[i]) == "--adaptive-gain") {
138  opt_adaptive_gain = true;
139  }
140  else if (std::string(argv[i]) == "--task-sequencing") {
141  opt_task_sequencing = true;
142  }
143  else if ((std::string(argv[i]) == "--quad-decimate") && (i + 1 < argc)) {
144  opt_quad_decimate = std::stoi(argv[i + 1]);
145  ++i;
146  }
147  else if (std::string(argv[i]) == "--no-convergence-threshold") {
148  convergence_threshold = 0.;
149  }
150  else if ((std::string(argv[i]) == "--help") || (std::string(argv[i]) == "-h")) {
151  std::cout << "SYNOPSYS" << std::endl
152  << " " << argv[0]
153  << " [--ip <controller ip>]"
154  << " [--tag-size <size>]"
155  << " [--eMc <extrinsic transformation file>]"
156  << " [--quad-decimate <decimation factor>]"
157  << " [--adaptive-gain]"
158  << " [--plot]"
159  << " [--task-sequencing]"
160  << " [--no-convergence-threshold]"
161  << " [--verbose]"
162  << " [--help] [-h]\n"
163  << std::endl;
164  std::cout << "DESCRIPTION" << std::endl
165  << " Use an image-based visual-servoing scheme to position the camera in front of an Apriltag." << std::endl
166  << std::endl
167  << " --ip <controller ip>" << std::endl
168  << " Franka controller ip address" << std::endl
169  << " Default: " << opt_robot_ip << std::endl
170  << std::endl
171  << " --tag-size <size>" << std::endl
172  << " Apriltag size in [m]." << std::endl
173  << " Default: " << opt_tagSize << " [m]" << std::endl
174  << std::endl
175  << " --eMc <extrinsic transformation file>" << std::endl
176  << " File containing the homogeneous transformation matrix between" << std::endl
177  << " robot end-effector and camera frame." << std::endl
178  << std::endl
179  << " --quad-decimate <decimation factor>" << std::endl
180  << " Decimation factor used during Apriltag detection." << std::endl
181  << " Default: " << opt_quad_decimate << std::endl
182  << std::endl
183  << " --adaptive-gain" << std::endl
184  << " Flag to enable adaptive gain to speed up visual servo near convergence." << std::endl
185  << std::endl
186  << " --plot" << std::endl
187  << " Flag to enable curve plotter." << std::endl
188  << std::endl
189  << " --task-sequencing" << std::endl
190  << " Flag to enable task sequencing scheme." << std::endl
191  << std::endl
192  << " --no-convergence-threshold" << std::endl
193  << " Flag to disable convergence threshold used to stop the visual servo." << std::endl
194  << std::endl
195  << " --verbose" << std::endl
196  << " Flag to enable extra verbosity." << std::endl
197  << std::endl
198  << " --help, -h" << std::endl
199  << " Print this helper message." << std::endl
200  << std::endl;
201 
202  return EXIT_SUCCESS;
203  }
204  }
205 
206  vpRobotFranka robot;
207 
208  try {
209  robot.connect(opt_robot_ip);
210 
211  vpRealSense2 rs;
212  rs2::config config;
213  unsigned int width = 640, height = 480;
214  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
215  config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
216  config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
217  rs.open(config);
218 
219  // Get camera extrinsics
220  vpPoseVector ePc;
221  // Set camera extrinsics default values
222  ePc[0] = 0.0337731;
223  ePc[1] = -0.00535012;
224  ePc[2] = -0.0523339;
225  ePc[3] = -0.247294;
226  ePc[4] = -0.306729;
227  ePc[5] = 1.53055;
228 
229  // If provided, read camera extrinsics from --eMc <file>
230  if (!opt_eMc_filename.empty()) {
231  ePc.loadYAML(opt_eMc_filename, ePc);
232  }
233  else {
234  std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values." << std::endl;
235  }
236  vpHomogeneousMatrix eMc(ePc);
237  std::cout << "eMc:\n" << eMc << std::endl;
238 
239  // Get camera intrinsics
241  std::cout << "cam:\n" << cam << std::endl;
242 
243  vpImage<unsigned char> I(height, width);
244 
245 #if defined(VISP_HAVE_X11)
246  vpDisplayX dc(I, 10, 10, "Color image");
247 #elif defined(VISP_HAVE_GDI)
248  vpDisplayGDI dc(I, 10, 10, "Color image");
249 #endif
250 
253  // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
254  vpDetectorAprilTag detector(tagFamily);
255  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
256  detector.setDisplayTag(display_tag);
257  detector.setAprilTagQuadDecimate(opt_quad_decimate);
258 
259  // Servo
260  vpHomogeneousMatrix cdMc, cMo, oMo;
261 
262  // Desired pose used to compute the desired features
263  vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
264  vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
265 
266  // Create visual features
267  std::vector<vpFeaturePoint> p(4), pd(4); // We use 4 points
268 
269  // Define 4 3D points corresponding to the CAD model of the Apriltag
270  std::vector<vpPoint> point(4);
271  point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
272  point[1].setWorldCoordinates(opt_tagSize / 2., -opt_tagSize / 2., 0);
273  point[2].setWorldCoordinates(opt_tagSize / 2., opt_tagSize / 2., 0);
274  point[3].setWorldCoordinates(-opt_tagSize / 2., opt_tagSize / 2., 0);
275 
276  vpServo task;
277  // Add the 4 visual feature points
278  for (size_t i = 0; i < p.size(); i++) {
279  task.addFeature(p[i], pd[i]);
280  }
283 
284  if (opt_adaptive_gain) {
285  vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
286  task.setLambda(lambda);
287  }
288  else {
289  task.setLambda(0.5);
290  }
291 
292  vpPlot *plotter = nullptr;
293  int iter_plot = 0;
294 
295  if (opt_plot) {
296  plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
297  "Real time curves plotter");
298  plotter->setTitle(0, "Visual features error");
299  plotter->setTitle(1, "Camera velocities");
300  plotter->initGraph(0, 8);
301  plotter->initGraph(1, 6);
302  plotter->setLegend(0, 0, "error_feat_p1_x");
303  plotter->setLegend(0, 1, "error_feat_p1_y");
304  plotter->setLegend(0, 2, "error_feat_p2_x");
305  plotter->setLegend(0, 3, "error_feat_p2_y");
306  plotter->setLegend(0, 4, "error_feat_p3_x");
307  plotter->setLegend(0, 5, "error_feat_p3_y");
308  plotter->setLegend(0, 6, "error_feat_p4_x");
309  plotter->setLegend(0, 7, "error_feat_p4_y");
310  plotter->setLegend(1, 0, "vc_x");
311  plotter->setLegend(1, 1, "vc_y");
312  plotter->setLegend(1, 2, "vc_z");
313  plotter->setLegend(1, 3, "wc_x");
314  plotter->setLegend(1, 4, "wc_y");
315  plotter->setLegend(1, 5, "wc_z");
316  }
317 
318  bool final_quit = false;
319  bool has_converged = false;
320  bool send_velocities = false;
321  bool servo_started = false;
322  std::vector<vpImagePoint> *traj_corners = nullptr; // To memorize point trajectory
323 
324  static double t_init_servo = vpTime::measureTimeMs();
325 
326  robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
328 
329  while (!has_converged && !final_quit) {
330  double t_start = vpTime::measureTimeMs();
331 
332  rs.acquire(I);
333 
335 
336  std::vector<vpHomogeneousMatrix> cMo_vec;
337  detector.detect(I, opt_tagSize, cam, cMo_vec);
338 
339  {
340  std::stringstream ss;
341  ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
342  vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
343  }
344 
345  vpColVector v_c(6);
346 
347  // Only one tag is detected
348  if (cMo_vec.size() == 1) {
349  cMo = cMo_vec[0];
350 
351  static bool first_time = true;
352  if (first_time) {
353  // Introduce security wrt tag positioning in order to avoid PI rotation
354  std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
355  v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
356  for (size_t i = 0; i < 2; i++) {
357  v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
358  }
359  if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
360  oMo = v_oMo[0];
361  }
362  else {
363  std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
364  oMo = v_oMo[1]; // Introduce PI rotation
365  }
366 
367  // Compute the desired position of the features from the desired pose
368  for (size_t i = 0; i < point.size(); i++) {
369  vpColVector cP, p_;
370  point[i].changeFrame(cdMo * oMo, cP);
371  point[i].projection(cP, p_);
372 
373  pd[i].set_x(p_[0]);
374  pd[i].set_y(p_[1]);
375  pd[i].set_Z(cP[2]);
376  }
377  }
378 
379  // Get tag corners
380  std::vector<vpImagePoint> corners = detector.getPolygon(0);
381 
382  // Update visual features
383  for (size_t i = 0; i < corners.size(); i++) {
384  // Update the point feature from the tag corners location
385  vpFeatureBuilder::create(p[i], cam, corners[i]);
386  // Set the feature Z coordinate from the pose
387  vpColVector cP;
388  point[i].changeFrame(cMo, cP);
389 
390  p[i].set_Z(cP[2]);
391  }
392 
393  if (opt_task_sequencing) {
394  if (!servo_started) {
395  if (send_velocities) {
396  servo_started = true;
397  }
398  t_init_servo = vpTime::measureTimeMs();
399  }
400  v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
401  }
402  else {
403  v_c = task.computeControlLaw();
404  }
405 
406  // Display the current and desired feature points in the image display
407  vpServoDisplay::display(task, cam, I);
408  for (size_t i = 0; i < corners.size(); i++) {
409  std::stringstream ss;
410  ss << i;
411  // Display current point indexes
412  vpDisplay::displayText(I, corners[i] + vpImagePoint(15, 15), ss.str(), vpColor::red);
413  // Display desired point indexes
414  vpImagePoint ip;
415  vpMeterPixelConversion::convertPoint(cam, pd[i].get_x(), pd[i].get_y(), ip);
416  vpDisplay::displayText(I, ip + vpImagePoint(15, 15), ss.str(), vpColor::red);
417  }
418  if (first_time) {
419  traj_corners = new std::vector<vpImagePoint>[corners.size()];
420  }
421  // Display the trajectory of the points used as features
422  display_point_trajectory(I, corners, traj_corners);
423 
424  if (opt_plot) {
425  plotter->plot(0, iter_plot, task.getError());
426  plotter->plot(1, iter_plot, v_c);
427  iter_plot++;
428  }
429 
430  if (opt_verbose) {
431  std::cout << "v_c: " << v_c.t() << std::endl;
432  }
433 
434  double error = task.getError().sumSquare();
435  std::stringstream ss;
436  ss << "error: " << error;
437  vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
438 
439  if (opt_verbose)
440  std::cout << "error: " << error << std::endl;
441 
442  if (error < convergence_threshold) {
443  has_converged = true;
444  std::cout << "Servo task has converged" << std::endl;
445  vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
446  }
447  if (first_time) {
448  first_time = false;
449  }
450  } // end if (cMo_vec.size() == 1)
451  else {
452  v_c = 0;
453  }
454 
455  if (!send_velocities) {
456  v_c = 0;
457  }
458 
459  // Send to the robot
461 
462  {
463  std::stringstream ss;
464  ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
465  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
466  }
467  vpDisplay::flush(I);
468 
470  if (vpDisplay::getClick(I, button, false)) {
471  switch (button) {
473  send_velocities = !send_velocities;
474  break;
475 
477  final_quit = true;
478  v_c = 0;
479  break;
480 
481  default:
482  break;
483  }
484  }
485  }
486  std::cout << "Stop the robot " << std::endl;
488 
489  if (opt_plot && plotter != nullptr) {
490  delete plotter;
491  plotter = nullptr;
492  }
493 
494  if (!final_quit) {
495  while (!final_quit) {
496  rs.acquire(I);
498 
499  vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
500  vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
501 
502  if (vpDisplay::getClick(I, false)) {
503  final_quit = true;
504  }
505 
506  vpDisplay::flush(I);
507  }
508  }
509  if (traj_corners) {
510  delete[] traj_corners;
511  }
512  }
513  catch (const vpException &e) {
514  std::cout << "ViSP exception: " << e.what() << std::endl;
515  std::cout << "Stop the robot " << std::endl;
517  return EXIT_FAILURE;
518  }
519  catch (const franka::NetworkException &e) {
520  std::cout << "Franka network exception: " << e.what() << std::endl;
521  std::cout << "Check if you are connected to the Franka robot"
522  << " or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
523  << std::endl;
524  return EXIT_FAILURE;
525  }
526  catch (const std::exception &e) {
527  std::cout << "Franka exception: " << e.what() << std::endl;
528  return EXIT_FAILURE;
529  }
530 
531  return EXIT_SUCCESS;
532 }
533 #else
534 int main()
535 {
536 #if !defined(VISP_HAVE_REALSENSE2)
537  std::cout << "Install librealsense-2.x" << std::endl;
538 #endif
539 #if !defined(VISP_HAVE_FRANKA)
540  std::cout << "Install libfranka." << std::endl;
541 #endif
542  return EXIT_SUCCESS;
543 }
544 #endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition: vpArray2D.h:783
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
double sumSquare() const
static const vpColor red
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:130
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * what() const
Definition: vpException.cpp:71
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
Definition: vpImage.h:242
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:112
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition: vpPlot.cpp:203
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition: vpPlot.cpp:552
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition: vpPlot.cpp:270
void setTitle(unsigned int graphNum, const std::string &title)
Definition: vpPlot.cpp:510
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:203
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ CAMERA_FRAME
Definition: vpRobot.h:84
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:67
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition: vpRobot.h:66
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:202
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_CAMERA
Definition: vpServo.h:161
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void setLambda(double c)
Definition: vpServo.h:991
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector getError() const
Definition: vpServo.h:515
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()