Visual Servoing Platform  version 3.6.1 under development (2024-12-17)
servoFrankaPBVS.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Data acquisition with RealSense RGB-D sensor and Franka robot.
33  *
34 *****************************************************************************/
35 
59 #include <iostream>
60 
61 #include <visp3/core/vpCameraParameters.h>
62 #include <visp3/core/vpConfig.h>
63 #include <visp3/detection/vpDetectorAprilTag.h>
64 #include <visp3/gui/vpDisplayGDI.h>
65 #include <visp3/gui/vpDisplayX.h>
66 #include <visp3/gui/vpPlot.h>
67 #include <visp3/io/vpImageIo.h>
68 #include <visp3/robot/vpRobotFranka.h>
69 #include <visp3/sensor/vpRealSense2.h>
70 #include <visp3/visual_features/vpFeatureThetaU.h>
71 #include <visp3/visual_features/vpFeatureTranslation.h>
72 #include <visp3/vs/vpServo.h>
73 #include <visp3/vs/vpServoDisplay.h>
74 
75 #if defined(VISP_HAVE_REALSENSE2) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_FRANKA)
76 
77 #ifdef ENABLE_VISP_NAMESPACE
78 using namespace VISP_NAMESPACE_NAME;
79 #endif
80 
81 void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
82  std::vector<vpImagePoint> *traj_vip)
83 {
84  for (size_t i = 0; i < vip.size(); i++) {
85  if (traj_vip[i].size()) {
86  // Add the point only if distance with the previous > 1 pixel
87  if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
88  traj_vip[i].push_back(vip[i]);
89  }
90  }
91  else {
92  traj_vip[i].push_back(vip[i]);
93  }
94  }
95  for (size_t i = 0; i < vip.size(); i++) {
96  for (size_t j = 1; j < traj_vip[i].size(); j++) {
97  vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
98  }
99  }
100 }
101 
102 int main(int argc, char **argv)
103 {
104  double opt_tagSize = 0.120;
105  std::string opt_robot_ip = "192.168.1.1";
106  std::string opt_eMc_filename = "";
107  bool display_tag = true;
108  int opt_quad_decimate = 2;
109  bool opt_verbose = false;
110  bool opt_plot = false;
111  bool opt_adaptive_gain = false;
112  bool opt_task_sequencing = false;
113  double convergence_threshold_t = 0.0005; // Value in [m]
114  double convergence_threshold_tu = 0.5; // Value in [deg]
115 
116  for (int i = 1; i < argc; i++) {
117  if ((std::string(argv[i]) == "--tag_size") && (i + 1 < argc)) {
118  opt_tagSize = std::stod(argv[i + 1]);
119  ++i;
120  }
121  else if ((std::string(argv[i]) == "--ip") && (i + 1 < argc)) {
122  opt_robot_ip = std::string(argv[i + 1]);
123  ++i;
124  }
125  else if ((std::string(argv[i]) == "--eMc") && (i + 1 < argc)) {
126  opt_eMc_filename = std::string(argv[i + 1]);
127  ++i;
128  }
129  else if (std::string(argv[i]) == "--verbose") {
130  opt_verbose = true;
131  }
132  else if (std::string(argv[i]) == "--plot") {
133  opt_plot = true;
134  }
135  else if (std::string(argv[i]) == "--adaptive_gain") {
136  opt_adaptive_gain = true;
137  }
138  else if (std::string(argv[i]) == "--task_sequencing") {
139  opt_task_sequencing = true;
140  }
141  else if ((std::string(argv[i]) == "--quad_decimate") && (i + 1 < argc)) {
142  opt_quad_decimate = std::stoi(argv[i + 1]);
143  ++i;
144  }
145  else if (std::string(argv[i]) == "--no-convergence-threshold") {
146  convergence_threshold_t = 0.;
147  convergence_threshold_tu = 0.;
148  }
149  else if ((std::string(argv[i]) == "--help") || (std::string(argv[i]) == "-h")) {
150  std::cout << "SYNOPSYS" << std::endl
151  << " " << argv[0]
152  << " [--ip <controller ip>]"
153  << " [--tag-size <size>]"
154  << " [--eMc <extrinsic transformation file>]"
155  << " [--quad-decimate <decimation factor>]"
156  << " [--adaptive-gain]"
157  << " [--plot]"
158  << " [--task-sequencing]"
159  << " [--no-convergence-threshold]"
160  << " [--verbose]"
161  << " [--help] [-h]\n"
162  << std::endl;
163  std::cout << "DESCRIPTION" << std::endl
164  << " Use a position-based visual-servoing scheme to position the camera in front of an Apriltag." << std::endl
165  << std::endl
166  << " --ip <controller ip>" << std::endl
167  << " Franka controller ip address" << std::endl
168  << " Default: " << opt_robot_ip << std::endl
169  << std::endl
170  << " --tag-size <size>" << std::endl
171  << " Apriltag size in [m]." << std::endl
172  << " Default: " << opt_tagSize << " [m]" << std::endl
173  << std::endl
174  << " --eMc <extrinsic transformation file>" << std::endl
175  << " File containing the homogeneous transformation matrix between" << std::endl
176  << " robot end-effector and camera frame." << std::endl
177  << std::endl
178  << " --quad-decimate <decimation factor>" << std::endl
179  << " Decimation factor used during Apriltag detection." << std::endl
180  << " Default: " << opt_quad_decimate << std::endl
181  << std::endl
182  << " --adaptive-gain" << std::endl
183  << " Flag to enable adaptive gain to speed up visual servo near convergence." << std::endl
184  << std::endl
185  << " --plot" << std::endl
186  << " Flag to enable curve plotter." << std::endl
187  << std::endl
188  << " --task-sequencing" << std::endl
189  << " Flag to enable task sequencing scheme." << std::endl
190  << std::endl
191  << " --no-convergence-threshold" << std::endl
192  << " Flag to disable convergence threshold used to stop the visual servo." << std::endl
193  << std::endl
194  << " --verbose" << std::endl
195  << " Flag to enable extra verbosity." << std::endl
196  << std::endl
197  << " --help, -h" << std::endl
198  << " Print this helper message." << std::endl
199  << std::endl;
200  return EXIT_SUCCESS;
201  }
202  }
203 
204  vpRobotFranka robot;
205 
206  try {
207  robot.connect(opt_robot_ip);
208 
209  vpRealSense2 rs;
210  rs2::config config;
211  unsigned int width = 640, height = 480;
212  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
213  config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
214  config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
215  rs.open(config);
216 
217  // Get camera extrinsics
218  vpPoseVector ePc;
219  // Set camera extrinsics default values
220  ePc[0] = 0.0337731;
221  ePc[1] = -0.00535012;
222  ePc[2] = -0.0523339;
223  ePc[3] = -0.247294;
224  ePc[4] = -0.306729;
225  ePc[5] = 1.53055;
226 
227  // If provided, read camera extrinsics from --eMc <file>
228  if (!opt_eMc_filename.empty()) {
229  ePc.loadYAML(opt_eMc_filename, ePc);
230  }
231  else {
232  std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values." << std::endl;
233  }
234  vpHomogeneousMatrix eMc(ePc);
235  std::cout << "eMc:\n" << eMc << std::endl;
236 
237  // Get camera intrinsics
239  std::cout << "cam:\n" << cam << std::endl;
240 
241  vpImage<unsigned char> I(height, width);
242 
243 #if defined(VISP_HAVE_X11)
244  vpDisplayX dc(I, 10, 10, "Color image");
245 #elif defined(VISP_HAVE_GDI)
246  vpDisplayGDI dc(I, 10, 10, "Color image");
247 #endif
248 
251  // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
252  vpDetectorAprilTag detector(tagFamily);
253  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
254  detector.setDisplayTag(display_tag);
255  detector.setAprilTagQuadDecimate(opt_quad_decimate);
256 
257  // Servo
258  vpHomogeneousMatrix cdMc, cMo, oMo;
259 
260  // Desired pose to reach
261  vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
262  vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
263 
264  cdMc = cdMo * cMo.inverse();
267  t.buildFrom(cdMc);
268  tu.buildFrom(cdMc);
269 
272 
273  vpServo task;
274  task.addFeature(t, td);
275  task.addFeature(tu, tud);
278 
279  if (opt_adaptive_gain) {
280  vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
281  task.setLambda(lambda);
282  }
283  else {
284  task.setLambda(0.5);
285  }
286 
287  vpPlot *plotter = nullptr;
288  int iter_plot = 0;
289 
290  if (opt_plot) {
291  plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
292  "Real time curves plotter");
293  plotter->setTitle(0, "Visual features error");
294  plotter->setTitle(1, "Camera velocities");
295  plotter->initGraph(0, 6);
296  plotter->initGraph(1, 6);
297  plotter->setLegend(0, 0, "error_feat_tx");
298  plotter->setLegend(0, 1, "error_feat_ty");
299  plotter->setLegend(0, 2, "error_feat_tz");
300  plotter->setLegend(0, 3, "error_feat_theta_ux");
301  plotter->setLegend(0, 4, "error_feat_theta_uy");
302  plotter->setLegend(0, 5, "error_feat_theta_uz");
303  plotter->setLegend(1, 0, "vc_x");
304  plotter->setLegend(1, 1, "vc_y");
305  plotter->setLegend(1, 2, "vc_z");
306  plotter->setLegend(1, 3, "wc_x");
307  plotter->setLegend(1, 4, "wc_y");
308  plotter->setLegend(1, 5, "wc_z");
309  }
310 
311  bool final_quit = false;
312  bool has_converged = false;
313  bool send_velocities = false;
314  bool servo_started = false;
315  std::vector<vpImagePoint> *traj_vip = nullptr; // To memorize point trajectory
316 
317  static double t_init_servo = vpTime::measureTimeMs();
318 
319  robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
321 
322  while (!has_converged && !final_quit) {
323  double t_start = vpTime::measureTimeMs();
324 
325  rs.acquire(I);
326 
328 
329  std::vector<vpHomogeneousMatrix> cMo_vec;
330  detector.detect(I, opt_tagSize, cam, cMo_vec);
331 
332  std::stringstream ss;
333  ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
334  vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
335 
336  vpColVector v_c(6);
337 
338  // Only one tag is detected
339  if (cMo_vec.size() == 1) {
340  cMo = cMo_vec[0];
341 
342  static bool first_time = true;
343  if (first_time) {
344  // Introduce security wrt tag positioning in order to avoid PI rotation
345  std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
346  v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
347  for (size_t i = 0; i < 2; i++) {
348  v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
349  }
350  if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
351  oMo = v_oMo[0];
352  }
353  else {
354  std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
355  oMo = v_oMo[1]; // Introduce PI rotation
356  }
357  }
358 
359  // Update visual features
360  cdMc = cdMo * oMo * cMo.inverse();
361  t.buildFrom(cdMc);
362  tu.buildFrom(cdMc);
363 
364  if (opt_task_sequencing) {
365  if (!servo_started) {
366  if (send_velocities) {
367  servo_started = true;
368  }
369  t_init_servo = vpTime::measureTimeMs();
370  }
371  v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
372  }
373  else {
374  v_c = task.computeControlLaw();
375  }
376 
377  // Display desired and current pose features
378  vpDisplay::displayFrame(I, cdMo * oMo, cam, opt_tagSize / 1.5, vpColor::yellow, 2);
379  vpDisplay::displayFrame(I, cMo, cam, opt_tagSize / 2, vpColor::none, 3);
380  // Get tag corners
381  std::vector<vpImagePoint> vip = detector.getPolygon(0);
382  // Get the tag cog corresponding to the projection of the tag frame in the image
383  vip.push_back(detector.getCog(0));
384  // Display the trajectory of the points
385  if (first_time) {
386  traj_vip = new std::vector<vpImagePoint>[vip.size()];
387  }
388  display_point_trajectory(I, vip, traj_vip);
389 
390  if (opt_plot) {
391  plotter->plot(0, iter_plot, task.getError());
392  plotter->plot(1, iter_plot, v_c);
393  iter_plot++;
394  }
395 
396  if (opt_verbose) {
397  std::cout << "v_c: " << v_c.t() << std::endl;
398  }
399 
401  vpThetaUVector cd_tu_c = cdMc.getThetaUVector();
402  double error_tr = sqrt(cd_t_c.sumSquare());
403  double error_tu = vpMath::deg(sqrt(cd_tu_c.sumSquare()));
404 
405  ss.str("");
406  ss << "error_t: " << error_tr;
407  vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
408  ss.str("");
409  ss << "error_tu: " << error_tu;
410  vpDisplay::displayText(I, 40, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
411 
412  if (opt_verbose)
413  std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
414 
415  if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
416  has_converged = true;
417  std::cout << "Servo task has converged" << std::endl;
418  ;
419  vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
420  }
421 
422  if (first_time) {
423  first_time = false;
424  }
425  } // end if (cMo_vec.size() == 1)
426  else {
427  v_c = 0;
428  }
429 
430  if (!send_velocities) {
431  v_c = 0;
432  }
433 
434  // Send to the robot
436 
437  ss.str("");
438  ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
439  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
440  vpDisplay::flush(I);
441 
443  if (vpDisplay::getClick(I, button, false)) {
444  switch (button) {
446  send_velocities = !send_velocities;
447  break;
448 
450  final_quit = true;
451  v_c = 0;
452  break;
453 
454  default:
455  break;
456  }
457  }
458  }
459  std::cout << "Stop the robot " << std::endl;
461 
462  if (opt_plot && plotter != nullptr) {
463  delete plotter;
464  plotter = nullptr;
465  }
466 
467  if (!final_quit) {
468  while (!final_quit) {
469  rs.acquire(I);
471 
472  vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
473  vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
474 
475  if (vpDisplay::getClick(I, false)) {
476  final_quit = true;
477  }
478 
479  vpDisplay::flush(I);
480  }
481  }
482 
483  if (traj_vip) {
484  delete[] traj_vip;
485  }
486  }
487  catch (const vpException &e) {
488  std::cout << "ViSP exception: " << e.what() << std::endl;
489  std::cout << "Stop the robot " << std::endl;
491  return EXIT_FAILURE;
492  }
493  catch (const franka::NetworkException &e) {
494  std::cout << "Franka network exception: " << e.what() << std::endl;
495  std::cout << "Check if you are connected to the Franka robot"
496  << " or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
497  << std::endl;
498  return EXIT_FAILURE;
499  }
500  catch (const std::exception &e) {
501  std::cout << "Franka exception: " << e.what() << std::endl;
502  return EXIT_FAILURE;
503  }
504 
505  return EXIT_SUCCESS;
506 }
507 #else
508 int main()
509 {
510 #if !defined(VISP_HAVE_REALSENSE2)
511  std::cout << "Install librealsense-2.x" << std::endl;
512 #endif
513 #if !defined(VISP_HAVE_FRANKA)
514  std::cout << "Install libfranka." << std::endl;
515 #endif
516  return EXIT_SUCCESS;
517 }
518 #endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition: vpArray2D.h:783
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
static const vpColor yellow
Definition: vpColor.h:225
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:130
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * what() const
Definition: vpException.cpp:71
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
Definition: vpImage.h:242
static double deg(double rad)
Definition: vpMath.h:119
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:112
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition: vpPlot.cpp:203
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition: vpPlot.cpp:552
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition: vpPlot.cpp:270
void setTitle(unsigned int graphNum, const std::string &title)
Definition: vpPlot.cpp:510
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:203
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ CAMERA_FRAME
Definition: vpRobot.h:84
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:67
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition: vpRobot.h:66
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:202
Implementation of a rotation matrix and operations on such kind of matrices.
double sumSquare() const
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_CAMERA
Definition: vpServo.h:161
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void setLambda(double c)
Definition: vpServo.h:986
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector getError() const
Definition: vpServo.h:510
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()