Visual Servoing Platform  version 3.6.1 under development (2024-05-16)
servoUniversalRobotsPBVS.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Data acquisition with RealSense RGB-D sensor and Franka robot.
33  *
34 *****************************************************************************/
35 
56 #include <iostream>
57 
58 #include <visp3/core/vpCameraParameters.h>
59 #include <visp3/detection/vpDetectorAprilTag.h>
60 #include <visp3/gui/vpDisplayGDI.h>
61 #include <visp3/gui/vpDisplayX.h>
62 #include <visp3/gui/vpPlot.h>
63 #include <visp3/io/vpImageIo.h>
64 #include <visp3/robot/vpRobotUniversalRobots.h>
65 #include <visp3/sensor/vpRealSense2.h>
66 #include <visp3/visual_features/vpFeatureThetaU.h>
67 #include <visp3/visual_features/vpFeatureTranslation.h>
68 #include <visp3/vs/vpServo.h>
69 #include <visp3/vs/vpServoDisplay.h>
70 
71 #if defined(VISP_HAVE_REALSENSE2) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_UR_RTDE)
72 
73 void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
74  std::vector<vpImagePoint> *traj_vip)
75 {
76  for (size_t i = 0; i < vip.size(); i++) {
77  if (traj_vip[i].size()) {
78  // Add the point only if distance with the previous > 1 pixel
79  if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
80  traj_vip[i].push_back(vip[i]);
81  }
82  }
83  else {
84  traj_vip[i].push_back(vip[i]);
85  }
86  }
87  for (size_t i = 0; i < vip.size(); i++) {
88  for (size_t j = 1; j < traj_vip[i].size(); j++) {
89  vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
90  }
91  }
92 }
93 
94 int main(int argc, char **argv)
95 {
96  double opt_tagSize = 0.120;
97  std::string opt_robot_ip = "192.168.0.100";
98  std::string opt_eMc_filename = "";
99  bool display_tag = true;
100  int opt_quad_decimate = 2;
101  bool opt_verbose = false;
102  bool opt_plot = false;
103  bool opt_adaptive_gain = false;
104  bool opt_task_sequencing = false;
105  double convergence_threshold_t = 0.0005; // Value in [m]
106  double convergence_threshold_tu = 0.5; // Value in [deg]
107 
108  for (int i = 1; i < argc; i++) {
109  if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
110  opt_tagSize = std::stod(argv[i + 1]);
111  }
112  else if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
113  opt_robot_ip = std::string(argv[i + 1]);
114  }
115  else if (std::string(argv[i]) == "--eMc" && i + 1 < argc) {
116  opt_eMc_filename = std::string(argv[i + 1]);
117  }
118  else if (std::string(argv[i]) == "--verbose") {
119  opt_verbose = true;
120  }
121  else if (std::string(argv[i]) == "--plot") {
122  opt_plot = true;
123  }
124  else if (std::string(argv[i]) == "--adaptive_gain") {
125  opt_adaptive_gain = true;
126  }
127  else if (std::string(argv[i]) == "--task_sequencing") {
128  opt_task_sequencing = true;
129  }
130  else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
131  opt_quad_decimate = std::stoi(argv[i + 1]);
132  }
133  else if (std::string(argv[i]) == "--no-convergence-threshold") {
134  convergence_threshold_t = 0.;
135  convergence_threshold_tu = 0.;
136  }
137  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
138  std::cout
139  << argv[0] << " [--ip <default " << opt_robot_ip << ">] [--tag_size <marker size in meter; default "
140  << opt_tagSize << ">] [--eMc <eMc extrinsic file>] "
141  << "[--quad_decimate <decimation; default " << opt_quad_decimate
142  << ">] [--adaptive_gain] [--plot] [--task_sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
143  << "\n";
144  return EXIT_SUCCESS;
145  }
146  }
147 
149 
150  try {
151  robot.connect(opt_robot_ip);
152 
153  std::cout << "WARNING: This example will move the robot! "
154  << "Please make sure to have the user stop button at hand!" << std::endl
155  << "Press Enter to continue..." << std::endl;
156  std::cin.ignore();
157 
158  /*
159  * Move to a safe position
160  */
161  vpColVector q(6, 0);
162  q[0] = -vpMath::rad(16);
163  q[1] = -vpMath::rad(120);
164  q[2] = vpMath::rad(120);
165  q[3] = -vpMath::rad(90);
166  q[4] = -vpMath::rad(90);
167  q[5] = 0;
168  std::cout << "Move to joint position: " << q.t() << std::endl;
170  robot.setPosition(vpRobot::JOINT_STATE, q);
171 
172  vpRealSense2 rs;
173  rs2::config config;
174  unsigned int width = 640, height = 480;
175  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
176  config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
177  config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
178  rs.open(config);
179 
180  // Get camera extrinsics
181  vpPoseVector ePc;
182  // Set camera extrinsics default values
183  ePc[0] = -0.0312543;
184  ePc[1] = -0.0584638;
185  ePc[2] = 0.0309834;
186  ePc[3] = -0.00506562;
187  ePc[4] = -0.00293325;
188  ePc[5] = 0.0117901;
189 
190  // If provided, read camera extrinsics from --eMc <file>
191  if (!opt_eMc_filename.empty()) {
192  ePc.loadYAML(opt_eMc_filename, ePc);
193  }
194  else {
195  std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values."
196  << "\n";
197  }
198  vpHomogeneousMatrix eMc(ePc);
199  std::cout << "eMc:\n" << eMc << "\n";
200 
201  // Get camera intrinsics
202  vpCameraParameters cam =
204  std::cout << "cam:\n" << cam << "\n";
205 
206  vpImage<unsigned char> I(height, width);
207 
208 #if defined(VISP_HAVE_X11)
209  vpDisplayX dc(I, 10, 10, "Color image");
210 #elif defined(VISP_HAVE_GDI)
211  vpDisplayGDI dc(I, 10, 10, "Color image");
212 #endif
213 
216  // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
217  vpDetectorAprilTag detector(tagFamily);
218  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
219  detector.setDisplayTag(display_tag);
220  detector.setAprilTagQuadDecimate(opt_quad_decimate);
221 
222  // Servo
223  vpHomogeneousMatrix cdMc, cMo, oMo;
224 
225  // Desired pose to reach
226  vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
227  vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
228 
229  cdMc = cdMo * cMo.inverse();
232  t.buildFrom(cdMc);
233  tu.buildFrom(cdMc);
234 
237 
238  vpServo task;
239  task.addFeature(t, td);
240  task.addFeature(tu, tud);
243 
244  if (opt_adaptive_gain) {
245  vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
246  task.setLambda(lambda);
247  }
248  else {
249  task.setLambda(0.5);
250  }
251 
252  vpPlot *plotter = nullptr;
253  int iter_plot = 0;
254 
255  if (opt_plot) {
256  plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
257  "Real time curves plotter");
258  plotter->setTitle(0, "Visual features error");
259  plotter->setTitle(1, "Camera velocities");
260  plotter->initGraph(0, 6);
261  plotter->initGraph(1, 6);
262  plotter->setLegend(0, 0, "error_feat_tx");
263  plotter->setLegend(0, 1, "error_feat_ty");
264  plotter->setLegend(0, 2, "error_feat_tz");
265  plotter->setLegend(0, 3, "error_feat_theta_ux");
266  plotter->setLegend(0, 4, "error_feat_theta_uy");
267  plotter->setLegend(0, 5, "error_feat_theta_uz");
268  plotter->setLegend(1, 0, "vc_x");
269  plotter->setLegend(1, 1, "vc_y");
270  plotter->setLegend(1, 2, "vc_z");
271  plotter->setLegend(1, 3, "wc_x");
272  plotter->setLegend(1, 4, "wc_y");
273  plotter->setLegend(1, 5, "wc_z");
274  }
275 
276  bool final_quit = false;
277  bool has_converged = false;
278  bool send_velocities = false;
279  bool servo_started = false;
280  std::vector<vpImagePoint> *traj_vip = nullptr; // To memorize point trajectory
281 
282  static double t_init_servo = vpTime::measureTimeMs();
283 
284  robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
286 
287  while (!has_converged && !final_quit) {
288  double t_start = vpTime::measureTimeMs();
289 
290  rs.acquire(I);
291 
293 
294  std::vector<vpHomogeneousMatrix> cMo_vec;
295  detector.detect(I, opt_tagSize, cam, cMo_vec);
296 
297  std::stringstream ss;
298  ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
299  vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
300 
301  vpColVector v_c(6);
302 
303  // Only one tag is detected
304  if (cMo_vec.size() == 1) {
305  cMo = cMo_vec[0];
306 
307  static bool first_time = true;
308  if (first_time) {
309  // Introduce security wrt tag positioning in order to avoid PI rotation
310  std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
311  v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
312  for (size_t i = 0; i < 2; i++) {
313  v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
314  }
315  if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
316  oMo = v_oMo[0];
317  }
318  else {
319  std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
320  oMo = v_oMo[1]; // Introduce PI rotation
321  }
322  }
323 
324  // Update visual features
325  cdMc = cdMo * oMo * cMo.inverse();
326  t.buildFrom(cdMc);
327  tu.buildFrom(cdMc);
328 
329  if (opt_task_sequencing) {
330  if (!servo_started) {
331  if (send_velocities) {
332  servo_started = true;
333  }
334  t_init_servo = vpTime::measureTimeMs();
335  }
336  v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
337  }
338  else {
339  v_c = task.computeControlLaw();
340  }
341 
342  // Display desired and current pose features
343  vpDisplay::displayFrame(I, cdMo * oMo, cam, opt_tagSize / 1.5, vpColor::yellow, 2);
344  vpDisplay::displayFrame(I, cMo, cam, opt_tagSize / 2, vpColor::none, 3);
345  // Get tag corners
346  std::vector<vpImagePoint> vip = detector.getPolygon(0);
347  // Get the tag cog corresponding to the projection of the tag frame in the image
348  vip.push_back(detector.getCog(0));
349  // Display the trajectory of the points
350  if (first_time) {
351  traj_vip = new std::vector<vpImagePoint>[vip.size()];
352  }
353  display_point_trajectory(I, vip, traj_vip);
354 
355  if (opt_plot) {
356  plotter->plot(0, iter_plot, task.getError());
357  plotter->plot(1, iter_plot, v_c);
358  iter_plot++;
359  }
360 
361  if (opt_verbose) {
362  std::cout << "v_c: " << v_c.t() << std::endl;
363  }
364 
366  vpThetaUVector cd_tu_c = cdMc.getThetaUVector();
367  double error_tr = sqrt(cd_t_c.sumSquare());
368  double error_tu = vpMath::deg(sqrt(cd_tu_c.sumSquare()));
369 
370  ss.str("");
371  ss << "error_t: " << error_tr;
372  vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
373  ss.str("");
374  ss << "error_tu: " << error_tu;
375  vpDisplay::displayText(I, 40, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
376 
377  if (opt_verbose)
378  std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
379 
380  if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
381  has_converged = true;
382  std::cout << "Servo task has converged" << std::endl;
383  ;
384  vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
385  }
386 
387  if (first_time) {
388  first_time = false;
389  }
390  } // end if (cMo_vec.size() == 1)
391  else {
392  v_c = 0;
393  }
394 
395  if (!send_velocities) {
396  v_c = 0;
397  }
398 
399  // Send to the robot
401 
402  ss.str("");
403  ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
404  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
405  vpDisplay::flush(I);
406 
408  if (vpDisplay::getClick(I, button, false)) {
409  switch (button) {
411  send_velocities = !send_velocities;
412  break;
413 
415  final_quit = true;
416  v_c = 0;
417  break;
418 
419  default:
420  break;
421  }
422  }
423  }
424  std::cout << "Stop the robot " << std::endl;
426 
427  if (opt_plot && plotter != nullptr) {
428  delete plotter;
429  plotter = nullptr;
430  }
431 
432  if (!final_quit) {
433  while (!final_quit) {
434  rs.acquire(I);
436 
437  vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
438  vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
439 
440  if (vpDisplay::getClick(I, false)) {
441  final_quit = true;
442  }
443 
444  vpDisplay::flush(I);
445  }
446  }
447 
448  if (traj_vip) {
449  delete[] traj_vip;
450  }
451  }
452  catch (const vpException &e) {
453  std::cout << "ViSP exception: " << e.what() << std::endl;
454  std::cout << "Stop the robot " << std::endl;
456  return EXIT_FAILURE;
457  }
458  catch (const std::exception &e) {
459  std::cout << "ur_rtde exception: " << e.what() << std::endl;
460  return EXIT_FAILURE;
461  }
462 
463  return EXIT_SUCCESS;
464 }
465 #else
466 int main()
467 {
468 #if !defined(VISP_HAVE_REALSENSE2)
469  std::cout << "Install librealsense-2.x" << std::endl;
470 #endif
471 #if !defined(VISP_HAVE_UR_RTDE)
472  std::cout << "ViSP is not build with libur_rtde 3rd party used to control a robot from Universal Robots..."
473  << std::endl;
474 #endif
475  return EXIT_SUCCESS;
476 }
477 #endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition: vpArray2D.h:772
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
static const vpColor red
Definition: vpColor.h:211
static const vpColor none
Definition: vpColor.h:223
static const vpColor yellow
Definition: vpColor.h:219
static const vpColor green
Definition: vpColor.h:214
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:128
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:59
const char * what() const
Definition: vpException.cpp:70
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
Definition: vpImage.h:245
static double rad(double deg)
Definition: vpMath.h:127
static double deg(double rad)
Definition: vpMath.h:117
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:109
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition: vpPlot.cpp:202
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition: vpPlot.cpp:545
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition: vpPlot.cpp:269
void setTitle(unsigned int graphNum, const std::string &title)
Definition: vpPlot.cpp:503
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:189
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) vp_override
@ JOINT_STATE
Definition: vpRobot.h:80
@ CAMERA_FRAME
Definition: vpRobot.h:82
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition: vpRobot.h:66
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:65
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition: vpRobot.h:64
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:198
Implementation of a rotation matrix and operations on such kind of matrices.
double sumSquare() const
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:378
@ EYEINHAND_CAMERA
Definition: vpServo.h:155
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:329
void setLambda(double c)
Definition: vpServo.h:976
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:132
vpColVector getError() const
Definition: vpServo.h:504
vpColVector computeControlLaw()
Definition: vpServo.cpp:703
@ CURRENT
Definition: vpServo.h:196
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()