Visual Servoing Platform  version 3.6.1 under development (2025-02-18)
servoUniversalRobotsIBVS.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * tests the control law
33  * eye-in-hand control
34  * velocity computed in the camera frame
35  *
36 *****************************************************************************/
59 #include <iostream>
60 
61 #include <visp3/core/vpCameraParameters.h>
62 #include <visp3/core/vpConfig.h>
63 #include <visp3/detection/vpDetectorAprilTag.h>
64 #include <visp3/gui/vpDisplayFactory.h>
65 #include <visp3/gui/vpPlot.h>
66 #include <visp3/io/vpImageIo.h>
67 #include <visp3/robot/vpRobotUniversalRobots.h>
68 #include <visp3/sensor/vpRealSense2.h>
69 #include <visp3/visual_features/vpFeatureBuilder.h>
70 #include <visp3/visual_features/vpFeaturePoint.h>
71 #include <visp3/vs/vpServo.h>
72 #include <visp3/vs/vpServoDisplay.h>
73 
74 #if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_UR_RTDE)
75 
76 #ifdef ENABLE_VISP_NAMESPACE
77 using namespace VISP_NAMESPACE_NAME;
78 #endif
79 
80 void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
81  std::vector<vpImagePoint> *traj_vip)
82 {
83  for (size_t i = 0; i < vip.size(); i++) {
84  if (traj_vip[i].size()) {
85  // Add the point only if distance with the previous > 1 pixel
86  if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
87  traj_vip[i].push_back(vip[i]);
88  }
89  }
90  else {
91  traj_vip[i].push_back(vip[i]);
92  }
93  }
94  for (size_t i = 0; i < vip.size(); i++) {
95  for (size_t j = 1; j < traj_vip[i].size(); j++) {
96  vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
97  }
98  }
99 }
100 
101 int main(int argc, char **argv)
102 {
103  double opt_tagSize = 0.120;
104  std::string opt_robot_ip = "192.168.0.100";
105  std::string opt_eMc_filename = "";
106  bool display_tag = true;
107  int opt_quad_decimate = 2;
108  bool opt_verbose = false;
109  bool opt_plot = false;
110  bool opt_adaptive_gain = false;
111  bool opt_task_sequencing = false;
112  double convergence_threshold = 0.00005;
113 
114  for (int i = 1; i < argc; i++) {
115  if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
116  opt_tagSize = std::stod(argv[i + 1]);
117  }
118  else if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
119  opt_robot_ip = std::string(argv[i + 1]);
120  }
121  else if (std::string(argv[i]) == "--eMc" && i + 1 < argc) {
122  opt_eMc_filename = std::string(argv[i + 1]);
123  }
124  else if (std::string(argv[i]) == "--verbose") {
125  opt_verbose = true;
126  }
127  else if (std::string(argv[i]) == "--plot") {
128  opt_plot = true;
129  }
130  else if (std::string(argv[i]) == "--adpative-gain") {
131  opt_adaptive_gain = true;
132  }
133  else if (std::string(argv[i]) == "--task-sequencing") {
134  opt_task_sequencing = true;
135  }
136  else if (std::string(argv[i]) == "--quad-decimate" && i + 1 < argc) {
137  opt_quad_decimate = std::stoi(argv[i + 1]);
138  }
139  else if (std::string(argv[i]) == "--no-convergence-threshold") {
140  convergence_threshold = 0.;
141  }
142  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
143  std::cout
144  << argv[0] << " [--ip <default " << opt_robot_ip << ">] [--tag-size <marker size in meter; default "
145  << opt_tagSize << ">] [--eMc <eMc extrinsic file>] "
146  << "[--quad-decimate <decimation; default " << opt_quad_decimate
147  << ">] [--adpative-gain] [--plot] [--task-sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
148  << "\n";
149  return EXIT_SUCCESS;
150  }
151  }
152 
154 
155 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
156  std::shared_ptr<vpDisplay> display;
157 #else
158  vpDisplay *display = nullptr;
159 #endif
160  try {
161  robot.connect(opt_robot_ip);
162 
163  std::cout << "WARNING: This example will move the robot! "
164  << "Please make sure to have the user stop button at hand!" << std::endl
165  << "Press Enter to continue..." << std::endl;
166  std::cin.ignore();
167 
168  /*
169  * Move to a safe position
170  */
171  vpColVector q(6, 0);
172  q[0] = -vpMath::rad(16);
173  q[1] = -vpMath::rad(120);
174  q[2] = vpMath::rad(120);
175  q[3] = -vpMath::rad(90);
176  q[4] = -vpMath::rad(90);
177  q[5] = 0;
178  std::cout << "Move to joint position: " << q.t() << std::endl;
180  robot.setPosition(vpRobot::JOINT_STATE, q);
181 
182  vpRealSense2 rs;
183  rs2::config config;
184  unsigned int width = 640, height = 480;
185  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
186  config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
187  config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
188  rs.open(config);
189 
190  // Get camera extrinsics
191  vpPoseVector ePc;
192  // Set camera extrinsics default values
193  ePc[0] = -0.0312543;
194  ePc[1] = -0.0584638;
195  ePc[2] = 0.0309834;
196  ePc[3] = -0.00506562;
197  ePc[4] = -0.00293325;
198  ePc[5] = 0.0117901;
199 
200  // If provided, read camera extrinsics from --eMc <file>
201  if (!opt_eMc_filename.empty()) {
202  ePc.loadYAML(opt_eMc_filename, ePc);
203  }
204  else {
205  std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values."
206  << "\n";
207  }
208  vpHomogeneousMatrix eMc(ePc);
209  std::cout << "eMc:\n" << eMc << "\n";
210 
211  // Get camera intrinsics
212  vpCameraParameters cam =
214  std::cout << "cam:\n" << cam << "\n";
215 
216  vpImage<unsigned char> I(height, width);
217 
218 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
219  display = vpDisplayFactory::createDisplay(I, 10, 10, "Color image");
220 #else
221  display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Color image");
222 #endif
223 
226  // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
227  vpDetectorAprilTag detector(tagFamily);
228  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
229  detector.setDisplayTag(display_tag);
230  detector.setAprilTagQuadDecimate(opt_quad_decimate);
231 
232  // Servo
233  vpHomogeneousMatrix cdMc, cMo, oMo;
234 
235  // Desired pose used to compute the desired features
236  vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
237  vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
238 
239  // Create visual features
240  std::vector<vpFeaturePoint> p(4), pd(4); // We use 4 points
241 
242  // Define 4 3D points corresponding to the CAD model of the Apriltag
243  std::vector<vpPoint> point(4);
244  point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
245  point[1].setWorldCoordinates(opt_tagSize / 2., -opt_tagSize / 2., 0);
246  point[2].setWorldCoordinates(opt_tagSize / 2., opt_tagSize / 2., 0);
247  point[3].setWorldCoordinates(-opt_tagSize / 2., opt_tagSize / 2., 0);
248 
249  vpServo task;
250  // Add the 4 visual feature points
251  for (size_t i = 0; i < p.size(); i++) {
252  task.addFeature(p[i], pd[i]);
253  }
256 
257  if (opt_adaptive_gain) {
258  vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
259  task.setLambda(lambda);
260  }
261  else {
262  task.setLambda(0.5);
263  }
264 
265  vpPlot *plotter = nullptr;
266  int iter_plot = 0;
267 
268  if (opt_plot) {
269  plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
270  "Real time curves plotter");
271  plotter->setTitle(0, "Visual features error");
272  plotter->setTitle(1, "Camera velocities");
273  plotter->initGraph(0, 8);
274  plotter->initGraph(1, 6);
275  plotter->setLegend(0, 0, "error_feat_p1_x");
276  plotter->setLegend(0, 1, "error_feat_p1_y");
277  plotter->setLegend(0, 2, "error_feat_p2_x");
278  plotter->setLegend(0, 3, "error_feat_p2_y");
279  plotter->setLegend(0, 4, "error_feat_p3_x");
280  plotter->setLegend(0, 5, "error_feat_p3_y");
281  plotter->setLegend(0, 6, "error_feat_p4_x");
282  plotter->setLegend(0, 7, "error_feat_p4_y");
283  plotter->setLegend(1, 0, "vc_x");
284  plotter->setLegend(1, 1, "vc_y");
285  plotter->setLegend(1, 2, "vc_z");
286  plotter->setLegend(1, 3, "wc_x");
287  plotter->setLegend(1, 4, "wc_y");
288  plotter->setLegend(1, 5, "wc_z");
289  }
290 
291  bool final_quit = false;
292  bool has_converged = false;
293  bool send_velocities = false;
294  bool servo_started = false;
295  std::vector<vpImagePoint> *traj_corners = nullptr; // To memorize point trajectory
296 
297  static double t_init_servo = vpTime::measureTimeMs();
298 
299  robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
301 
302  while (!has_converged && !final_quit) {
303  double t_start = vpTime::measureTimeMs();
304 
305  rs.acquire(I);
306 
308 
309  std::vector<vpHomogeneousMatrix> cMo_vec;
310  detector.detect(I, opt_tagSize, cam, cMo_vec);
311 
312  {
313  std::stringstream ss;
314  ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
315  vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
316  }
317 
318  vpColVector v_c(6);
319 
320  // Only one tag is detected
321  if (cMo_vec.size() == 1) {
322  cMo = cMo_vec[0];
323 
324  static bool first_time = true;
325  if (first_time) {
326  // Introduce security wrt tag positioning in order to avoid PI rotation
327  std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
328  v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
329  for (size_t i = 0; i < 2; i++) {
330  v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
331  }
332  if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
333  oMo = v_oMo[0];
334  }
335  else {
336  std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
337  oMo = v_oMo[1]; // Introduce PI rotation
338  }
339 
340  // Compute the desired position of the features from the desired pose
341  for (size_t i = 0; i < point.size(); i++) {
342  vpColVector cP, p_;
343  point[i].changeFrame(cdMo * oMo, cP);
344  point[i].projection(cP, p_);
345 
346  pd[i].set_x(p_[0]);
347  pd[i].set_y(p_[1]);
348  pd[i].set_Z(cP[2]);
349  }
350  }
351 
352  // Get tag corners
353  std::vector<vpImagePoint> corners = detector.getPolygon(0);
354 
355  // Update visual features
356  for (size_t i = 0; i < corners.size(); i++) {
357  // Update the point feature from the tag corners location
358  vpFeatureBuilder::create(p[i], cam, corners[i]);
359  // Set the feature Z coordinate from the pose
360  vpColVector cP;
361  point[i].changeFrame(cMo, cP);
362 
363  p[i].set_Z(cP[2]);
364  }
365 
366  if (opt_task_sequencing) {
367  if (!servo_started) {
368  if (send_velocities) {
369  servo_started = true;
370  }
371  t_init_servo = vpTime::measureTimeMs();
372  }
373  v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
374  }
375  else {
376  v_c = task.computeControlLaw();
377  }
378 
379  // Display the current and desired feature points in the image display
380  vpServoDisplay::display(task, cam, I);
381  for (size_t i = 0; i < corners.size(); i++) {
382  std::stringstream ss;
383  ss << i;
384  // Display current point indexes
385  vpDisplay::displayText(I, corners[i] + vpImagePoint(15, 15), ss.str(), vpColor::red);
386  // Display desired point indexes
387  vpImagePoint ip;
388  vpMeterPixelConversion::convertPoint(cam, pd[i].get_x(), pd[i].get_y(), ip);
389  vpDisplay::displayText(I, ip + vpImagePoint(15, 15), ss.str(), vpColor::red);
390  }
391  if (first_time) {
392  traj_corners = new std::vector<vpImagePoint>[corners.size()];
393  }
394  // Display the trajectory of the points used as features
395  display_point_trajectory(I, corners, traj_corners);
396 
397  if (opt_plot) {
398  plotter->plot(0, iter_plot, task.getError());
399  plotter->plot(1, iter_plot, v_c);
400  iter_plot++;
401  }
402 
403  if (opt_verbose) {
404  std::cout << "v_c: " << v_c.t() << std::endl;
405  }
406 
407  double error = task.getError().sumSquare();
408  std::stringstream ss;
409  ss << "error: " << error;
410  vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
411 
412  if (opt_verbose)
413  std::cout << "error: " << error << std::endl;
414 
415  if (error < convergence_threshold) {
416  has_converged = true;
417  std::cout << "Servo task has converged"
418  << "\n";
419  vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
420  }
421  if (first_time) {
422  first_time = false;
423  }
424  } // end if (cMo_vec.size() == 1)
425  else {
426  v_c = 0;
427  }
428 
429  if (!send_velocities) {
430  v_c = 0;
431  }
432 
433  // Send to the robot
435 
436  {
437  std::stringstream ss;
438  ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
439  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
440  }
441  vpDisplay::flush(I);
442 
444  if (vpDisplay::getClick(I, button, false)) {
445  switch (button) {
447  send_velocities = !send_velocities;
448  break;
449 
451  final_quit = true;
452  v_c = 0;
453  break;
454 
455  default:
456  break;
457  }
458  }
459  }
460  std::cout << "Stop the robot " << std::endl;
462 
463  if (opt_plot && plotter != nullptr) {
464  delete plotter;
465  plotter = nullptr;
466  }
467 
468  if (!final_quit) {
469  while (!final_quit) {
470  rs.acquire(I);
472 
473  vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
474  vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
475 
476  if (vpDisplay::getClick(I, false)) {
477  final_quit = true;
478  }
479 
480  vpDisplay::flush(I);
481  }
482  }
483  if (traj_corners) {
484  delete[] traj_corners;
485  }
486  }
487  catch (const vpException &e) {
488  std::cout << "ViSP exception: " << e.what() << std::endl;
489  std::cout << "Stop the robot " << std::endl;
491 #if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
492  if (display != nullptr) {
493  delete display;
494  }
495 #endif
496  return EXIT_FAILURE;
497  }
498  catch (const std::exception &e) {
499  std::cout << "ur_rtde exception: " << e.what() << std::endl;
500 #if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
501  if (display != nullptr) {
502  delete display;
503  }
504 #endif
505  return EXIT_FAILURE;
506  }
507 
508 #if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
509  if (display != nullptr) {
510  delete display;
511  }
512 #endif
513  return EXIT_SUCCESS;
514 }
515 #else
516 int main()
517 {
518 #if !defined(VISP_HAVE_REALSENSE2)
519  std::cout << "Install librealsense-2.x" << std::endl;
520 #endif
521 #if !defined(VISP_HAVE_UR_RTDE)
522  std::cout << "ViSP is not build with libur_rtde 3rd party used to control a robot from Universal Robots..."
523  << std::endl;
524 #endif
525  return EXIT_SUCCESS;
526 }
527 #endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition: vpArray2D.h:868
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
double sumSquare() const
static const vpColor red
Definition: vpColor.h:198
static const vpColor green
Definition: vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Class that defines generic functionalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * what() const
Definition: vpException.cpp:71
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
Definition: vpImage.h:242
static double rad(double deg)
Definition: vpMath.h:129
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:112
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition: vpPlot.cpp:203
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition: vpPlot.cpp:552
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition: vpPlot.cpp:270
void setTitle(unsigned int graphNum, const std::string &title)
Definition: vpPlot.cpp:510
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:203
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ JOINT_STATE
Definition: vpRobot.h:82
@ CAMERA_FRAME
Definition: vpRobot.h:84
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition: vpRobot.h:68
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:67
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition: vpRobot.h:66
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:202
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_CAMERA
Definition: vpServo.h:161
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void setLambda(double c)
Definition: vpServo.h:991
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector getError() const
Definition: vpServo.h:515
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()