Visual Servoing Platform  version 3.6.1 under development (2024-12-17)
mbot-apriltag-ibvs.cpp
1 #include <visp3/core/vpConfig.h>
3 #include <visp3/core/vpMomentAreaNormalized.h>
4 #include <visp3/core/vpMomentBasic.h>
5 #include <visp3/core/vpMomentCentered.h>
6 #include <visp3/core/vpMomentDatabase.h>
7 #include <visp3/core/vpMomentGravityCenter.h>
8 #include <visp3/core/vpMomentGravityCenterNormalized.h>
9 #include <visp3/core/vpMomentObject.h>
10 #include <visp3/core/vpPixelMeterConversion.h>
11 #include <visp3/core/vpPoint.h>
12 #include <visp3/core/vpSerial.h>
13 #include <visp3/core/vpXmlParserCamera.h>
14 #include <visp3/detection/vpDetectorAprilTag.h>
15 #include <visp3/gui/vpDisplayFactory.h>
16 #include <visp3/io/vpImageIo.h>
17 #include <visp3/robot/vpUnicycle.h>
18 #include <visp3/sensor/vpV4l2Grabber.h>
19 #include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
20 #include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
21 #include <visp3/vs/vpServo.h>
22 
23 int main(int argc, const char **argv)
24 {
25 #if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_V4L2)
26 #ifdef ENABLE_VISP_NAMESPACE
27  using namespace VISP_NAMESPACE_NAME;
28 #endif
29 
30  int device = 0;
32  double tagSize = 0.065;
33  float quad_decimate = 4.0;
34  int nThreads = 2;
35  std::string intrinsic_file = "";
36  std::string camera_name = "";
37  bool display_on = false;
38  bool serial_off = false;
39 #if defined(VISP_HAVE_DISPLAY)
40  bool display_tag = false;
41  bool save_image = false; // Only possible if display_on = true
42 #endif
43 
44  for (int i = 1; i < argc; i++) {
45  if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
46  tagSize = std::atof(argv[i + 1]);
47  }
48  else if (std::string(argv[i]) == "--input" && i + 1 < argc) {
49  device = std::atoi(argv[i + 1]);
50  }
51  else if (std::string(argv[i]) == "--quad-decimate" && i + 1 < argc) {
52  quad_decimate = (float)atof(argv[i + 1]);
53  }
54  else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
55  nThreads = std::atoi(argv[i + 1]);
56  }
57  else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
58  intrinsic_file = std::string(argv[i + 1]);
59  }
60  else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
61  camera_name = std::string(argv[i + 1]);
62  }
63 #if defined(VISP_HAVE_DISPLAY)
64  else if (std::string(argv[i]) == "--display-tag") {
65  display_tag = true;
66  }
67  else if (std::string(argv[i]) == "--display-on") {
68  display_on = true;
69  }
70  else if (std::string(argv[i]) == "--save-image") {
71  save_image = true;
72  }
73 #endif
74  else if (std::string(argv[i]) == "--serial-off") {
75  serial_off = true;
76  }
77  else if (std::string(argv[i]) == "--tag-family" && i + 1 < argc) {
78  tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)std::atoi(argv[i + 1]);
79  }
80  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
81  std::cout << "Usage: " << argv[0]
82  << " [--input <camera input>] [--tag-size <tag_size in m>]"
83  " [--quad-decimate <quad_decimate>] [--nthreads <nb>]"
84  " [--intrinsic <intrinsic file>] [--camera-name <camera name>]"
85  " [--tag-family <family> (0: TAG_36h11, 1: TAG_36h10, 2: TAG_36ARTOOLKIT, 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]"
86  " [--display-tag]";
87 #if defined(VISP_HAVE_DISPLAY)
88  std::cout << " [--display-on] [--save-image]";
89 #endif
90  std::cout << " [--serial-off] [--help]" << std::endl;
91  return EXIT_SUCCESS;
92  }
93  }
94 
95  // Me Auriga led ring
96  // if serial com ok: led 1 green
97  // if exception: led 1 red
98  // if tag detected: led 2 green, else led 2 red
99  // if motor left: led 3 blue
100  // if motor right: led 4 blue
101 
102  vpSerial *serial = nullptr;
103  if (!serial_off) {
104  serial = new vpSerial("/dev/ttyAMA0", 115200);
105 
106  serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
107  serial->write("LED_RING=1,0,10,0\n"); // Switch on led 1 to green: serial ok
108  }
109 
110  try {
112 
113  vpV4l2Grabber g;
114  std::ostringstream device_name;
115  device_name << "/dev/video" << device;
116  g.setDevice(device_name.str());
117  g.setScale(1);
118  g.acquire(I);
119 
120  vpDisplay *d = nullptr;
121  vpImage<vpRGBa> O;
122 #ifdef VISP_HAVE_DISPLAY
123  if (display_on) {
125  }
126 #endif
127 
128  vpCameraParameters cam;
129  cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.);
130 
131 #if defined(VISP_HAVE_PUGIXML)
132  vpXmlParserCamera parser;
133  if (!intrinsic_file.empty() && !camera_name.empty()) {
134  parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
135  }
136 #endif
137 
138  std::cout << "cam:\n" << cam << std::endl;
139  std::cout << "tagFamily: " << tagFamily << std::endl;
140  std::cout << "tagSize: " << tagSize << std::endl;
141 
142  vpDetectorAprilTag detector(tagFamily);
143 
144  detector.setAprilTagQuadDecimate(quad_decimate);
145  detector.setAprilTagNbThreads(nThreads);
146 #ifdef VISP_HAVE_DISPLAY
147  detector.setDisplayTag(display_tag);
148 #endif
149 
150  vpServo task;
151  vpAdaptiveGain lambda;
152  if (display_on) {
153  lambda.initStandard(2.5, 0.4, 30); // lambda(0)=2.5, lambda(oo)=0.4 and lambda'(0)=30
154  }
155  else {
156  lambda.initStandard(4, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
157  }
158 
159  vpUnicycle robot;
162  task.setLambda(lambda);
163  vpRotationMatrix cRe;
164  cRe[0][0] = 0;
165  cRe[0][1] = -1;
166  cRe[0][2] = 0;
167  cRe[1][0] = 0;
168  cRe[1][1] = 0;
169  cRe[1][2] = -1;
170  cRe[2][0] = 1;
171  cRe[2][1] = 0;
172  cRe[2][2] = 0;
173 
175  vpVelocityTwistMatrix cVe(cMe);
176  task.set_cVe(cVe);
177 
178  vpMatrix eJe(6, 2, 0);
179  eJe[0][0] = eJe[5][1] = 1.0;
180 
181  std::cout << "eJe: \n" << eJe << std::endl;
182 
183  // Desired distance to the target
184  double Z_d = 0.4;
185 
186  // Define the desired polygon corresponding the the AprilTag CLOCKWISE
187  double X[4] = { tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2. };
188  double Y[4] = { tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2. };
189  std::vector<vpPoint> vec_P, vec_P_d;
190 
191  for (int i = 0; i < 4; i++) {
192  vpPoint P_d(X[i], Y[i], 0);
193  vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
194  P_d.track(cdMo); //
195  vec_P_d.push_back(P_d);
196  }
197 
198  vpMomentObject m_obj(3), m_obj_d(3);
199  vpMomentDatabase mdb, mdb_d;
200  vpMomentBasic mb_d; // Here only to get the desired area m00
201  vpMomentGravityCenter mg, mg_d;
202  vpMomentCentered mc, mc_d;
203  vpMomentAreaNormalized man(0, Z_d),
204  man_d(0, Z_d); // Declare normalized area. Desired area parameter will be updated below with m00
205  vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
206 
207  // Desired moments
208  m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
209  m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
210 
211  mb_d.linkTo(mdb_d); // Add basic moments to database
212  mg_d.linkTo(mdb_d); // Add gravity center to database
213  mc_d.linkTo(mdb_d); // Add centered moments to database
214  man_d.linkTo(mdb_d); // Add area normalized to database
215  mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
216  mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
217  mg_d.compute(); // Compute gravity center moment
218  mc_d.compute(); // Compute centered moments AFTER gravity center
219 
220  double area = 0;
221  if (m_obj_d.getType() == vpMomentObject::DISCRETE)
222  area = mb_d.get(2, 0) + mb_d.get(0, 2);
223  else
224  area = mb_d.get(0, 0);
225  // Update an moment with the desired area
226  man_d.setDesiredArea(area);
227 
228  man_d.compute(); // Compute area normalized moment AFTER centered moments
229  mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized moment
230 
231  // Desired plane
232  double A = 0.0;
233  double B = 0.0;
234  double C = 1.0 / Z_d;
235 
236  // Construct area normalized features
237  vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
238  vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
239 
240  // Add the features
242  task.addFeature(s_man, s_man_d);
243 
244  // Update desired gravity center normalized feature
245  s_mgn_d.update(A, B, C);
246  s_mgn_d.compute_interaction();
247  // Update desired area normalized feature
248  s_man_d.update(A, B, C);
249  s_man_d.compute_interaction();
250 
251  std::vector<double> time_vec;
252  for (;;) {
253  g.acquire(I);
254 
255 #ifdef VISP_HAVE_DISPLAY
257 #endif
258 
259  double t = vpTime::measureTimeMs();
260  std::vector<vpHomogeneousMatrix> cMo_vec;
261  detector.detect(I, tagSize, cam, cMo_vec);
262  t = vpTime::measureTimeMs() - t;
263  time_vec.push_back(t);
264 
265  {
266  std::stringstream ss;
267  ss << "Detection time: " << t << " ms";
268 #ifdef VISP_HAVE_DISPLAY
269  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
270 #endif
271  }
272 
273  if (detector.getNbObjects() == 1) {
274  if (!serial_off) {
275  serial->write("LED_RING=2,0,10,0\n"); // Switch on led 2 to green: tag detected
276  }
277 
278  // Update current points used to compute the moments
279  std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
280  vec_P.clear();
281  for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
282  double x = 0, y = 0;
283  vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
284  vpPoint P;
285  P.set_x(x);
286  P.set_y(y);
287  vec_P.push_back(P);
288  }
289 
290 #ifdef VISP_HAVE_DISPLAY
291  // Display visual features
292  vpDisplay::displayPolygon(I, vec_ip, vpColor::green, 3); // Current polygon used to compure an moment
293  vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
294  3); // Current polygon used to compure an moment
295  vpDisplay::displayLine(I, 0, cam.get_u0(), I.getHeight() - 1, cam.get_u0(), vpColor::red,
296  3); // Vertical line as desired x position
297 #endif
298 
299  // Current moments
300  m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
301  m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
302 
303  mg.linkTo(mdb); // Add gravity center to database
304  mc.linkTo(mdb); // Add centered moments to database
305  man.linkTo(mdb); // Add area normalized to database
306  mgn.linkTo(mdb); // Add gravity center normalized to database
307  mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
308  mg.compute(); // Compute gravity center moment
309  mc.compute(); // Compute centered moments AFTER gravity center
310  man.setDesiredArea(
311  area); // Since desired area was init at 0, because unknow at construction, need to be updated here
312  man.compute(); // Compute area normalized moment AFTER centered moment
313  mgn.compute(); // Compute gravity center normalized moment AFTER area normalized moment
314 
315  s_mgn.update(A, B, C);
316  s_mgn.compute_interaction();
317  s_man.update(A, B, C);
318  s_man.compute_interaction();
319 
320  task.set_cVe(cVe);
321  task.set_eJe(eJe);
322 
323  // Compute the control law. Velocities are computed in the mobile robot reference frame
324  vpColVector v = task.computeControlLaw();
325 
326  std::cout << "Send velocity to the mbot: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
327 
328  task.print();
329  double radius = 0.0325;
330  double L = 0.0725;
331  double motor_left = (-v[0] - L * v[1]) / radius;
332  double motor_right = (v[0] - L * v[1]) / radius;
333  std::cout << "motor left vel: " << motor_left << " motor right vel: " << motor_right << std::endl;
334  if (!serial_off) {
335  // serial->write("LED_RING=3,0,0,10\n"); // Switch on led 3 to blue: motor left servoed
336  // serial->write("LED_RING=4,0,0,10\n"); // Switch on led 4 to blue: motor right servoed
337  }
338  std::stringstream ss;
339  double rpm_left = motor_left * 30. / M_PI;
340  double rpm_right = motor_right * 30. / M_PI;
341  ss << "MOTOR_RPM=" << vpMath::round(rpm_left) << "," << vpMath::round(rpm_right) << "\n";
342  std::cout << "Send: " << ss.str() << std::endl;
343  if (!serial_off) {
344  serial->write(ss.str());
345  }
346  }
347  else {
348  // stop the robot
349  if (!serial_off) {
350  serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected
351  // serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed
352  // serial->write("LED_RING=4,0,0,0\n"); // Switch on led 4 to blue: motor right not servoed
353  serial->write("MOTOR_RPM=0,-0\n"); // Stop the robot
354  }
355  }
356 
357 #ifdef VISP_HAVE_DISPLAY
358  vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
359  vpDisplay::flush(I);
360 
361  if (display_on && save_image) {
362  vpDisplay::getImage(I, O);
363  vpImageIo::write(O, "image.png");
364  }
365  if (vpDisplay::getClick(I, false)) {
366  break;
367  }
368 #endif
369  }
370 
371 
372  if (!serial_off) {
373  serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
374  }
375 
376  std::cout << "Benchmark computation time" << std::endl;
377  std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
378  << " ; " << vpMath::getMedian(time_vec) << " ms"
379  << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
380 
381  if (display_on) {
382  delete d;
383  }
384  if (!serial_off) {
385  delete serial;
386  }
387  }
388  catch (const vpException &e) {
389  std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
390  if (!serial_off) {
391  serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red
392  }
393  }
394 
395  return EXIT_SUCCESS;
396 #else
397  (void)argc;
398  (void)argv;
399 #ifndef VISP_HAVE_APRILTAG
400  std::cout << "ViSP is not build with Apriltag support" << std::endl;
401 #endif
402 #ifndef VISP_HAVE_V4L2
403  std::cout << "ViSP is not build with v4l2 support" << std::endl;
404 #endif
405  std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
406  return EXIT_SUCCESS;
407 #endif
408 }
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
static const vpColor red
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Class that defines generic functionalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
Definition: vpDisplay.cpp:140
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * getMessage() const
Definition: vpException.cpp:65
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition: vpImageIo.cpp:291
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:322
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:353
static int round(double x)
Definition: vpMath.h:410
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:302
static double deg(double rad)
Definition: vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject which allows to u...
Definition: vpMomentBasic.h:73
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
Definition: vpMoment.cpp:114
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79
void set_x(double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:468
void set_y(double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:470
Implementation of a rotation matrix and operations on such kind of matrices.
void write(const std::string &s)
Definition: vpSerial.cpp:332
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:168
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:1038
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:171
void setLambda(double c)
Definition: vpServo.h:986
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:1101
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Class that consider the case of a translation vector.
Generic functions for unicycle mobile robots.
Definition: vpUnicycle.h:52
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
void acquire(vpImage< unsigned char > &I)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0, bool verbose=true)
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()