Visual Servoing Platform  version 3.6.1 under development (2024-04-25)
servoSimuPoint2DhalfCamVelocity2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Simulation of a 2 1/2 D visual servoing using theta U visual features.
33  *
34 *****************************************************************************/
35 
45 #include <stdio.h>
46 #include <stdlib.h>
47 
48 #include <visp3/core/vpHomogeneousMatrix.h>
49 #include <visp3/core/vpMath.h>
50 #include <visp3/core/vpPoint.h>
51 #include <visp3/io/vpParseArgv.h>
52 #include <visp3/robot/vpSimulatorCamera.h>
53 #include <visp3/visual_features/vpFeatureBuilder.h>
54 #include <visp3/visual_features/vpFeaturePoint.h>
55 #include <visp3/visual_features/vpFeatureThetaU.h>
56 #include <visp3/visual_features/vpGenericFeature.h>
57 #include <visp3/vs/vpServo.h>
58 
59 // List of allowed command line options
60 #define GETOPTARGS "h"
61 
62 void usage(const char *name, const char *badparam);
63 bool getOptions(int argc, const char **argv);
64 
73 void usage(const char *name, const char *badparam)
74 {
75  fprintf(stdout, "\n\
76 Simulation of a 2 1/2 D visual servoing (x,y,log Z, theta U):\n\
77 - eye-in-hand control law,\n\
78 - velocity computed in the camera frame,\n\
79 - without display.\n\
80  \n\
81 SYNOPSIS\n\
82  %s [-h]\n",
83  name);
84 
85  fprintf(stdout, "\n\
86 OPTIONS: Default\n\
87  \n\
88  -h\n\
89  Print the help.\n");
90 
91  if (badparam)
92  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
93 }
94 
104 bool getOptions(int argc, const char **argv)
105 {
106  const char *optarg_;
107  int c;
108  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
109 
110  switch (c) {
111  case 'h':
112  usage(argv[0], nullptr);
113  return false;
114 
115  default:
116  usage(argv[0], optarg_);
117  return false;
118  }
119  }
120 
121  if ((c == 1) || (c == -1)) {
122  // standalone param or error
123  usage(argv[0], nullptr);
124  std::cerr << "ERROR: " << std::endl;
125  std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
126  return false;
127  }
128 
129  return true;
130 }
131 
132 int main(int argc, const char **argv)
133 {
134 #if (defined(VISP_HAVE_LAPACK) || defined(VISP_HAVE_EIGEN3) || defined(VISP_HAVE_OPENCV))
135  try {
136  // Read the command line options
137  if (getOptions(argc, argv) == false) {
138  return EXIT_FAILURE;
139  }
140 
141  std::cout << std::endl;
142  std::cout << "-------------------------------------------------------" << std::endl;
143  std::cout << " simulation of a 2 1/2 D visual servoing " << std::endl;
144  std::cout << "-------------------------------------------------------" << std::endl;
145  std::cout << std::endl;
146 
147  // In this example we will simulate a visual servoing task.
148  // In simulation, we have to define the scene frane Ro and the
149  // camera frame Rc.
150  // The camera location is given by an homogenous matrix cMo that
151  // describes the position of the scene or object frame in the camera frame.
152 
153  vpServo task;
154 
155  // sets the initial camera location
156  // we give the camera location as a size 6 vector (3 translations in meter
157  // and 3 rotation (theta U representation)
158  vpPoseVector c_r_o(0.1, 0.2, 2, vpMath::rad(20), vpMath::rad(10), vpMath::rad(50));
159 
160  // this pose vector is then transformed in a 4x4 homogeneous matrix
161  vpHomogeneousMatrix cMo(c_r_o);
162 
163  // We define a robot
164  // The vpSimulatorCamera implements a simple moving that is juste defined
165  // by its location cMo
166  vpSimulatorCamera robot;
167 
168  // Compute the position of the object in the world frame
169  vpHomogeneousMatrix wMc, wMo;
170  robot.getPosition(wMc);
171  wMo = wMc * cMo;
172 
173  // Now that the current camera position has been defined,
174  // let us defined the defined camera location.
175  // It is defined by cdMo
176  // sets the desired camera location
177  vpPoseVector cd_r_o(0, 0, 1, vpMath::rad(0), vpMath::rad(0), vpMath::rad(0));
178  vpHomogeneousMatrix cdMo(cd_r_o);
179 
180  //----------------------------------------------------------------------
181  // A 2 1/2 D visual servoing can be defined by
182  // - the position of a point x,y
183  // - the difference between this point depth and a desire depth
184  // modeled by log Z/Zd to be regulated to 0
185  // - the rotation that the camera has to realized cdMc
186 
187  // Let us now defined the current value of these features
188 
189  // since we simulate we have to define a 3D point that will
190  // forward-projected to define the current position x,y of the
191  // reference point
192 
193  //------------------------------------------------------------------
194  // First feature (x,y)
195  //------------------------------------------------------------------
196  // Let oP be this ... point,
197  // a vpPoint class has three main member
198  // .oP : 3D coordinates in scene frame
199  // .cP : 3D coordinates in camera frame
200  // .p : 2D
201 
202  //------------------------------------------------------------------
203  // sets the point coordinates in the world frame
204  vpPoint point(0, 0, 0);
205  // computes the point coordinates in the camera frame and its
206  // 2D coordinates cP and then p
207  // computes the point coordinates in the camera frame and its 2D
208  // coordinates" ) ;
209  point.track(cMo);
210 
211  // We also defined (again by forward projection) the desired position
212  // of this point according to the desired camera position
213  vpPoint pointd(0, 0, 0);
214  pointd.track(cdMo);
215 
216  // Nevertheless, a vpPoint is not a feature, this is just a "tracker"
217  // from which the feature are built
218  // a feature is juste defined by a vector s, a way to compute the
219  // interaction matrix and the error, and if required a (or a vector of)
220  // 3D information
221 
222  // for a point (x,y) Visp implements the vpFeaturePoint class.
223  // we no defined a feature for x,y (and for (x*,y*))
224  vpFeaturePoint p, pd;
225 
226  // and we initialized the vector s=(x,y) of p from the tracker P
227  // Z coordinates in p is also initialized, it will be used to compute
228  // the interaction matrix
229  vpFeatureBuilder::create(p, point);
230  vpFeatureBuilder::create(pd, pointd);
231 
232  //------------------------------------------------------------------
233  // Second feature log (Z/Zd)
234  // not necessary to project twice (reuse p)
235 
236  // This case in intersting since this visual feature has not
237  // been predefined in VisP
238  // In such case we have a generic feature class vpGenericFeature
239  // We will have to defined
240  // the vector s : .set_s(...)
241  // the interaction matrix Ls : .setInteractionMatrix(...)
242 
243  // log(Z/Zd) is then a size 1 vector logZ
244  vpGenericFeature logZ(1);
245  // initialized to s = log(Z/Zd)
246  // Let us note that here we use the point P and Pd, it's not necessary
247  // to forward project twice (it's already done)
248  logZ.set_s(log(point.get_Z() / pointd.get_Z()));
249 
250  // This visual has to be regulated to zero
251 
252  //------------------------------------------------------------------
253  // 3rd feature ThetaU
254  // The thetaU feature is defined, tu represents the rotation that the
255  // camera has to realized. the complete displacement is then defined by:
256  //------------------------------------------------------------------
257  vpHomogeneousMatrix cdMc;
258  // compute the rotation that the camera has to achieve
259  cdMc = cdMo * cMo.inverse();
260 
261  // from this displacement, we extract the rotation cdRc represented by
262  // the angle theta and the rotation axis u
264  tu.buildFrom(cdMc);
265  // This visual has to be regulated to zero
266 
267  // sets the desired rotation (always zero !)
268  // since s is the rotation that the camera has to realize
269 
270  //------------------------------------------------------------------
271  // Let us now the task itself
272  //------------------------------------------------------------------
273 
274  // define the task
275  // - we want an eye-in-hand control law
276  // - robot is controlled in the camera frame
277  // we choose to control the robot in the camera frame
279  // Interaction matrix is computed with the current value of s
281 
282  // we build the task by "stacking" the visual feature
283  // previously defined
284  task.addFeature(p, pd);
285  task.addFeature(logZ);
286  task.addFeature(tu);
287  // addFeature(X,Xd) means X should be regulated to Xd
288  // addFeature(X) means that X should be regulated to 0
289  // some features such as vpFeatureThetaU MUST be regulated to zero
290  // (otherwise, it will results in an error at exectution level)
291 
292  // set the gain
293  task.setLambda(1);
294 
295  // Display task information
296  task.print();
297  //------------------------------------------------------------------
298  // An now the closed loop
299 
300  unsigned int iter = 0;
301  // loop
302  while (iter++ < 200) {
303  std::cout << "---------------------------------------------" << iter << std::endl;
304  vpColVector v;
305 
306  // get the robot position
307  robot.getPosition(wMc);
308  // Compute the position of the object frame in the camera frame
309  cMo = wMc.inverse() * wMo;
310 
311  // update the feature
312  point.track(cMo);
313  vpFeatureBuilder::create(p, point);
314 
315  cdMc = cdMo * cMo.inverse();
316  tu.buildFrom(cdMc);
317 
318  // there is no feature for logZ, we explicitly build
319  // the related interaction matrix") ;
320  logZ.set_s(log(point.get_Z() / pointd.get_Z()));
321  vpMatrix LlogZ(1, 6);
322  LlogZ[0][0] = LlogZ[0][1] = LlogZ[0][5] = 0;
323  LlogZ[0][2] = -1 / p.get_Z();
324  LlogZ[0][3] = -p.get_y();
325  LlogZ[0][4] = p.get_x();
326 
327  logZ.setInteractionMatrix(LlogZ);
328 
329  // compute the control law
330  v = task.computeControlLaw();
331 
332  // send the camera velocity to the controller ") ;
334 
335  std::cout << "|| s - s* || = " << (task.getError()).sumSquare() << std::endl;
336  }
337 
338  // Display task information
339  task.print();
340  // Final camera location
341  std::cout << cMo << std::endl;
342  return EXIT_SUCCESS;
343  } catch (const vpException &e) {
344  std::cout << "Catch a ViSP exception: " << e << std::endl;
345  return EXIT_FAILURE;
346  }
347 #else
348  (void)argc;
349  (void)argv;
350  std::cout << "Cannot run this example: install Lapack, Eigen3 or OpenCV" << std::endl;
351  return EXIT_SUCCESS;
352 #endif
353 }
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
error that can be emitted by ViSP classes.
Definition: vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
double get_y() const
double get_x() const
double get_Z() const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Class that enables to define a feature or a set of features which are not implemented in ViSP as a sp...
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
static double rad(double deg)
Definition: vpMath.h:127
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:146
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:69
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:77
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:189
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) vp_override
@ CAMERA_FRAME
Definition: vpRobot.h:82
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:378
@ EYEINHAND_CAMERA
Definition: vpServo.h:155
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:329
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:169
void setLambda(double c)
Definition: vpServo.h:976
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:132
vpColVector getError() const
Definition: vpServo.h:504
vpColVector computeControlLaw()
Definition: vpServo.cpp:703
@ CURRENT
Definition: vpServo.h:196
Class that defines the simplest robot: a free flying camera.