Visual Servoing Platform  version 3.0.1
 All Classes Namespaces Functions Variables Typedefs Enumerations Enumerator Friends Groups Pages
servoSimuPoint2DhalfCamVelocity2.cpp
1 /****************************************************************************
2  *
3  * This file is part of the ViSP software.
4  * Copyright (C) 2005 - 2017 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public License
8  * ("GPL") version 2 as published by the Free Software Foundation.
9  * See the file LICENSE.txt at the root directory of this source
10  * distribution for additional information about the GNU GPL.
11  *
12  * For using ViSP with software that can not be combined with the GNU
13  * GPL, please contact Inria about acquiring a ViSP Professional
14  * Edition License.
15  *
16  * See http://visp.inria.fr for more information.
17  *
18  * This software was developed at:
19  * Inria Rennes - Bretagne Atlantique
20  * Campus Universitaire de Beaulieu
21  * 35042 Rennes Cedex
22  * France
23  *
24  * If you have questions regarding the use of this file, please contact
25  * Inria at visp@inria.fr
26  *
27  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29  *
30  * Description:
31  * Simulation of a 2 1/2 D visual servoing using theta U visual features.
32  *
33  * Authors:
34  * Eric Marchand
35  * Fabien Spindler
36  *
37  *****************************************************************************/
38 
39 
50 #include <stdlib.h>
51 #include <stdio.h>
52 
53 #include <visp3/visual_features/vpFeatureBuilder.h>
54 #include <visp3/visual_features/vpFeaturePoint.h>
55 #include <visp3/visual_features/vpFeatureThetaU.h>
56 #include <visp3/visual_features/vpGenericFeature.h>
57 #include <visp3/core/vpHomogeneousMatrix.h>
58 #include <visp3/core/vpMath.h>
59 #include <visp3/io/vpParseArgv.h>
60 #include <visp3/core/vpPoint.h>
61 #include <visp3/vs/vpServo.h>
62 #include <visp3/robot/vpSimulatorCamera.h>
63 
64 // List of allowed command line options
65 #define GETOPTARGS "h"
66 
67 void usage(const char *name, const char *badparam);
68 bool getOptions(int argc, const char **argv);
69 
78 void usage(const char *name, const char *badparam)
79 {
80  fprintf(stdout, "\n\
81 Simulation of a 2 1/2 D visual servoing (x,y,log Z, theta U):\n\
82 - eye-in-hand control law,\n\
83 - velocity computed in the camera frame,\n\
84 - without display.\n\
85  \n\
86 SYNOPSIS\n\
87  %s [-h]\n", name);
88 
89  fprintf(stdout, "\n\
90 OPTIONS: Default\n\
91  \n\
92  -h\n\
93  Print the help.\n");
94 
95  if (badparam)
96  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
97 }
98 
108 bool getOptions(int argc, const char **argv)
109 {
110  const char *optarg_;
111  int c;
112  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
113 
114  switch (c) {
115  case 'h': usage(argv[0], NULL); return false; break;
116 
117  default:
118  usage(argv[0], optarg_);
119  return false; break;
120  }
121  }
122 
123  if ((c == 1) || (c == -1)) {
124  // standalone param or error
125  usage(argv[0], NULL);
126  std::cerr << "ERROR: " << std::endl;
127  std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
128  return false;
129  }
130 
131  return true;
132 }
133 
134 int
135 main(int argc, const char ** argv)
136 {
137  try {
138  // Read the command line options
139  if (getOptions(argc, argv) == false) {
140  exit (-1);
141  }
142 
143  std::cout << std::endl ;
144  std::cout << "-------------------------------------------------------" << std::endl ;
145  std::cout << " simulation of a 2 1/2 D visual servoing " << std::endl ;
146  std::cout << "-------------------------------------------------------" << std::endl ;
147  std::cout << std::endl ;
148 
149  // In this example we will simulate a visual servoing task.
150  // In simulation, we have to define the scene frane Ro and the
151  // camera frame Rc.
152  // The camera location is given by an homogenous matrix cMo that
153  // describes the position of the camera in the scene frame.
154 
155  vpServo task ;
156 
157  // sets the initial camera location
158  // we give the camera location as a size 6 vector (3 translations in meter
159  // and 3 rotation (theta U representation)
160  vpPoseVector c_r_o(0.1,0.2,2,
161  vpMath::rad(20), vpMath::rad(10), vpMath::rad(50)
162  ) ;
163 
164  // this pose vector is then transformed in a 4x4 homogeneous matrix
165  vpHomogeneousMatrix cMo(c_r_o) ;
166 
167  // We define a robot
168  // The vpSimulatorCamera implements a simple moving that is juste defined
169  // by its location cMo
170  vpSimulatorCamera robot ;
171 
172  // Compute the position of the object in the world frame
173  vpHomogeneousMatrix wMc, wMo;
174  robot.getPosition(wMc) ;
175  wMo = wMc * cMo;
176 
177  // Now that the current camera position has been defined,
178  // let us defined the defined camera location.
179  // It is defined by cdMo
180  // sets the desired camera location
181  vpPoseVector cd_r_o(0,0,1,
183  vpHomogeneousMatrix cdMo(cd_r_o) ;
184 
185 
186  //----------------------------------------------------------------------
187  // A 2 1/2 D visual servoing can be defined by
188  // - the position of a point x,y
189  // - the difference between this point depth and a desire depth
190  // modeled by log Z/Zd to be regulated to 0
191  // - the rotation that the camera has to realized cdMc
192 
193  // Let us now defined the current value of these features
194 
195 
196  // since we simulate we have to define a 3D point that will
197  // forward-projected to define the current position x,y of the
198  // reference point
199 
200  //------------------------------------------------------------------
201  // First feature (x,y)
202  //------------------------------------------------------------------
203  // Let oP be this ... point,
204  // a vpPoint class has three main member
205  // .oP : 3D coordinates in scene frame
206  // .cP : 3D coordinates in camera frame
207  // .p : 2D
208 
209  //------------------------------------------------------------------
210  // sets the point coordinates in the world frame
211  vpPoint point(0, 0, 0);
212  // computes the point coordinates in the camera frame and its
213  // 2D coordinates cP and then p
214  // computes the point coordinates in the camera frame and its 2D coordinates" ) ;
215  point.track(cMo);
216 
217  // We also defined (again by forward projection) the desired position
218  // of this point according to the desired camera position
219  vpPoint pointd(0, 0, 0);
220  pointd.track(cdMo);
221 
222  // Nevertheless, a vpPoint is not a feature, this is just a "tracker"
223  // from which the feature are built
224  // a feature is juste defined by a vector s, a way to compute the
225  // interaction matrix and the error, and if required a (or a vector of)
226  // 3D information
227 
228  // for a point (x,y) Visp implements the vpFeaturePoint class.
229  // we no defined a feature for x,y (and for (x*,y*))
230  vpFeaturePoint p,pd ;
231 
232  // and we initialized the vector s=(x,y) of p from the tracker P
233  // Z coordinates in p is also initialized, it will be used to compute
234  // the interaction matrix
235  vpFeatureBuilder::create(p,point) ;
236  vpFeatureBuilder::create(pd,pointd) ;
237 
238  //------------------------------------------------------------------
239  // Second feature log (Z/Zd)
240  // not necessary to project twice (reuse p)
241 
242  // This case in intersting since this visual feature has not
243  // been predefined in VisP
244  // In such case we have a generic feature class vpGenericFeature
245  // We will have to defined
246  // the vector s : .set_s(...)
247  // the interaction matrix Ls : .setInteractionMatrix(...)
248 
249  // log(Z/Zd) is then a size 1 vector logZ
250  vpGenericFeature logZ(1) ;
251  // initialized to s = log(Z/Zd)
252  // Let us note that here we use the point P and Pd, it's not necessary
253  // to forward project twice (it's already done)
254  logZ.set_s(log(point.get_Z()/pointd.get_Z())) ;
255 
256  // This visual has to be regulated to zero
257 
258  //------------------------------------------------------------------
259  // 3rd feature ThetaU
260  // The thetaU feature is defined, tu represents the rotation that the camera
261  // has to realized.
262  // the complete displacement is then defined by:
263  //------------------------------------------------------------------
264  vpHomogeneousMatrix cdMc ;
265  // compute the rotation that the camera has to achieve
266  cdMc = cdMo*cMo.inverse() ;
267 
268  // from this displacement, we extract the rotation cdRc represented by
269  // the angle theta and the rotation axis u
271  tu.buildFrom(cdMc) ;
272  // This visual has to be regulated to zero
273 
274  // sets the desired rotation (always zero !)
275  // since s is the rotation that the camera has to realize
276 
277  //------------------------------------------------------------------
278  // Let us now the task itself
279  //------------------------------------------------------------------
280 
281  // define the task
282  // - we want an eye-in-hand control law
283  // - robot is controlled in the camera frame
284  // we choose to control the robot in the camera frame
286  // Interaction matrix is computed with the current value of s
288 
289  // we build the task by "stacking" the visual feature
290  // previously defined
291  task.addFeature(p,pd) ;
292  task.addFeature(logZ) ;
293  task.addFeature(tu) ;
294  // addFeature(X,Xd) means X should be regulated to Xd
295  // addFeature(X) means that X should be regulated to 0
296  // some features such as vpFeatureThetaU MUST be regulated to zero
297  // (otherwise, it will results in an error at exectution level)
298 
299  // set the gain
300  task.setLambda(1) ;
301 
302  // Display task information
303  task.print() ;
304  //------------------------------------------------------------------
305  // An now the closed loop
306 
307  unsigned int iter=0 ;
308  // loop
309  while(iter++<200)
310  {
311  std::cout << "---------------------------------------------" << iter <<std::endl ;
312  vpColVector v ;
313 
314  // get the robot position
315  robot.getPosition(wMc) ;
316  // Compute the position of the camera wrt the object frame
317  cMo = wMc.inverse() * wMo;
318 
319  // update the feature
320  point.track(cMo) ;
321  vpFeatureBuilder::create(p,point) ;
322 
323  cdMc = cdMo*cMo.inverse() ;
324  tu.buildFrom(cdMc) ;
325 
326  // there is no feature for logZ, we explicitely build
327  // the related interaction matrix") ;
328  logZ.set_s(log(point.get_Z()/pointd.get_Z())) ;
329  vpMatrix LlogZ(1,6) ;
330  LlogZ[0][0] = LlogZ[0][1] = LlogZ[0][5] = 0 ;
331  LlogZ[0][2] = -1/p.get_Z() ;
332  LlogZ[0][3] = -p.get_y() ;
333  LlogZ[0][4] = p.get_x() ;
334 
335  logZ.setInteractionMatrix(LlogZ) ;
336 
337  // compute the control law
338  v = task.computeControlLaw() ;
339 
340  // send the camera velocity to the controller ") ;
342 
343  std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;
344  }
345 
346  // Display task information
347  task.print() ;
348  task.kill();
349  // Final camera location
350  std::cout << cMo << std::endl ;
351  return 0;
352  }
353  catch(vpException &e) {
354  std::cout << "Catch a ViSP exception: " << e << std::endl;
355  return 1;
356  }
357 }
358 
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:97
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines the simplest robot: a free flying camera.
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, const unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:512
error that can be emited by ViSP classes.
Definition: vpException.h:73
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:76
Class that defines what is a point.
Definition: vpPoint.h:59
void kill()
Definition: vpServo.cpp:191
vpColVector getError() const
Definition: vpServo.h:271
vpColVector computeControlLaw()
Definition: vpServo.cpp:954
double get_Z() const
void setLambda(double c)
Definition: vpServo.h:391
vpHomogeneousMatrix getPosition() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:585
static double rad(double deg)
Definition: vpMath.h:104
Implementation of column vector and the associated operations.
Definition: vpColVector.h:72
double get_y() const
double get_x() const
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:93
vpHomogeneousMatrix inverse() const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:314
Class that enables to define a feature or a set of features which are not implemented in ViSP as a sp...
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:222