ViSP  2.8.0
servoSimuPoint2DCamVelocity3.cpp

Servo a point:

wrt. servoSimuPoint2DCamVelocity1.cpp only the type of control law is modified. This illustrates the need for Jacobian update and Twist transformation matrix initialization, only the X coordinate is selected.

Only the X coordinate is selected (test the selection process).

/****************************************************************************
*
* $Id: servoSimuPoint2DCamVelocity3.cpp 2503 2010-02-16 18:55:01Z fspindle $
*
* This file is part of the ViSP software.
* Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
*
* This software is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* ("GPL") version 2 as published by the Free Software Foundation.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact INRIA about acquiring a ViSP Professional
* Edition License.
*
* See http://www.irisa.fr/lagadic/visp/visp.html for more information.
*
* This software was developed at:
* INRIA Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
* http://www.irisa.fr/lagadic
*
* If you have questions regarding the use of this file, please contact
* INRIA at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
*
* Description:
* Simulation of a 2D visual servoing on a point.
*
* Authors:
* Eric Marchand
* Fabien Spindler
*
*****************************************************************************/
#include <stdlib.h>
#include <stdio.h>
#include <visp/vpFeatureBuilder.h>
#include <visp/vpFeaturePoint.h>
#include <visp/vpHomogeneousMatrix.h>
#include <visp/vpMath.h>
#include <visp/vpParseArgv.h>
#include <visp/vpServo.h>
#include <visp/vpSimulatorCamera.h>
// List of allowed command line options
#define GETOPTARGS "h"
void usage(const char *name, const char *badparam)
{
fprintf(stdout, "\n\
Simulation of a 2D visual servoing on a point:\n\
- eye-in-hand control law,\n\
- articular velocity are computed,\n\
- without display,\n\
- only the X coordinate of the point is selected.\n\
\n\
SYNOPSIS\n\
%s [-h]\n", name);
fprintf(stdout, "\n\
OPTIONS: Default\n\
\n\
-h\n\
Print the help.\n");
if (badparam)
fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
}
bool getOptions(int argc, const char **argv)
{
const char *optarg;
int c;
while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
switch (c) {
case 'h': usage(argv[0], NULL); return false; break;
default:
usage(argv[0], optarg);
return false; break;
}
}
if ((c == 1) || (c == -1)) {
// standalone param or error
usage(argv[0], NULL);
std::cerr << "ERROR: " << std::endl;
std::cerr << " Bad argument " << optarg << std::endl << std::endl;
return false;
}
return true;
}
int
main(int argc, const char ** argv)
{
// Read the command line options
if (getOptions(argc, argv) == false) {
exit (-1);
}
vpServo task ;
std::cout << std::endl ;
std::cout << "-------------------------------------------------------" << std::endl ;
std::cout << " Test program for vpServo " <<std::endl ;
std::cout << " Eye-in-hand task control, articular velocity are computed" << std::endl ;
std::cout << " Simulation " << std::endl ;
std::cout << " task : servo a point " << std::endl ;
std::cout << "-------------------------------------------------------" << std::endl ;
std::cout << std::endl ;
// sets the initial camera location
cMo[0][3] = 0.1 ;
cMo[1][3] = 0.2 ;
cMo[2][3] = 2 ;
// Compute the position of the object in the world frame
robot.getPosition(wMc) ;
wMo = wMc * cMo;
// sets the point coordinates in the world frame
vpPoint point ;
point.setWorldCoordinates(0,0,0) ;
// computes the point coordinates in the camera frame and its 2D coordinates
point.track(cMo) ;
// sets the current position of the visual feature
vpFeatureBuilder::create(p,point) ; //retrieve x,y and Z of the vpPoint structure
// sets the desired position of the visual feature
pd.buildFrom(0,0,1) ; // buildFrom(x,y,Z) ;
// define the task
// - we want an eye-in-hand control law
// - articular velocity are computed
// Set the position of the camera in the end-effector frame
task.set_cVe(cVe) ;
// Set the Jacobian (expressed in the end-effector frame)
vpMatrix eJe ;
robot.get_eJe(eJe) ;
task.set_eJe(eJe) ;
// we want to see a point on a point
// set the gain
task.setLambda(1) ;
// Display task information
task.print() ;
unsigned int iter=0 ;
// loop
while(iter++ < 100)
{
std::cout << "---------------------------------------------" << iter <<std::endl ;
// Set the Jacobian (expressed in the end-effector frame)
// since q is modified eJe is modified
robot.get_eJe(eJe) ;
task.set_eJe(eJe) ;
// get the robot position
robot.getPosition(wMc) ;
// Compute the position of the camera wrt the object frame
cMo = wMc.inverse() * wMo;
// new point position
point.track(cMo) ;
vpFeatureBuilder::create(p,point) ; //retrieve x,y and Z of the vpPoint structure
// compute the control law
v = task.computeControlLaw() ;
// send the camera velocity to the controller
std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<std::endl ;
}
// Display task information
task.print() ;
task.kill();
}