Visual Servoing Platform  version 3.1.0
servoSimuPoint2DCamVelocity2.cpp

Servo a point:

wrt. servoSimuPoint2DCamVelocity1.cpp only the type of control law is modified. This illustrates the need for Jacobian update and Twist transformation matrix initialization.

Interaction matrix is computed as the mean of the current and desired interaction matrix.

/****************************************************************************
*
* This file is part of the ViSP software.
* Copyright (C) 2005 - 2017 by Inria. All rights reserved.
*
* This software is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact Inria about acquiring a ViSP Professional
* Edition License.
*
* See http://visp.inria.fr for more information.
*
* This software was developed at:
* Inria Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
*
* If you have questions regarding the use of this file, please contact
* Inria at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
* Description:
* Simulation of a 2D visual servoing on a point.
*
* Authors:
* Eric Marchand
* Fabien Spindler
*
*****************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpMath.h>
#include <visp3/io/vpParseArgv.h>
#include <visp3/robot/vpSimulatorCamera.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
// List of allowed command line options
#define GETOPTARGS "h"
void usage(const char *name, const char *badparam);
bool getOptions(int argc, const char **argv);
void usage(const char *name, const char *badparam)
{
fprintf(stdout, "\n\
Simulation of a 2D visual servoing on a point:\n\
- eye-in-hand control law,\n\
- articular velocity are computed,\n\
- without display.\n\
\n\
SYNOPSIS\n\
%s [-h]\n", name);
fprintf(stdout, "\n\
OPTIONS: Default\n\
\n\
-h\n\
Print the help.\n");
if (badparam)
fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
}
bool getOptions(int argc, const char **argv)
{
const char *optarg_;
int c;
while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg_)) > 1) {
switch (c) {
case 'h':
usage(argv[0], NULL);
return false;
break;
default:
usage(argv[0], optarg_);
return false;
break;
}
}
if ((c == 1) || (c == -1)) {
// standalone param or error
usage(argv[0], NULL);
std::cerr << "ERROR: " << std::endl;
std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
return false;
}
return true;
}
int main(int argc, const char **argv)
{
try {
// Read the command line options
if (getOptions(argc, argv) == false) {
exit(-1);
}
vpServo task;
std::cout << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-in-hand task control, articular velocity are computed" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a point " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << std::endl;
// sets the initial camera location
cMo[0][3] = 0.1;
cMo[1][3] = 0.2;
cMo[2][3] = 2;
// Compute the position of the object in the world frame
robot.getPosition(wMc);
wMo = wMc * cMo;
// sets the point coordinates in the world frame
vpPoint point(0, 0, 0);
// computes the point coordinates in the camera frame and its 2D
// coordinates
point.track(cMo);
// sets the current position of the visual feature
vpFeatureBuilder::create(p, point); // retrieve x,y and Z of the vpPoint structure
// sets the desired position of the visual feature
pd.buildFrom(0, 0, 1);
// define the task
// - we want an eye-in-hand control law
// - articular velocity are computed
// Set the position of the camera in the end-effector frame
task.set_cVe(cVe);
// Set the Jacobian (expressed in the end-effector frame)
vpMatrix eJe;
robot.get_eJe(eJe);
task.set_eJe(eJe);
// we want to see a point on a point
task.addFeature(p, pd);
// set the gain
task.setLambda(1);
// Display task information
task.print();
unsigned int iter = 0;
// loop
while (iter++ < 100) {
std::cout << "---------------------------------------------" << iter << std::endl;
// Set the Jacobian (expressed in the end-effector frame)
// since q is modified eJe is modified
robot.get_eJe(eJe);
task.set_eJe(eJe);
// get the robot position
robot.getPosition(wMc);
// Compute the position of the camera wrt the object frame
cMo = wMc.inverse() * wMo;
// new point position
point.track(cMo);
vpFeatureBuilder::create(p, point); // retrieve x,y and Z of the vpPoint structure
pd.buildFrom(0, 0, 1); // Since vpServo::MEAN interaction matrix is
// used, we need to update the desired feature at
// each iteration
// compute the control law
v = task.computeControlLaw();
// send the camera velocity to the controller
std::cout << "|| s - s* || = " << (task.getError()).sumSquare() << std::endl;
}
// Display task information
task.print();
task.kill();
return 0;
} catch (vpException &e) {
std::cout << "Catch a ViSP exception: " << e << std::endl;
return 1;
}
}