Visual Servoing Platform  version 3.6.1 under development (2024-03-29)
servoAfma6Points2DCamVelocityEyeToHand.cpp

Example of a eye-to-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The robot is controlled in the camera frame.

/****************************************************************************
*
* ViSP, open source Visual Servoing Platform software.
* Copyright (C) 2005 - 2023 by Inria. All rights reserved.
*
* This software is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact Inria about acquiring a ViSP Professional
* Edition License.
*
* See https://visp.inria.fr for more information.
*
* This software was developed at:
* Inria Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
*
* If you have questions regarding the use of this file, please contact
* Inria at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
* Description:
* tests the control law
* eye-to-hand control
* velocity computed in the camera frame
*
*****************************************************************************/
#include <cmath> // std::fabs
#include <limits> // numeric_limits
#include <list>
#include <stdlib.h>
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpDebug.h> // Debug trace
#if (defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_DC1394))
#include <visp3/blob/vpDot.h>
#include <visp3/core/vpDisplay.h>
#include <visp3/core/vpException.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpImage.h>
#include <visp3/core/vpImagePoint.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpMath.h>
#include <visp3/core/vpPoint.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#define SAVE 0
#define L 0.006
#define D 0
int main()
{
try {
std::string username = vpIoTools::getUserName();
std::string logdirname = "/tmp/" + username;
if (SAVE) {
if (vpIoTools::checkDirectory(logdirname) == false) {
try {
// Create the dirname
} catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Cannot create " << logdirname << std::endl;
return EXIT_FAILURE;
}
}
}
vpServo task;
int i;
g.open(I);
g.acquire(I);
#ifdef VISP_HAVE_X11
vpDisplayX display(I, 100, 100, "Current image");
#elif defined(HAVE_OPENCV_HIGHGUI)
vpDisplayOpenCV display(I, 100, 100, "Current image");
#elif defined(VISP_HAVE_GTK)
vpDisplayGTK display(I, 100, 100, "Current image");
#endif
std::cout << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-to-hand task control" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a point " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << std::endl;
int nbPoint = 7;
vpDot dot[nbPoint];
for (i = 0; i < nbPoint; i++) {
dot[i].initTracking(I);
dot[i].setGraphics(true);
dot[i].track(I);
dot[i].setGraphics(false);
}
// Compute the pose 3D model
vpPoint point[nbPoint];
point[0].setWorldCoordinates(-2 * L, D, -3 * L);
point[1].setWorldCoordinates(0, D, -3 * L);
point[2].setWorldCoordinates(2 * L, D, -3 * L);
point[3].setWorldCoordinates(-L, D, -L);
point[4].setWorldCoordinates(L, D, -L);
point[5].setWorldCoordinates(L, D, L);
point[6].setWorldCoordinates(-L, D, L);
vpRobotAfma6 robot;
// Update camera parameters
robot.getCameraParameters(cam, I);
vpPose pose;
pose.clearPoint();
for (i = 0; i < nbPoint; i++) {
cog = dot[i].getCog();
double x = 0, y = 0;
point[i].set_x(x);
point[i].set_y(y);
pose.addPoint(point[i]);
}
// compute the initial pose using Dementhon method followed by a non
// linear minimization method
std::cout << cMo << std::endl;
cMo.print();
/*------------------------------------------------------------------
-- Learning the desired position
-- or reading the desired position
------------------------------------------------------------------
*/
std::cout << " Learning 0/1 " << std::endl;
std::string name = "cdMo.dat";
int learning;
std::cin >> learning;
if (learning == 1) {
// save the object position
vpTRACE("Save the location of the object in a file cdMo.dat");
std::ofstream f(name.c_str());
cMo.save(f);
f.close();
exit(1);
}
{
vpTRACE("Loading desired location from cdMo.dat");
std::ifstream f("cdMo.dat");
cdMo.load(f);
f.close();
}
vpFeaturePoint p[nbPoint], pd[nbPoint];
// set the desired position of the point by forward projection using
// the pose cdMo
for (i = 0; i < nbPoint; i++) {
vpColVector cP, p;
point[i].changeFrame(cdMo, cP);
point[i].projection(cP, p);
pd[i].set_x(p[0]);
pd[i].set_y(p[1]);
}
//------------------------------------------------------------------
vpTRACE("define the task");
vpTRACE("\t we want an eye-in-hand control law");
vpTRACE("\t robot is controlled in the camera frame");
for (i = 0; i < nbPoint; i++) {
task.addFeature(p[i], pd[i]);
}
vpTRACE("Display task information ");
task.print();
//------------------------------------------------------------------
double convergence_threshold = 0.00; // 025 ;
//-------------------------------------------------------------
double error = 1;
unsigned int iter = 0;
vpTRACE("\t loop");
vpColVector v; // computed robot velocity
// position of the object in the effector frame
vpHomogeneousMatrix oMcamrobot;
oMcamrobot[0][3] = -0.05;
int it = 0;
double lambda_av = 0.1;
double alpha = 1; // 1 ;
double beta = 3; // 3 ;
std::cout << "alpha 0.7" << std::endl;
std::cin >> alpha;
std::cout << "beta 5" << std::endl;
std::cin >> beta;
std::list<vpImagePoint> Lcog;
while (error > convergence_threshold) {
std::cout << "---------------------------------------------" << iter++ << std::endl;
g.acquire(I);
ip.set_i(265);
ip.set_j(150);
vpDisplay::displayText(I, ip, "Eye-To-Hand Visual Servoing", vpColor::green);
ip.set_i(280);
ip.set_j(150);
vpDisplay::displayText(I, ip, "IRISA-INRIA Rennes, Lagadic project", vpColor::green);
try {
for (i = 0; i < nbPoint; i++) {
dot[i].track(I);
Lcog.push_back(dot[i].getCog());
}
} catch (...) {
vpTRACE("Error detected while tracking visual features");
robot.stopMotion();
exit(1);
}
// compute the initial pose using a non linear minimization method
pose.clearPoint();
for (i = 0; i < nbPoint; i++) {
double x = 0, y = 0;
cog = dot[i].getCog();
point[i].set_x(x);
point[i].set_y(y);
point[i].changeFrame(cdMo, cP);
p[i].set_x(x);
p[i].set_y(y);
p[i].set_Z(cP[2]);
pose.addPoint(point[i]);
point[i].display(I, cMo, cam, vpColor::green);
point[i].display(I, cdMo, cam, vpColor::blue);
}
vpHomogeneousMatrix cMe, camrobotMe;
robot.get_cMe(camrobotMe);
cMe = cMo * oMcamrobot * camrobotMe;
task.set_cVe(cMe);
vpMatrix eJe;
robot.get_eJe(eJe);
task.set_eJe(eJe);
// Compute the adaptative gain (speed up the convergence)
double gain;
if (iter > 2) {
if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
gain = lambda_av;
else {
gain = alpha * exp(-beta * (task.getError()).sumSquare()) + lambda_av;
}
} else
gain = lambda_av;
if (SAVE == 1)
gain = gain / 5;
vpTRACE("%f %f %f %f %f", alpha, beta, lambda_av, (task.getError()).sumSquare(), gain);
task.setLambda(gain);
v = task.computeControlLaw();
// display points trajectory
for (std::list<vpImagePoint>::const_iterator it_cog = Lcog.begin(); it_cog != Lcog.end(); ++it_cog) {
}
vpServoDisplay::display(task, cam, I);
error = (task.getError()).sumSquare();
std::cout << "|| s - s* || = " << error << std::endl;
if (error > 7) {
vpTRACE("Error detected while tracking visual features");
robot.stopMotion();
exit(1);
}
// display the pose
// pose.display(I,cMo,cam, 0.04, vpColor::red) ;
// display the pose
// pose.display(I,cdMo,cam, 0.04, vpColor::blue) ;
if ((SAVE == 1) && (iter % 3 == 0)) {
std::stringstream ss;
ss << logdirname;
ss << "/image.";
ss << std::setfill('0') << std::setw(4);
ss << it++;
ss << ".ppm";
vpImageIo::write(Ic, ss.str());
}
}
v = 0;
return EXIT_SUCCESS;
} catch (const vpException &e) {
std::cout << "Test failed with exception: " << e << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void acquire(vpImage< unsigned char > &I)
void setVideoMode(vp1394TwoVideoModeType videomode)
void setFramerate(vp1394TwoFramerateType fps)
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
static const vpColor red
Definition: vpColor.h:211
static const vpColor blue
Definition: vpColor.h:217
static const vpColor green
Definition: vpColor.h:214
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:128
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:128
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
Definition: vpDisplay.cpp:138
static void flush(const vpImage< unsigned char > &I)
static void displayPoint(const vpImage< unsigned char > &I, const vpImagePoint &ip, const vpColor &color, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
This tracker is meant to track a dot (connected pixels with same gray level) on a vpImage.
Definition: vpDot.h:112
void initTracking(const vpImage< unsigned char > &I)
Definition: vpDot.cpp:617
void setGraphics(bool activate)
Definition: vpDot.h:349
vpImagePoint getCog() const
Definition: vpDot.h:242
void track(const vpImage< unsigned char > &I)
Definition: vpDot.cpp:757
error that can be emitted by ViSP classes.
Definition: vpException.h:59
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_y(double y)
void set_x(double x)
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void load(std::ifstream &f)
void print() const
Print the matrix as a pose vector .
void save(std::ofstream &f) const
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition: vpImageIo.cpp:287
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
void set_j(double jj)
Definition: vpImagePoint.h:304
void set_i(double ii)
Definition: vpImagePoint.h:293
static bool checkDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:818
static std::string getUserName()
Definition: vpIoTools.cpp:711
static void makeDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:967
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:146
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:500
void projection(const vpColVector &_cP, vpColVector &_p) const vp_override
Definition: vpPoint.cpp:223
void display(const vpImage< unsigned char > &I, const vpCameraParameters &cam, const vpColor &color=vpColor::green, unsigned int thickness=1) vp_override
Definition: vpPoint.cpp:419
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const vp_override
Definition: vpPoint.cpp:240
void setWorldCoordinates(double oX, double oY, double oZ)
Definition: vpPoint.cpp:110
void set_y(double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:502
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition: vpPose.h:78
void addPoint(const vpPoint &P)
Definition: vpPose.cpp:93
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
Definition: vpPose.h:99
@ LOWE
Definition: vpPose.h:85
void clearPoint()
Definition: vpPose.cpp:86
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, bool(*func)(const vpHomogeneousMatrix &)=nullptr)
Definition: vpPose.cpp:333
Control of Irisa's gantry robot named Afma6.
Definition: vpRobotAfma6.h:209
void get_eJe(vpMatrix &eJe) vp_override
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) vp_override
@ ARTICULAR_FRAME
Definition: vpRobot.h:78
@ CAMERA_FRAME
Definition: vpRobot.h:82
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:65
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:198
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:378
@ EYETOHAND_L_cVe_eJe
Definition: vpServo.h:169
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:329
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:1028
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:169
void setLambda(double c)
Definition: vpServo.h:976
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:1091
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:132
vpColVector getError() const
Definition: vpServo.h:504
vpColVector computeControlLaw()
Definition: vpServo.cpp:703
@ CURRENT
Definition: vpServo.h:196
vpHomogeneousMatrix get_cMe() const
Definition: vpUnicycle.h:63
#define vpTRACE
Definition: vpDebug.h:405
void display(vpImage< unsigned char > &I, const std::string &title)
Display a gray-scale image.