Visual Servoing Platform  version 3.6.1 under development (2024-11-14)
servoAfma6Points2DCamVelocityEyeToHand.cpp

Example of a eye-to-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The robot is controlled in the camera frame.

/*
* ViSP, open source Visual Servoing Platform software.
* Copyright (C) 2005 - 2024 by Inria. All rights reserved.
*
* This software is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact Inria about acquiring a ViSP Professional
* Edition License.
*
* See https://visp.inria.fr for more information.
*
* This software was developed at:
* Inria Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
*
* If you have questions regarding the use of this file, please contact
* Inria at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
* Description:
* tests the control law
* eye-to-hand control
* velocity computed in the camera frame
*/
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY)
#include <visp3/core/vpImage.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/blob/vpDot2.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#define SAVE 0
#define L 0.006
#define D 0
int main()
{
#ifdef ENABLE_VISP_NAMESPACE
using namespace VISP_NAMESPACE_NAME;
#endif
try {
std::string username = vpIoTools::getUserName();
std::string logdirname = "/tmp/" + username;
if (SAVE) {
if (vpIoTools::checkDirectory(logdirname) == false) {
try {
// Create the dirname
}
catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Cannot create " << logdirname << std::endl;
return EXIT_FAILURE;
}
}
}
rs2::config config;
unsigned int width = 640, height = 480, fps = 60;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
config.enable_stream(RS2_STREAM_INFRARED, width, height, RS2_FORMAT_Y8, fps);
rs.open(config);
// Warm up camera
for (size_t i = 0; i < 10; ++i) {
rs.acquire(I);
}
std::shared_ptr<vpDisplay> d = vpDisplayFactory::createDisplay(I, 100, 100, "Current image");
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-to-hand task control" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a point " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
int nbPoint = 7;
vpDot dot[nbPoint];
for (int i = 0; i < nbPoint; ++i) {
dot[i].initTracking(I);
dot[i].setGraphics(true);
dot[i].track(I);
dot[i].setGraphics(false);
}
// Compute the pose 3D model
vpPoint point[nbPoint];
point[0].setWorldCoordinates(-2 * L, D, -3 * L);
point[1].setWorldCoordinates(0, D, -3 * L);
point[2].setWorldCoordinates(2 * L, D, -3 * L);
point[3].setWorldCoordinates(-L, D, -L);
point[4].setWorldCoordinates(L, D, -L);
point[5].setWorldCoordinates(L, D, L);
point[6].setWorldCoordinates(-L, D, L);
vpRobotAfma6 robot;
// Get camera intrinsics
robot.getCameraParameters(cam, I);
vpHomogeneousMatrix c_M_o, cd_M_o;
vpPose pose;
pose.clearPoint();
for (int i = 0; i < nbPoint; ++i) {
cog = dot[i].getCog();
double x = 0, y = 0;
point[i].set_x(x);
point[i].set_y(y);
pose.addPoint(point[i]);
}
// compute the initial pose using Dementhon method followed by a non
// linear minimization method
std::cout << "c_M_o: \n" << c_M_o << std::endl;
/*
* Learning or reading the desired position
*/
std::cout << "Learning (0/1)? " << std::endl;
std::string name = "cd_M_o.dat";
int learning;
std::cin >> learning;
if (learning == 1) {
// save the object position
std::cout << "Save the location of the object in a file cd_M_o.dat" << std::endl;
std::ofstream f(name.c_str());
c_M_o.save(f);
f.close();
exit(1);
}
{
std::cout << "Loading desired location from cd_M_o.dat" << std::endl;
std::ifstream f("cd_M_o.dat");
cd_M_o.load(f);
f.close();
}
vpFeaturePoint s[nbPoint], s_d[nbPoint];
// Set the desired position of the point by forward projection using the pose cd_M_o
for (int i = 0; i < nbPoint; ++i) {
vpColVector cP, p;
point[i].changeFrame(cd_M_o, cP);
point[i].projection(cP, p);
s_d[i].set_x(p[0]);
s_d[i].set_y(p[1]);
}
// Define the task
// - we want an eye-in-hand control law
// - robot is controlled in the camera frame
vpServo task;
// - we want to see a point on a point
for (int i = 0; i < nbPoint; ++i) {
task.addFeature(s[i], s_d[i]);
}
// - display task information
task.print();
double convergence_threshold = 0.00; // 025 ;
double error = 1;
unsigned int iter = 0;
// position of the object in the effector frame
vpHomogeneousMatrix o_M_camrobot;
o_M_camrobot[0][3] = -0.05;
int it = 0;
std::list<vpImagePoint> Lcog;
bool quit = false;
while ((error > convergence_threshold) && (!quit)) {
std::cout << "---------------------------------------------" << iter++ << std::endl;
rs.acquire(I);
try {
for (int i = 0; i < nbPoint; ++i) {
dot[i].track(I);
Lcog.push_back(dot[i].getCog());
}
}
catch (...) {
std::cout << "Error detected while tracking visual features" << std::endl;
robot.stopMotion();
return EXIT_FAILURE;
}
// compute the initial pose using a non linear minimization method
pose.clearPoint();
for (int i = 0; i < nbPoint; ++i) {
double x = 0, y = 0;
cog = dot[i].getCog();
point[i].set_x(x);
point[i].set_y(y);
point[i].changeFrame(cd_M_o, cP);
s[i].set_x(x);
s[i].set_y(y);
s[i].set_Z(cP[2]);
pose.addPoint(point[i]);
point[i].display(I, c_M_o, cam, vpColor::green);
point[i].display(I, cd_M_o, cam, vpColor::blue);
}
pose.computePose(vpPose::LOWE, c_M_o);
// - set the camera to end-effector velocity twist matrix transformation
vpHomogeneousMatrix c_M_e, camrobot_M_e;
robot.get_cMe(camrobot_M_e);
c_M_e = c_M_o * o_M_camrobot * camrobot_M_e;
task.set_cVe(c_M_e);
// - set the Jacobian (expressed in the end-effector frame)
vpMatrix e_J_e;
robot.get_eJe(e_J_e);
task.set_eJe(e_J_e);
// - set the task adaptive gain
vpAdaptiveGain lambda_adaptive;
lambda_adaptive.initStandard(1.7, 0.3, 1.5); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
task.setLambda(lambda_adaptive);
// Display points trajectory
for (std::list<vpImagePoint>::const_iterator it_cog = Lcog.begin(); it_cog != Lcog.end(); ++it_cog) {
}
// Display task visual features feature
vpServoDisplay::display(task, cam, I);
// Apply joint velocity to the robot
error = (task.getError()).sumSquare();
std::cout << "|| s - s* || = " << error << std::endl;
if (error > 7) {
std::cout << "Error detected while tracking visual features" << std::endl;
robot.stopMotion();
return EXIT_FAILURE;
}
if ((SAVE == 1) && (iter % 3 == 0)) {
std::stringstream ss;
ss << logdirname;
ss << "/image.";
ss << std::setfill('0') << std::setw(4);
ss << it++;
ss << ".png";
vpImageIo::write(Ic, ss.str());
}
vpDisplay::displayText(I, 20, 20, "Click to quit...", vpColor::red);
if (vpDisplay::getClick(I, false)) {
quit = true;
}
}
return EXIT_SUCCESS;
}
catch (const vpException &e) {
std::cout << "Visual servo failed with exception: " << e << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
@ TOOL_INTEL_D435_CAMERA
Definition: vpAfma6.h:131
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
static const vpColor red
Definition: vpColor.h:217
static const vpColor blue
Definition: vpColor.h:223
static const vpColor green
Definition: vpColor.h:220
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
Definition: vpDisplay.cpp:140
static void flush(const vpImage< unsigned char > &I)
static void displayPoint(const vpImage< unsigned char > &I, const vpImagePoint &ip, const vpColor &color, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
This tracker is meant to track a dot (connected pixels with same gray level) on a vpImage.
Definition: vpDot.h:116
void initTracking(const vpImage< unsigned char > &I)
Definition: vpDot.cpp:630
void setGraphics(bool activate)
Definition: vpDot.h:354
vpImagePoint getCog() const
Definition: vpDot.h:247
void track(const vpImage< unsigned char > &I)
Definition: vpDot.cpp:760
error that can be emitted by ViSP classes.
Definition: vpException.h:60
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_y(double y)
void set_x(double x)
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void load(std::ifstream &f)
void save(std::ofstream &f) const
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition: vpImageIo.cpp:291
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
static bool checkDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:396
static std::string getUserName()
Definition: vpIoTools.cpp:285
static void makeDirectory(const std::string &dirname)
Definition: vpIoTools.cpp:550
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79
void set_x(double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:464
void projection(const vpColVector &_cP, vpColVector &_p) const VP_OVERRIDE
Definition: vpPoint.cpp:247
void display(const vpImage< unsigned char > &I, const vpCameraParameters &cam, const vpColor &color=vpColor::green, unsigned int thickness=1) VP_OVERRIDE
Definition: vpPoint.cpp:380
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const VP_OVERRIDE
Definition: vpPoint.cpp:267
void setWorldCoordinates(double oX, double oY, double oZ)
Definition: vpPoint.cpp:111
void set_y(double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:466
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition: vpPose.h:77
void addPoint(const vpPoint &P)
Definition: vpPose.cpp:96
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
Definition: vpPose.h:98
@ LOWE
Definition: vpPose.h:84
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, FuncCheckValidityPose func=nullptr)
Definition: vpPose.cpp:385
void clearPoint()
Definition: vpPose.cpp:89
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
Definition: vpRobotAfma6.h:212
void get_eJe(vpMatrix &eJe) VP_OVERRIDE
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ JOINT_STATE
Definition: vpRobot.h:82
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition: vpRobot.h:67
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition: vpRobot.cpp:202
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYETOHAND_L_cVe_eJe
Definition: vpServo.h:175
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:1038
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:171
void setLambda(double c)
Definition: vpServo.h:986
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:1101
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector getError() const
Definition: vpServo.h:510
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
vpHomogeneousMatrix get_cMe() const
Definition: vpUnicycle.h:65
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.