Example of eye-in-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The velocity is computed in the camera frame. Visual features are the two lines corresponding to the edges of a cylinder.
This example illustrates in one hand a classical visual servoing with a cylinder. And in the other hand it illustrates the behaviour of the robot when adding a secondary task.
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_AFMA6)
#include <visp3/core/vpImage.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/core/vpCylinder.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpMath.h>
#include <visp3/me/vpMeLine.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeatureLine.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/vs/vpServoDisplay.h>
#include <visp3/vs/vpServo.h>
#ifdef ENABLE_VISP_NAMESPACE
#endif
int main(int argc, char **argv)
{
bool opt_verbose = false;
bool opt_adaptive_gain = false;
for (int i = 1; i < argc; ++i) {
if (std::string(argv[i]) == "--verbose") {
opt_verbose = true;
}
else if (std::string(argv[i]) == "--adaptive-gain") {
opt_adaptive_gain = true;
}
else if ((std::string(argv[i]) == "--help") || (std::string(argv[i]) == "-h")) {
std::cout
<< argv[0]
<< " [--adaptive-gain]"
<< " [--verbose]"
<< " [--help] [-h]"
<< std::endl;;
return EXIT_SUCCESS;
}
}
try {
std::cout << "WARNING: This example will move the robot! "
<< "Please make sure to have the user stop button at hand!" << std::endl
<< "Press Enter to continue..." << std::endl;
std::cin.ignore();
rs2::config config;
unsigned int width = 640, height = 480, fps = 60;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
config.enable_stream(RS2_STREAM_INFRARED, width, height, RS2_FORMAT_Y8, fps);
for (size_t i = 0; i < 10; ++i) {
}
#ifdef VISP_HAVE_X11
vpDisplayX display(I, 100, 100, "Current image");
#elif defined(HAVE_OPENCV_HIGHGUI)
#elif defined(VISP_HAVE_GTK)
#endif
int nblines = 2;
std::vector<vpMeLine> line(nblines);
for (int i = 0; i < nblines; ++i) {
line[i].setMe(&me);
line[i].initTracking(I);
line[i].track(I);
}
robot.getCameraParameters(cam, I);
std::cout << "cam:\n" << cam << std::endl;
std::vector<vpFeatureLine> s_line(nblines);
for (int i = 0; i < nblines; ++i) {
}
std::vector<vpFeatureLine> s_line_d(nblines);
{
std::cout << "Desired features: " << std::endl;
std::cout <<
" - line 1: rho: " << s_line_d[0].getRho() <<
" theta: " <<
vpMath::deg(s_line_d[0].getTheta()) <<
"deg" << std::endl;
std::cout <<
" - line 2: rho: " << s_line_d[1].getRho() <<
" theta: " <<
vpMath::deg(s_line_d[1].getTheta()) <<
"deg" << std::endl;
}
s_line_d[0].setRhoTheta(+fabs(s_line_d[0].getRho()), 0);
s_line_d[1].setRhoTheta(+fabs(s_line_d[1].getRho()), M_PI);
for (int i = 0; i < nblines; ++i) {
}
if (opt_adaptive_gain) {
}
else {
}
bool final_quit = false;
bool send_velocities = false;
double task_error = 1.;
while ((task_error > 0.00001) && (!final_quit)) {
std::stringstream ss;
ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
for (int i = 0; i < nblines; ++i) {
line[i].track(I);
}
if (opt_verbose) {
std::cout <<
"v: " << v_c.
t() << std::endl;
std::cout << "\t\t || s - s* || = " << task_error << std::endl;;
}
if (!send_velocities) {
v_c = 0;
}
ss.str("");
switch (button) {
send_velocities = !send_velocities;
break;
final_quit = true;
break;
default:
break;
}
}
}
unsigned long iter = 0;
double secondary_task_speed = 0.02;
unsigned int tempo = 1200;
while (!final_quit) {
std::stringstream ss;
ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
for (int i = 0; i < nblines; ++i) {
line[i].track(I);
}
if (iter % tempo < 400 /*&& iter%tempo >= 0*/) {
e2 = 0;
e1[0] = fabs(secondary_task_speed);
double frac = secondary_task_speed / proj_e1[0];
proj_e1 *= frac;
v_c += proj_e1;
if (iter == 199)
iter += 200;
}
if (iter % tempo < 600 && iter % tempo >= 400) {
e1 = 0;
e2[1] = fabs(secondary_task_speed);
double frac = secondary_task_speed / proj_e2[1];
proj_e2 *= frac;
v_c += proj_e2;
}
if (iter % tempo < 1000 && iter % tempo >= 600) {
e2 = 0;
e1[0] = -fabs(secondary_task_speed);
double frac = -secondary_task_speed / proj_e1[0];
proj_e1 *= frac;
v_c += proj_e1;
}
if (iter % tempo < 1200 && iter % tempo >= 1000) {
e1 = 0;
e2[1] = -fabs(secondary_task_speed);
double frac = -secondary_task_speed / proj_e2[1];
proj_e2 *= frac;
v_c += proj_e2;
}
ss.str("");
final_quit = true;
}
iter++;
}
std::cout << "Stop the robot " << std::endl;
if (!final_quit) {
while (!final_quit) {
final_quit = true;
}
}
}
}
std::cout <<
"ViSP exception: " << e.
what() << std::endl;
std::cout << "Stop the robot " << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
#else
int main()
{
std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor green
Class that defines a 3D cylinder in the object frame and allows forward projection of a 3D cylinder i...
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
const char * what() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static double rad(double deg)
static double deg(double rad)
void setPointsToTrack(const int &points_to_track)
void setRange(const unsigned int &range)
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
void setThreshold(const double &threshold)
void setSampleStep(const double &sample_step)
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
vpColVector secondaryTask(const vpColVector &de2dt, const bool &useLargeProjectionOperator=false)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
VISP_EXPORT double measureTimeMs()