Example of a eye-to-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The robot is controlled in the camera frame.
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY)
#include <visp3/core/vpImage.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/blob/vpDot2.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#define SAVE 0
#define L 0.006
#define D 0
int main()
{
#ifdef ENABLE_VISP_NAMESPACE
#endif
try {
std::string logdirname = "/tmp/" + username;
if (SAVE) {
try {
}
catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Cannot create " << logdirname << std::endl;
return EXIT_FAILURE;
}
}
}
rs2::config config;
unsigned int width = 640, height = 480, fps = 60;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
config.enable_stream(RS2_STREAM_INFRARED, width, height, RS2_FORMAT_Y8, fps);
for (size_t i = 0; i < 10; ++i) {
}
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-to-hand task control" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a point " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
int nbPoint = 7;
for (int i = 0; i < nbPoint; ++i) {
}
robot.getCameraParameters(cam, I);
for (int i = 0; i < nbPoint; ++i) {
double x = 0, y = 0;
}
std::cout << "c_M_o: \n" << c_M_o << std::endl;
std::cout << "Learning (0/1)? " << std::endl;
std::string name = "cd_M_o.dat";
int learning;
std::cin >> learning;
if (learning == 1) {
std::cout << "Save the location of the object in a file cd_M_o.dat" << std::endl;
std::ofstream f(name.c_str());
f.close();
exit(1);
}
{
std::cout << "Loading desired location from cd_M_o.dat" << std::endl;
std::ifstream f("cd_M_o.dat");
f.close();
}
for (int i = 0; i < nbPoint; ++i) {
}
for (int i = 0; i < nbPoint; ++i) {
}
double convergence_threshold = 0.00;
double error = 1;
unsigned int iter = 0;
o_M_camrobot[0][3] = -0.05;
int it = 0;
std::list<vpImagePoint> Lcog;
bool quit = false;
while ((error > convergence_threshold) && (!quit)) {
std::cout << "---------------------------------------------" << iter++ << std::endl;
try {
for (int i = 0; i < nbPoint; ++i) {
Lcog.push_back(dot[i].getCog());
}
}
catch (...) {
std::cout << "Error detected while tracking visual features" << std::endl;
robot.stopMotion();
return EXIT_FAILURE;
}
for (int i = 0; i < nbPoint; ++i) {
double x = 0, y = 0;
}
c_M_e = c_M_o * o_M_camrobot * camrobot_M_e;
for (std::list<vpImagePoint>::const_iterator it_cog = Lcog.begin(); it_cog != Lcog.end(); ++it_cog) {
}
std::cout << "|| s - s* || = " << error << std::endl;
if (error > 7) {
std::cout << "Error detected while tracking visual features" << std::endl;
robot.stopMotion();
return EXIT_FAILURE;
}
if ((SAVE == 1) && (iter % 3 == 0)) {
std::stringstream ss;
ss << logdirname;
ss << "/image.";
ss << std::setfill('0') << std::setw(4);
ss << it++;
ss << ".png";
}
quit = true;
}
}
return EXIT_SUCCESS;
}
std::cout << "Visual servo failed with exception: " << e << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor blue
static const vpColor green
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void flush(const vpImage< unsigned char > &I)
static void displayPoint(const vpImage< unsigned char > &I, const vpImagePoint &ip, const vpColor &color, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
This tracker is meant to track a dot (connected pixels with same gray level) on a vpImage.
void initTracking(const vpImage< unsigned char > &I)
void setGraphics(bool activate)
vpImagePoint getCog() const
void track(const vpImage< unsigned char > &I)
error that can be emitted by ViSP classes.
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
Implementation of an homogeneous matrix and operations on such kind of matrices.
void load(std::ifstream &f)
void save(std::ofstream &f) const
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Implementation of a matrix and operations on matrices.
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
void set_x(double x)
Set the point x coordinate in the image plane.
void projection(const vpColVector &_cP, vpColVector &_p) const VP_OVERRIDE
void display(const vpImage< unsigned char > &I, const vpCameraParameters &cam, const vpColor &color=vpColor::green, unsigned int thickness=1) VP_OVERRIDE
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const VP_OVERRIDE
void setWorldCoordinates(double oX, double oY, double oZ)
void set_y(double y)
Set the point y coordinate in the image plane.
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
void addPoint(const vpPoint &P)
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, FuncCheckValidityPose func=nullptr)
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
void get_eJe(vpMatrix &eJe) VP_OVERRIDE
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void set_cVe(const vpVelocityTwistMatrix &cVe_)
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
void set_eJe(const vpMatrix &eJe_)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
vpHomogeneousMatrix get_cMe() const
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.