Example of eye-in-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The velocity is computed in the camera frame. Visual features are the image coordinates of 4 vpDot2 points. The interaction matrix is computed using the current visual features.
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY)
#include <visp3/core/vpImage.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/blob/vpDot2.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#define L 0.06
#ifdef ENABLE_VISP_NAMESPACE
#endif
void compute_pose(std::vector<vpPoint> &point,
const std::vector<vpDot2> &dot,
const vpCameraParameters &cam,
{
for (size_t i = 0; i < point.size(); ++i) {
double x = 0, y = 0;
cog = dot[i].getCog();
point[i].set_x(x);
point[i].set_y(y);
}
if (init == true) {
}
else {
}
}
int main()
{
std::string logdirname = "/tmp/" + username;
try {
}
catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Cannot create " << logdirname << std::endl;
return EXIT_FAILURE;
}
}
std::string logfilename = logdirname + "/log.dat";
std::ofstream flog(logfilename.c_str());
try {
rs2::config config;
unsigned int width = 640, height = 480, fps = 60;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
config.enable_stream(RS2_STREAM_INFRARED, width, height, RS2_FORMAT_Y8, fps);
for (size_t i = 0; i < 10; ++i) {
}
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
std::cout << " Use of the Afma6 robot " << std::endl;
std::cout << " Interaction matrix computed with the current features " << std::endl;
std::cout << " task : servo 4 points on a square with dimension " << L << " meters" << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::vector<vpDot2> dot(4);
std::cout << "Click on the 4 dots clockwise starting from upper/left dot..." << std::endl;
for (size_t i = 0; i < dot.size(); ++i) {
dot[i].initTracking(I);
}
robot.getCameraParameters(cam, I);
std::vector<vpFeaturePoint> s(4);
for (size_t i = 0; i < s.size(); ++i) {
}
std::vector<vpPoint> point(4);
point[0].setWorldCoordinates(-L, -L, 0);
point[1].setWorldCoordinates(+L, -L, 0);
point[2].setWorldCoordinates(+L, +L, 0);
point[3].setWorldCoordinates(-L, +L, 0);
std::vector<vpFeaturePoint> s_d(4);
for (size_t i = 0; i < s_d.size(); ++i) {
point[i].changeFrame(c_M_o, cP);
point[i].projection(cP, p);
s_d[i].set_x(p[0]);
s_d[i].set_y(p[1]);
s_d[i].set_Z(cP[2]);
}
for (size_t i = 0; i < s.size(); ++i) {
}
std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
bool init_pose_from_linear_method = true;
bool quit = false;
while (!quit) {
for (size_t i = 0; i < dot.size(); ++i) {
dot[i].track(I);
}
compute_pose(point, dot, cam, c_M_o, init_pose_from_linear_method);
if (init_pose_from_linear_method) {
init_pose_from_linear_method = false;
}
for (size_t i = 0; i < dot.size(); ++i) {
point[i].changeFrame(c_M_o, cP);
s[i].set_Z(cP[2]);
}
flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
flog << c_t_o[0] << " " << c_t_o[1] << " " << c_t_o[2] << " "
<< c_r_o[0] << " " << c_r_o[1] << " " << c_r_o[2] << std::endl;
quit = true;
}
}
flog.close();
return EXIT_SUCCESS;
}
flog.close();
std::cout << "Visual servo failed with exception: " << e << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif
Generic class defining intrinsic camera parameters.
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor blue
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double rad(double deg)
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
void addPoint(const vpPoint &P)
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, FuncCheckValidityPose func=nullptr)
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
void getVelocity(const vpRobot::vpControlFrameType frame, vpColVector &velocity)
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.