Example of eye-in-hand image-based control law. We control here the Afma6 robot at Inria. The velocity is computed in the camera frame. Visual features correspond to the 3D pose of the target (an AprilTag) in the camera frame.
The device used to acquire images is a Realsense D435 device.
Camera intrinsic parameters are retrieved from the Realsense SDK.
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_AFMA6)
#include <visp3/core/vpCameraParameters.h>
#include <visp3/detection/vpDetectorAprilTag.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/gui/vpPlot.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/visual_features/vpFeatureThetaU.h>
#include <visp3/visual_features/vpFeatureTranslation.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#ifdef ENABLE_VISP_NAMESPACE
#endif
const std::vector<vpImagePoint> &vip,
std::vector<vpImagePoint> *traj_vip)
{
for (size_t i = 0; i < vip.size(); ++i) {
if (traj_vip[i].size()) {
traj_vip[i].push_back(vip[i]);
}
}
else {
traj_vip[i].push_back(vip[i]);
}
}
for (size_t i = 0; i < vip.size(); ++i) {
for (size_t j = 1; j < traj_vip[i].size(); ++j) {
}
}
}
int main(int argc, char **argv)
{
double opt_tagSize = 0.120;
int opt_quad_decimate = 2;
bool opt_verbose = false;
bool opt_plot = false;
bool opt_adaptive_gain = false;
bool opt_task_sequencing = false;
double opt_convergence_threshold_t = 0.0005;
double opt_convergence_threshold_tu = 0.5;
bool display_tag = true;
for (int i = 1; i < argc; ++i) {
if ((std::string(argv[i]) == "--tag-size") && (i + 1 < argc)) {
opt_tagSize = std::stod(argv[i + 1]);
++i;
}
else if (std::string(argv[i]) == "--verbose") {
opt_verbose = true;
}
else if (std::string(argv[i]) == "--plot") {
opt_plot = true;
}
else if (std::string(argv[i]) == "--adaptive-gain") {
opt_adaptive_gain = true;
}
else if (std::string(argv[i]) == "--task-sequencing") {
opt_task_sequencing = true;
}
else if ((std::string(argv[i]) == "--quad-decimate") && (i + 1 < argc)) {
opt_quad_decimate = std::stoi(argv[i + 1]);
++i;
}
else if (std::string(argv[i]) == "--no-convergence-threshold") {
opt_convergence_threshold_t = 0.;
opt_convergence_threshold_tu = 0.;
}
else if ((std::string(argv[i]) == "--help") || (std::string(argv[i]) == "-h")) {
std::cout
<< argv[0]
<< " [--tag-size <marker size in meter; default " << opt_tagSize << ">]"
<< " [--quad-decimate <decimation; default " << opt_quad_decimate << ">]"
<< " [--adaptive-gain]"
<< " [--plot]"
<< " [--task-sequencing]"
<< " [--no-convergence-threshold]"
<< " [--verbose]"
<< " [--help] [-h]"
<< std::endl;;
return EXIT_SUCCESS;
}
}
try {
std::cout << "WARNING: This example will move the robot! "
<< "Please make sure to have the user stop button at hand!" << std::endl
<< "Press Enter to continue..." << std::endl;
std::cin.ignore();
rs2::config config;
unsigned int width = 640, height = 480, fps = 60;
config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
config.enable_stream(RS2_STREAM_INFRARED, width, height, RS2_FORMAT_Y8, fps);
for (size_t i = 0; i < 10; ++i) {
}
robot.getCameraParameters(cam, I);
std::cout << "cam:\n" << cam << std::endl;
detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
detector.setDisplayTag(display_tag);
detector.setAprilTagQuadDecimate(opt_quad_decimate);
detector.setZAlignedWithCameraAxis(true);
if (!detector.isZAlignedWithCameraAxis()) {
0, -1, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1 };
cd_M_o *= oprim_M_o;
}
s_t.buildFrom(cd_M_c);
s_tu.buildFrom(cd_M_c);
if (opt_adaptive_gain) {
}
else {
}
int iter_plot = 0;
if (opt_plot) {
plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.
getWidth()) + 80, 10,
"Real time curves plotter");
plotter->
setTitle(0,
"Visual features error");
plotter->
setTitle(1,
"Camera velocities");
plotter->
setLegend(0, 3,
"error_feat_theta_ux");
plotter->
setLegend(0, 4,
"error_feat_theta_uy");
plotter->
setLegend(0, 5,
"error_feat_theta_uz");
}
bool final_quit = false;
bool has_converged = false;
bool send_velocities = false;
bool servo_started = false;
std::vector<vpImagePoint> *traj_vip = nullptr;
while (!has_converged && !final_quit) {
std::vector<vpHomogeneousMatrix> c_M_o_vec;
detector.detect(I, opt_tagSize, cam, c_M_o_vec);
std::stringstream ss;
ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
if (c_M_o_vec.size() == 1) {
c_M_o = c_M_o_vec[0];
static bool first_time = true;
if (first_time) {
std::vector<vpHomogeneousMatrix> v_o_M_o(2), v_cd_M_c(2);
v_o_M_o[1].buildFrom(0, 0, 0, 0, 0, M_PI);
for (size_t i = 0; i < 2; ++i) {
v_cd_M_c[i] = cd_M_o * v_o_M_o[i] * c_M_o.
inverse();
}
if (std::fabs(v_cd_M_c[0].getThetaUVector().getTheta()) < std::fabs(v_cd_M_c[1].getThetaUVector().getTheta())) {
o_M_o = v_o_M_o[0];
}
else {
std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
o_M_o = v_o_M_o[1];
}
}
cd_M_c = cd_M_o * o_M_o * c_M_o.
inverse();
if (opt_task_sequencing) {
if (!servo_started) {
if (send_velocities) {
servo_started = true;
}
}
}
else {
}
std::vector<vpImagePoint> vip = detector.getPolygon(0);
vip.push_back(detector.getCog(0));
if (first_time) {
traj_vip = new std::vector<vpImagePoint>[vip.size()];
}
display_point_trajectory(I, vip, traj_vip);
if (opt_plot) {
plotter->
plot(1, iter_plot, v_c);
iter_plot++;
}
if (opt_verbose) {
std::cout << "v_c: " << v_c.t() << std::endl;
}
ss.str("");
ss << "error_t: " << error_tr;
ss.str("");
ss << "error_tu: " << error_tu;
if (opt_verbose)
std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
if ((error_tr < opt_convergence_threshold_t) && (error_tu < opt_convergence_threshold_tu)) {
has_converged = true;
std::cout << "Servo task has converged" << std::endl;
;
}
if (first_time) {
first_time = false;
}
}
else {
v_c = 0;
}
if (!send_velocities) {
v_c = 0;
}
ss.str("");
switch (button) {
send_velocities = !send_velocities;
break;
final_quit = true;
break;
default:
break;
}
}
}
std::cout << "Stop the robot " << std::endl;
if (opt_plot && plotter != nullptr) {
delete plotter;
plotter = nullptr;
}
if (!final_quit) {
while (!final_quit) {
final_quit = true;
}
}
}
if (traj_vip) {
delete[] traj_vip;
}
}
std::cout <<
"ViSP exception: " << e.
what() << std::endl;
std::cout << "Stop the robot " << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
#else
int main()
{
#if !defined(VISP_HAVE_REALSENSE2)
std::cout << "Install librealsense-2.x" << std::endl;
#endif
#if !defined(VISP_HAVE_AFMA6)
std::cout << "ViSP is not build with Afma-6 robot support..." << std::endl;
#endif
return EXIT_SUCCESS;
}
#endif
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor none
static const vpColor yellow
static const vpColor green
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
const char * what() const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
static double rad(double deg)
static double deg(double rad)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
void initGraph(unsigned int graphNum, unsigned int curveNbr)
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
void setTitle(unsigned int graphNum, const std::string &title)
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()