56 #include <visp3/core/vpCameraParameters.h>
57 #include <visp3/detection/vpDetectorAprilTag.h>
58 #include <visp3/gui/vpDisplayGDI.h>
59 #include <visp3/gui/vpDisplayX.h>
60 #include <visp3/gui/vpPlot.h>
61 #include <visp3/io/vpImageIo.h>
62 #include <visp3/robot/vpRobotAfma6.h>
63 #include <visp3/sensor/vpRealSense2.h>
64 #include <visp3/visual_features/vpFeatureBuilder.h>
65 #include <visp3/visual_features/vpFeaturePoint.h>
66 #include <visp3/vs/vpServo.h>
67 #include <visp3/vs/vpServoDisplay.h>
69 #if defined(VISP_HAVE_REALSENSE2) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_AFMA6)
72 std::vector<vpImagePoint> *traj_vip)
74 for (
size_t i = 0; i < vip.size(); i++) {
75 if (traj_vip[i].size()) {
78 traj_vip[i].push_back(vip[i]);
82 traj_vip[i].push_back(vip[i]);
85 for (
size_t i = 0; i < vip.size(); i++) {
86 for (
size_t j = 1; j < traj_vip[i].size(); j++) {
92 int main(
int argc,
char **argv)
94 double opt_tagSize = 0.120;
95 bool display_tag =
true;
96 int opt_quad_decimate = 2;
97 bool opt_verbose =
false;
98 bool opt_plot =
false;
99 bool opt_adaptive_gain =
false;
100 bool opt_task_sequencing =
false;
101 double convergence_threshold = 0.00005;
103 for (
int i = 1; i < argc; i++) {
104 if (std::string(argv[i]) ==
"--tag_size" && i + 1 < argc) {
105 opt_tagSize = std::stod(argv[i + 1]);
107 else if (std::string(argv[i]) ==
"--verbose") {
110 else if (std::string(argv[i]) ==
"--plot") {
113 else if (std::string(argv[i]) ==
"--adaptive_gain") {
114 opt_adaptive_gain =
true;
116 else if (std::string(argv[i]) ==
"--task_sequencing") {
117 opt_task_sequencing =
true;
119 else if (std::string(argv[i]) ==
"--quad_decimate" && i + 1 < argc) {
120 opt_quad_decimate = std::stoi(argv[i + 1]);
122 else if (std::string(argv[i]) ==
"--no-convergence-threshold") {
123 convergence_threshold = 0.;
125 else if (std::string(argv[i]) ==
"--help" || std::string(argv[i]) ==
"-h") {
127 << argv[0] <<
" --tag_size <marker size in meter; default " << opt_tagSize <<
">] "
128 <<
"[--quad_decimate <decimation; default " << opt_quad_decimate
129 <<
">] [--adaptive_gain] [--plot] [--task_sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
138 std::cout <<
"WARNING: This example will move the robot! "
139 <<
"Please make sure to have the user stop button at hand!" << std::endl
140 <<
"Press Enter to continue..." << std::endl;
154 std::cout <<
"Move to joint position: " << q.t() << std::endl;
160 unsigned int width = 640, height = 480;
161 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
162 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
163 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
169 std::cout <<
"cam:\n" << cam <<
"\n";
173 #if defined(VISP_HAVE_X11)
175 #elif defined(VISP_HAVE_GDI)
183 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
184 detector.setDisplayTag(display_tag);
185 detector.setAprilTagQuadDecimate(opt_quad_decimate);
195 std::vector<vpFeaturePoint> p(4), pd(4);
198 std::vector<vpPoint> point(4);
199 point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
200 point[1].setWorldCoordinates(opt_tagSize / 2., -opt_tagSize / 2., 0);
201 point[2].setWorldCoordinates(opt_tagSize / 2., opt_tagSize / 2., 0);
202 point[3].setWorldCoordinates(-opt_tagSize / 2., opt_tagSize / 2., 0);
206 for (
size_t i = 0; i < p.size(); i++) {
212 if (opt_adaptive_gain) {
220 vpPlot *plotter =
nullptr;
224 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.
getWidth()) + 80, 10,
225 "Real time curves plotter");
226 plotter->
setTitle(0,
"Visual features error");
227 plotter->
setTitle(1,
"Camera velocities");
230 plotter->
setLegend(0, 0,
"error_feat_p1_x");
231 plotter->
setLegend(0, 1,
"error_feat_p1_y");
232 plotter->
setLegend(0, 2,
"error_feat_p2_x");
233 plotter->
setLegend(0, 3,
"error_feat_p2_y");
234 plotter->
setLegend(0, 4,
"error_feat_p3_x");
235 plotter->
setLegend(0, 5,
"error_feat_p3_y");
236 plotter->
setLegend(0, 6,
"error_feat_p4_x");
237 plotter->
setLegend(0, 7,
"error_feat_p4_y");
246 bool final_quit =
false;
247 bool has_converged =
false;
248 bool send_velocities =
false;
249 bool servo_started =
false;
250 std::vector<vpImagePoint> *traj_corners =
nullptr;
256 while (!has_converged && !final_quit) {
263 std::vector<vpHomogeneousMatrix> cMo_vec;
264 detector.detect(I, opt_tagSize, cam, cMo_vec);
267 std::stringstream ss;
268 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
275 if (cMo_vec.size() == 1) {
278 static bool first_time =
true;
281 std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
282 v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
283 for (
size_t i = 0; i < 2; i++) {
284 v_cdMc[i] = cdMo * v_oMo[i] * cMo.
inverse();
286 if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
290 std::cout <<
"Desired frame modified to avoid PI rotation of the camera" << std::endl;
295 for (
size_t i = 0; i < point.size(); i++) {
297 point[i].changeFrame(cdMo * oMo, cP);
298 point[i].projection(cP, p_);
307 std::vector<vpImagePoint> corners = detector.getPolygon(0);
310 for (
size_t i = 0; i < corners.size(); i++) {
315 point[i].changeFrame(cMo, cP);
320 if (opt_task_sequencing) {
321 if (!servo_started) {
322 if (send_velocities) {
323 servo_started =
true;
335 for (
size_t i = 0; i < corners.size(); i++) {
336 std::stringstream ss;
346 traj_corners =
new std::vector<vpImagePoint>[corners.size()];
349 display_point_trajectory(I, corners, traj_corners);
353 plotter->
plot(1, iter_plot, v_c);
358 std::cout <<
"v_c: " << v_c.t() << std::endl;
362 std::stringstream ss;
363 ss <<
"error: " << error;
367 std::cout <<
"error: " << error << std::endl;
369 if (error < convergence_threshold) {
370 has_converged =
true;
371 std::cout <<
"Servo task has converged"
383 if (!send_velocities) {
391 std::stringstream ss;
401 send_velocities = !send_velocities;
414 std::cout <<
"Stop the robot " << std::endl;
417 if (opt_plot && plotter !=
nullptr) {
423 while (!final_quit) {
438 delete[] traj_corners;
442 std::cout <<
"ViSP exception: " << e.
what() << std::endl;
443 std::cout <<
"Stop the robot " << std::endl;
447 catch (
const std::exception &e) {
448 std::cout <<
"ur_rtde exception: " << e.what() << std::endl;
457 #if !defined(VISP_HAVE_REALSENSE2)
458 std::cout <<
"Install librealsense-2.x" << std::endl;
460 #if !defined(VISP_HAVE_AFMA6)
461 std::cout <<
"ViSP is not build with Afma6 robot support..." << std::endl;
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor green
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
const char * what() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
static double rad(double deg)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
void initGraph(unsigned int graphNum, unsigned int curveNbr)
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
void setTitle(unsigned int graphNum, const std::string &title)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) vp_override
@ STATE_POSITION_CONTROL
Initialize the position controller.
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()