64 #include <visp3/core/vpCameraParameters.h>
65 #include <visp3/core/vpConfig.h>
66 #include <visp3/detection/vpDetectorAprilTag.h>
67 #include <visp3/gui/vpDisplayGDI.h>
68 #include <visp3/gui/vpDisplayX.h>
69 #include <visp3/gui/vpPlot.h>
70 #include <visp3/io/vpImageIo.h>
71 #include <visp3/robot/vpRobotFranka.h>
72 #include <visp3/sensor/vpRealSense2.h>
73 #include <visp3/visual_features/vpFeatureBuilder.h>
74 #include <visp3/visual_features/vpFeaturePoint.h>
75 #include <visp3/vs/vpServo.h>
76 #include <visp3/vs/vpServoDisplay.h>
78 #if defined(VISP_HAVE_REALSENSE2) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_FRANKA)
80 #ifdef ENABLE_VISP_NAMESPACE
85 std::vector<vpImagePoint> *traj_vip)
87 for (
size_t i = 0; i < vip.size(); i++) {
88 if (traj_vip[i].size()) {
91 traj_vip[i].push_back(vip[i]);
95 traj_vip[i].push_back(vip[i]);
98 for (
size_t i = 0; i < vip.size(); i++) {
99 for (
size_t j = 1; j < traj_vip[i].size(); j++) {
105 int main(
int argc,
char **argv)
107 double opt_tagSize = 0.120;
108 std::string opt_robot_ip =
"192.168.1.1";
109 std::string opt_eMc_filename =
"";
110 bool display_tag =
true;
111 int opt_quad_decimate = 2;
112 bool opt_verbose =
false;
113 bool opt_plot =
false;
114 bool opt_adaptive_gain =
false;
115 bool opt_task_sequencing =
false;
116 double convergence_threshold = 0.00005;
118 for (
int i = 1; i < argc; i++) {
119 if ((std::string(argv[i]) ==
"--tag-size") && (i + 1 < argc)) {
120 opt_tagSize = std::stod(argv[i + 1]);
123 else if ((std::string(argv[i]) ==
"--ip") && (i + 1 < argc)) {
124 opt_robot_ip = std::string(argv[i + 1]);
127 else if ((std::string(argv[i]) ==
"--eMc") && (i + 1 < argc)) {
128 opt_eMc_filename = std::string(argv[i + 1]);
131 else if (std::string(argv[i]) ==
"--verbose") {
134 else if (std::string(argv[i]) ==
"--plot") {
137 else if (std::string(argv[i]) ==
"--adaptive-gain") {
138 opt_adaptive_gain =
true;
140 else if (std::string(argv[i]) ==
"--task-sequencing") {
141 opt_task_sequencing =
true;
143 else if ((std::string(argv[i]) ==
"--quad-decimate") && (i + 1 < argc)) {
144 opt_quad_decimate = std::stoi(argv[i + 1]);
147 else if (std::string(argv[i]) ==
"--no-convergence-threshold") {
148 convergence_threshold = 0.;
150 else if ((std::string(argv[i]) ==
"--help") || (std::string(argv[i]) ==
"-h")) {
151 std::cout <<
"SYNOPSYS" << std::endl
153 <<
" [--ip <controller ip>]"
154 <<
" [--tag-size <size>]"
155 <<
" [--eMc <extrinsic transformation file>]"
156 <<
" [--quad-decimate <decimation factor>]"
157 <<
" [--adaptive-gain]"
159 <<
" [--task-sequencing]"
160 <<
" [--no-convergence-threshold]"
162 <<
" [--help] [-h]\n"
164 std::cout <<
"DESCRIPTION" << std::endl
165 <<
" Use an image-based visual-servoing scheme to position the camera in front of an Apriltag." << std::endl
167 <<
" --ip <controller ip>" << std::endl
168 <<
" Franka controller ip address" << std::endl
169 <<
" Default: " << opt_robot_ip << std::endl
171 <<
" --tag-size <size>" << std::endl
172 <<
" Apriltag size in [m]." << std::endl
173 <<
" Default: " << opt_tagSize <<
" [m]" << std::endl
175 <<
" --eMc <extrinsic transformation file>" << std::endl
176 <<
" File containing the homogeneous transformation matrix between" << std::endl
177 <<
" robot end-effector and camera frame." << std::endl
179 <<
" --quad-decimate <decimation factor>" << std::endl
180 <<
" Decimation factor used during Apriltag detection." << std::endl
181 <<
" Default: " << opt_quad_decimate << std::endl
183 <<
" --adaptive-gain" << std::endl
184 <<
" Flag to enable adaptive gain to speed up visual servo near convergence." << std::endl
186 <<
" --plot" << std::endl
187 <<
" Flag to enable curve plotter." << std::endl
189 <<
" --task-sequencing" << std::endl
190 <<
" Flag to enable task sequencing scheme." << std::endl
192 <<
" --no-convergence-threshold" << std::endl
193 <<
" Flag to disable convergence threshold used to stop the visual servo." << std::endl
195 <<
" --verbose" << std::endl
196 <<
" Flag to enable extra verbosity." << std::endl
198 <<
" --help, -h" << std::endl
199 <<
" Print this helper message." << std::endl
209 robot.connect(opt_robot_ip);
213 unsigned int width = 640, height = 480;
214 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
215 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
216 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
223 ePc[1] = -0.00535012;
230 if (!opt_eMc_filename.empty()) {
231 ePc.
loadYAML(opt_eMc_filename, ePc);
234 std::cout <<
"Warning, opt_eMc_filename is empty! Use hard coded values." << std::endl;
237 std::cout <<
"eMc:\n" << eMc << std::endl;
241 std::cout <<
"cam:\n" << cam << std::endl;
245 #if defined(VISP_HAVE_X11)
246 vpDisplayX dc(I, 10, 10,
"Color image");
247 #elif defined(VISP_HAVE_GDI)
255 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
256 detector.setDisplayTag(display_tag);
257 detector.setAprilTagQuadDecimate(opt_quad_decimate);
267 std::vector<vpFeaturePoint> p(4), pd(4);
270 std::vector<vpPoint> point(4);
271 point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
272 point[1].setWorldCoordinates(opt_tagSize / 2., -opt_tagSize / 2., 0);
273 point[2].setWorldCoordinates(opt_tagSize / 2., opt_tagSize / 2., 0);
274 point[3].setWorldCoordinates(-opt_tagSize / 2., opt_tagSize / 2., 0);
278 for (
size_t i = 0; i < p.size(); i++) {
284 if (opt_adaptive_gain) {
292 vpPlot *plotter =
nullptr;
296 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.
getWidth()) + 80, 10,
297 "Real time curves plotter");
298 plotter->
setTitle(0,
"Visual features error");
299 plotter->
setTitle(1,
"Camera velocities");
302 plotter->
setLegend(0, 0,
"error_feat_p1_x");
303 plotter->
setLegend(0, 1,
"error_feat_p1_y");
304 plotter->
setLegend(0, 2,
"error_feat_p2_x");
305 plotter->
setLegend(0, 3,
"error_feat_p2_y");
306 plotter->
setLegend(0, 4,
"error_feat_p3_x");
307 plotter->
setLegend(0, 5,
"error_feat_p3_y");
308 plotter->
setLegend(0, 6,
"error_feat_p4_x");
309 plotter->
setLegend(0, 7,
"error_feat_p4_y");
318 bool final_quit =
false;
319 bool has_converged =
false;
320 bool send_velocities =
false;
321 bool servo_started =
false;
322 std::vector<vpImagePoint> *traj_corners =
nullptr;
329 while (!has_converged && !final_quit) {
336 std::vector<vpHomogeneousMatrix> cMo_vec;
337 detector.detect(I, opt_tagSize, cam, cMo_vec);
340 std::stringstream ss;
341 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
348 if (cMo_vec.size() == 1) {
351 static bool first_time =
true;
354 std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
355 v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
356 for (
size_t i = 0; i < 2; i++) {
357 v_cdMc[i] = cdMo * v_oMo[i] * cMo.
inverse();
359 if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
363 std::cout <<
"Desired frame modified to avoid PI rotation of the camera" << std::endl;
368 for (
size_t i = 0; i < point.size(); i++) {
370 point[i].changeFrame(cdMo * oMo, cP);
371 point[i].projection(cP, p_);
380 std::vector<vpImagePoint> corners = detector.getPolygon(0);
383 for (
size_t i = 0; i < corners.size(); i++) {
388 point[i].changeFrame(cMo, cP);
393 if (opt_task_sequencing) {
394 if (!servo_started) {
395 if (send_velocities) {
396 servo_started =
true;
408 for (
size_t i = 0; i < corners.size(); i++) {
409 std::stringstream ss;
419 traj_corners =
new std::vector<vpImagePoint>[corners.size()];
422 display_point_trajectory(I, corners, traj_corners);
426 plotter->
plot(1, iter_plot, v_c);
431 std::cout <<
"v_c: " << v_c.t() << std::endl;
435 std::stringstream ss;
436 ss <<
"error: " << error;
440 std::cout <<
"error: " << error << std::endl;
442 if (error < convergence_threshold) {
443 has_converged =
true;
444 std::cout <<
"Servo task has converged" << std::endl;
455 if (!send_velocities) {
463 std::stringstream ss;
473 send_velocities = !send_velocities;
486 std::cout <<
"Stop the robot " << std::endl;
489 if (opt_plot && plotter !=
nullptr) {
495 while (!final_quit) {
510 delete[] traj_corners;
514 std::cout <<
"ViSP exception: " << e.
what() << std::endl;
515 std::cout <<
"Stop the robot " << std::endl;
519 catch (
const franka::NetworkException &e) {
520 std::cout <<
"Franka network exception: " << e.what() << std::endl;
521 std::cout <<
"Check if you are connected to the Franka robot"
522 <<
" or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
526 catch (
const std::exception &e) {
527 std::cout <<
"Franka exception: " << e.what() << std::endl;
536 #if !defined(VISP_HAVE_REALSENSE2)
537 std::cout <<
"Install librealsense-2.x" << std::endl;
539 #if !defined(VISP_HAVE_FRANKA)
540 std::cout <<
"Install libfranka." << std::endl;
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor green
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
const char * what() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
unsigned int getWidth() const
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
void initGraph(unsigned int graphNum, unsigned int curveNbr)
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
void setTitle(unsigned int graphNum, const std::string &title)
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()