63 #include <visp3/core/vpCameraParameters.h>
64 #include <visp3/core/vpConfig.h>
65 #include <visp3/detection/vpDetectorAprilTag.h>
66 #include <visp3/gui/vpDisplayGDI.h>
67 #include <visp3/gui/vpDisplayX.h>
68 #include <visp3/gui/vpPlot.h>
69 #include <visp3/io/vpImageIo.h>
70 #include <visp3/robot/vpRobotAfma6.h>
71 #include <visp3/sensor/vpRealSense2.h>
72 #include <visp3/visual_features/vpFeatureThetaU.h>
73 #include <visp3/visual_features/vpFeatureTranslation.h>
74 #include <visp3/vs/vpServo.h>
75 #include <visp3/vs/vpServoDisplay.h>
76 #include <visp3/core/vpImageFilter.h>
77 #include <visp3/io/vpVideoWriter.h>
80 #if defined(VISP_HAVE_REALSENSE2) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \
81 (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_MODULE_DNN_TRACKER)
84 #include <visp3/io/vpJsonArgumentParser.h>
85 #include <visp3/dnn_tracker/vpMegaPoseTracker.h>
87 #ifdef VISP_HAVE_NLOHMANN_JSON
88 using json = nlohmann::json;
91 #ifdef ENABLE_VISP_NAMESPACE
95 std::optional<vpRect> detectObjectForInitMegaposeClick(
const vpImage<vpRGBa> &I)
101 if (startLabelling) {
111 vpRect bb(topLeft, bottomRight);
122 int main(
int argc,
const char *argv[])
124 bool opt_verbose =
true;
125 bool opt_plot =
true;
126 double convergence_threshold_t = 0.0005;
127 double convergence_threshold_tu = 0.5;
129 unsigned width = 640, height = 480;
130 std::string megaposeAddress =
"127.0.0.1";
131 unsigned megaposePort = 5555;
132 int refinerIterations = 1, coarseNumSamples = 1024;
133 std::string objectName =
"";
135 std::string desiredPosFile =
"desired.pos";
136 std::string initialPosFile =
"init.pos";
138 #ifdef VISP_HAVE_NLOHMANN_JSON
139 vpJsonArgumentParser parser(
"Pose-based visual servoing with Megapose on an Afma6, with a Realsense D435.",
"--config",
"/");
141 .addArgument(
"initialPose", initialPosFile,
true,
"Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
142 .addArgument(
"desiredPose", desiredPosFile,
true,
"Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
143 .addArgument(
"object", objectName,
true,
"Name of the object to track with megapose.")
144 .addArgument(
"megapose/address", megaposeAddress,
true,
"IP address of the Megapose server.")
145 .addArgument(
"megapose/port", megaposePort,
true,
"Port on which the Megapose server listens for connections.")
146 .addArgument(
"megapose/refinerIterations", refinerIterations,
false,
"Number of Megapose refiner model iterations."
147 "A higher count may lead to better accuracy, at the cost of more processing time")
148 .addArgument(
"megapose/initialisationNumSamples", coarseNumSamples,
false,
"Number of Megapose renderings used for the initial pose estimation.");
149 parser.parse(argc, argv);
155 std::cout <<
"WARNING: This example will move the robot! "
156 <<
"Please make sure to have the user stop button at hand!" << std::endl
157 <<
"Press Enter to continue..." << std::endl;
159 std::vector<vpColVector> velocities;
160 std::vector<vpPoseVector> error;
169 robot.setPositioningVelocity(10.0);
170 robot.readPosFile(desiredPosFile, q);
173 std::cout <<
"Move to joint position: " << q.t() << std::endl;
179 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, 30);
185 std::cout <<
"cam:\n" << cam << std::endl;
187 std::shared_ptr<vpMegaPose> megapose;
189 megapose = std::make_shared<vpMegaPose>(megaposeAddress, megaposePort, cam, height, width);
196 megapose->setCoarseNumSamples(coarseNumSamples);
197 const std::vector<std::string> allObjects = megapose->getObjectNames();
198 if (std::find(allObjects.begin(), allObjects.end(), objectName) == allObjects.end()) {
201 std::future<vpMegaPoseEstimate> trackerFuture;
205 #if defined(VISP_HAVE_X11)
206 vpDisplayX dc(I, 10, 10,
"Color image");
207 #elif defined(VISP_HAVE_GDI)
211 std::optional<vpRect> detection;
215 detection = detectObjectForInitMegaposeClick(I);
222 robot.readPosFile(initialPosFile, q);
225 std::cout <<
"Move to joint position: " << q.t() << std::endl;
229 detection = std::nullopt;
233 detection = detectObjectForInitMegaposeClick(I);
236 auto est = megaposeTracker.init(I, *detection).get();
238 std::cout <<
"Estimate score = " << est.score << std::endl;
260 vpPlot *plotter =
nullptr;
264 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.
getWidth()) + 80, 10,
265 "Real time curves plotter");
266 plotter->
setTitle(0,
"Visual features error");
267 plotter->
setTitle(1,
"Camera velocities");
270 plotter->
setLegend(0, 0,
"error_feat_tx");
271 plotter->
setLegend(0, 1,
"error_feat_ty");
272 plotter->
setLegend(0, 2,
"error_feat_tz");
273 plotter->
setLegend(0, 3,
"error_feat_theta_ux");
274 plotter->
setLegend(0, 4,
"error_feat_theta_uy");
275 plotter->
setLegend(0, 5,
"error_feat_theta_uz");
284 bool final_quit =
false;
285 bool has_converged =
false;
286 bool send_velocities =
false;
295 bool callMegapose =
true;
298 while (!has_converged && !final_quit) {
303 if (!callMegapose && trackerFuture.wait_for(std::chrono::milliseconds(0)) == std::future_status::ready) {
304 megaposeEstimate = trackerFuture.get();
306 cTo = megaposeEstimate.
cTo;
308 if (megaposeEstimate.
score < 0.2) {
310 std::cout <<
"Low confidence, exiting" << std::endl;
315 std::cout <<
"Calling megapose" << std::endl;
316 trackerFuture = megaposeTracker.track(I);
317 callMegapose =
false;
320 std::stringstream ss;
321 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
329 velocities.push_back(v);
334 cTw = robot.get_fMc(q).inverse();
335 cdTc_true = cdTw * cTw.
inverse();
337 error.push_back(cdrc);
345 plotter->
plot(1, iter_plot, v);
350 std::cout <<
"v: " << v.t() << std::endl;
355 double error_tr = sqrt(cd_t_c.
sumSquare());
359 double error_tr_true = sqrt(cd_t_c_true.
sumSquare());
363 ss <<
"Predicted error_t: " << error_tr <<
", True error_t:" << error_tr_true;
366 ss <<
"Predicted error_tu: " << error_tu <<
", True error_tu:" << error_tu_true;
370 std::cout <<
"error translation: " << error_tr <<
" ; error rotation: " << error_tu << std::endl;
372 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
373 has_converged =
true;
374 std::cout <<
"Servo task has converged" << std::endl;
393 send_velocities = !send_velocities;
406 std::cout <<
"Stop the robot " << std::endl;
409 #ifdef VISP_HAVE_NLOHMANN_JSON
412 {
"velocities", velocities},
415 std::ofstream jsonFile;
416 jsonFile.open(
"results.json");
417 jsonFile << j.dump(4);
421 if (opt_plot && plotter !=
nullptr) {
427 while (!final_quit) {
443 std::cout <<
"ViSP exception: " << e.
what() << std::endl;
444 std::cout <<
"Stop the robot " << std::endl;
448 catch (
const std::exception &e) {
449 std::cout <<
"ur_rtde exception: " << e.what() << std::endl;
458 #if !defined(VISP_HAVE_REALSENSE2)
459 std::cout <<
"Install librealsense-2.x" << std::endl;
461 #if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
462 std::cout <<
"Build ViSP with c++17 or higher compiler flag (cmake -DUSE_CXX_STANDARD=17)." << std::endl;
464 #if !defined(VISP_HAVE_AFMA6)
465 std::cout <<
"ViSP is not built with Afma-6 robot support..." << std::endl;
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor none
static const vpColor yellow
Display for windows using GDI (available on any windows 32 platform).
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
@ badValue
Used to indicate that a value is not in the allowed range.
const char * what() const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
unsigned int getWidth() const
Command line argument parsing with support for JSON files. If a JSON file is supplied,...
static double deg(double rad)
A simplified interface to track a single object with MegaPose. This tracker works asynchronously: A c...
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
void initGraph(unsigned int graphNum, unsigned int curveNbr)
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
void setTitle(unsigned int graphNum, const std::string &title)
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Defines a rectangle in the plane.
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ STATE_POSITION_CONTROL
Initialize the position controller.
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to write easily a video file or a sequence of images.
void saveFrame(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
void open(vpImage< vpRGBa > &I)
VISP_EXPORT double measureTimeMs()