Visual Servoing Platform  version 3.4.0
testRealSense2_T265_images_odometry_async.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Asynchronous acquisition of images and odometry information with
33  * RealSense T265 sensor and librealsense2.
34  *
35  *****************************************************************************/
36 
43 #include <iostream>
44 
45 #include <visp3/core/vpMeterPixelConversion.h>
46 #include <visp3/gui/vpDisplayX.h>
47 #include <visp3/gui/vpDisplayGDI.h>
48 #include <visp3/sensor/vpRealSense2.h>
49 
50 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && \
51  (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && \
52  (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
53 
54 #include <thread>
55 #include <functional>
56 
57 int main()
58 {
59  vpHomogeneousMatrix cMw, cMw_0;
60  vpHomogeneousMatrix cextMw(0, 0, 2, 0, 0, 0); // External camera view for pose visualization.
61  vpColVector odo_vel, odo_acc, imu_acc, imu_vel;
62  unsigned int confidence;
63  vpImagePoint frame_origin;
64  std::list< std::pair<unsigned int, vpImagePoint> > frame_origins; // Frame origin's history for trajectory visualization.
65  unsigned int display_scale = 2;
66 
67  try {
68  vpRealSense2 g;
69 
70  rs2::config config;
71  config.enable_stream(RS2_STREAM_POSE, RS2_FORMAT_6DOF);
72  config.enable_stream(RS2_STREAM_FISHEYE, 1, RS2_FORMAT_Y8);
73  config.enable_stream(RS2_STREAM_FISHEYE, 2, RS2_FORMAT_Y8);
74 
75  // Creating images for left and right cameras, and for visualizing trajectory.
76  vpImage<unsigned char> I_left, I_right;
77  vpImage<unsigned char> I_pose(300, 300, 0);
78 
79  vpCameraParameters cam(300., 300., I_pose.getWidth()/2, I_pose.getHeight()/2); // For pose visualization.
80 
81  // Define frame callback.
82  // The callback is executed on a sensor thread and can be called simultaneously from multiple sensors.
83  std::function<void(rs2::frame)> callback = [&](const rs2::frame &frame)
84  {
85  if (rs2::frameset fs = frame.as<rs2::frameset>())
86  {
87  // With callbacks, all synchronized stream will arrive in a single frameset.
88  rs2::video_frame left_frame = fs.get_fisheye_frame(1);
89  size_t size = left_frame.get_width() * left_frame.get_height();
90  memcpy(I_left.bitmap, left_frame.get_data(), size);
91 
92  rs2::video_frame right_frame = fs.get_fisheye_frame(2);
93  size = right_frame.get_width() * right_frame.get_height();
94  memcpy(I_right.bitmap, right_frame.get_data(), size);
95 
96  rs2_pose pose_data = fs.get_pose_frame().get_pose_data();
97 
98  vpTranslationVector ctw(static_cast<double>(pose_data.translation.x),
99  static_cast<double>(pose_data.translation.y),
100  static_cast<double>(pose_data.translation.z));
101  vpQuaternionVector cqw(static_cast<double>(pose_data.rotation.x),
102  static_cast<double>(pose_data.rotation.y),
103  static_cast<double>(pose_data.rotation.z),
104  static_cast<double>(pose_data.rotation.w));
105 
106  cMw.buildFrom(ctw, cqw);
107 
108  odo_vel.resize(6, false);
109  odo_vel[0] = static_cast<double>(pose_data.velocity.x);
110  odo_vel[1] = static_cast<double>(pose_data.velocity.y);
111  odo_vel[2] = static_cast<double>(pose_data.velocity.z);
112  odo_vel[3] = static_cast<double>(pose_data.angular_velocity.x);
113  odo_vel[4] = static_cast<double>(pose_data.angular_velocity.y);
114  odo_vel[5] = static_cast<double>(pose_data.angular_velocity.z);
115 
116  odo_acc.resize(6, false);
117  odo_acc[0] = static_cast<double>(pose_data.acceleration.x);
118  odo_acc[1] = static_cast<double>(pose_data.acceleration.y);
119  odo_acc[2] = static_cast<double>(pose_data.acceleration.z);
120  odo_acc[3] = static_cast<double>(pose_data.angular_acceleration.x);
121  odo_acc[4] = static_cast<double>(pose_data.angular_acceleration.y);
122  odo_acc[5] = static_cast<double>(pose_data.angular_acceleration.z);
123 
124  confidence = pose_data.tracker_confidence;
125  }
126  else
127  {
128  // Stream that bypass synchronization (such as IMU, Pose, ...) will produce single frames.
129  rs2_pose pose_data = frame.as<rs2::pose_frame>().get_pose_data();
130  vpTranslationVector ctw(static_cast<double>(pose_data.translation.x),
131  static_cast<double>(pose_data.translation.y),
132  static_cast<double>(pose_data.translation.z));
133  vpQuaternionVector cqw(static_cast<double>(pose_data.rotation.x),
134  static_cast<double>(pose_data.rotation.y),
135  static_cast<double>(pose_data.rotation.z),
136  static_cast<double>(pose_data.rotation.w));
137 
138  cMw.buildFrom(ctw, cqw);
139 
140  odo_vel.resize(6, false);
141  odo_vel[0] = static_cast<double>(pose_data.velocity.x);
142  odo_vel[1] = static_cast<double>(pose_data.velocity.y);
143  odo_vel[2] = static_cast<double>(pose_data.velocity.z);
144  odo_vel[3] = static_cast<double>(pose_data.angular_velocity.x);
145  odo_vel[4] = static_cast<double>(pose_data.angular_velocity.y);
146  odo_vel[5] = static_cast<double>(pose_data.angular_velocity.z);
147 
148  odo_acc.resize(6, false);
149  odo_acc[0] = static_cast<double>(pose_data.acceleration.x);
150  odo_acc[1] = static_cast<double>(pose_data.acceleration.y);
151  odo_acc[2] = static_cast<double>(pose_data.acceleration.z);
152  odo_acc[3] = static_cast<double>(pose_data.angular_acceleration.x);
153  odo_acc[4] = static_cast<double>(pose_data.angular_acceleration.y);
154  odo_acc[5] = static_cast<double>(pose_data.angular_acceleration.z);
155 
156  confidence = pose_data.tracker_confidence;
157  }
158 
159  // Calculate the frame's origin to be projected on the image I_pose and append it to frame_origins
160  vpHomogeneousMatrix cextMc = cextMw * cMw.inverse();
161  vpMeterPixelConversion::convertPoint(cam, cextMc[0][3] / cextMc[2][3], cextMc[1][3] / cextMc[2][3], frame_origin);
162  frame_origins.push_back(std::make_pair(confidence, frame_origin));
163  };
164 
165  // Open vpRealSense2 object according to configuration and with the callback to be called.
166  g.open(config, callback);
167 
168  I_left.resize(g.getIntrinsics(RS2_STREAM_FISHEYE, 1).height,
169  g.getIntrinsics(RS2_STREAM_FISHEYE, 1).width);
170 
171  I_right.resize(g.getIntrinsics(RS2_STREAM_FISHEYE, 2).height,
172  g.getIntrinsics(RS2_STREAM_FISHEYE, 2).width);
173 
174 #if defined(VISP_HAVE_X11)
175  vpDisplayX display_left; // Left image
176  vpDisplayX display_right; // Right image
177  vpDisplayX display_pose; // Pose visualization
178 #elif defined(VISP_HAVE_GDI)
179  vpDisplayGDI display_left; // Left image
180  vpDisplayGDI display_right; // Right image
181  vpDisplayGDI display_pose; // Pose visualization
182 #endif
183 
184 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
185  display_left.setDownScalingFactor(display_scale);
186  display_right.setDownScalingFactor(display_scale);
187  display_left.init(I_left, 10, 10, "Left image");
188  display_right.init(I_right, static_cast<int>(I_left.getWidth()/display_scale) + 80, 10, "Right image"); // Right
189  display_pose.init(I_pose, 10, static_cast<int>(I_left.getHeight()/display_scale) + 80, "Pose visualizer"); // visualization
190 #endif
191 
192  vpHomogeneousMatrix cextMc_0 = cextMw * cMw_0.inverse();
193  vpMeterPixelConversion::convertPoint(cam, cextMc_0[0][3] / cextMc_0[2][3], cextMc_0[1][3] / cextMc_0[2][3], frame_origin);
194  frame_origins.push_back(std::make_pair(confidence, frame_origin));
195 
196  while (true) {
197  // Sleep for 1 millisecond to reduce the number of iterations
198  std::this_thread::sleep_for(std::chrono::milliseconds(1));
199 
200  vpDisplay::display(I_left);
201  vpDisplay::display(I_right);
202  vpDisplay::display(I_pose);
203 
204  vpHomogeneousMatrix cextMc = cextMw * cMw.inverse();
205  vpMeterPixelConversion::convertPoint(cam, cextMc[0][3] / cextMc[2][3], cextMc[1][3] / cextMc[2][3], frame_origin);
206  frame_origins.push_back(std::make_pair(confidence, frame_origin));
207 
208  vpDisplay::displayText(I_left, 15*display_scale, 15*display_scale, "Click to quit", vpColor::red);
209  vpDisplay::displayText(I_right, 15*display_scale, 15*display_scale, "Click to quit", vpColor::red);
210  vpDisplay::displayText(I_pose, 15, 15, "Click to quit", vpColor::red);
211 
212  vpDisplay::displayFrame(I_pose, cextMc_0, cam, 0.1, vpColor::none, 2); // First frame
213  vpDisplay::displayFrame(I_pose, cextMc , cam, 0.1, vpColor::none, 2);
214 
215  // Display frame origin trajectory
216  {
217  std::list< std::pair<unsigned int, vpImagePoint> >::const_iterator it = frame_origins.begin();
218  std::pair<unsigned int, vpImagePoint> frame_origin_pair_prev = *(it++);
219  for (; it != frame_origins.end(); ++it) {
220  if (vpImagePoint::distance(frame_origin_pair_prev.second, (*it).second) > 1) {
221  vpDisplay::displayLine(I_pose, frame_origin_pair_prev.second, (*it).second,
222  (*it).first == 3 ? vpColor::green : ((*it).first == 2 ? vpColor::yellow : vpColor::red), 2);
223  frame_origin_pair_prev = *it;
224  }
225  }
226  }
227  if (vpDisplay::getClick(I_left, false) || vpDisplay::getClick(I_right, false) || vpDisplay::getClick(I_pose, false)) {
228  break;
229  }
230  vpDisplay::flush(I_left);
231  vpDisplay::flush(I_right);
232  vpDisplay::flush(I_pose);
233  }
234  } catch (const vpException &e) {
235  std::cerr << "RealSense error " << e.what() << std::endl;
236  } catch (const std::exception &e) {
237  std::cerr << e.what() << std::endl;
238  }
239 
240  return EXIT_SUCCESS;
241 }
242 #else
243 int main()
244 {
245 #if !defined(VISP_HAVE_REALSENSE2)
246  std::cout << "You do not realsense2 SDK functionality enabled..." << std::endl;
247  std::cout << "Tip:" << std::endl;
248  std::cout << "- Install librealsense2, configure again ViSP using cmake and build again this example" << std::endl;
249  return EXIT_SUCCESS;
250 #elif (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
251  std::cout << "You do not build ViSP with c++11 or higher compiler flag" << std::endl;
252  std::cout << "Tip:" << std::endl;
253  std::cout << "- Configure ViSP again using cmake -DUSE_CXX_STANDARD=11, and build again this example" << std::endl;
254 #elif !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI))
255  std::cout << "You don't have X11 or GDI display capabilities" << std::endl;
256 #elif !(RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
257  std::cout << "Install librealsense version > 2.31.0" << std::endl;
258 #endif
259  return EXIT_SUCCESS;
260 }
261 #endif
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
unsigned int getWidth() const
Definition: vpImage.h:246
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:800
Implementation of an homogeneous matrix and operations on such kind of matrices.
Type * bitmap
points toward the bitmap
Definition: vpImage.h:143
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:150
static const vpColor none
Definition: vpColor.h:229
error that can be emited by ViSP classes.
Definition: vpException.h:71
static const vpColor green
Definition: vpColor.h:220
static void flush(const vpImage< unsigned char > &I)
bool open(const rs2::config &cfg=rs2::config())
static const vpColor red
Definition: vpColor.h:217
const char * what() const
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
static void display(const vpImage< unsigned char > &I)
void setDownScalingFactor(unsigned int scale)
Generic class defining intrinsic camera parameters.
Implementation of a rotation vector as quaternion angle minimal representation.
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
vpHomogeneousMatrix inverse() const
unsigned int getHeight() const
Definition: vpImage.h:188
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const std::string &title="")
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:87
static const vpColor yellow
Definition: vpColor.h:225
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
Class that consider the case of a translation vector.