Visual Servoing Platform  version 3.6.1 under development (2024-07-27)
vpRealSense2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * librealSense2 interface.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <cstring>
40 #include <iomanip>
41 #include <map>
42 #include <set>
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
45 
46 #define MANUAL_POINTCLOUD 1
47 
48 namespace
49 {
50 bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
51 {
52  for (int i = 0; i < 3; i++) {
53  for (int j = 0; j < 3; j++) {
54  if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
55  return false;
56  }
57  }
58 
59  if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
60  return false;
61  }
62  }
63 
64  return true;
65 }
66 } // namespace
67 
73  : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
74  m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
75 { }
76 
82 
89 {
90  auto data = m_pipe.wait_for_frames();
91  auto color_frame = data.get_color_frame();
92  getGreyFrame(color_frame, grey);
93  if (ts != nullptr) {
94  *ts = data.get_timestamp();
95  }
96 }
97 
103 void vpRealSense2::acquire(vpImage<vpRGBa> &color, double *ts)
104 {
105  auto data = m_pipe.wait_for_frames();
106  auto color_frame = data.get_color_frame();
107  getColorFrame(color_frame, color);
108  if (ts != nullptr) {
109  *ts = data.get_timestamp();
110  }
111 }
112 
123 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
124  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
125  rs2::align *const align_to, double *ts)
126 {
127  acquire(data_image, data_depth, data_pointCloud, data_infrared, nullptr, align_to, ts);
128 }
129 
192 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
193  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
194  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
195 {
196  auto data = m_pipe.wait_for_frames();
197  if (align_to != nullptr) {
198  // Infrared stream is not aligned
199  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
200 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
201  data = align_to->process(data);
202 #else
203  data = align_to->proccess(data);
204 #endif
205  }
206 
207  if (data_image != nullptr) {
208  auto color_frame = data.get_color_frame();
209  getNativeFrameData(color_frame, data_image);
210  }
211 
212  if (data_depth != nullptr || data_pointCloud != nullptr) {
213  auto depth_frame = data.get_depth_frame();
214  if (data_depth != nullptr) {
215  getNativeFrameData(depth_frame, data_depth);
216  }
217 
218  if (data_pointCloud != nullptr) {
219  getPointcloud(depth_frame, *data_pointCloud);
220  }
221  }
222 
223  if (data_infrared1 != nullptr) {
224  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
225  getNativeFrameData(infrared_frame, data_infrared1);
226  }
227 
228  if (data_infrared2 != nullptr) {
229  auto infrared_frame = data.get_infrared_frame(2);
230  getNativeFrameData(infrared_frame, data_infrared2);
231  }
232 
233  if (ts != nullptr) {
234  *ts = data.get_timestamp();
235  }
236 }
237 
238 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
260 {
261  auto data = m_pipe.wait_for_frames();
262 
263  if (left != nullptr) {
264  auto left_fisheye_frame = data.get_fisheye_frame(1);
265  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
266  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
267  left->resize(height, width);
268  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
269  }
270 
271  if (right != nullptr) {
272  auto right_fisheye_frame = data.get_fisheye_frame(2);
273  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
274  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
275  right->resize(height, width);
276  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
277  }
278 
279  if (ts != nullptr) {
280  *ts = data.get_timestamp();
281  }
282 }
283 
313  vpColVector *odo_vel, vpColVector *odo_acc, unsigned int *confidence, double *ts)
314 {
315  auto data = m_pipe.wait_for_frames();
316 
317  if (left != nullptr) {
318  auto left_fisheye_frame = data.get_fisheye_frame(1);
319  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
320  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
321  left->resize(height, width);
322  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
323  }
324 
325  if (right != nullptr) {
326  auto right_fisheye_frame = data.get_fisheye_frame(2);
327  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
328  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
329  right->resize(height, width);
330  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
331  }
332 
333  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
334  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
335 
336  if (ts != nullptr) {
337  *ts = data.get_timestamp();
338  }
339 
340  if (cMw != nullptr) {
341  m_pos[0] = static_cast<double>(pose_data.translation.x);
342  m_pos[1] = static_cast<double>(pose_data.translation.y);
343  m_pos[2] = static_cast<double>(pose_data.translation.z);
344 
345  m_quat[0] = static_cast<double>(pose_data.rotation.x);
346  m_quat[1] = static_cast<double>(pose_data.rotation.y);
347  m_quat[2] = static_cast<double>(pose_data.rotation.z);
348  m_quat[3] = static_cast<double>(pose_data.rotation.w);
349 
351  }
352 
353  if (odo_vel != nullptr) {
354  odo_vel->resize(6, false);
355  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
356  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
357  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
358  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
359  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
360  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
361  }
362 
363  if (odo_acc != nullptr) {
364  odo_acc->resize(6, false);
365  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
366  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
367  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
368  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
369  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
370  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
371  }
372 
373  if (confidence != nullptr) {
374  *confidence = pose_data.tracker_confidence;
375  }
376 }
377 
392  vpColVector *odo_vel, vpColVector *odo_acc, vpColVector *imu_vel, vpColVector *imu_acc,
393  unsigned int *confidence, double *ts)
394 {
395  auto data = m_pipe.wait_for_frames();
396 
397  if (left != nullptr) {
398  auto left_fisheye_frame = data.get_fisheye_frame(1);
399  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
400  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
401  left->resize(height, width);
402  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
403  }
404 
405  if (right != nullptr) {
406  auto right_fisheye_frame = data.get_fisheye_frame(2);
407  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
408  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
409  right->resize(height, width);
410  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
411  }
412 
413  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
414  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
415 
416  if (ts != nullptr) {
417  *ts = data.get_timestamp();
418  }
419 
420  if (cMw != nullptr) {
421  m_pos[0] = static_cast<double>(pose_data.translation.x);
422  m_pos[1] = static_cast<double>(pose_data.translation.y);
423  m_pos[2] = static_cast<double>(pose_data.translation.z);
424 
425  m_quat[0] = static_cast<double>(pose_data.rotation.x);
426  m_quat[1] = static_cast<double>(pose_data.rotation.y);
427  m_quat[2] = static_cast<double>(pose_data.rotation.z);
428  m_quat[3] = static_cast<double>(pose_data.rotation.w);
429 
431  }
432 
433  if (odo_vel != nullptr) {
434  odo_vel->resize(6, false);
435  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
436  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
437  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
438  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
439  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
440  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
441  }
442 
443  if (odo_acc != nullptr) {
444  odo_acc->resize(6, false);
445  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
446  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
447  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
448  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
449  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
450  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
451  }
452 
453  auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
454  auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
455 
456  if (imu_acc != nullptr) {
457  imu_acc->resize(3, false);
458  (*imu_acc)[0] = static_cast<double>(accel_data.x);
459  (*imu_acc)[1] = static_cast<double>(accel_data.y);
460  (*imu_acc)[2] = static_cast<double>(accel_data.z);
461  }
462 
463  auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
464  auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
465 
466  if (imu_vel != nullptr) {
467  imu_vel->resize(3, false);
468  (*imu_vel)[0] = static_cast<double>(gyro_data.x);
469  (*imu_vel)[1] = static_cast<double>(gyro_data.y);
470  (*imu_vel)[2] = static_cast<double>(gyro_data.z);
471  }
472 
473  if (confidence != nullptr) {
474  *confidence = pose_data.tracker_confidence;
475  }
476 }
477 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
478 
479 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
492 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
493  std::vector<vpColVector> *const data_pointCloud,
494  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
495  rs2::align *const align_to, double *ts)
496 {
497  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
498 }
499 
514 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
515  std::vector<vpColVector> *const data_pointCloud,
516  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
517  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
518 {
519  auto data = m_pipe.wait_for_frames();
520  if (align_to != nullptr) {
521  // Infrared stream is not aligned
522  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
523 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
524  data = align_to->process(data);
525 #else
526  data = align_to->proccess(data);
527 #endif
528  }
529 
530  if (data_image != nullptr) {
531  auto color_frame = data.get_color_frame();
532  getNativeFrameData(color_frame, data_image);
533  }
534 
535  if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
536  auto depth_frame = data.get_depth_frame();
537  if (data_depth != nullptr) {
538  getNativeFrameData(depth_frame, data_depth);
539  }
540 
541  if (data_pointCloud != nullptr) {
542  getPointcloud(depth_frame, *data_pointCloud);
543  }
544 
545  if (pointcloud != nullptr) {
546  getPointcloud(depth_frame, pointcloud);
547  }
548  }
549 
550  if (data_infrared1 != nullptr) {
551  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
552  getNativeFrameData(infrared_frame, data_infrared1);
553  }
554 
555  if (data_infrared2 != nullptr) {
556  auto infrared_frame = data.get_infrared_frame(2);
557  getNativeFrameData(infrared_frame, data_infrared2);
558  }
559 
560  if (ts != nullptr) {
561  *ts = data.get_timestamp();
562  }
563 }
564 
577 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
578  std::vector<vpColVector> *const data_pointCloud,
579  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
580  rs2::align *const align_to, double *ts)
581 {
582  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
583 }
584 
599 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
600  std::vector<vpColVector> *const data_pointCloud,
601  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
602  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
603 {
604  auto data = m_pipe.wait_for_frames();
605  if (align_to != nullptr) {
606  // Infrared stream is not aligned
607  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
608 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
609  data = align_to->process(data);
610 #else
611  data = align_to->proccess(data);
612 #endif
613  }
614 
615  auto color_frame = data.get_color_frame();
616  if (data_image != nullptr) {
617  getNativeFrameData(color_frame, data_image);
618  }
619 
620  if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
621  auto depth_frame = data.get_depth_frame();
622  if (data_depth != nullptr) {
623  getNativeFrameData(depth_frame, data_depth);
624  }
625 
626  if (data_pointCloud != nullptr) {
627  getPointcloud(depth_frame, *data_pointCloud);
628  }
629 
630  if (pointcloud != nullptr) {
631  getPointcloud(depth_frame, color_frame, pointcloud);
632  }
633  }
634 
635  if (data_infrared1 != nullptr) {
636  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
637  getNativeFrameData(infrared_frame, data_infrared1);
638  }
639 
640  if (data_infrared2 != nullptr) {
641  auto infrared_frame = data.get_infrared_frame(2);
642  getNativeFrameData(infrared_frame, data_infrared2);
643  }
644 
645  if (ts != nullptr) {
646  *ts = data.get_timestamp();
647  }
648 }
649 #endif
650 
663 {
664  if (m_init) {
665  m_pipe.stop();
666  m_init = false;
667  }
668 }
669 
681  int index) const
682 {
683  auto rs_stream = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
684  auto intrinsics = rs_stream.get_intrinsics();
685 
686  vpCameraParameters cam;
687  double u0 = intrinsics.ppx;
688  double v0 = intrinsics.ppy;
689  double px = intrinsics.fx;
690  double py = intrinsics.fy;
691 
692  switch (type) {
694  double kdu = intrinsics.coeffs[0];
695  cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
696  } break;
697 
699  std::vector<double> tmp_coefs;
700  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
701  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
702  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
703  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
704  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
705 
706  cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
707  } break;
708 
710  default:
711  cam.initPersProjWithoutDistortion(px, py, u0, v0);
712  break;
713  }
714 
715  return cam;
716 }
717 
726 rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream, int index) const
727 {
728  auto vsp = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
729  return vsp.get_intrinsics();
730 }
731 
732 void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
733 {
734  auto vf = frame.as<rs2::video_frame>();
735  unsigned int width = (unsigned int)vf.get_width();
736  unsigned int height = (unsigned int)vf.get_height();
737  color.resize(height, width);
738 
739  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
740  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
741  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
742  }
743  else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
744  memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
745  const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
746  width * height * sizeof(vpRGBa));
747  }
748  else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
749  vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
750  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
751  }
752  else {
753  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
754  }
755 }
756 
762 {
763  if (!m_init) { // If pipe is not yet created, create it. Otherwise, we already know depth scale.
764  rs2::pipeline *pipe = new rs2::pipeline;
765  rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
766  *pipelineProfile = pipe->start();
767 
768  rs2::device dev = pipelineProfile->get_device();
769 
770  // Go over the device's sensors
771  for (rs2::sensor &sensor : dev.query_sensors()) {
772  // Check if the sensor is a depth sensor
773  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
774  m_depthScale = dpt.get_depth_scale();
775  }
776  }
777 
778  pipe->stop();
779  delete pipe;
780  delete pipelineProfile;
781  }
782 
783  return m_depthScale;
784 }
785 
786 void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
787 {
788  auto vf = frame.as<rs2::video_frame>();
789  unsigned int width = (unsigned int)vf.get_width();
790  unsigned int height = (unsigned int)vf.get_height();
791  grey.resize(height, width);
792 
793  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
794  vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
795  grey.bitmap, width, height);
796  }
797  else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
798  vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
799  grey.bitmap, width * height);
800  }
801  else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
802  vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
803  grey.bitmap, width, height);
804  }
805  else {
806  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
807  }
808 }
809 
810 void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
811 {
812  auto vf = frame.as<rs2::video_frame>();
813  int size = vf.get_width() * vf.get_height();
814 
815  switch (frame.get_profile().format()) {
816  case RS2_FORMAT_RGB8:
817  case RS2_FORMAT_BGR8:
818  memcpy(data, frame.get_data(), size * 3);
819  break;
820 
821  case RS2_FORMAT_RGBA8:
822  case RS2_FORMAT_BGRA8:
823  memcpy(data, frame.get_data(), size * 4);
824  break;
825 
826  case RS2_FORMAT_Y16:
827  case RS2_FORMAT_Z16:
828  memcpy(data, frame.get_data(), size * 2);
829  break;
830 
831  case RS2_FORMAT_Y8:
832  memcpy(data, frame.get_data(), size);
833  break;
834 
835  default:
836  break;
837  }
838 }
839 
840 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
841 {
842  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
843  std::stringstream ss;
844  ss << "Error, depth scale <= 0: " << m_depthScale;
845  throw vpException(vpException::fatalError, ss.str());
846  }
847 
848  auto vf = depth_frame.as<rs2::video_frame>();
849  const int width = vf.get_width();
850  const int height = vf.get_height();
851  pointcloud.resize((size_t)(width * height));
852 
853  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
854  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
855 
856 // Multi-threading if OpenMP
857 // Concurrent writes at different locations are safe
858 #pragma omp parallel for schedule(dynamic)
859  for (int i = 0; i < height; i++) {
860  auto depth_pixel_index = i * width;
861 
862  for (int j = 0; j < width; j++, depth_pixel_index++) {
863  if (p_depth_frame[depth_pixel_index] == 0) {
864  pointcloud[(size_t)depth_pixel_index].resize(4, false);
865  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
866  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
867  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
868  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
869  continue;
870  }
871 
872  // Get the depth value of the current pixel
873  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
874 
875  float points[3];
876  const float pixel[] = { (float)j, (float)i };
877  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
878 
879  if (pixels_distance > m_max_Z)
880  points[0] = points[1] = points[2] = m_invalidDepthValue;
881 
882  pointcloud[(size_t)depth_pixel_index].resize(4, false);
883  pointcloud[(size_t)depth_pixel_index][0] = points[0];
884  pointcloud[(size_t)depth_pixel_index][1] = points[1];
885  pointcloud[(size_t)depth_pixel_index][2] = points[2];
886  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
887  }
888  }
889 }
890 
891 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
892 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
893 {
894  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
895  std::stringstream ss;
896  ss << "Error, depth scale <= 0: " << m_depthScale;
897  throw vpException(vpException::fatalError, ss.str());
898  }
899 
900  auto vf = depth_frame.as<rs2::video_frame>();
901  const int width = vf.get_width();
902  const int height = vf.get_height();
903  pointcloud->width = (uint32_t)width;
904  pointcloud->height = (uint32_t)height;
905  pointcloud->resize((size_t)(width * height));
906 
907 #if MANUAL_POINTCLOUD // faster to compute manually when tested
908  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
909  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
910 
911  // Multi-threading if OpenMP
912  // Concurrent writes at different locations are safe
913 #pragma omp parallel for schedule(dynamic)
914  for (int i = 0; i < height; i++) {
915  auto depth_pixel_index = i * width;
916 
917  for (int j = 0; j < width; j++, depth_pixel_index++) {
918  if (p_depth_frame[depth_pixel_index] == 0) {
919  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
920  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
921  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
922  continue;
923  }
924 
925  // Get the depth value of the current pixel
926  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
927 
928  float points[3];
929  const float pixel[] = { (float)j, (float)i };
930  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
931 
932  if (pixels_distance > m_max_Z)
933  points[0] = points[1] = points[2] = m_invalidDepthValue;
934 
935  pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
936  pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
937  pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
938  }
939  }
940 #else
941  m_points = m_pointcloud.calculate(depth_frame);
942  auto vertices = m_points.get_vertices();
943 
944  for (size_t i = 0; i < m_points.size(); i++) {
945  if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
946  pointcloud->points[i].x = m_invalidDepthValue;
947  pointcloud->points[i].y = m_invalidDepthValue;
948  pointcloud->points[i].z = m_invalidDepthValue;
949  }
950  else {
951  pointcloud->points[i].x = vertices[i].x;
952  pointcloud->points[i].y = vertices[i].y;
953  pointcloud->points[i].z = vertices[i].z;
954  }
955  }
956 #endif
957 }
958 
959 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
960  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
961 {
962  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
963  throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
964  }
965 
966  auto vf = depth_frame.as<rs2::video_frame>();
967  const int depth_width = vf.get_width();
968  const int depth_height = vf.get_height();
969  pointcloud->width = static_cast<uint32_t>(depth_width);
970  pointcloud->height = static_cast<uint32_t>(depth_height);
971  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
972 
973  vf = color_frame.as<rs2::video_frame>();
974  const int color_width = vf.get_width();
975  const int color_height = vf.get_height();
976 
977  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
978  const rs2_extrinsics depth2ColorExtrinsics =
979  depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
980  color_frame.get_profile().as<rs2::video_stream_profile>());
981  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
982  const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
983 
984  auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
985  const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
986  unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
987  const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
988  rs2_extrinsics identity;
989  memset(identity.rotation, 0, sizeof(identity.rotation));
990  memset(identity.translation, 0, sizeof(identity.translation));
991  for (int i = 0; i < 3; i++) {
992  identity.rotation[i * 3 + i] = 1;
993  }
994  const bool registered_streams =
995  (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
996 
997 // Multi-threading if OpenMP
998 // Concurrent writes at different locations are safe
999 #pragma omp parallel for schedule(dynamic)
1000  for (int i = 0; i < depth_height; i++) {
1001  auto depth_pixel_index = i * depth_width;
1002 
1003  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1004  if (p_depth_frame[depth_pixel_index] == 0) {
1005  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
1006  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1007  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1008 
1009  // For out of bounds color data, default to a shade of blue in order to
1010  // visually distinguish holes. This color value is same as the librealsense
1011  // out of bounds color value.
1012 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1013  unsigned int r = 96, g = 157, b = 198;
1014  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1015 
1016  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1017 #else
1018  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1019  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1020  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1021 #endif
1022  continue;
1023  }
1024 
1025  // Get the depth value of the current pixel
1026  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
1027 
1028  float depth_point[3];
1029  const float pixel[] = { (float)j, (float)i };
1030  rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1031 
1032  if (pixels_distance > m_max_Z) {
1033  depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
1034  }
1035 
1036  pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1037  pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1038  pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1039 
1040  if (!registered_streams) {
1041  float color_point[3];
1042  rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1043  float color_pixel[2];
1044  rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1045 
1046  if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1047  color_pixel[0] >= color_width) {
1048  // For out of bounds color data, default to a shade of blue in order to
1049  // visually distinguish holes. This color value is same as the librealsense
1050  // out of bounds color value.
1051 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1052  unsigned int r = 96, g = 157, b = 198;
1053  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1054 
1055  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1056 #else
1057  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1058  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1059  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1060 #endif
1061  }
1062  else {
1063  unsigned int i_ = (unsigned int)color_pixel[1];
1064  unsigned int j_ = (unsigned int)color_pixel[0];
1065 
1066 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1067  uint32_t rgb = 0;
1068  if (swap_rb) {
1069  rgb =
1070  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1071  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1072  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2])
1073  << 16);
1074  }
1075  else {
1076  rgb =
1077  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1078  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1079  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1080  }
1081 
1082  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1083 #else
1084  if (swap_rb) {
1085  pointcloud->points[(size_t)depth_pixel_index].b =
1086  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1087  pointcloud->points[(size_t)depth_pixel_index].g =
1088  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1089  pointcloud->points[(size_t)depth_pixel_index].r =
1090  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1091  }
1092  else {
1093  pointcloud->points[(size_t)depth_pixel_index].r =
1094  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1095  pointcloud->points[(size_t)depth_pixel_index].g =
1096  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1097  pointcloud->points[(size_t)depth_pixel_index].b =
1098  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1099  }
1100 #endif
1101  }
1102  }
1103  else {
1104 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1105  uint32_t rgb = 0;
1106  if (swap_rb) {
1107  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1108  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1109  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1110  }
1111  else {
1112  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1113  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1114  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1115  }
1116 
1117  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1118 #else
1119  if (swap_rb) {
1120  pointcloud->points[(size_t)depth_pixel_index].b =
1121  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1122  pointcloud->points[(size_t)depth_pixel_index].g =
1123  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1124  pointcloud->points[(size_t)depth_pixel_index].r =
1125  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1126  }
1127  else {
1128  pointcloud->points[(size_t)depth_pixel_index].r =
1129  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1130  pointcloud->points[(size_t)depth_pixel_index].g =
1131  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1132  pointcloud->points[(size_t)depth_pixel_index].b =
1133  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1134  }
1135 #endif
1136  }
1137  }
1138  }
1139 }
1140 
1141 #endif
1142 
1150 vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index) const
1151 {
1152  int to_index = -1;
1153 
1154  if (from_index != -1) // If we have to specify indices for streams. (Ex.: T265 device having 2 fisheyes)
1155  {
1156  if (from_index == 1) // From left => To right.
1157  to_index = 2;
1158  else if (from_index == 2) // From right => To left.
1159  to_index = 1;
1160  }
1161 
1162  auto from_stream = m_pipelineProfile.get_stream(from, from_index);
1163  auto to_stream = m_pipelineProfile.get_stream(to, to_index);
1164 
1165  rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1166 
1168  vpRotationMatrix R;
1169  for (unsigned int i = 0; i < 3; i++) {
1170  t[i] = extrinsics.translation[i];
1171  for (unsigned int j = 0; j < 3; j++)
1172  R[i][j] = extrinsics.rotation[j * 3 + i]; // rotation is column-major order
1173  }
1174 
1175  vpHomogeneousMatrix to_M_from(t, R);
1176  return to_M_from;
1177 }
1178 
1179 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1190  double *ts)
1191 {
1192  auto frame = m_pipe.wait_for_frames();
1193  auto f = frame.first_or_default(RS2_STREAM_POSE);
1194  auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1195 
1196  if (ts != nullptr)
1197  *ts = frame.get_timestamp();
1198 
1199  if (cMw != nullptr) {
1200  m_pos[0] = static_cast<double>(pose_data.translation.x);
1201  m_pos[1] = static_cast<double>(pose_data.translation.y);
1202  m_pos[2] = static_cast<double>(pose_data.translation.z);
1203 
1204  m_quat[0] = static_cast<double>(pose_data.rotation.x);
1205  m_quat[1] = static_cast<double>(pose_data.rotation.y);
1206  m_quat[2] = static_cast<double>(pose_data.rotation.z);
1207  m_quat[3] = static_cast<double>(pose_data.rotation.w);
1208 
1209  *cMw = vpHomogeneousMatrix(m_pos, m_quat);
1210  }
1211 
1212  if (odo_vel != nullptr) {
1213  odo_vel->resize(6, false);
1214  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
1215  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
1216  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
1217  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
1218  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
1219  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
1220  }
1221 
1222  if (odo_acc != nullptr) {
1223  odo_acc->resize(6, false);
1224  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
1225  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
1226  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
1227  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
1228  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
1229  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
1230  }
1231 
1232  return pose_data.tracker_confidence;
1233 }
1234 
1256 {
1257  auto frame = m_pipe.wait_for_frames();
1258  auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1259  auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1260 
1261  if (ts != nullptr)
1262  *ts = f.get_timestamp();
1263 
1264  if (imu_acc != nullptr) {
1265  imu_acc->resize(3, false);
1266  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1267  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1268  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1269  }
1270 }
1271 
1292 void vpRealSense2::getIMUVelocity(vpColVector *imu_vel, double *ts)
1293 {
1294  auto frame = m_pipe.wait_for_frames();
1295  auto f = frame.first_or_default(RS2_STREAM_GYRO);
1296  auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1297 
1298  if (ts != nullptr)
1299  *ts = f.get_timestamp();
1300 
1301  if (imu_vel != nullptr) {
1302  imu_vel->resize(3, false);
1303  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1304  (*imu_vel)[1] = static_cast<double>(imu_vel_data.x);
1305  (*imu_vel)[2] = static_cast<double>(imu_vel_data.x);
1306  }
1307 }
1308 
1329 void vpRealSense2::getIMUData(vpColVector *imu_acc, vpColVector *imu_vel, double *ts)
1330 {
1331  auto data = m_pipe.wait_for_frames();
1332 
1333  if (ts != nullptr)
1334  *ts = data.get_timestamp();
1335 
1336  if (imu_acc != nullptr) {
1337  auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1338  auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1339 
1340  imu_acc->resize(3, false);
1341  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1342  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1343  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1344  }
1345 
1346  if (imu_vel != nullptr) {
1347  auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1348  auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1349 
1350  imu_vel->resize(3, false);
1351  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1352  (*imu_vel)[1] = static_cast<double>(imu_vel_data.y);
1353  (*imu_vel)[2] = static_cast<double>(imu_vel_data.z);
1354  }
1355 }
1356 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1357 
1361 bool vpRealSense2::open(const rs2::config &cfg)
1362 {
1363  if (m_init) {
1364  close();
1365  }
1366 
1367  m_pipelineProfile = m_pipe.start(cfg);
1368 
1369  rs2::device dev = m_pipelineProfile.get_device();
1370 
1371 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1372  // Query device product line D400/SR300/L500/T200
1373  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1374 #endif
1375 
1376  // Go over the device's sensors
1377  for (rs2::sensor &sensor : dev.query_sensors()) {
1378  // Check if the sensor is a depth sensor
1379  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1380  m_depthScale = dpt.get_depth_scale();
1381  }
1382  }
1383 
1384  m_init = true;
1385  return m_init;
1386 }
1387 
1394 bool vpRealSense2::open(const rs2::config &cfg, std::function<void(rs2::frame)> &callback)
1395 {
1396  if (m_init) {
1397  close();
1398  }
1399 
1400  m_pipelineProfile = m_pipe.start(cfg, callback);
1401 
1402  rs2::device dev = m_pipelineProfile.get_device();
1403 
1404 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1405  // Query device product line D400/SR300/L500/T200
1406  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1407 #endif
1408 
1409  // Go over the device's sensors
1410  for (rs2::sensor &sensor : dev.query_sensors()) {
1411  // Check if the sensor is a depth sensor
1412  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1413  m_depthScale = dpt.get_depth_scale();
1414  }
1415  }
1416 
1417  m_init = true;
1418  return m_init;
1419 }
1420 
1426 {
1427 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1428  // With previous code, example/device/framegrabber/grabRealSense2.cpp does not work with D455
1429  // Error: Frame didn't arrive within 15000
1430  // Following code from:
1431  // https://github.com/IntelRealSense/librealsense/blob/4673a37d981164af8eeb8e296e430fc1427e008d/unit-tests/live/memory/test-extrinsics.cpp#L119
1432 
1433  // Reset product line info
1434  m_product_line = "unknown";
1435 
1436  rs2::context ctx;
1437  auto list = ctx.query_devices();
1438  if (list.size() > 0) {
1439  // Only one plugged sensor supported
1440  auto dev = list.front();
1441  auto sensors = dev.query_sensors();
1442  if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1443  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1444  }
1445  }
1446 
1447  return m_product_line;
1448 #else
1449  return (std::string("unknown"));
1450 #endif
1451 }
1452 
1463 {
1464  std::ostringstream oss;
1465  oss << *this;
1466  return oss.str();;
1467 }
1468 
1469 namespace
1470 {
1471 // Helper functions to print information about the RealSense device
1472 void print(const rs2_extrinsics &extrinsics, std::ostream &os)
1473 {
1474  std::stringstream ss;
1475  ss << "Rotation Matrix:\n";
1476 
1477  for (auto i = 0; i < 3; ++i) {
1478  for (auto j = 0; j < 3; ++j) {
1479  ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1480  }
1481  ss << std::endl;
1482  }
1483 
1484  ss << "\nTranslation Vector: ";
1485  for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
1486  ss << std::setprecision(15) << extrinsics.translation[i] << " ";
1487 
1488  os << ss.str() << "\n\n";
1489 }
1490 
1491 void print(const rs2_intrinsics &intrinsics, std::ostream &os)
1492 {
1493  std::stringstream ss;
1494  ss << std::left << std::setw(14) << "Width: "
1495  << "\t" << intrinsics.width << "\n"
1496  << std::left << std::setw(14) << "Height: "
1497  << "\t" << intrinsics.height << "\n"
1498  << std::left << std::setw(14) << "PPX: "
1499  << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
1500  << std::left << std::setw(14) << "PPY: "
1501  << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
1502  << std::left << std::setw(14) << "Fx: "
1503  << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
1504  << std::left << std::setw(14) << "Fy: "
1505  << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
1506  << std::left << std::setw(14) << "Distortion: "
1507  << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
1508  << std::left << std::setw(14) << "Coeffs: ";
1509 
1510  for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
1511  ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
1512 
1513  os << ss.str() << "\n\n";
1514 }
1515 
1516 void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1517 {
1518  try {
1519  intrinsics = profile.get_intrinsics();
1520  }
1521  catch (...) {
1522  }
1523 }
1524 
1525 bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
1526 {
1527  return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1528  lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1529  !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
1530 }
1531 
1532 std::string get_str_formats(const std::set<rs2_format> &formats)
1533 {
1534  std::stringstream ss;
1535  for (auto format = formats.begin(); format != formats.end(); ++format) {
1536  ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
1537  }
1538  return ss.str();
1539 }
1540 
1541 struct stream_and_resolution
1542 {
1543  rs2_stream stream;
1544  int stream_index;
1545  int width;
1546  int height;
1547  std::string stream_name;
1548 
1549  bool operator<(const stream_and_resolution &obj) const
1550  {
1551  return (std::make_tuple(stream, stream_index, width, height) <
1552  std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1553  }
1554 };
1555 
1556 struct stream_and_index
1557 {
1558  rs2_stream stream;
1559  int stream_index;
1560 
1561  bool operator<(const stream_and_index &obj) const
1562  {
1563  return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1564  }
1565 };
1566 } // anonymous namespace
1567 
1592 std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
1593 {
1594  rs2::device dev = rs.m_pipelineProfile.get_device();
1595  os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1596  << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1597  << std::endl;
1598 
1599  // Show which options are supported by this device
1600  os << " Device info: \n";
1601  for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1602  auto param = static_cast<rs2_camera_info>(j);
1603  if (dev.supports(param))
1604  os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
1605  << dev.get_info(param) << "\n";
1606  }
1607 
1608  os << "\n";
1609 
1610  for (auto &&sensor : dev.query_sensors()) {
1611  os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1612 
1613  os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
1614  << " step" << std::setw(10) << " default" << std::endl;
1615  for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1616  auto opt = static_cast<rs2_option>(j);
1617  if (sensor.supports(opt)) {
1618  auto range = sensor.get_option_range(opt);
1619  os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1620  << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1621  }
1622  }
1623 
1624  os << "\n";
1625  }
1626 
1627  for (auto &&sensor : dev.query_sensors()) {
1628  os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1629 
1630  os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1631  << std::setw(6) << " fps" << std::setw(10) << " format"
1632  << "\n";
1633  // Show which streams are supported by this device
1634  for (auto &&profile : sensor.get_stream_profiles()) {
1635  if (auto video = profile.as<rs2::video_stream_profile>()) {
1636  os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1637  << profile.fps() << "Hz\t" << profile.format() << "\n";
1638  }
1639  else {
1640  os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1641  }
1642  }
1643 
1644  os << "\n";
1645  }
1646 
1647  std::map<stream_and_index, rs2::stream_profile> streams;
1648  std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1649  for (auto &&sensor : dev.query_sensors()) {
1650  // Intrinsics
1651  for (auto &&profile : sensor.get_stream_profiles()) {
1652  if (auto video = profile.as<rs2::video_stream_profile>()) {
1653  if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1654  streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1655  }
1656 
1657  rs2_intrinsics intrinsics {};
1658  stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1659  profile.stream_name() };
1660  safe_get_intrinsics(video, intrinsics);
1661  auto it = std::find_if(
1662  (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1663  [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1664  if (it == (intrinsics_map[stream_res]).end()) {
1665  (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1666  }
1667  else {
1668  it->first.insert(profile.format()); // If the intrinsics are equals,
1669  // add the profile format to
1670  // format set
1671  }
1672  }
1673  }
1674  }
1675 
1676  os << "Provided Intrinsic:\n";
1677  for (auto &kvp : intrinsics_map) {
1678  auto stream_res = kvp.first;
1679  for (auto &intrinsics : kvp.second) {
1680  auto formats = get_str_formats(intrinsics.first);
1681  os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1682  << "\t " << formats << "\n";
1683  if (intrinsics.second == rs2_intrinsics {}) {
1684  os << "Intrinsic NOT available!\n\n";
1685  }
1686  else {
1687  print(intrinsics.second, os);
1688  }
1689  }
1690  }
1691 
1692  // Print Extrinsics
1693  os << "\nProvided Extrinsic:\n";
1694  for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1695  for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1696  os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1697  << "To"
1698  << "\t \"" << kvp2->second.stream_name() << "\"\n";
1699  auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1700  print(extrinsics, os);
1701  }
1702  }
1703 
1704  return os;
1705 }
1706 END_VISP_NAMESPACE
1707 #elif !defined(VISP_BUILD_SHARED_LIBS)
1708 // Work around to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has symbols
1709 void dummy_vpRealSense2() { };
1710 #endif
bool operator==(const vpArray2D< double > &A) const
Definition: vpArray2D.h:1310
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Definition: vpArray2D.h:611
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1143
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ fatalError
Fatal error.
Definition: vpException.h:72
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:538
Type * bitmap
points toward the bitmap
Definition: vpImage.h:135
Definition: vpRGBa.h:65
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
virtual ~vpRealSense2()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
Definition: vpRealSense2.h:417
rs2::pipeline m_pipe
Definition: vpRealSense2.h:412
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
float getDepthScale()
rs2::pointcloud m_pointcloud
Definition: vpRealSense2.h:414
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
float m_depthScale
Definition: vpRealSense2.h:409
vpTranslationVector m_pos
Definition: vpRealSense2.h:416
std::string m_product_line
Definition: vpRealSense2.h:419
std::string getProductLine()
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=nullptr)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
rs2::points m_points
Definition: vpRealSense2.h:415
float m_invalidDepthValue
Definition: vpRealSense2.h:410
rs2::pipeline_profile m_pipelineProfile
Definition: vpRealSense2.h:413
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.