Visual Servoing Platform  version 3.6.1 under development (2024-11-14)
vpRealSense2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * librealSense2 interface.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <cstring>
40 #include <iomanip>
41 #include <map>
42 #include <set>
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
45 
46 #define MANUAL_POINTCLOUD 1
47 
48 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
49 namespace
50 {
51 bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
52 {
53  for (int i = 0; i < 3; i++) {
54  for (int j = 0; j < 3; j++) {
55  if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
56  return false;
57  }
58  }
59 
60  if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
61  return false;
62  }
63  }
64 
65  return true;
66 }
67 } // namespace
68 #endif
69 
70 BEGIN_VISP_NAMESPACE
75  : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
76  m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
77 { }
78 
84 
91 {
92  auto data = m_pipe.wait_for_frames();
93  auto color_frame = data.get_color_frame();
94  getGreyFrame(color_frame, grey);
95  if (ts != nullptr) {
96  *ts = data.get_timestamp();
97  }
98 }
99 
105 void vpRealSense2::acquire(vpImage<vpRGBa> &color, double *ts)
106 {
107  auto data = m_pipe.wait_for_frames();
108  auto color_frame = data.get_color_frame();
109  getColorFrame(color_frame, color);
110  if (ts != nullptr) {
111  *ts = data.get_timestamp();
112  }
113 }
114 
125 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
126  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
127  rs2::align *const align_to, double *ts)
128 {
129  acquire(data_image, data_depth, data_pointCloud, data_infrared, nullptr, align_to, ts);
130 }
131 
194 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
195  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
196  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
197 {
198  auto data = m_pipe.wait_for_frames();
199  if (align_to != nullptr) {
200  // Infrared stream is not aligned
201  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
202 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
203  data = align_to->process(data);
204 #else
205  data = align_to->proccess(data);
206 #endif
207  }
208 
209  if (data_image != nullptr) {
210  auto color_frame = data.get_color_frame();
211  getNativeFrameData(color_frame, data_image);
212  }
213 
214  if (data_depth != nullptr || data_pointCloud != nullptr) {
215  auto depth_frame = data.get_depth_frame();
216  if (data_depth != nullptr) {
217  getNativeFrameData(depth_frame, data_depth);
218  }
219 
220  if (data_pointCloud != nullptr) {
221  getPointcloud(depth_frame, *data_pointCloud);
222  }
223  }
224 
225  if (data_infrared1 != nullptr) {
226  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
227  getNativeFrameData(infrared_frame, data_infrared1);
228  }
229 
230  if (data_infrared2 != nullptr) {
231  auto infrared_frame = data.get_infrared_frame(2);
232  getNativeFrameData(infrared_frame, data_infrared2);
233  }
234 
235  if (ts != nullptr) {
236  *ts = data.get_timestamp();
237  }
238 }
239 
240 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
262 {
263  auto data = m_pipe.wait_for_frames();
264 
265  if (left != nullptr) {
266  auto left_fisheye_frame = data.get_fisheye_frame(1);
267  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
268  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
269  left->resize(height, width);
270  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
271  }
272 
273  if (right != nullptr) {
274  auto right_fisheye_frame = data.get_fisheye_frame(2);
275  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
276  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
277  right->resize(height, width);
278  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
279  }
280 
281  if (ts != nullptr) {
282  *ts = data.get_timestamp();
283  }
284 }
285 
315  vpColVector *odo_vel, vpColVector *odo_acc, unsigned int *confidence, double *ts)
316 {
317  auto data = m_pipe.wait_for_frames();
318 
319  if (left != nullptr) {
320  auto left_fisheye_frame = data.get_fisheye_frame(1);
321  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
322  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
323  left->resize(height, width);
324  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
325  }
326 
327  if (right != nullptr) {
328  auto right_fisheye_frame = data.get_fisheye_frame(2);
329  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
330  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
331  right->resize(height, width);
332  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
333  }
334 
335  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
336  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
337 
338  if (ts != nullptr) {
339  *ts = data.get_timestamp();
340  }
341 
342  if (cMw != nullptr) {
343  m_pos[0] = static_cast<double>(pose_data.translation.x);
344  m_pos[1] = static_cast<double>(pose_data.translation.y);
345  m_pos[2] = static_cast<double>(pose_data.translation.z);
346 
347  m_quat[0] = static_cast<double>(pose_data.rotation.x);
348  m_quat[1] = static_cast<double>(pose_data.rotation.y);
349  m_quat[2] = static_cast<double>(pose_data.rotation.z);
350  m_quat[3] = static_cast<double>(pose_data.rotation.w);
351 
353  }
354 
355  if (odo_vel != nullptr) {
356  odo_vel->resize(6, false);
357  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
358  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
359  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
360  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
361  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
362  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
363  }
364 
365  if (odo_acc != nullptr) {
366  odo_acc->resize(6, false);
367  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
368  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
369  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
370  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
371  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
372  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
373  }
374 
375  if (confidence != nullptr) {
376  *confidence = pose_data.tracker_confidence;
377  }
378 }
379 
394  vpColVector *odo_vel, vpColVector *odo_acc, vpColVector *imu_vel, vpColVector *imu_acc,
395  unsigned int *confidence, double *ts)
396 {
397  auto data = m_pipe.wait_for_frames();
398 
399  if (left != nullptr) {
400  auto left_fisheye_frame = data.get_fisheye_frame(1);
401  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
402  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
403  left->resize(height, width);
404  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
405  }
406 
407  if (right != nullptr) {
408  auto right_fisheye_frame = data.get_fisheye_frame(2);
409  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
410  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
411  right->resize(height, width);
412  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
413  }
414 
415  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
416  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
417 
418  if (ts != nullptr) {
419  *ts = data.get_timestamp();
420  }
421 
422  if (cMw != nullptr) {
423  m_pos[0] = static_cast<double>(pose_data.translation.x);
424  m_pos[1] = static_cast<double>(pose_data.translation.y);
425  m_pos[2] = static_cast<double>(pose_data.translation.z);
426 
427  m_quat[0] = static_cast<double>(pose_data.rotation.x);
428  m_quat[1] = static_cast<double>(pose_data.rotation.y);
429  m_quat[2] = static_cast<double>(pose_data.rotation.z);
430  m_quat[3] = static_cast<double>(pose_data.rotation.w);
431 
433  }
434 
435  if (odo_vel != nullptr) {
436  odo_vel->resize(6, false);
437  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
438  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
439  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
440  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
441  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
442  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
443  }
444 
445  if (odo_acc != nullptr) {
446  odo_acc->resize(6, false);
447  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
448  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
449  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
450  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
451  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
452  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
453  }
454 
455  auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
456  auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
457 
458  if (imu_acc != nullptr) {
459  imu_acc->resize(3, false);
460  (*imu_acc)[0] = static_cast<double>(accel_data.x);
461  (*imu_acc)[1] = static_cast<double>(accel_data.y);
462  (*imu_acc)[2] = static_cast<double>(accel_data.z);
463  }
464 
465  auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
466  auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
467 
468  if (imu_vel != nullptr) {
469  imu_vel->resize(3, false);
470  (*imu_vel)[0] = static_cast<double>(gyro_data.x);
471  (*imu_vel)[1] = static_cast<double>(gyro_data.y);
472  (*imu_vel)[2] = static_cast<double>(gyro_data.z);
473  }
474 
475  if (confidence != nullptr) {
476  *confidence = pose_data.tracker_confidence;
477  }
478 }
479 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
480 
481 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
494 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
495  std::vector<vpColVector> *const data_pointCloud,
496  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
497  rs2::align *const align_to, double *ts)
498 {
499  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
500 }
501 
516 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
517  std::vector<vpColVector> *const data_pointCloud,
518  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
519  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
520 {
521  auto data = m_pipe.wait_for_frames();
522  if (align_to != nullptr) {
523  // Infrared stream is not aligned
524  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
525 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
526  data = align_to->process(data);
527 #else
528  data = align_to->proccess(data);
529 #endif
530  }
531 
532  if (data_image != nullptr) {
533  auto color_frame = data.get_color_frame();
534  getNativeFrameData(color_frame, data_image);
535  }
536 
537  if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
538  auto depth_frame = data.get_depth_frame();
539  if (data_depth != nullptr) {
540  getNativeFrameData(depth_frame, data_depth);
541  }
542 
543  if (data_pointCloud != nullptr) {
544  getPointcloud(depth_frame, *data_pointCloud);
545  }
546 
547  if (pointcloud != nullptr) {
548  getPointcloud(depth_frame, pointcloud);
549  }
550  }
551 
552  if (data_infrared1 != nullptr) {
553  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
554  getNativeFrameData(infrared_frame, data_infrared1);
555  }
556 
557  if (data_infrared2 != nullptr) {
558  auto infrared_frame = data.get_infrared_frame(2);
559  getNativeFrameData(infrared_frame, data_infrared2);
560  }
561 
562  if (ts != nullptr) {
563  *ts = data.get_timestamp();
564  }
565 }
566 
579 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
580  std::vector<vpColVector> *const data_pointCloud,
581  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
582  rs2::align *const align_to, double *ts)
583 {
584  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
585 }
586 
601 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
602  std::vector<vpColVector> *const data_pointCloud,
603  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
604  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
605 {
606  auto data = m_pipe.wait_for_frames();
607  if (align_to != nullptr) {
608  // Infrared stream is not aligned
609  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
610 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
611  data = align_to->process(data);
612 #else
613  data = align_to->proccess(data);
614 #endif
615  }
616 
617  auto color_frame = data.get_color_frame();
618  if (data_image != nullptr) {
619  getNativeFrameData(color_frame, data_image);
620  }
621 
622  if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
623  auto depth_frame = data.get_depth_frame();
624  if (data_depth != nullptr) {
625  getNativeFrameData(depth_frame, data_depth);
626  }
627 
628  if (data_pointCloud != nullptr) {
629  getPointcloud(depth_frame, *data_pointCloud);
630  }
631 
632  if (pointcloud != nullptr) {
633  getPointcloud(depth_frame, color_frame, pointcloud);
634  }
635  }
636 
637  if (data_infrared1 != nullptr) {
638  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
639  getNativeFrameData(infrared_frame, data_infrared1);
640  }
641 
642  if (data_infrared2 != nullptr) {
643  auto infrared_frame = data.get_infrared_frame(2);
644  getNativeFrameData(infrared_frame, data_infrared2);
645  }
646 
647  if (ts != nullptr) {
648  *ts = data.get_timestamp();
649  }
650 }
651 #endif
652 
665 {
666  if (m_init) {
667  m_pipe.stop();
668  m_init = false;
669  }
670 }
671 
683  int index) const
684 {
685  auto rs_stream = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
686  auto intrinsics = rs_stream.get_intrinsics();
687 
688  vpCameraParameters cam;
689  double u0 = intrinsics.ppx;
690  double v0 = intrinsics.ppy;
691  double px = intrinsics.fx;
692  double py = intrinsics.fy;
693 
694  switch (type) {
696  double kdu = intrinsics.coeffs[0];
697  cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
698  } break;
699 
701  std::vector<double> tmp_coefs;
702  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
703  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
704  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
705  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
706  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
707 
708  cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
709  } break;
710 
712  default:
713  cam.initPersProjWithoutDistortion(px, py, u0, v0);
714  break;
715  }
716 
717  return cam;
718 }
719 
728 rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream, int index) const
729 {
730  auto vsp = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
731  return vsp.get_intrinsics();
732 }
733 
734 void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
735 {
736  auto vf = frame.as<rs2::video_frame>();
737  unsigned int width = (unsigned int)vf.get_width();
738  unsigned int height = (unsigned int)vf.get_height();
739  color.resize(height, width);
740 
741  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
742  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
743  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
744  }
745  else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
746  memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
747  const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
748  width * height * sizeof(vpRGBa));
749  }
750  else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
751  vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
752  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
753  }
754  else {
755  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
756  }
757 }
758 
764 {
765  if (!m_init) { // If pipe is not yet created, create it. Otherwise, we already know depth scale.
766  rs2::pipeline *pipe = new rs2::pipeline;
767  rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
768  *pipelineProfile = pipe->start();
769 
770  rs2::device dev = pipelineProfile->get_device();
771 
772  // Go over the device's sensors
773  for (rs2::sensor &sensor : dev.query_sensors()) {
774  // Check if the sensor is a depth sensor
775  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
776  m_depthScale = dpt.get_depth_scale();
777  }
778  }
779 
780  pipe->stop();
781  delete pipe;
782  delete pipelineProfile;
783  }
784 
785  return m_depthScale;
786 }
787 
788 void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
789 {
790  auto vf = frame.as<rs2::video_frame>();
791  unsigned int width = (unsigned int)vf.get_width();
792  unsigned int height = (unsigned int)vf.get_height();
793  grey.resize(height, width);
794 
795  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
796  vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
797  grey.bitmap, width, height);
798  }
799  else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
800  vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
801  grey.bitmap, width * height);
802  }
803  else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
804  vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
805  grey.bitmap, width, height);
806  }
807  else {
808  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
809  }
810 }
811 
812 void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
813 {
814  auto vf = frame.as<rs2::video_frame>();
815  int size = vf.get_width() * vf.get_height();
816 
817  switch (frame.get_profile().format()) {
818  case RS2_FORMAT_RGB8:
819  case RS2_FORMAT_BGR8:
820  memcpy(data, frame.get_data(), size * 3);
821  break;
822 
823  case RS2_FORMAT_RGBA8:
824  case RS2_FORMAT_BGRA8:
825  memcpy(data, frame.get_data(), size * 4);
826  break;
827 
828  case RS2_FORMAT_Y16:
829  case RS2_FORMAT_Z16:
830  memcpy(data, frame.get_data(), size * 2);
831  break;
832 
833  case RS2_FORMAT_Y8:
834  memcpy(data, frame.get_data(), size);
835  break;
836 
837  default:
838  break;
839  }
840 }
841 
842 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
843 {
844  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
845  std::stringstream ss;
846  ss << "Error, depth scale <= 0: " << m_depthScale;
847  throw vpException(vpException::fatalError, ss.str());
848  }
849 
850  auto vf = depth_frame.as<rs2::video_frame>();
851  const int width = vf.get_width();
852  const int height = vf.get_height();
853  pointcloud.resize((size_t)(width * height));
854 
855  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
856  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
857 
858 // Multi-threading if OpenMP
859 // Concurrent writes at different locations are safe
860 #pragma omp parallel for schedule(dynamic)
861  for (int i = 0; i < height; i++) {
862  auto depth_pixel_index = i * width;
863 
864  for (int j = 0; j < width; j++, depth_pixel_index++) {
865  if (p_depth_frame[depth_pixel_index] == 0) {
866  pointcloud[(size_t)depth_pixel_index].resize(4, false);
867  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
868  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
869  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
870  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
871  continue;
872  }
873 
874  // Get the depth value of the current pixel
875  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
876 
877  float points[3];
878  const float pixel[] = { (float)j, (float)i };
879  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
880 
881  if (pixels_distance > m_max_Z)
882  points[0] = points[1] = points[2] = m_invalidDepthValue;
883 
884  pointcloud[(size_t)depth_pixel_index].resize(4, false);
885  pointcloud[(size_t)depth_pixel_index][0] = points[0];
886  pointcloud[(size_t)depth_pixel_index][1] = points[1];
887  pointcloud[(size_t)depth_pixel_index][2] = points[2];
888  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
889  }
890  }
891 }
892 
893 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
894 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
895 {
896  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
897  std::stringstream ss;
898  ss << "Error, depth scale <= 0: " << m_depthScale;
899  throw vpException(vpException::fatalError, ss.str());
900  }
901 
902  auto vf = depth_frame.as<rs2::video_frame>();
903  const int width = vf.get_width();
904  const int height = vf.get_height();
905  pointcloud->width = (uint32_t)width;
906  pointcloud->height = (uint32_t)height;
907  pointcloud->resize((size_t)(width * height));
908 
909 #if MANUAL_POINTCLOUD // faster to compute manually when tested
910  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
911  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
912 
913  // Multi-threading if OpenMP
914  // Concurrent writes at different locations are safe
915 #pragma omp parallel for schedule(dynamic)
916  for (int i = 0; i < height; i++) {
917  auto depth_pixel_index = i * width;
918 
919  for (int j = 0; j < width; j++, depth_pixel_index++) {
920  if (p_depth_frame[depth_pixel_index] == 0) {
921  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
922  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
923  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
924  continue;
925  }
926 
927  // Get the depth value of the current pixel
928  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
929 
930  float points[3];
931  const float pixel[] = { (float)j, (float)i };
932  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
933 
934  if (pixels_distance > m_max_Z)
935  points[0] = points[1] = points[2] = m_invalidDepthValue;
936 
937  pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
938  pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
939  pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
940  }
941  }
942 #else
943  m_points = m_pointcloud.calculate(depth_frame);
944  auto vertices = m_points.get_vertices();
945 
946  for (size_t i = 0; i < m_points.size(); i++) {
947  if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
948  pointcloud->points[i].x = m_invalidDepthValue;
949  pointcloud->points[i].y = m_invalidDepthValue;
950  pointcloud->points[i].z = m_invalidDepthValue;
951  }
952  else {
953  pointcloud->points[i].x = vertices[i].x;
954  pointcloud->points[i].y = vertices[i].y;
955  pointcloud->points[i].z = vertices[i].z;
956  }
957  }
958 #endif
959 }
960 
961 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
962  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
963 {
964  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
965  throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
966  }
967 
968  auto vf = depth_frame.as<rs2::video_frame>();
969  const int depth_width = vf.get_width();
970  const int depth_height = vf.get_height();
971  pointcloud->width = static_cast<uint32_t>(depth_width);
972  pointcloud->height = static_cast<uint32_t>(depth_height);
973  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
974 
975  vf = color_frame.as<rs2::video_frame>();
976  const int color_width = vf.get_width();
977  const int color_height = vf.get_height();
978 
979  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
980  const rs2_extrinsics depth2ColorExtrinsics =
981  depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
982  color_frame.get_profile().as<rs2::video_stream_profile>());
983  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
984  const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
985 
986  auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
987  const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
988  unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
989  const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
990  rs2_extrinsics identity;
991  memset(identity.rotation, 0, sizeof(identity.rotation));
992  memset(identity.translation, 0, sizeof(identity.translation));
993  for (int i = 0; i < 3; i++) {
994  identity.rotation[i * 3 + i] = 1;
995  }
996  const bool registered_streams =
997  (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
998 
999 // Multi-threading if OpenMP
1000 // Concurrent writes at different locations are safe
1001 #pragma omp parallel for schedule(dynamic)
1002  for (int i = 0; i < depth_height; i++) {
1003  auto depth_pixel_index = i * depth_width;
1004 
1005  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1006  if (p_depth_frame[depth_pixel_index] == 0) {
1007  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
1008  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1009  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1010 
1011  // For out of bounds color data, default to a shade of blue in order to
1012  // visually distinguish holes. This color value is same as the librealsense
1013  // out of bounds color value.
1014 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1015  unsigned int r = 96, g = 157, b = 198;
1016  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1017 
1018  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1019 #else
1020  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1021  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1022  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1023 #endif
1024  continue;
1025  }
1026 
1027  // Get the depth value of the current pixel
1028  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
1029 
1030  float depth_point[3];
1031  const float pixel[] = { (float)j, (float)i };
1032  rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1033 
1034  if (pixels_distance > m_max_Z) {
1035  depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
1036  }
1037 
1038  pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1039  pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1040  pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1041 
1042  if (!registered_streams) {
1043  float color_point[3];
1044  rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1045  float color_pixel[2];
1046  rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1047 
1048  if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1049  color_pixel[0] >= color_width) {
1050  // For out of bounds color data, default to a shade of blue in order to
1051  // visually distinguish holes. This color value is same as the librealsense
1052  // out of bounds color value.
1053 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1054  unsigned int r = 96, g = 157, b = 198;
1055  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1056 
1057  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1058 #else
1059  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1060  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1061  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1062 #endif
1063  }
1064  else {
1065  unsigned int i_ = (unsigned int)color_pixel[1];
1066  unsigned int j_ = (unsigned int)color_pixel[0];
1067 
1068 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1069  uint32_t rgb = 0;
1070  if (swap_rb) {
1071  rgb =
1072  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1073  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1074  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2])
1075  << 16);
1076  }
1077  else {
1078  rgb =
1079  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1080  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1081  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1082  }
1083 
1084  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1085 #else
1086  if (swap_rb) {
1087  pointcloud->points[(size_t)depth_pixel_index].b =
1088  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1089  pointcloud->points[(size_t)depth_pixel_index].g =
1090  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1091  pointcloud->points[(size_t)depth_pixel_index].r =
1092  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1093  }
1094  else {
1095  pointcloud->points[(size_t)depth_pixel_index].r =
1096  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1097  pointcloud->points[(size_t)depth_pixel_index].g =
1098  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1099  pointcloud->points[(size_t)depth_pixel_index].b =
1100  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1101  }
1102 #endif
1103  }
1104  }
1105  else {
1106 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1107  uint32_t rgb = 0;
1108  if (swap_rb) {
1109  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1110  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1111  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1112  }
1113  else {
1114  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1115  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1116  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1117  }
1118 
1119  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1120 #else
1121  if (swap_rb) {
1122  pointcloud->points[(size_t)depth_pixel_index].b =
1123  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1124  pointcloud->points[(size_t)depth_pixel_index].g =
1125  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1126  pointcloud->points[(size_t)depth_pixel_index].r =
1127  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1128  }
1129  else {
1130  pointcloud->points[(size_t)depth_pixel_index].r =
1131  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1132  pointcloud->points[(size_t)depth_pixel_index].g =
1133  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1134  pointcloud->points[(size_t)depth_pixel_index].b =
1135  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1136  }
1137 #endif
1138  }
1139  }
1140  }
1141 }
1142 
1143 #endif
1144 
1152 vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index) const
1153 {
1154  int to_index = -1;
1155 
1156  if (from_index != -1) // If we have to specify indices for streams. (Ex.: T265 device having 2 fisheyes)
1157  {
1158  if (from_index == 1) // From left => To right.
1159  to_index = 2;
1160  else if (from_index == 2) // From right => To left.
1161  to_index = 1;
1162  }
1163 
1164  auto from_stream = m_pipelineProfile.get_stream(from, from_index);
1165  auto to_stream = m_pipelineProfile.get_stream(to, to_index);
1166 
1167  rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1168 
1170  vpRotationMatrix R;
1171  for (unsigned int i = 0; i < 3; i++) {
1172  t[i] = extrinsics.translation[i];
1173  for (unsigned int j = 0; j < 3; j++)
1174  R[i][j] = extrinsics.rotation[j * 3 + i]; // rotation is column-major order
1175  }
1176 
1177  vpHomogeneousMatrix to_M_from(t, R);
1178  return to_M_from;
1179 }
1180 
1181 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1191 unsigned int vpRealSense2::getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc,
1192  double *ts)
1193 {
1194  auto frame = m_pipe.wait_for_frames();
1195  auto f = frame.first_or_default(RS2_STREAM_POSE);
1196  auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1197 
1198  if (ts != nullptr)
1199  *ts = frame.get_timestamp();
1200 
1201  if (cMw != nullptr) {
1202  m_pos[0] = static_cast<double>(pose_data.translation.x);
1203  m_pos[1] = static_cast<double>(pose_data.translation.y);
1204  m_pos[2] = static_cast<double>(pose_data.translation.z);
1205 
1206  m_quat[0] = static_cast<double>(pose_data.rotation.x);
1207  m_quat[1] = static_cast<double>(pose_data.rotation.y);
1208  m_quat[2] = static_cast<double>(pose_data.rotation.z);
1209  m_quat[3] = static_cast<double>(pose_data.rotation.w);
1210 
1211  *cMw = vpHomogeneousMatrix(m_pos, m_quat);
1212  }
1213 
1214  if (odo_vel != nullptr) {
1215  odo_vel->resize(6, false);
1216  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
1217  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
1218  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
1219  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
1220  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
1221  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
1222  }
1223 
1224  if (odo_acc != nullptr) {
1225  odo_acc->resize(6, false);
1226  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
1227  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
1228  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
1229  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
1230  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
1231  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
1232  }
1233 
1234  return pose_data.tracker_confidence;
1235 }
1236 
1257 void vpRealSense2::getIMUAcceleration(vpColVector *imu_acc, double *ts)
1258 {
1259  auto frame = m_pipe.wait_for_frames();
1260  auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1261  auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1262 
1263  if (ts != nullptr)
1264  *ts = f.get_timestamp();
1265 
1266  if (imu_acc != nullptr) {
1267  imu_acc->resize(3, false);
1268  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1269  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1270  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1271  }
1272 }
1273 
1294 void vpRealSense2::getIMUVelocity(vpColVector *imu_vel, double *ts)
1295 {
1296  auto frame = m_pipe.wait_for_frames();
1297  auto f = frame.first_or_default(RS2_STREAM_GYRO);
1298  auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1299 
1300  if (ts != nullptr)
1301  *ts = f.get_timestamp();
1302 
1303  if (imu_vel != nullptr) {
1304  imu_vel->resize(3, false);
1305  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1306  (*imu_vel)[1] = static_cast<double>(imu_vel_data.x);
1307  (*imu_vel)[2] = static_cast<double>(imu_vel_data.x);
1308  }
1309 }
1310 
1331 void vpRealSense2::getIMUData(vpColVector *imu_acc, vpColVector *imu_vel, double *ts)
1332 {
1333  auto data = m_pipe.wait_for_frames();
1334 
1335  if (ts != nullptr)
1336  *ts = data.get_timestamp();
1337 
1338  if (imu_acc != nullptr) {
1339  auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1340  auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1341 
1342  imu_acc->resize(3, false);
1343  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1344  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1345  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1346  }
1347 
1348  if (imu_vel != nullptr) {
1349  auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1350  auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1351 
1352  imu_vel->resize(3, false);
1353  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1354  (*imu_vel)[1] = static_cast<double>(imu_vel_data.y);
1355  (*imu_vel)[2] = static_cast<double>(imu_vel_data.z);
1356  }
1357 }
1358 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1359 
1363 bool vpRealSense2::open(const rs2::config &cfg)
1364 {
1365  if (m_init) {
1366  close();
1367  }
1368 
1369  m_pipelineProfile = m_pipe.start(cfg);
1370 
1371  rs2::device dev = m_pipelineProfile.get_device();
1372 
1373 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1374  // Query device product line D400/SR300/L500/T200
1375  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1376 #endif
1377 
1378  // Go over the device's sensors
1379  for (rs2::sensor &sensor : dev.query_sensors()) {
1380  // Check if the sensor is a depth sensor
1381  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1382  m_depthScale = dpt.get_depth_scale();
1383  }
1384  }
1385 
1386  m_init = true;
1387  return m_init;
1388 }
1389 
1396 bool vpRealSense2::open(const rs2::config &cfg, std::function<void(rs2::frame)> &callback)
1397 {
1398  if (m_init) {
1399  close();
1400  }
1401 
1402  m_pipelineProfile = m_pipe.start(cfg, callback);
1403 
1404  rs2::device dev = m_pipelineProfile.get_device();
1405 
1406 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1407  // Query device product line D400/SR300/L500/T200
1408  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1409 #endif
1410 
1411  // Go over the device's sensors
1412  for (rs2::sensor &sensor : dev.query_sensors()) {
1413  // Check if the sensor is a depth sensor
1414  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1415  m_depthScale = dpt.get_depth_scale();
1416  }
1417  }
1418 
1419  m_init = true;
1420  return m_init;
1421 }
1422 
1428 {
1429 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1430  // With previous code, example/device/framegrabber/grabRealSense2.cpp does not work with D455
1431  // Error: Frame didn't arrive within 15000
1432  // Following code from:
1433  // https://github.com/IntelRealSense/librealsense/blob/4673a37d981164af8eeb8e296e430fc1427e008d/unit-tests/live/memory/test-extrinsics.cpp#L119
1434 
1435  // Reset product line info
1436  m_product_line = "unknown";
1437 
1438  rs2::context ctx;
1439  auto list = ctx.query_devices();
1440  if (list.size() > 0) {
1441  // Only one plugged sensor supported
1442  auto dev = list.front();
1443  auto sensors = dev.query_sensors();
1444  if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1445  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1446  }
1447  }
1448 
1449  return m_product_line;
1450 #else
1451  return (std::string("unknown"));
1452 #endif
1453 }
1454 
1465 {
1466  std::ostringstream oss;
1467  oss << *this;
1468  return oss.str();;
1469 }
1470 
1471 namespace
1472 {
1473 // Helper functions to print information about the RealSense device
1474 void print(const rs2_extrinsics &extrinsics, std::ostream &os)
1475 {
1476  std::stringstream ss;
1477  ss << "Rotation Matrix:\n";
1478 
1479  for (auto i = 0; i < 3; ++i) {
1480  for (auto j = 0; j < 3; ++j) {
1481  ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1482  }
1483  ss << std::endl;
1484  }
1485 
1486  ss << "\nTranslation Vector: ";
1487  for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
1488  ss << std::setprecision(15) << extrinsics.translation[i] << " ";
1489 
1490  os << ss.str() << "\n\n";
1491 }
1492 
1493 void print(const rs2_intrinsics &intrinsics, std::ostream &os)
1494 {
1495  std::stringstream ss;
1496  ss << std::left << std::setw(14) << "Width: "
1497  << "\t" << intrinsics.width << "\n"
1498  << std::left << std::setw(14) << "Height: "
1499  << "\t" << intrinsics.height << "\n"
1500  << std::left << std::setw(14) << "PPX: "
1501  << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
1502  << std::left << std::setw(14) << "PPY: "
1503  << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
1504  << std::left << std::setw(14) << "Fx: "
1505  << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
1506  << std::left << std::setw(14) << "Fy: "
1507  << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
1508  << std::left << std::setw(14) << "Distortion: "
1509  << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
1510  << std::left << std::setw(14) << "Coeffs: ";
1511 
1512  for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
1513  ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
1514 
1515  os << ss.str() << "\n\n";
1516 }
1517 
1518 void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1519 {
1520  try {
1521  intrinsics = profile.get_intrinsics();
1522  }
1523  catch (...) {
1524  }
1525 }
1526 
1527 bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
1528 {
1529  return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1530  lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1531  !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
1532 }
1533 
1534 std::string get_str_formats(const std::set<rs2_format> &formats)
1535 {
1536  std::stringstream ss;
1537  for (auto format = formats.begin(); format != formats.end(); ++format) {
1538  ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
1539  }
1540  return ss.str();
1541 }
1542 
1543 struct stream_and_resolution
1544 {
1545  rs2_stream stream;
1546  int stream_index;
1547  int width;
1548  int height;
1549  std::string stream_name;
1550 
1551  bool operator<(const stream_and_resolution &obj) const
1552  {
1553  return (std::make_tuple(stream, stream_index, width, height) <
1554  std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1555  }
1556 };
1557 
1558 struct stream_and_index
1559 {
1560  rs2_stream stream;
1561  int stream_index;
1562 
1563  bool operator<(const stream_and_index &obj) const
1564  {
1565  return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1566  }
1567 };
1568 } // anonymous namespace
1569 
1594 std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
1595 {
1596  rs2::device dev = rs.m_pipelineProfile.get_device();
1597  os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1598  << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1599  << std::endl;
1600 
1601  // Show which options are supported by this device
1602  os << " Device info: \n";
1603  for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1604  auto param = static_cast<rs2_camera_info>(j);
1605  if (dev.supports(param))
1606  os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
1607  << dev.get_info(param) << "\n";
1608  }
1609 
1610  os << "\n";
1611 
1612  for (auto &&sensor : dev.query_sensors()) {
1613  os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1614 
1615  os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
1616  << " step" << std::setw(10) << " default" << std::endl;
1617  for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1618  auto opt = static_cast<rs2_option>(j);
1619  if (sensor.supports(opt)) {
1620  auto range = sensor.get_option_range(opt);
1621  os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1622  << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1623  }
1624  }
1625 
1626  os << "\n";
1627  }
1628 
1629  for (auto &&sensor : dev.query_sensors()) {
1630  os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1631 
1632  os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1633  << std::setw(6) << " fps" << std::setw(10) << " format"
1634  << "\n";
1635  // Show which streams are supported by this device
1636  for (auto &&profile : sensor.get_stream_profiles()) {
1637  if (auto video = profile.as<rs2::video_stream_profile>()) {
1638  os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1639  << profile.fps() << "Hz\t" << profile.format() << "\n";
1640  }
1641  else {
1642  os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1643  }
1644  }
1645 
1646  os << "\n";
1647  }
1648 
1649  std::map<stream_and_index, rs2::stream_profile> streams;
1650  std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1651  for (auto &&sensor : dev.query_sensors()) {
1652  // Intrinsics
1653  for (auto &&profile : sensor.get_stream_profiles()) {
1654  if (auto video = profile.as<rs2::video_stream_profile>()) {
1655  if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1656  streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1657  }
1658 
1659  rs2_intrinsics intrinsics {};
1660  stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1661  profile.stream_name() };
1662  safe_get_intrinsics(video, intrinsics);
1663  auto it = std::find_if(
1664  (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1665  [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1666  if (it == (intrinsics_map[stream_res]).end()) {
1667  (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1668  }
1669  else {
1670  it->first.insert(profile.format()); // If the intrinsics are equals,
1671  // add the profile format to
1672  // format set
1673  }
1674  }
1675  }
1676  }
1677 
1678  os << "Provided Intrinsic:\n";
1679  for (auto &kvp : intrinsics_map) {
1680  auto stream_res = kvp.first;
1681  for (auto &intrinsics : kvp.second) {
1682  auto formats = get_str_formats(intrinsics.first);
1683  os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1684  << "\t " << formats << "\n";
1685  if (intrinsics.second == rs2_intrinsics {}) {
1686  os << "Intrinsic NOT available!\n\n";
1687  }
1688  else {
1689  print(intrinsics.second, os);
1690  }
1691  }
1692  }
1693 
1694  // Print Extrinsics
1695  os << "\nProvided Extrinsic:\n";
1696  for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1697  for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1698  os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1699  << "To"
1700  << "\t \"" << kvp2->second.stream_name() << "\"\n";
1701  auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1702  print(extrinsics, os);
1703  }
1704  }
1705 
1706  return os;
1707 }
1708 END_VISP_NAMESPACE
1709 #elif !defined(VISP_BUILD_SHARED_LIBS)
1710 // Work around to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has symbols
1711 void dummy_vpRealSense2() { };
1712 #endif
bool operator==(const vpArray2D< double > &A) const
Definition: vpArray2D.h:1313
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Definition: vpArray2D.h:614
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1143
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ fatalError
Fatal error.
Definition: vpException.h:72
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:542
Type * bitmap
points toward the bitmap
Definition: vpImage.h:135
Definition: vpRGBa.h:65
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
virtual ~vpRealSense2()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
Definition: vpRealSense2.h:417
rs2::pipeline m_pipe
Definition: vpRealSense2.h:412
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
float getDepthScale()
rs2::pointcloud m_pointcloud
Definition: vpRealSense2.h:414
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
float m_depthScale
Definition: vpRealSense2.h:409
vpTranslationVector m_pos
Definition: vpRealSense2.h:416
std::string m_product_line
Definition: vpRealSense2.h:419
std::string getProductLine()
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
rs2::points m_points
Definition: vpRealSense2.h:415
float m_invalidDepthValue
Definition: vpRealSense2.h:410
rs2::pipeline_profile m_pipelineProfile
Definition: vpRealSense2.h:413
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.