36 #include <visp3/core/vpConfig.h>
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
46 #define MANUAL_POINTCLOUD 1
50 bool operator==(
const rs2_extrinsics &lhs,
const rs2_extrinsics &rhs)
52 for (
int i = 0; i < 3; i++) {
53 for (
int j = 0; j < 3; j++) {
54 if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
59 if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
72 : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
73 m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
90 auto data =
m_pipe.wait_for_frames();
91 auto color_frame = data.get_color_frame();
94 *ts = data.get_timestamp();
105 auto data =
m_pipe.wait_for_frames();
106 auto color_frame = data.get_color_frame();
109 *ts = data.get_timestamp();
124 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
125 rs2::align *
const align_to,
double *ts)
127 acquire(data_image, data_depth, data_pointCloud, data_infrared, NULL, align_to, ts);
188 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared1,
189 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
191 auto data =
m_pipe.wait_for_frames();
192 if (align_to != NULL) {
195 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
196 data = align_to->process(data);
198 data = align_to->proccess(data);
202 if (data_image != NULL) {
203 auto color_frame = data.get_color_frame();
207 if (data_depth != NULL || data_pointCloud != NULL) {
208 auto depth_frame = data.get_depth_frame();
209 if (data_depth != NULL) {
213 if (data_pointCloud != NULL) {
218 if (data_infrared1 != NULL) {
219 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
223 if (data_infrared2 != NULL) {
224 auto infrared_frame = data.get_infrared_frame(2);
229 *ts = data.get_timestamp();
233 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
256 auto data =
m_pipe.wait_for_frames();
259 auto left_fisheye_frame = data.get_fisheye_frame(1);
260 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
261 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
262 left->
resize(height, width);
267 auto right_fisheye_frame = data.get_fisheye_frame(2);
268 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
269 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
270 right->
resize(height, width);
275 *ts = data.get_timestamp();
310 auto data =
m_pipe.wait_for_frames();
313 auto left_fisheye_frame = data.get_fisheye_frame(1);
314 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
315 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
316 left->
resize(height, width);
321 auto right_fisheye_frame = data.get_fisheye_frame(2);
322 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
323 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
324 right->
resize(height, width);
328 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
329 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
332 *ts = data.get_timestamp();
336 m_pos[0] =
static_cast<double>(pose_data.translation.x);
337 m_pos[1] =
static_cast<double>(pose_data.translation.y);
338 m_pos[2] =
static_cast<double>(pose_data.translation.z);
340 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
341 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
342 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
343 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
348 if (odo_vel != NULL) {
349 odo_vel->
resize(6,
false);
350 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
351 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
352 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
353 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
354 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
355 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
358 if (odo_acc != NULL) {
359 odo_acc->
resize(6,
false);
360 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
361 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
362 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
363 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
364 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
365 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
368 if (confidence != NULL) {
369 *confidence = pose_data.tracker_confidence;
388 unsigned int *confidence,
double *ts)
390 auto data =
m_pipe.wait_for_frames();
393 auto left_fisheye_frame = data.get_fisheye_frame(1);
394 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
395 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
396 left->
resize(height, width);
401 auto right_fisheye_frame = data.get_fisheye_frame(2);
402 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
403 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
404 right->
resize(height, width);
408 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
409 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
412 *ts = data.get_timestamp();
416 m_pos[0] =
static_cast<double>(pose_data.translation.x);
417 m_pos[1] =
static_cast<double>(pose_data.translation.y);
418 m_pos[2] =
static_cast<double>(pose_data.translation.z);
420 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
421 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
422 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
423 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
428 if (odo_vel != NULL) {
429 odo_vel->
resize(6,
false);
430 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
431 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
432 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
433 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
434 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
435 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
438 if (odo_acc != NULL) {
439 odo_acc->
resize(6,
false);
440 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
441 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
442 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
443 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
444 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
445 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
448 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
449 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
451 if (imu_acc != NULL) {
452 imu_acc->
resize(3,
false);
453 (*imu_acc)[0] =
static_cast<double>(accel_data.x);
454 (*imu_acc)[1] =
static_cast<double>(accel_data.y);
455 (*imu_acc)[2] =
static_cast<double>(accel_data.z);
458 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
459 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
461 if (imu_vel != NULL) {
462 imu_vel->
resize(3,
false);
463 (*imu_vel)[0] =
static_cast<double>(gyro_data.x);
464 (*imu_vel)[1] =
static_cast<double>(gyro_data.y);
465 (*imu_vel)[2] =
static_cast<double>(gyro_data.z);
468 if (confidence != NULL) {
469 *confidence = pose_data.tracker_confidence;
488 std::vector<vpColVector> *
const data_pointCloud,
489 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
490 rs2::align *
const align_to,
double *ts)
492 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
510 std::vector<vpColVector> *
const data_pointCloud,
511 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
512 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
514 auto data =
m_pipe.wait_for_frames();
515 if (align_to != NULL) {
518 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
519 data = align_to->process(data);
521 data = align_to->proccess(data);
525 if (data_image != NULL) {
526 auto color_frame = data.get_color_frame();
530 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
531 auto depth_frame = data.get_depth_frame();
532 if (data_depth != NULL) {
536 if (data_pointCloud != NULL) {
540 if (pointcloud != NULL) {
545 if (data_infrared1 != NULL) {
546 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
550 if (data_infrared2 != NULL) {
551 auto infrared_frame = data.get_infrared_frame(2);
556 *ts = data.get_timestamp();
573 std::vector<vpColVector> *
const data_pointCloud,
574 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared,
575 rs2::align *
const align_to,
double *ts)
577 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
595 std::vector<vpColVector> *
const data_pointCloud,
596 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
597 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
599 auto data =
m_pipe.wait_for_frames();
600 if (align_to != NULL) {
603 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
604 data = align_to->process(data);
606 data = align_to->proccess(data);
610 auto color_frame = data.get_color_frame();
611 if (data_image != NULL) {
615 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
616 auto depth_frame = data.get_depth_frame();
617 if (data_depth != NULL) {
621 if (data_pointCloud != NULL) {
625 if (pointcloud != NULL) {
630 if (data_infrared1 != NULL) {
631 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
635 if (data_infrared2 != NULL) {
636 auto infrared_frame = data.get_infrared_frame(2);
641 *ts = data.get_timestamp();
678 auto rs_stream =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
679 auto intrinsics = rs_stream.get_intrinsics();
682 double u0 = intrinsics.ppx;
683 double v0 = intrinsics.ppy;
684 double px = intrinsics.fx;
685 double py = intrinsics.fy;
689 double kdu = intrinsics.coeffs[0];
694 std::vector<double> tmp_coefs;
695 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[0]));
696 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[1]));
697 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[2]));
698 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[3]));
699 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[4]));
723 auto vsp =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
724 return vsp.get_intrinsics();
729 auto vf = frame.as<rs2::video_frame>();
730 unsigned int width = (
unsigned int)vf.get_width();
731 unsigned int height = (
unsigned int)vf.get_height();
732 color.
resize(height, width);
734 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
736 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
737 }
else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
738 memcpy(
reinterpret_cast<unsigned char *
>(color.
bitmap),
739 const_cast<unsigned char *
>(
static_cast<const unsigned char *
>(frame.get_data())),
740 width * height *
sizeof(
vpRGBa));
741 }
else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
743 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
756 rs2::pipeline *pipe =
new rs2::pipeline;
757 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
758 *pipelineProfile = pipe->start();
760 rs2::device dev = pipelineProfile->get_device();
763 for (rs2::sensor &sensor : dev.query_sensors()) {
765 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
772 delete pipelineProfile;
780 auto vf = frame.as<rs2::video_frame>();
781 unsigned int width = (
unsigned int)vf.get_width();
782 unsigned int height = (
unsigned int)vf.get_height();
783 grey.
resize(height, width);
785 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
787 grey.
bitmap, width, height);
788 }
else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
790 grey.
bitmap, width * height);
791 }
else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
793 grey.
bitmap, width, height);
801 auto vf = frame.as<rs2::video_frame>();
802 int size = vf.get_width() * vf.get_height();
804 switch (frame.get_profile().format()) {
805 case RS2_FORMAT_RGB8:
806 case RS2_FORMAT_BGR8:
807 memcpy(data, frame.get_data(), size * 3);
810 case RS2_FORMAT_RGBA8:
811 case RS2_FORMAT_BGRA8:
812 memcpy(data, frame.get_data(), size * 4);
817 memcpy(data, frame.get_data(), size * 2);
821 memcpy(data, frame.get_data(), size);
831 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
832 std::stringstream ss;
837 auto vf = depth_frame.as<rs2::video_frame>();
838 const int width = vf.get_width();
839 const int height = vf.get_height();
840 pointcloud.resize((
size_t)(width * height));
842 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
843 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
847 #pragma omp parallel for schedule(dynamic)
848 for (
int i = 0; i < height; i++) {
849 auto depth_pixel_index = i * width;
851 for (
int j = 0; j < width; j++, depth_pixel_index++) {
852 if (p_depth_frame[depth_pixel_index] == 0) {
853 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
857 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
862 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
865 const float pixel[] = {(float)j, (
float)i};
866 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
871 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
872 pointcloud[(size_t)depth_pixel_index][0] = points[0];
873 pointcloud[(size_t)depth_pixel_index][1] = points[1];
874 pointcloud[(size_t)depth_pixel_index][2] = points[2];
875 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
883 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
884 std::stringstream ss;
889 auto vf = depth_frame.as<rs2::video_frame>();
890 const int width = vf.get_width();
891 const int height = vf.get_height();
892 pointcloud->width = (uint32_t)width;
893 pointcloud->height = (uint32_t)height;
894 pointcloud->resize((
size_t)(width * height));
896 #if MANUAL_POINTCLOUD
897 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
898 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
902 #pragma omp parallel for schedule(dynamic)
903 for (
int i = 0; i < height; i++) {
904 auto depth_pixel_index = i * width;
906 for (
int j = 0; j < width; j++, depth_pixel_index++) {
907 if (p_depth_frame[depth_pixel_index] == 0) {
915 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
918 const float pixel[] = {(float)j, (
float)i};
919 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
924 pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
925 pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
926 pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
931 auto vertices =
m_points.get_vertices();
933 for (
size_t i = 0; i <
m_points.size(); i++) {
934 if (vertices[i].z <= 0.0f || vertices[i].z >
m_max_Z) {
939 pointcloud->points[i].x = vertices[i].x;
940 pointcloud->points[i].y = vertices[i].y;
941 pointcloud->points[i].z = vertices[i].z;
948 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
950 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
954 auto vf = depth_frame.as<rs2::video_frame>();
955 const int depth_width = vf.get_width();
956 const int depth_height = vf.get_height();
957 pointcloud->width =
static_cast<uint32_t
>(depth_width);
958 pointcloud->height =
static_cast<uint32_t
>(depth_height);
959 pointcloud->resize(
static_cast<uint32_t
>(depth_width * depth_height));
961 vf = color_frame.as<rs2::video_frame>();
962 const int color_width = vf.get_width();
963 const int color_height = vf.get_height();
965 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
966 const rs2_extrinsics depth2ColorExtrinsics =
967 depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
968 color_frame.get_profile().as<rs2::video_stream_profile>());
969 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
970 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
972 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
973 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
974 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
975 const unsigned char *p_color_frame =
static_cast<const unsigned char *
>(color_frame.get_data());
976 rs2_extrinsics identity;
977 memset(identity.rotation, 0,
sizeof(identity.rotation));
978 memset(identity.translation, 0,
sizeof(identity.translation));
979 for (
int i = 0; i < 3; i++) {
980 identity.rotation[i * 3 + i] = 1;
982 const bool registered_streams =
983 (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
987 #pragma omp parallel for schedule(dynamic)
988 for (
int i = 0; i < depth_height; i++) {
989 auto depth_pixel_index = i * depth_width;
991 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
992 if (p_depth_frame[depth_pixel_index] == 0) {
1000 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1001 unsigned int r = 96, g = 157, b = 198;
1002 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1004 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1006 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1007 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1008 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1014 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
1016 float depth_point[3];
1017 const float pixel[] = {(float)j, (
float)i};
1018 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1020 if (pixels_distance >
m_max_Z) {
1024 pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1025 pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1026 pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1028 if (!registered_streams) {
1029 float color_point[3];
1030 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1031 float color_pixel[2];
1032 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1034 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1035 color_pixel[0] >= color_width) {
1039 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1040 unsigned int r = 96, g = 157, b = 198;
1041 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1043 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1045 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1046 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1047 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1050 unsigned int i_ = (
unsigned int)color_pixel[1];
1051 unsigned int j_ = (
unsigned int)color_pixel[0];
1053 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1057 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) |
1058 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1059 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2])
1063 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1064 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1065 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]));
1068 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1071 pointcloud->points[(size_t)depth_pixel_index].b =
1072 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1073 pointcloud->points[(size_t)depth_pixel_index].g =
1074 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1075 pointcloud->points[(size_t)depth_pixel_index].r =
1076 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1078 pointcloud->points[(size_t)depth_pixel_index].r =
1079 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1080 pointcloud->points[(size_t)depth_pixel_index].g =
1081 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1082 pointcloud->points[(size_t)depth_pixel_index].b =
1083 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1088 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1091 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) |
1092 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1093 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1095 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1096 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1097 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]));
1100 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1103 pointcloud->points[(size_t)depth_pixel_index].b =
1104 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1105 pointcloud->points[(size_t)depth_pixel_index].g =
1106 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1107 pointcloud->points[(size_t)depth_pixel_index].r =
1108 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1110 pointcloud->points[(size_t)depth_pixel_index].r =
1111 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1112 pointcloud->points[(size_t)depth_pixel_index].g =
1113 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1114 pointcloud->points[(size_t)depth_pixel_index].b =
1115 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1136 if (from_index != -1)
1138 if (from_index == 1)
1140 else if (from_index == 2)
1147 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1151 for (
unsigned int i = 0; i < 3; i++) {
1152 t[i] = extrinsics.translation[i];
1153 for (
unsigned int j = 0; j < 3; j++)
1154 R[i][j] = extrinsics.rotation[j * 3 + i];
1161 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1174 auto frame =
m_pipe.wait_for_frames();
1175 auto f = frame.first_or_default(RS2_STREAM_POSE);
1176 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1179 *ts = frame.get_timestamp();
1182 m_pos[0] =
static_cast<double>(pose_data.translation.x);
1183 m_pos[1] =
static_cast<double>(pose_data.translation.y);
1184 m_pos[2] =
static_cast<double>(pose_data.translation.z);
1186 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
1187 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
1188 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
1189 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
1194 if (odo_vel != NULL) {
1195 odo_vel->
resize(6,
false);
1196 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
1197 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
1198 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
1199 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
1200 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
1201 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
1204 if (odo_acc != NULL) {
1205 odo_acc->
resize(6,
false);
1206 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
1207 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
1208 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
1209 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
1210 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
1211 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
1214 return pose_data.tracker_confidence;
1239 auto frame =
m_pipe.wait_for_frames();
1240 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1241 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1244 *ts = f.get_timestamp();
1246 if (imu_acc != NULL) {
1247 imu_acc->
resize(3,
false);
1248 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1249 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1250 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1276 auto frame =
m_pipe.wait_for_frames();
1277 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1278 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1281 *ts = f.get_timestamp();
1283 if (imu_vel != NULL) {
1284 imu_vel->
resize(3,
false);
1285 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1286 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.x);
1287 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.x);
1313 auto data =
m_pipe.wait_for_frames();
1316 *ts = data.get_timestamp();
1318 if (imu_acc != NULL) {
1319 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1320 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1322 imu_acc->
resize(3,
false);
1323 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1324 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1325 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1328 if (imu_vel != NULL) {
1329 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1330 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1332 imu_vel->
resize(3,
false);
1333 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1334 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.y);
1335 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.z);
1353 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1359 for (rs2::sensor &sensor : dev.query_sensors()) {
1361 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1386 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1392 for (rs2::sensor &sensor : dev.query_sensors()) {
1394 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1409 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1411 rs2::pipeline *pipe =
new rs2::pipeline;
1412 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
1413 *pipelineProfile = pipe->start();
1415 rs2::device dev = pipelineProfile->get_device();
1417 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1424 delete pipelineProfile;
1429 return (std::string(
"unknown"));
1436 void print(
const rs2_extrinsics &extrinsics, std::ostream &os)
1438 std::stringstream ss;
1439 ss <<
"Rotation Matrix:\n";
1441 for (
auto i = 0; i < 3; ++i) {
1442 for (
auto j = 0; j < 3; ++j) {
1443 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1448 ss <<
"\nTranslation Vector: ";
1449 for (
size_t i = 0; i <
sizeof(extrinsics.translation) /
sizeof(extrinsics.translation[0]); ++i)
1450 ss << std::setprecision(15) << extrinsics.translation[i] <<
" ";
1452 os << ss.str() <<
"\n\n";
1455 void print(
const rs2_intrinsics &intrinsics, std::ostream &os)
1457 std::stringstream ss;
1458 ss << std::left << std::setw(14) <<
"Width: "
1459 <<
"\t" << intrinsics.width <<
"\n"
1460 << std::left << std::setw(14) <<
"Height: "
1461 <<
"\t" << intrinsics.height <<
"\n"
1462 << std::left << std::setw(14) <<
"PPX: "
1463 <<
"\t" << std::setprecision(15) << intrinsics.ppx <<
"\n"
1464 << std::left << std::setw(14) <<
"PPY: "
1465 <<
"\t" << std::setprecision(15) << intrinsics.ppy <<
"\n"
1466 << std::left << std::setw(14) <<
"Fx: "
1467 <<
"\t" << std::setprecision(15) << intrinsics.fx <<
"\n"
1468 << std::left << std::setw(14) <<
"Fy: "
1469 <<
"\t" << std::setprecision(15) << intrinsics.fy <<
"\n"
1470 << std::left << std::setw(14) <<
"Distortion: "
1471 <<
"\t" << rs2_distortion_to_string(intrinsics.model) <<
"\n"
1472 << std::left << std::setw(14) <<
"Coeffs: ";
1474 for (
size_t i = 0; i <
sizeof(intrinsics.coeffs) /
sizeof(intrinsics.coeffs[0]); ++i)
1475 ss <<
"\t" << std::setprecision(15) << intrinsics.coeffs[i] <<
" ";
1477 os << ss.str() <<
"\n\n";
1480 void safe_get_intrinsics(
const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1483 intrinsics = profile.get_intrinsics();
1488 bool operator==(
const rs2_intrinsics &lhs,
const rs2_intrinsics &rhs)
1490 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1491 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1492 !std::memcmp(lhs.coeffs, rhs.coeffs,
sizeof(rhs.coeffs));
1495 std::string get_str_formats(
const std::set<rs2_format> &formats)
1497 std::stringstream ss;
1498 for (
auto format = formats.begin(); format != formats.end(); ++format) {
1499 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ?
"" :
"/");
1504 struct stream_and_resolution {
1509 std::string stream_name;
1511 bool operator<(
const stream_and_resolution &obj)
const
1513 return (std::make_tuple(stream, stream_index, width, height) <
1514 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1518 struct stream_and_index {
1522 bool operator<(
const stream_and_index &obj)
const
1524 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1552 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1553 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1557 os <<
" Device info: \n";
1558 for (
auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1559 auto param =
static_cast<rs2_camera_info
>(j);
1560 if (dev.supports(param))
1561 os <<
" " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) <<
": \t"
1562 << dev.get_info(param) <<
"\n";
1567 for (
auto &&sensor : dev.query_sensors()) {
1568 os <<
"Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1570 os << std::setw(55) <<
" Supported options:" << std::setw(10) <<
"min" << std::setw(10) <<
" max" << std::setw(6)
1571 <<
" step" << std::setw(10) <<
" default" << std::endl;
1572 for (
auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1573 auto opt =
static_cast<rs2_option
>(j);
1574 if (sensor.supports(opt)) {
1575 auto range = sensor.get_option_range(opt);
1576 os <<
" " << std::left << std::setw(50) << opt <<
" : " << std::setw(5) << range.min <<
"... "
1577 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def <<
"\n";
1584 for (
auto &&sensor : dev.query_sensors()) {
1585 os <<
"Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) <<
"\n";
1587 os << std::setw(55) <<
" Supported modes:" << std::setw(10) <<
"stream" << std::setw(10) <<
" resolution"
1588 << std::setw(6) <<
" fps" << std::setw(10) <<
" format"
1591 for (
auto &&profile : sensor.get_stream_profiles()) {
1592 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1593 os <<
" " << profile.stream_name() <<
"\t " << video.width() <<
"x" << video.height() <<
"\t@ "
1594 << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1596 os <<
" " << profile.stream_name() <<
"\t@ " << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1603 std::map<stream_and_index, rs2::stream_profile> streams;
1604 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1605 for (
auto &&sensor : dev.query_sensors()) {
1607 for (
auto &&profile : sensor.get_stream_profiles()) {
1608 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1609 if (streams.find(stream_and_index{profile.stream_type(), profile.stream_index()}) == streams.end()) {
1610 streams[stream_and_index{profile.stream_type(), profile.stream_index()}] = profile;
1613 rs2_intrinsics intrinsics{};
1614 stream_and_resolution stream_res{profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1615 profile.stream_name()};
1616 safe_get_intrinsics(video, intrinsics);
1617 auto it = std::find_if(
1618 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1619 [&](
const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) {
return intrinsics == kvp.second; });
1620 if (it == (intrinsics_map[stream_res]).end()) {
1621 (intrinsics_map[stream_res]).push_back({{profile.format()}, intrinsics});
1623 it->first.insert(profile.format());
1631 os <<
"Provided Intrinsic:\n";
1632 for (
auto &kvp : intrinsics_map) {
1633 auto stream_res = kvp.first;
1634 for (
auto &intrinsics : kvp.second) {
1635 auto formats = get_str_formats(intrinsics.first);
1636 os <<
"Intrinsic of \"" << stream_res.stream_name <<
"\"\t " << stream_res.width <<
"x" << stream_res.height
1637 <<
"\t " << formats <<
"\n";
1638 if (intrinsics.second == rs2_intrinsics{}) {
1639 os <<
"Intrinsic NOT available!\n\n";
1641 print(intrinsics.second, os);
1647 os <<
"\nProvided Extrinsic:\n";
1648 for (
auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1649 for (
auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1650 os <<
"Extrinsic from \"" << kvp1->second.stream_name() <<
"\"\t "
1652 <<
"\t \"" << kvp2->second.stream_name() <<
"\"\n";
1653 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1654 print(extrinsics, os);
1661 #elif !defined(VISP_BUILD_SHARED_LIBS)
1664 void dummy_vpRealSense2(){};
bool operator==(const vpArray2D< double > &A) const
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
vpCameraParametersProjType
@ perspectiveProjWithDistortion
@ ProjWithKannalaBrandtDistortion
@ perspectiveProjWithoutDistortion
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
error that can be emited by ViSP classes.
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Type * bitmap
points toward the bitmap
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
vpQuaternionVector m_quat
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=NULL)
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
rs2::pointcloud m_pointcloud
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
vpTranslationVector m_pos
std::string m_product_line
std::string getProductLine()
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
float m_invalidDepthValue
rs2::pipeline_profile m_pipelineProfile
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.