36 #include <visp3/core/vpConfig.h> 38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) 43 #include <visp3/core/vpImageConvert.h> 44 #include <visp3/sensor/vpRealSense2.h> 46 #define MANUAL_POINTCLOUD 1 49 bool operator==(
const rs2_extrinsics &lhs,
const rs2_extrinsics &rhs)
51 for (
int i = 0; i < 3; i++) {
52 for (
int j = 0; j < 3; j++) {
53 if (std::fabs(lhs.rotation[i*3 + j] - rhs.rotation[i*3 + j]) >
54 std::numeric_limits<float>::epsilon()) {
59 if (std::fabs(lhs.translation[i] - rhs.translation[i]) >
60 std::numeric_limits<float>::epsilon()) {
73 : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
74 m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
91 auto data =
m_pipe.wait_for_frames();
92 auto color_frame = data.get_color_frame();
95 *ts = data.get_timestamp();
106 auto data =
m_pipe.wait_for_frames();
107 auto color_frame = data.get_color_frame();
110 *ts = data.get_timestamp();
125 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
126 rs2::align *
const align_to,
double *ts)
128 acquire(data_image, data_depth, data_pointCloud, data_infrared, NULL, align_to, ts);
189 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared1,
190 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
192 auto data =
m_pipe.wait_for_frames();
193 if (align_to != NULL) {
196 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0)) 197 data = align_to->process(data);
199 data = align_to->proccess(data);
203 if (data_image != NULL) {
204 auto color_frame = data.get_color_frame();
208 if (data_depth != NULL || data_pointCloud != NULL) {
209 auto depth_frame = data.get_depth_frame();
210 if (data_depth != NULL) {
214 if (data_pointCloud != NULL) {
219 if (data_infrared1 != NULL) {
220 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
224 if (data_infrared2 != NULL) {
225 auto infrared_frame = data.get_infrared_frame(2);
230 *ts = data.get_timestamp();
234 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 257 auto data =
m_pipe.wait_for_frames();
260 auto left_fisheye_frame = data.get_fisheye_frame(1);
261 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
262 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
263 left->
resize(height, width);
268 auto right_fisheye_frame = data.get_fisheye_frame(2);
269 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
270 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
271 right->
resize(height, width);
276 *ts = data.get_timestamp();
311 auto data =
m_pipe.wait_for_frames();
314 auto left_fisheye_frame = data.get_fisheye_frame(1);
315 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
316 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
317 left->
resize(height, width);
322 auto right_fisheye_frame = data.get_fisheye_frame(2);
323 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
324 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
325 right->
resize(height, width);
329 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
330 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
333 *ts = data.get_timestamp();
337 m_pos[0] =
static_cast<double>(pose_data.translation.x);
338 m_pos[1] =
static_cast<double>(pose_data.translation.y);
339 m_pos[2] =
static_cast<double>(pose_data.translation.z);
341 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
342 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
343 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
344 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
349 if(odo_vel != NULL) {
350 odo_vel->
resize(6,
false);
351 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
352 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
353 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
354 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
355 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
356 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
359 if(odo_acc != NULL) {
360 odo_acc->
resize(6,
false);
361 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
362 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
363 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
364 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
365 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
366 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
369 if(confidence != NULL) {
370 *confidence = pose_data.tracker_confidence;
389 unsigned int *confidence,
double *ts)
391 auto data =
m_pipe.wait_for_frames();
394 auto left_fisheye_frame = data.get_fisheye_frame(1);
395 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
396 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
397 left->
resize(height, width);
402 auto right_fisheye_frame = data.get_fisheye_frame(2);
403 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
404 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
405 right->
resize(height, width);
409 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
410 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
413 *ts = data.get_timestamp();
417 m_pos[0] =
static_cast<double>(pose_data.translation.x);
418 m_pos[1] =
static_cast<double>(pose_data.translation.y);
419 m_pos[2] =
static_cast<double>(pose_data.translation.z);
421 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
422 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
423 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
424 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
429 if(odo_vel != NULL) {
430 odo_vel->
resize(6,
false);
431 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
432 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
433 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
434 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
435 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
436 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
439 if(odo_acc != NULL) {
440 odo_acc->
resize(6,
false);
441 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
442 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
443 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
444 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
445 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
446 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
449 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
450 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
452 if(imu_acc != NULL) {
453 imu_acc->
resize(3,
false);
454 (*imu_acc)[0] =
static_cast<double>(accel_data.x);
455 (*imu_acc)[1] =
static_cast<double>(accel_data.y);
456 (*imu_acc)[2] =
static_cast<double>(accel_data.z);
459 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
460 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
462 if(imu_vel != NULL) {
463 imu_vel->
resize(3,
false);
464 (*imu_vel)[0] =
static_cast<double>(gyro_data.x);
465 (*imu_vel)[1] =
static_cast<double>(gyro_data.y);
466 (*imu_vel)[2] =
static_cast<double>(gyro_data.z);
469 if (confidence != NULL) {
470 *confidence = pose_data.tracker_confidence;
473 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 489 std::vector<vpColVector> *
const data_pointCloud,
490 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
491 rs2::align *
const align_to,
double *ts)
493 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
511 std::vector<vpColVector> *
const data_pointCloud,
512 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
513 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
515 auto data =
m_pipe.wait_for_frames();
516 if (align_to != NULL) {
519 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0)) 520 data = align_to->process(data);
522 data = align_to->proccess(data);
526 if (data_image != NULL) {
527 auto color_frame = data.get_color_frame();
531 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
532 auto depth_frame = data.get_depth_frame();
533 if (data_depth != NULL) {
537 if (data_pointCloud != NULL) {
541 if (pointcloud != NULL) {
546 if (data_infrared1 != NULL) {
547 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
551 if (data_infrared2 != NULL) {
552 auto infrared_frame = data.get_infrared_frame(2);
557 *ts = data.get_timestamp();
574 std::vector<vpColVector> *
const data_pointCloud,
575 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared,
576 rs2::align *
const align_to,
double *ts)
578 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
596 std::vector<vpColVector> *
const data_pointCloud,
597 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
598 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
600 auto data =
m_pipe.wait_for_frames();
601 if (align_to != NULL) {
604 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0)) 605 data = align_to->process(data);
607 data = align_to->proccess(data);
611 auto color_frame = data.get_color_frame();
612 if (data_image != NULL) {
616 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
617 auto depth_frame = data.get_depth_frame();
618 if (data_depth != NULL) {
622 if (data_pointCloud != NULL) {
626 if (pointcloud != NULL) {
631 if (data_infrared1 != NULL) {
632 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
636 if (data_infrared2 != NULL) {
637 auto infrared_frame = data.get_infrared_frame(2);
642 *ts = data.get_timestamp();
678 auto rs_stream =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
679 auto intrinsics = rs_stream.get_intrinsics();
682 double u0 = intrinsics.ppx;
683 double v0 = intrinsics.ppy;
684 double px = intrinsics.fx;
685 double py = intrinsics.fy;
691 double kdu = intrinsics.coeffs[0];
698 std::vector<double> tmp_coefs;
699 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
700 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
701 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
702 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
703 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
728 auto vsp =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
729 return vsp.get_intrinsics();
734 auto vf = frame.as<rs2::video_frame>();
735 unsigned int width = (
unsigned int)vf.get_width();
736 unsigned int height = (
unsigned int)vf.get_height();
737 color.
resize(height, width);
739 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
741 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
742 }
else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
743 memcpy(reinterpret_cast<unsigned char *>(color.
bitmap),
744 const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
745 width * height *
sizeof(
vpRGBa));
746 }
else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
748 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
761 rs2::pipeline *pipe =
new rs2::pipeline;
762 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
763 *pipelineProfile = pipe->start();
765 rs2::device dev = pipelineProfile->get_device();
768 for (rs2::sensor &sensor : dev.query_sensors()) {
770 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
777 delete pipelineProfile;
785 auto vf = frame.as<rs2::video_frame>();
786 unsigned int width = (
unsigned int)vf.get_width();
787 unsigned int height = (
unsigned int)vf.get_height();
788 grey.
resize(height, width);
790 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
792 grey.
bitmap, width, height);
793 }
else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
795 grey.
bitmap, width * height);
796 }
else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
798 grey.
bitmap, width, height);
806 auto vf = frame.as<rs2::video_frame>();
807 int size = vf.get_width() * vf.get_height();
809 switch (frame.get_profile().format()) {
810 case RS2_FORMAT_RGB8:
811 case RS2_FORMAT_BGR8:
812 memcpy(data, frame.get_data(), size * 3);
815 case RS2_FORMAT_RGBA8:
816 case RS2_FORMAT_BGRA8:
817 memcpy(data, frame.get_data(), size * 4);
822 memcpy(data, frame.get_data(), size * 2);
826 memcpy(data, frame.get_data(), size);
836 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
837 std::stringstream ss;
842 auto vf = depth_frame.as<rs2::video_frame>();
843 const int width = vf.get_width();
844 const int height = vf.get_height();
845 pointcloud.resize((
size_t)(width * height));
847 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
848 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
852 #pragma omp parallel for schedule(dynamic) 853 for (
int i = 0; i < height; i++) {
854 auto depth_pixel_index = i * width;
856 for (
int j = 0; j < width; j++, depth_pixel_index++) {
857 if (p_depth_frame[depth_pixel_index] == 0) {
858 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
862 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
867 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
870 const float pixel[] = {(float)j, (
float)i};
871 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
876 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
877 pointcloud[(size_t)depth_pixel_index][0] = points[0];
878 pointcloud[(size_t)depth_pixel_index][1] = points[1];
879 pointcloud[(size_t)depth_pixel_index][2] = points[2];
880 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
889 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
890 std::stringstream ss;
895 auto vf = depth_frame.as<rs2::video_frame>();
896 const int width = vf.get_width();
897 const int height = vf.get_height();
898 pointcloud->width = (uint32_t)width;
899 pointcloud->height = (uint32_t)height;
900 pointcloud->resize((
size_t)(width * height));
902 #if MANUAL_POINTCLOUD // faster to compute manually when tested 903 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
904 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
908 #pragma omp parallel for schedule(dynamic) 909 for (
int i = 0; i < height; i++) {
910 auto depth_pixel_index = i * width;
912 for (
int j = 0; j < width; j++, depth_pixel_index++) {
913 if (p_depth_frame[depth_pixel_index] == 0) {
921 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
924 const float pixel[] = {(float)j, (
float)i};
925 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
930 pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
931 pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
932 pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
937 auto vertices =
m_points.get_vertices();
939 for (
size_t i = 0; i <
m_points.size(); i++) {
940 if (vertices[i].z <= 0.0f || vertices[i].z >
m_max_Z) {
945 pointcloud->points[i].x = vertices[i].x;
946 pointcloud->points[i].y = vertices[i].y;
947 pointcloud->points[i].z = vertices[i].z;
954 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
956 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
960 auto vf = depth_frame.as<rs2::video_frame>();
961 const int depth_width = vf.get_width();
962 const int depth_height = vf.get_height();
963 pointcloud->width =
static_cast<uint32_t
>(depth_width);
964 pointcloud->height =
static_cast<uint32_t
>(depth_height);
965 pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
967 vf = color_frame.as<rs2::video_frame>();
968 const int color_width = vf.get_width();
969 const int color_height = vf.get_height();
971 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
972 const rs2_extrinsics depth2ColorExtrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().
973 get_extrinsics_to(color_frame.get_profile().as<rs2::video_stream_profile>());
974 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
975 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
977 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
978 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
979 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
980 const unsigned char *p_color_frame =
static_cast<const unsigned char *
>(color_frame.get_data());
981 rs2_extrinsics identity;
982 memset(identity.rotation, 0,
sizeof(identity.rotation));
983 memset(identity.translation, 0,
sizeof(identity.translation));
984 for (
int i = 0; i < 3; i++) {
985 identity.rotation[i*3 + i] = 1;
987 const bool registered_streams = (depth2ColorExtrinsics == identity) &&
988 (color_width == depth_width) && (color_height == depth_height);
992 #pragma omp parallel for schedule(dynamic) 993 for (
int i = 0; i < depth_height; i++) {
994 auto depth_pixel_index = i * depth_width;
996 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
997 if (p_depth_frame[depth_pixel_index] == 0) {
1005 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 1006 unsigned int r = 96, g = 157, b = 198;
1007 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 | static_cast<uint32_t>(g) << 8 |
static_cast<uint32_t
>(b));
1009 pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1011 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1012 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1013 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1019 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
1021 float depth_point[3];
1022 const float pixel[] = {(float)j, (
float)i};
1023 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1025 if (pixels_distance >
m_max_Z) {
1029 pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1030 pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1031 pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1033 if (!registered_streams) {
1034 float color_point[3];
1035 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1036 float color_pixel[2];
1037 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1039 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 || color_pixel[0] >= color_width) {
1043 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 1044 unsigned int r = 96, g = 157, b = 198;
1045 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 | static_cast<uint32_t>(g) << 8 |
static_cast<uint32_t
>(b));
1047 pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1049 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1050 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1051 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1054 unsigned int i_ = (
unsigned int)color_pixel[1];
1055 unsigned int j_ = (
unsigned int)color_pixel[0];
1057 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 1061 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) |
1062 static_cast<uint32_t>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1063 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]) << 16);
1065 rgb = (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1066 static_cast<uint32_t>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1067 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]));
1070 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1073 pointcloud->points[(size_t)depth_pixel_index].b =
1074 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1075 pointcloud->points[(size_t)depth_pixel_index].g =
1076 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1077 pointcloud->points[(size_t)depth_pixel_index].r =
1078 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1080 pointcloud->points[(size_t)depth_pixel_index].r =
1081 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1082 pointcloud->points[(size_t)depth_pixel_index].g =
1083 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1084 pointcloud->points[(size_t)depth_pixel_index].b =
1085 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1090 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 1094 (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) |
1095 static_cast<uint32_t>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1096 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1098 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1099 static_cast<uint32_t>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1100 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]));
1103 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1106 pointcloud->points[(size_t)depth_pixel_index].b =
1107 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1108 pointcloud->points[(size_t)depth_pixel_index].g =
1109 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1110 pointcloud->points[(size_t)depth_pixel_index].r =
1111 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1113 pointcloud->points[(size_t)depth_pixel_index].r =
1114 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1115 pointcloud->points[(size_t)depth_pixel_index].g =
1116 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1117 pointcloud->points[(size_t)depth_pixel_index].b =
1118 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1138 if(from_index != -1)
1150 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1154 for (
unsigned int i = 0; i < 3; i++) {
1155 t[i] = extrinsics.translation[i];
1156 for (
unsigned int j = 0; j < 3; j++)
1157 R[i][j] = extrinsics.rotation[j * 3 + i];
1164 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 1176 auto frame =
m_pipe.wait_for_frames();
1177 auto f = frame.first_or_default(RS2_STREAM_POSE);
1178 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1181 *ts = frame.get_timestamp();
1185 m_pos[0] =
static_cast<double>(pose_data.translation.x);
1186 m_pos[1] =
static_cast<double>(pose_data.translation.y);
1187 m_pos[2] =
static_cast<double>(pose_data.translation.z);
1189 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
1190 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
1191 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
1192 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
1199 odo_vel->
resize(6,
false);
1200 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
1201 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
1202 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
1203 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
1204 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
1205 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
1210 odo_acc->
resize(6,
false);
1211 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
1212 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
1213 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
1214 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
1215 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
1216 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
1219 return pose_data.tracker_confidence;
1244 auto frame =
m_pipe.wait_for_frames();
1245 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1246 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1249 *ts = f.get_timestamp();
1253 imu_acc->
resize(3,
false);
1254 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1255 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1256 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1282 auto frame =
m_pipe.wait_for_frames();
1283 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1284 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1287 *ts = f.get_timestamp();
1291 imu_vel->
resize(3,
false);
1292 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1293 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.x);
1294 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.x);
1320 auto data =
m_pipe.wait_for_frames();
1323 *ts = data.get_timestamp();
1327 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1328 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1330 imu_acc->
resize(3,
false);
1331 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1332 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1333 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1338 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1339 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1341 imu_vel->
resize(3,
false);
1342 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1343 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.y);
1344 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.z);
1347 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 1362 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 1368 for (rs2::sensor &sensor : dev.query_sensors()) {
1370 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1394 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 1400 for (rs2::sensor &sensor : dev.query_sensors()) {
1402 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1416 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 1418 rs2::pipeline *pipe =
new rs2::pipeline;
1419 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
1420 *pipelineProfile = pipe->start();
1422 rs2::device dev = pipelineProfile->get_device();
1424 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0)) 1431 delete pipelineProfile;
1436 return (std::string(
"unknown"));
1443 void print(
const rs2_extrinsics &extrinsics, std::ostream &os)
1445 std::stringstream ss;
1446 ss <<
"Rotation Matrix:\n";
1448 for (
auto i = 0; i < 3; ++i) {
1449 for (
auto j = 0; j < 3; ++j) {
1450 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1455 ss <<
"\nTranslation Vector: ";
1456 for (
size_t i = 0; i <
sizeof(extrinsics.translation) /
sizeof(extrinsics.translation[0]); ++i)
1457 ss << std::setprecision(15) << extrinsics.translation[i] <<
" ";
1459 os << ss.str() <<
"\n\n";
1462 void print(
const rs2_intrinsics &intrinsics, std::ostream &os)
1464 std::stringstream ss;
1465 ss << std::left << std::setw(14) <<
"Width: " 1466 <<
"\t" << intrinsics.width <<
"\n" 1467 << std::left << std::setw(14) <<
"Height: " 1468 <<
"\t" << intrinsics.height <<
"\n" 1469 << std::left << std::setw(14) <<
"PPX: " 1470 <<
"\t" << std::setprecision(15) << intrinsics.ppx <<
"\n" 1471 << std::left << std::setw(14) <<
"PPY: " 1472 <<
"\t" << std::setprecision(15) << intrinsics.ppy <<
"\n" 1473 << std::left << std::setw(14) <<
"Fx: " 1474 <<
"\t" << std::setprecision(15) << intrinsics.fx <<
"\n" 1475 << std::left << std::setw(14) <<
"Fy: " 1476 <<
"\t" << std::setprecision(15) << intrinsics.fy <<
"\n" 1477 << std::left << std::setw(14) <<
"Distortion: " 1478 <<
"\t" << rs2_distortion_to_string(intrinsics.model) <<
"\n" 1479 << std::left << std::setw(14) <<
"Coeffs: ";
1481 for (
size_t i = 0; i <
sizeof(intrinsics.coeffs) /
sizeof(intrinsics.coeffs[0]); ++i)
1482 ss <<
"\t" << std::setprecision(15) << intrinsics.coeffs[i] <<
" ";
1484 os << ss.str() <<
"\n\n";
1487 void safe_get_intrinsics(
const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1490 intrinsics = profile.get_intrinsics();
1495 bool operator==(
const rs2_intrinsics &lhs,
const rs2_intrinsics &rhs)
1497 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1498 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1499 !std::memcmp(lhs.coeffs, rhs.coeffs,
sizeof(rhs.coeffs));
1502 std::string get_str_formats(
const std::set<rs2_format> &formats)
1504 std::stringstream ss;
1505 for (
auto format = formats.begin(); format != formats.end(); ++format) {
1506 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ?
"" :
"/");
1511 struct stream_and_resolution {
1516 std::string stream_name;
1518 bool operator<(
const stream_and_resolution &obj)
const 1520 return (std::make_tuple(stream, stream_index, width, height) <
1521 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1525 struct stream_and_index {
1529 bool operator<(
const stream_and_index &obj)
const 1531 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1559 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1560 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1564 os <<
" Device info: \n";
1565 for (
auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1566 auto param =
static_cast<rs2_camera_info
>(j);
1567 if (dev.supports(param))
1568 os <<
" " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) <<
": \t" 1569 << dev.get_info(param) <<
"\n";
1574 for (
auto &&sensor : dev.query_sensors()) {
1575 os <<
"Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1577 os << std::setw(55) <<
" Supported options:" << std::setw(10) <<
"min" << std::setw(10) <<
" max" << std::setw(6)
1578 <<
" step" << std::setw(10) <<
" default" << std::endl;
1579 for (
auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1580 auto opt =
static_cast<rs2_option
>(j);
1581 if (sensor.supports(opt)) {
1582 auto range = sensor.get_option_range(opt);
1583 os <<
" " << std::left << std::setw(50) << opt <<
" : " << std::setw(5) << range.min <<
"... " 1584 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def <<
"\n";
1591 for (
auto &&sensor : dev.query_sensors()) {
1592 os <<
"Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) <<
"\n";
1594 os << std::setw(55) <<
" Supported modes:" << std::setw(10) <<
"stream" << std::setw(10) <<
" resolution" 1595 << std::setw(6) <<
" fps" << std::setw(10) <<
" format" 1598 for (
auto &&profile : sensor.get_stream_profiles()) {
1599 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1600 os <<
" " << profile.stream_name() <<
"\t " << video.width() <<
"x" << video.height() <<
"\t@ " 1601 << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1603 os <<
" " << profile.stream_name() <<
"\t@ " << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1610 std::map<stream_and_index, rs2::stream_profile> streams;
1611 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1612 for (
auto &&sensor : dev.query_sensors()) {
1614 for (
auto &&profile : sensor.get_stream_profiles()) {
1615 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1616 if (streams.find(stream_and_index{profile.stream_type(), profile.stream_index()}) == streams.end()) {
1617 streams[stream_and_index{profile.stream_type(), profile.stream_index()}] = profile;
1620 rs2_intrinsics intrinsics{};
1621 stream_and_resolution stream_res{profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1622 profile.stream_name()};
1623 safe_get_intrinsics(video, intrinsics);
1624 auto it = std::find_if(
1625 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1626 [&](
const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) {
return intrinsics == kvp.second; });
1627 if (it == (intrinsics_map[stream_res]).end()) {
1628 (intrinsics_map[stream_res]).push_back({{profile.format()}, intrinsics});
1630 it->first.insert(profile.format());
1638 os <<
"Provided Intrinsic:\n";
1639 for (
auto &kvp : intrinsics_map) {
1640 auto stream_res = kvp.first;
1641 for (
auto &intrinsics : kvp.second) {
1642 auto formats = get_str_formats(intrinsics.first);
1643 os <<
"Intrinsic of \"" << stream_res.stream_name <<
"\"\t " << stream_res.width <<
"x" << stream_res.height
1644 <<
"\t " << formats <<
"\n";
1645 if (intrinsics.second == rs2_intrinsics{}) {
1646 os <<
"Intrinsic NOT available!\n\n";
1648 print(intrinsics.second, os);
1654 os <<
"\nProvided Extrinsic:\n";
1655 for (
auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1656 for (
auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1657 os <<
"Extrinsic from \"" << kvp1->second.stream_name() <<
"\"\t " 1659 <<
"\t \"" << kvp2->second.stream_name() <<
"\"\n";
1660 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1661 print(extrinsics, os);
1668 #elif !defined(VISP_BUILD_SHARED_LIBS) 1671 void dummy_vpRealSense2(){};
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpQuaternionVector m_quat
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
std::string getProductLine()
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
Type * bitmap
points toward the bitmap
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=NULL)
error that can be emited by ViSP classes.
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
bool open(const rs2::config &cfg=rs2::config())
Implementation of a rotation matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
rs2::pipeline_profile m_pipelineProfile
void getIMUVelocity(vpColVector *imu_vel, double *ts)
vpCameraParametersProjType
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
Generic class defining intrinsic camera parameters.
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
rs2::pointcloud m_pointcloud
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
void resize(unsigned int i, bool flagNullify=true)
Implementation of column vector and the associated operations.
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
vpTranslationVector m_pos
std::string m_product_line
float m_invalidDepthValue
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
friend VISP_EXPORT std::ostream & operator<<(std::ostream &os, const vpRealSense2 &rs)
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
Class that consider the case of a translation vector.