36 #include <visp3/core/vpConfig.h>
38 #if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
46 #include <ST/Utilities.h>
48 #include <visp3/core/vpImageConvert.h>
49 #include <visp3/core/vpMeterPixelConversion.h>
50 #include <visp3/core/vpPoint.h>
51 #include <visp3/sensor/vpOccipitalStructure.h>
53 #define MANUAL_POINTCLOUD 1
74 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
84 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
98 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
103 if (
m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
105 reinterpret_cast<unsigned char *
>(rgb.
bitmap),
112 reinterpret_cast<unsigned char *
>(rgb.
bitmap),
116 reinterpret_cast<unsigned char *
>(rgb.
bitmap),
121 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
137 vpColVector *gyroscope_data,
bool undistorted,
double *ts)
139 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
142 if (rgb != NULL &&
m_delegate.m_visibleFrame.isValid()) {
144 if (
m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
146 reinterpret_cast<unsigned char *
>(rgb->
bitmap),
153 reinterpret_cast<unsigned char *
>(rgb->
bitmap),
157 reinterpret_cast<unsigned char *
>(rgb->
bitmap),
162 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
166 memcpy((
unsigned char *)
depth->bitmap,
m_delegate.m_depthFrame.convertDepthToRgba(),
169 if (acceleration_data != NULL) {
170 ST::Acceleration accel =
m_delegate.m_accelerometerEvent.acceleration();
171 acceleration_data[0] = accel.x;
172 acceleration_data[1] = accel.y;
173 acceleration_data[2] = accel.z;
176 if (gyroscope_data != NULL) {
177 ST::RotationRate gyro_data =
m_delegate.m_gyroscopeEvent.rotationRate();
178 gyroscope_data[0] = gyro_data.x;
179 gyroscope_data[1] = gyro_data.y;
180 gyroscope_data[2] = gyro_data.z;
184 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
199 vpColVector *gyroscope_data,
bool undistorted,
double *ts)
201 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
204 if (gray != NULL &&
m_delegate.m_visibleFrame.isValid()) {
211 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
215 memcpy((
unsigned char *)
depth->bitmap,
m_delegate.m_depthFrame.convertDepthToRgba(),
218 if (acceleration_data != NULL) {
219 ST::Acceleration accel =
m_delegate.m_accelerometerEvent.acceleration();
220 acceleration_data[0] = accel.x;
221 acceleration_data[1] = accel.y;
222 acceleration_data[2] = accel.z;
225 if (gyroscope_data != NULL) {
226 ST::RotationRate gyro_data =
m_delegate.m_gyroscopeEvent.rotationRate();
227 gyroscope_data[0] = gyro_data.x;
228 gyroscope_data[1] = gyro_data.y;
229 gyroscope_data[2] = gyro_data.z;
233 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
250 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
254 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
257 if (
m_delegate.m_depthFrame.isValid() && data_depth != NULL)
258 memcpy(data_depth,
m_delegate.m_depthFrame.depthInMillimeters(),
261 if (
m_delegate.m_visibleFrame.isValid() && data_image != NULL) {
262 if (
m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
264 reinterpret_cast<unsigned char *
>(data_image),
271 reinterpret_cast<unsigned char *
>(data_image),
275 reinterpret_cast<unsigned char *
>(data_image),
280 if (
m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
281 memcpy(data_infrared,
m_delegate.m_infraredFrame.data(),
284 if (data_pointCloud != NULL)
287 if (acceleration_data != NULL) {
288 ST::Acceleration accel =
m_delegate.m_accelerometerEvent.acceleration();
289 acceleration_data[0] = accel.x;
290 acceleration_data[1] = accel.y;
291 acceleration_data[2] = accel.z;
294 if (gyroscope_data != NULL) {
295 ST::RotationRate gyro_data =
m_delegate.m_gyroscopeEvent.rotationRate();
296 gyroscope_data[0] = gyro_data.x;
297 gyroscope_data[1] = gyro_data.y;
298 gyroscope_data[2] = gyro_data.z;
302 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
321 std::vector<vpColVector> *
const data_pointCloud,
322 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
326 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
330 if (
m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
332 reinterpret_cast<unsigned char *
>(data_image),
339 reinterpret_cast<unsigned char *
>(data_image),
343 reinterpret_cast<unsigned char *
>(data_image),
348 if (
m_delegate.m_depthFrame.isValid() && data_depth != NULL)
349 memcpy(data_depth,
m_delegate.m_depthFrame.depthInMillimeters(),
352 if (
m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
353 memcpy(data_infrared,
m_delegate.m_infraredFrame.data(),
356 if (data_pointCloud != NULL)
359 if (
m_delegate.m_depthFrame.isValid() && pointcloud != NULL)
362 if (acceleration_data != NULL) {
363 ST::Acceleration accel =
m_delegate.m_accelerometerEvent.acceleration();
364 acceleration_data[0] = accel.x;
365 acceleration_data[1] = accel.y;
366 acceleration_data[2] = accel.z;
369 if (gyroscope_data != NULL) {
370 ST::RotationRate gyro_data =
m_delegate.m_gyroscopeEvent.rotationRate();
371 gyroscope_data[0] = gyro_data.x;
372 gyroscope_data[1] = gyro_data.y;
373 gyroscope_data[2] = gyro_data.z;
377 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
395 std::vector<vpColVector> *
const data_pointCloud,
396 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
397 unsigned char *
const data_infrared,
vpColVector *acceleration_data,
398 vpColVector *gyroscope_data,
bool undistorted,
double *ts)
400 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
403 if (
m_delegate.m_depthFrame.isValid() && data_depth != NULL)
404 memcpy(data_depth,
m_delegate.m_depthFrame.depthInMillimeters(),
407 if (
m_delegate.m_visibleFrame.isValid() && data_image != NULL) {
408 if (
m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
410 reinterpret_cast<unsigned char *
>(data_image),
417 reinterpret_cast<unsigned char *
>(data_image),
421 reinterpret_cast<unsigned char *
>(data_image),
426 if (
m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
427 memcpy(data_infrared,
m_delegate.m_infraredFrame.data(),
430 if (
m_delegate.m_depthFrame.isValid() && data_pointCloud != NULL)
433 if (
m_delegate.m_depthFrame.isValid() &&
m_delegate.m_visibleFrame.isValid() && pointcloud != NULL)
436 if (acceleration_data != NULL) {
437 ST::Acceleration accel =
m_delegate.m_accelerometerEvent.acceleration();
438 acceleration_data[0] = accel.x;
439 acceleration_data[1] = accel.y;
440 acceleration_data[2] = accel.z;
443 if (gyroscope_data != NULL) {
444 ST::RotationRate gyro_data =
m_delegate.m_gyroscopeEvent.rotationRate();
445 gyroscope_data[0] = gyro_data.x;
446 gyroscope_data[1] = gyro_data.y;
447 gyroscope_data[2] = gyro_data.z;
451 *ts =
m_delegate.m_visibleFrame.arrivalTimestamp();
481 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
484 if (imu_vel != NULL) {
485 imu_vel->
resize(3,
false);
486 ST::RotationRate imu_rotationRate =
m_delegate.m_gyroscopeEvent.rotationRate();
487 (*imu_vel)[0] = imu_rotationRate.x;
488 (*imu_vel)[1] = imu_rotationRate.y;
489 (*imu_vel)[2] = imu_rotationRate.z;
493 *ts =
m_delegate.m_gyroscopeEvent.arrivalTimestamp();
521 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
524 if (imu_acc != NULL) {
525 imu_acc->
resize(3,
false);
526 ST::Acceleration imu_acceleration =
m_delegate.m_accelerometerEvent.acceleration();
527 (*imu_acc)[0] = imu_acceleration.x;
528 (*imu_acc)[1] = imu_acceleration.y;
529 (*imu_acc)[2] = imu_acceleration.z;
533 *ts =
m_delegate.m_accelerometerEvent.arrivalTimestamp();
563 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
565 double imu_vel_timestamp, imu_acc_timestamp;
567 if (imu_vel != NULL) {
568 imu_vel->
resize(3,
false);
569 ST::RotationRate imu_rotationRate =
m_delegate.m_gyroscopeEvent.rotationRate();
570 (*imu_vel)[0] = imu_rotationRate.x;
571 (*imu_vel)[1] = imu_rotationRate.y;
572 (*imu_vel)[2] = imu_rotationRate.z;
574 m_delegate.m_gyroscopeEvent.arrivalTimestamp();
577 if (imu_acc != NULL) {
578 imu_acc->
resize(3,
false);
579 ST::Acceleration imu_acceleration =
m_delegate.m_accelerometerEvent.acceleration();
580 (*imu_acc)[0] = imu_acceleration.x;
581 (*imu_acc)[1] = imu_acceleration.y;
582 (*imu_acc)[2] = imu_acceleration.z;
583 imu_acc_timestamp =
m_delegate.m_accelerometerEvent.arrivalTimestamp();
587 *ts = std::max(imu_vel_timestamp, imu_acc_timestamp);
605 std::cout <<
"Failed to initialize capture session!" << std::endl;
611 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
613 m_delegate.cv_sampleLock.wait_for(u, std::chrono::seconds(20));
616 if (var == std::cv_status::timeout) {
650 switch (stream_type) {
651 case vpOccipitalStructureStream::visible:
656 case vpOccipitalStructureStream::depth:
661 case vpOccipitalStructureStream::infrared:
679 switch (stream_type) {
680 case vpOccipitalStructureStream::visible:
685 case vpOccipitalStructureStream::depth:
690 case vpOccipitalStructureStream::infrared:
709 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
729 std::unique_lock<std::mutex> u(
m_delegate.m_sampleLock);
733 ST::Vector3f point_3D =
m_delegate.m_depthFrame.unprojectPoint(row, col);
734 return vpPoint(point_3D.x, point_3D.y, point_3D.z);
756 ST::Matrix4 v_M_d =
m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame();
758 result[0][0] = v_M_d.m00;
759 result[0][1] = v_M_d.m10;
760 result[0][2] = v_M_d.m20;
761 result[0][3] = v_M_d.m30;
762 result[1][0] = v_M_d.m01;
763 result[1][1] = v_M_d.m11;
764 result[1][2] = v_M_d.m21;
765 result[1][3] = v_M_d.m31;
766 result[2][0] = v_M_d.m02;
767 result[2][1] = v_M_d.m12;
768 result[2][2] = v_M_d.m22;
769 result[2][3] = v_M_d.m32;
773 ST::Matrix4 imu_M_d =
m_captureSession.getImuFromDepthExtrinsics().inversed();
775 result[0][0] = imu_M_d.m00;
776 result[0][1] = imu_M_d.m10;
777 result[0][2] = imu_M_d.m20;
778 result[0][3] = imu_M_d.m30;
779 result[1][0] = imu_M_d.m01;
780 result[1][1] = imu_M_d.m11;
781 result[1][2] = imu_M_d.m21;
782 result[1][3] = imu_M_d.m31;
783 result[2][0] = imu_M_d.m02;
784 result[2][1] = imu_M_d.m12;
785 result[2][2] = imu_M_d.m22;
786 result[2][3] = imu_M_d.m32;
792 ST::Matrix4 d_M_v =
m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame().inversed();
794 result[0][0] = d_M_v.m00;
795 result[0][1] = d_M_v.m10;
796 result[0][2] = d_M_v.m20;
797 result[0][3] = d_M_v.m30;
798 result[1][0] = d_M_v.m01;
799 result[1][1] = d_M_v.m11;
800 result[1][2] = d_M_v.m21;
801 result[1][3] = d_M_v.m31;
802 result[2][0] = d_M_v.m02;
803 result[2][1] = d_M_v.m12;
804 result[2][2] = d_M_v.m22;
805 result[2][3] = d_M_v.m32;
809 ST::Matrix4 imu_M_v =
m_captureSession.getImuFromVisibleExtrinsics().inversed();
811 result[0][0] = imu_M_v.m00;
812 result[0][1] = imu_M_v.m10;
813 result[0][2] = imu_M_v.m20;
814 result[0][3] = imu_M_v.m30;
815 result[1][0] = imu_M_v.m01;
816 result[1][1] = imu_M_v.m11;
817 result[1][2] = imu_M_v.m21;
818 result[1][3] = imu_M_v.m31;
819 result[2][0] = imu_M_v.m02;
820 result[2][1] = imu_M_v.m12;
821 result[2][2] = imu_M_v.m22;
822 result[2][3] = imu_M_v.m32;
833 result[0][0] = d_M_imu.m00;
834 result[0][1] = d_M_imu.m10;
835 result[0][2] = d_M_imu.m20;
836 result[0][3] = d_M_imu.m30;
837 result[1][0] = d_M_imu.m01;
838 result[1][1] = d_M_imu.m11;
839 result[1][2] = d_M_imu.m21;
840 result[1][3] = d_M_imu.m31;
841 result[2][0] = d_M_imu.m02;
842 result[2][1] = d_M_imu.m12;
843 result[2][2] = d_M_imu.m22;
844 result[2][3] = d_M_imu.m32;
850 result[0][0] = v_M_imu.m00;
851 result[0][1] = v_M_imu.m10;
852 result[0][2] = v_M_imu.m20;
853 result[0][3] = v_M_imu.m30;
854 result[1][0] = v_M_imu.m01;
855 result[1][1] = v_M_imu.m11;
856 result[1][2] = v_M_imu.m21;
857 result[1][3] = v_M_imu.m31;
858 result[2][0] = v_M_imu.m02;
859 result[2][1] = v_M_imu.m12;
860 result[2][2] = v_M_imu.m22;
861 result[2][3] = v_M_imu.m32;
878 ST::Intrinsics result;
880 switch (stream_type) {
882 result =
m_delegate.m_visibleFrame.intrinsics();
886 result =
m_delegate.m_depthFrame.intrinsics();
890 result =
m_delegate.m_infraredFrame.intrinsics();
908 m_delegate.m_depthFrame.saveImageAsPointCloudMesh(filename.c_str());
919 ST::Intrinsics cam_intrinsics;
920 switch (stream_type) {
921 case vpOccipitalStructureStream::visible:
922 cam_intrinsics =
m_delegate.m_visibleFrame.intrinsics();
925 cam_intrinsics.cy, cam_intrinsics.k1, -cam_intrinsics.k1);
929 vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
934 case vpOccipitalStructureStream::depth:
935 cam_intrinsics =
m_delegate.m_depthFrame.intrinsics();
937 vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
951 ST::DepthFrame last_df =
m_delegate.m_depthFrame;
952 const int width = last_df.width();
953 const int height = last_df.height();
954 pointcloud.resize((
size_t)(width * height));
956 const float *p_depth_frame =
reinterpret_cast<const float *
>(last_df.depthInMillimeters());
960 #pragma omp parallel for schedule(dynamic)
961 for (
int i = 0; i < height; i++) {
962 auto depth_pixel_index = i * width;
964 for (
int j = 0; j < width; j++, depth_pixel_index++) {
965 if (std::isnan(p_depth_frame[depth_pixel_index])) {
966 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
970 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
975 auto pixels_distance = p_depth_frame[depth_pixel_index];
977 ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
979 if (pixels_distance >
m_maxZ)
982 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
983 pointcloud[(size_t)depth_pixel_index][0] = point_3D.x * 0.001;
984 pointcloud[(
size_t)depth_pixel_index][1] = point_3D.y * 0.001;
985 pointcloud[(size_t)depth_pixel_index][2] = point_3D.z * 0.001;
986 pointcloud[(
size_t)depth_pixel_index][3] = 1.0;
994 ST::DepthFrame last_df =
m_delegate.m_depthFrame;
995 const int width = last_df.width();
996 const int height = last_df.height();
997 pointcloud->width = (uint32_t)width;
998 pointcloud->height = (uint32_t)height;
999 pointcloud->resize((
size_t)(width * height));
1001 #if MANUAL_POINTCLOUD
1002 const float *p_depth_frame =
reinterpret_cast<const float *
>(last_df.depthInMillimeters());
1006 #pragma omp parallel for schedule(dynamic)
1007 for (
int i = 0; i < height; i++) {
1008 auto depth_pixel_index = i * width;
1010 for (
int j = 0; j < width; j++, depth_pixel_index++) {
1011 if (p_depth_frame[depth_pixel_index] == 0) {
1019 auto pixels_distance = p_depth_frame[depth_pixel_index];
1022 ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
1024 if (pixels_distance >
m_maxZ)
1027 pointcloud->points[(size_t)(depth_pixel_index)].x = point_3D.x * 0.001;
1028 pointcloud->points[(size_t)(depth_pixel_index)].y = point_3D.y * 0.001;
1029 pointcloud->points[(size_t)(depth_pixel_index)].z = point_3D.z * 0.001;
1052 ST::DepthFrame last_df =
m_delegate.m_depthFrame;
1053 ST::ColorFrame last_cf =
m_delegate.m_visibleFrame;
1055 const int depth_width = last_df.width();
1056 const int depth_height = last_df.height();
1057 pointcloud->width =
static_cast<uint32_t
>(depth_width);
1058 pointcloud->height =
static_cast<uint32_t
>(depth_height);
1059 pointcloud->resize(
static_cast<uint32_t
>(depth_width * depth_height));
1061 const int color_width = last_cf.width();
1062 const int color_height = last_cf.height();
1064 const float *p_depth_frame =
reinterpret_cast<const float *
>(last_df.depthInMillimeters());
1065 const ST::Matrix4 depth2ColorExtrinsics = last_df.visibleCameraPoseInDepthCoordinateFrame();
1067 const bool swap_rb =
false;
1068 unsigned int nb_color_pixel = 3;
1069 const uint8_t *p_color_frame =
static_cast<const uint8_t *
>(last_cf.rgbData());
1071 const bool registered_streams = last_df.isRegisteredTo(last_cf);
1075 #pragma omp parallel for schedule(dynamic)
1076 for (
int i = 0; i < depth_height; i++) {
1077 auto depth_pixel_index = i * depth_width;
1079 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
1080 if (std::isnan(p_depth_frame[depth_pixel_index])) {
1086 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1087 unsigned int r = 0, g = 0, b = 0;
1088 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1090 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1092 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1093 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1094 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1100 auto pixels_distance = p_depth_frame[depth_pixel_index];
1102 ST::Vector3f depth_point_3D = last_df.unprojectPoint(i, j);
1104 if (pixels_distance >
m_maxZ)
1107 pointcloud->points[(size_t)depth_pixel_index].x = depth_point_3D.x * 0.001;
1108 pointcloud->points[(
size_t)depth_pixel_index].y = depth_point_3D.y * 0.001;
1109 pointcloud->points[(size_t)depth_pixel_index].z = depth_point_3D.z * 0.001;
1111 if (!registered_streams) {
1113 ST::Vector3f color_point_3D = depth2ColorExtrinsics * depth_point_3D;
1115 ST::Vector2f color_pixel;
1116 if (color_point_3D.z != 0) {
1117 double x, y, pixel_x, pixel_y;
1118 x =
static_cast<double>(color_point_3D.y / color_point_3D.z);
1119 y =
static_cast<double>(color_point_3D.x / color_point_3D.z);
1121 color_pixel.x = pixel_x;
1122 color_pixel.y = pixel_y;
1125 if (color_pixel.y < 0 || color_pixel.y >= color_height || color_pixel.x < 0 || color_pixel.x >= color_width) {
1128 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1129 unsigned int r = 0, g = 0, b = 0;
1130 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1132 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1134 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1135 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1136 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1139 unsigned int i_ = (
unsigned int)color_pixel.x;
1140 unsigned int j_ = (
unsigned int)color_pixel.y;
1142 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1146 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) |
1147 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1148 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2])
1152 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1153 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1154 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]));
1157 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1160 pointcloud->points[(size_t)depth_pixel_index].b =
1161 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1162 pointcloud->points[(size_t)depth_pixel_index].g =
1163 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1164 pointcloud->points[(size_t)depth_pixel_index].r =
1165 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1167 pointcloud->points[(size_t)depth_pixel_index].r =
1168 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1169 pointcloud->points[(size_t)depth_pixel_index].g =
1170 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1171 pointcloud->points[(size_t)depth_pixel_index].b =
1172 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1177 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1180 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) |
1181 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1182 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1184 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1185 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1186 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]));
1189 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1192 pointcloud->points[(size_t)depth_pixel_index].b =
1193 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1194 pointcloud->points[(size_t)depth_pixel_index].g =
1195 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1196 pointcloud->points[(size_t)depth_pixel_index].r =
1197 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1199 pointcloud->points[(size_t)depth_pixel_index].r =
1200 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1201 pointcloud->points[(size_t)depth_pixel_index].g =
1202 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1203 pointcloud->points[(size_t)depth_pixel_index].b =
1204 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
Generic class defining intrinsic camera parameters.
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void GreyToRGBa(unsigned char *grey, unsigned char *rgba, unsigned int width, unsigned int height)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
Type * bitmap
points toward the bitmap
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts=NULL)
vpPoint unprojectPoint(int row, int col)
unsigned int getHeight(vpOccipitalStructureStream stream_type)
vpCameraParameters getCameraParameters(const vpOccipitalStructureStream stream_type, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithoutDistortion)
vpCameraParameters m_visible_camera_parameters
vpCameraParameters m_depth_camera_parameters
void saveDepthImageAsPointCloudMesh(std::string &filename)
float getDepth(int x, int y)
ST::Intrinsics getIntrinsics(const vpOccipitalStructureStream stream_type) const
void getColoredPointcloud(pcl::PointCloud< pcl::PointXYZRGB >::Ptr &pointcloud)
ST::CaptureSession m_captureSession
float m_invalidDepthValue
void acquire(vpImage< unsigned char > &gray, bool undistorted=false, double *ts=NULL)
void getPointcloud(std::vector< vpColVector > &pointcloud)
unsigned int getWidth(vpOccipitalStructureStream stream_type)
ST::CaptureSessionSettings m_captureSessionSettings
SessionDelegate m_delegate
vpOccipitalStructureStream
@ infrared
Infrared stream.
vpHomogeneousMatrix getTransform(const vpOccipitalStructureStream from, const vpOccipitalStructureStream to)
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
bool open(const ST::CaptureSessionSettings &settings)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...