Visual Servoing Platform  version 3.5.1 under development (2022-12-04)
vpOccipitalStructure.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * libStructure interface.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <cstring>
40 #include <functional>
41 #include <iomanip>
42 #include <map>
43 #include <set>
44 #include <thread>
45 
46 #include <ST/Utilities.h>
47 
48 #include <visp3/core/vpImageConvert.h>
49 #include <visp3/core/vpMeterPixelConversion.h>
50 #include <visp3/core/vpPoint.h>
51 #include <visp3/sensor/vpOccipitalStructure.h>
52 
53 #define MANUAL_POINTCLOUD 1
54 
58 vpOccipitalStructure::vpOccipitalStructure() : m_invalidDepthValue(0.0f), m_maxZ(15000.0f) {}
59 
65 
72 void vpOccipitalStructure::acquire(vpImage<unsigned char> &gray, bool undistorted, double *ts)
73 {
74  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
75  m_delegate.cv_sampleLock.wait(u);
76 
77  if (m_delegate.m_visibleFrame.isValid()) {
78  if (!undistorted)
79  memcpy(gray.bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
80  else
81  memcpy(gray.bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
82 
83  if (ts != NULL)
84  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
85  }
86 
87  u.unlock();
88 }
89 
96 void vpOccipitalStructure::acquire(vpImage<vpRGBa> &rgb, bool undistorted, double *ts)
97 {
98  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
99  m_delegate.cv_sampleLock.wait(u);
100 
101  if (m_delegate.m_visibleFrame.isValid()) {
102  // Detecting if it's a Color Structure Core device.
103  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
104  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
105  reinterpret_cast<unsigned char *>(rgb.bitmap),
106  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
107 
108  else // If it's a monochrome Structure Core device.
109  {
110  if (!undistorted)
111  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
112  reinterpret_cast<unsigned char *>(rgb.bitmap),
113  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
114  else
115  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
116  reinterpret_cast<unsigned char *>(rgb.bitmap),
117  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
118  }
119 
120  if (ts != NULL)
121  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
122  }
123 
124  u.unlock();
125 }
126 
137  vpColVector *gyroscope_data, bool undistorted, double *ts)
138 {
139  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
140  m_delegate.cv_sampleLock.wait(u);
141 
142  if (rgb != NULL && m_delegate.m_visibleFrame.isValid()) {
143  // Detecting if it's a Color Structure Core device.
144  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
145  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
146  reinterpret_cast<unsigned char *>(rgb->bitmap),
147  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
148 
149  else // If it's a monochrome Structure Core device.
150  {
151  if (!undistorted)
152  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
153  reinterpret_cast<unsigned char *>(rgb->bitmap),
154  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
155  else
156  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
157  reinterpret_cast<unsigned char *>(rgb->bitmap),
158  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
159  }
160 
161  if (ts != NULL)
162  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
163  }
164 
165  if (depth != NULL && m_delegate.m_depthFrame.isValid())
166  memcpy((unsigned char *)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
167  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
168 
169  if (acceleration_data != NULL) {
170  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
171  acceleration_data[0] = accel.x;
172  acceleration_data[1] = accel.y;
173  acceleration_data[2] = accel.z;
174  }
175 
176  if (gyroscope_data != NULL) {
177  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
178  gyroscope_data[0] = gyro_data.x;
179  gyroscope_data[1] = gyro_data.y;
180  gyroscope_data[2] = gyro_data.z;
181  }
182 
183  if (ts != NULL) // By default, frames are synchronized.
184  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
185 
186  u.unlock();
187 }
188 
199  vpColVector *gyroscope_data, bool undistorted, double *ts)
200 {
201  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
202  m_delegate.cv_sampleLock.wait(u);
203 
204  if (gray != NULL && m_delegate.m_visibleFrame.isValid()) {
205  if (!undistorted)
206  memcpy(gray->bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
207  else
208  memcpy(gray->bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
209 
210  if (ts != NULL)
211  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
212  }
213 
214  if (depth != NULL && m_delegate.m_depthFrame.isValid())
215  memcpy((unsigned char *)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
216  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
217 
218  if (acceleration_data != NULL) {
219  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
220  acceleration_data[0] = accel.x;
221  acceleration_data[1] = accel.y;
222  acceleration_data[2] = accel.z;
223  }
224 
225  if (gyroscope_data != NULL) {
226  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
227  gyroscope_data[0] = gyro_data.x;
228  gyroscope_data[1] = gyro_data.y;
229  gyroscope_data[2] = gyro_data.z;
230  }
231 
232  if (ts != NULL) // By default, frames are synchronized.
233  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
234 
235  u.unlock();
236 }
237 
249 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
250  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
251  vpColVector *acceleration_data, vpColVector *gyroscope_data, bool undistorted,
252  double *ts)
253 {
254  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
255  m_delegate.cv_sampleLock.wait(u);
256 
257  if (m_delegate.m_depthFrame.isValid() && data_depth != NULL)
258  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
259  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
260 
261  if (m_delegate.m_visibleFrame.isValid() && data_image != NULL) {
262  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
263  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
264  reinterpret_cast<unsigned char *>(data_image),
265  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
266 
267  else // If it's a monochrome Structure Core device.
268  {
269  if (!undistorted)
270  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
271  reinterpret_cast<unsigned char *>(data_image),
272  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
273  else
274  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
275  reinterpret_cast<unsigned char *>(data_image),
276  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
277  }
278  }
279 
280  if (m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
281  memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
282  m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
283 
284  if (data_pointCloud != NULL)
285  getPointcloud(*data_pointCloud);
286 
287  if (acceleration_data != NULL) {
288  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
289  acceleration_data[0] = accel.x;
290  acceleration_data[1] = accel.y;
291  acceleration_data[2] = accel.z;
292  }
293 
294  if (gyroscope_data != NULL) {
295  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
296  gyroscope_data[0] = gyro_data.x;
297  gyroscope_data[1] = gyro_data.y;
298  gyroscope_data[2] = gyro_data.z;
299  }
300 
301  if (ts != NULL) // By default, frames are synchronized.
302  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
303 
304  u.unlock();
305 }
306 
307 #ifdef VISP_HAVE_PCL
320 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
321  std::vector<vpColVector> *const data_pointCloud,
322  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
323  vpColVector *acceleration_data, vpColVector *gyroscope_data, bool undistorted,
324  double *ts)
325 {
326  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
327  m_delegate.cv_sampleLock.wait(u);
328 
329  if (m_delegate.m_visibleFrame.isValid()) {
330  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
331  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
332  reinterpret_cast<unsigned char *>(data_image),
333  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
334 
335  else // If it's a monochrome Structure Core device.
336  {
337  if (!undistorted)
338  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
339  reinterpret_cast<unsigned char *>(data_image),
340  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
341  else
342  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
343  reinterpret_cast<unsigned char *>(data_image),
344  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
345  }
346  }
347 
348  if (m_delegate.m_depthFrame.isValid() && data_depth != NULL)
349  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
350  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
351 
352  if (m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
353  memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
354  m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
355 
356  if (data_pointCloud != NULL)
357  getPointcloud(*data_pointCloud);
358 
359  if (m_delegate.m_depthFrame.isValid() && pointcloud != NULL)
360  getPointcloud(pointcloud);
361 
362  if (acceleration_data != NULL) {
363  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
364  acceleration_data[0] = accel.x;
365  acceleration_data[1] = accel.y;
366  acceleration_data[2] = accel.z;
367  }
368 
369  if (gyroscope_data != NULL) {
370  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
371  gyroscope_data[0] = gyro_data.x;
372  gyroscope_data[1] = gyro_data.y;
373  gyroscope_data[2] = gyro_data.z;
374  }
375 
376  if (ts != NULL) // By default, frames are synchronized.
377  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
378 
379  u.unlock();
380 }
381 
394 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
395  std::vector<vpColVector> *const data_pointCloud,
396  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
397  unsigned char *const data_infrared, vpColVector *acceleration_data,
398  vpColVector *gyroscope_data, bool undistorted, double *ts)
399 {
400  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
401  m_delegate.cv_sampleLock.wait(u);
402 
403  if (m_delegate.m_depthFrame.isValid() && data_depth != NULL)
404  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
405  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
406 
407  if (m_delegate.m_visibleFrame.isValid() && data_image != NULL) {
408  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
409  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
410  reinterpret_cast<unsigned char *>(data_image),
411  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
412 
413  else // If it's a monochrome Structure Core device.
414  {
415  if (!undistorted)
416  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
417  reinterpret_cast<unsigned char *>(data_image),
418  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
419  else
420  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
421  reinterpret_cast<unsigned char *>(data_image),
422  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
423  }
424  }
425 
426  if (m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
427  memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
428  m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
429 
430  if (m_delegate.m_depthFrame.isValid() && data_pointCloud != NULL)
431  getPointcloud(*data_pointCloud);
432 
433  if (m_delegate.m_depthFrame.isValid() && m_delegate.m_visibleFrame.isValid() && pointcloud != NULL)
434  getColoredPointcloud(pointcloud);
435 
436  if (acceleration_data != NULL) {
437  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
438  acceleration_data[0] = accel.x;
439  acceleration_data[1] = accel.y;
440  acceleration_data[2] = accel.z;
441  }
442 
443  if (gyroscope_data != NULL) {
444  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
445  gyroscope_data[0] = gyro_data.x;
446  gyroscope_data[1] = gyro_data.y;
447  gyroscope_data[2] = gyro_data.z;
448  }
449 
450  if (ts != NULL) // By default, frames are synchronized.
451  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
452 
453  u.unlock();
454 }
455 
456 #endif
457 
480 {
481  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
482  m_delegate.cv_sampleLock.wait(u);
483 
484  if (imu_vel != NULL) {
485  imu_vel->resize(3, false);
486  ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
487  (*imu_vel)[0] = imu_rotationRate.x;
488  (*imu_vel)[1] = imu_rotationRate.y;
489  (*imu_vel)[2] = imu_rotationRate.z;
490  }
491 
492  if (ts != NULL)
493  *ts = m_delegate.m_gyroscopeEvent.arrivalTimestamp();
494 
495  u.unlock();
496 }
497 
520 {
521  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
522  m_delegate.cv_sampleLock.wait(u);
523 
524  if (imu_acc != NULL) {
525  imu_acc->resize(3, false);
526  ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
527  (*imu_acc)[0] = imu_acceleration.x;
528  (*imu_acc)[1] = imu_acceleration.y;
529  (*imu_acc)[2] = imu_acceleration.z;
530  }
531 
532  if (ts != NULL)
533  *ts = m_delegate.m_accelerometerEvent.arrivalTimestamp();
534 
535  u.unlock();
536 }
537 
561 void vpOccipitalStructure::getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
562 {
563  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
564  m_delegate.cv_sampleLock.wait(u);
565  double imu_vel_timestamp, imu_acc_timestamp;
566 
567  if (imu_vel != NULL) {
568  imu_vel->resize(3, false);
569  ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
570  (*imu_vel)[0] = imu_rotationRate.x;
571  (*imu_vel)[1] = imu_rotationRate.y;
572  (*imu_vel)[2] = imu_rotationRate.z;
573  imu_vel_timestamp =
574  m_delegate.m_gyroscopeEvent.arrivalTimestamp(); // Relative to an unspecified epoch. (see documentation).
575  }
576 
577  if (imu_acc != NULL) {
578  imu_acc->resize(3, false);
579  ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
580  (*imu_acc)[0] = imu_acceleration.x;
581  (*imu_acc)[1] = imu_acceleration.y;
582  (*imu_acc)[2] = imu_acceleration.z;
583  imu_acc_timestamp = m_delegate.m_accelerometerEvent.arrivalTimestamp();
584  }
585 
586  if (ts != NULL)
587  *ts = std::max(imu_vel_timestamp, imu_acc_timestamp);
588 
589  u.unlock();
590 }
591 
596 bool vpOccipitalStructure::open(const ST::CaptureSessionSettings &settings)
597 {
598  if (m_init) {
599  close();
600  }
601 
602  m_captureSession.setDelegate(&m_delegate);
603 
604  if (!m_captureSession.startMonitoring(settings)) {
605  std::cout << "Failed to initialize capture session!" << std::endl;
606  return false;
607  }
608 
609  // This will lock the open() function until the CaptureSession
610  // recieves the first frame or a timeout.
611  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
612  std::cv_status var =
613  m_delegate.cv_sampleLock.wait_for(u, std::chrono::seconds(20)); // Make sure a device is connected.
614 
615  // In case the device is not connected, open() should return false.
616  if (var == std::cv_status::timeout) {
617  m_init = false;
618  return m_init;
619  }
620 
621  m_init = true;
622  m_captureSessionSettings = settings;
623  return m_init;
624 }
625 
637 {
638  if (m_init) {
639  m_captureSession.stopStreaming();
640  m_init = false;
641  }
642 }
643 
649 {
650  switch (stream_type) {
651  case vpOccipitalStructureStream::visible:
652  if (m_delegate.m_visibleFrame.isValid())
653  return m_delegate.m_visibleFrame.width();
654  break;
655 
656  case vpOccipitalStructureStream::depth:
657  if (m_delegate.m_depthFrame.isValid())
658  return m_delegate.m_depthFrame.width();
659  break;
660 
661  case vpOccipitalStructureStream::infrared:
662  if (m_delegate.m_infraredFrame.isValid())
663  return m_delegate.m_infraredFrame.width();
664  break;
665 
666  default:
667  break;
668  }
669 
670  return 0;
671 }
672 
678 {
679  switch (stream_type) {
680  case vpOccipitalStructureStream::visible:
681  if (m_delegate.m_visibleFrame.isValid())
682  return m_delegate.m_visibleFrame.height();
683  break;
684 
685  case vpOccipitalStructureStream::depth:
686  if (m_delegate.m_depthFrame.isValid())
687  return m_delegate.m_depthFrame.height();
688  break;
689 
690  case vpOccipitalStructureStream::infrared:
691  if (m_delegate.m_infraredFrame.isValid())
692  return m_delegate.m_infraredFrame.height();
693  break;
694 
695  default:
696  break;
697  }
698 
699  return 0;
700 }
701 
708 {
709  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
710  m_delegate.cv_sampleLock.wait(u);
711 
712  if (m_delegate.m_depthFrame.isValid())
713  return m_delegate.m_depthFrame(x, y);
714 
715  else
716  return 0.;
717 
718  u.unlock();
719 }
720 
726 // Return vpColVector
728 {
729  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
730  m_delegate.cv_sampleLock.wait(u);
731 
732  if (m_delegate.m_depthFrame.isValid()) {
733  ST::Vector3f point_3D = m_delegate.m_depthFrame.unprojectPoint(row, col);
734  return vpPoint(point_3D.x, point_3D.y, point_3D.z);
735  }
736 
737  else // Should be returning invalid vpPoint.
738  return vpPoint(0., 0., 0.);
739 
740  u.unlock();
741 }
742 
750 {
751  vpHomogeneousMatrix result;
752 
753  switch (from) {
755  if (to == vpOccipitalStructure::visible) {
756  ST::Matrix4 v_M_d = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame();
757 
758  result[0][0] = v_M_d.m00;
759  result[0][1] = v_M_d.m10;
760  result[0][2] = v_M_d.m20;
761  result[0][3] = v_M_d.m30;
762  result[1][0] = v_M_d.m01;
763  result[1][1] = v_M_d.m11;
764  result[1][2] = v_M_d.m21;
765  result[1][3] = v_M_d.m31;
766  result[2][0] = v_M_d.m02;
767  result[2][1] = v_M_d.m12;
768  result[2][2] = v_M_d.m22;
769  result[2][3] = v_M_d.m32;
770  }
771 
772  if (to == vpOccipitalStructure::imu) {
773  ST::Matrix4 imu_M_d = m_captureSession.getImuFromDepthExtrinsics().inversed();
774 
775  result[0][0] = imu_M_d.m00;
776  result[0][1] = imu_M_d.m10;
777  result[0][2] = imu_M_d.m20;
778  result[0][3] = imu_M_d.m30;
779  result[1][0] = imu_M_d.m01;
780  result[1][1] = imu_M_d.m11;
781  result[1][2] = imu_M_d.m21;
782  result[1][3] = imu_M_d.m31;
783  result[2][0] = imu_M_d.m02;
784  result[2][1] = imu_M_d.m12;
785  result[2][2] = imu_M_d.m22;
786  result[2][3] = imu_M_d.m32;
787  }
788  break;
789 
791  if (to == vpOccipitalStructure::depth) {
792  ST::Matrix4 d_M_v = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame().inversed();
793 
794  result[0][0] = d_M_v.m00;
795  result[0][1] = d_M_v.m10;
796  result[0][2] = d_M_v.m20;
797  result[0][3] = d_M_v.m30;
798  result[1][0] = d_M_v.m01;
799  result[1][1] = d_M_v.m11;
800  result[1][2] = d_M_v.m21;
801  result[1][3] = d_M_v.m31;
802  result[2][0] = d_M_v.m02;
803  result[2][1] = d_M_v.m12;
804  result[2][2] = d_M_v.m22;
805  result[2][3] = d_M_v.m32;
806  }
807 
808  if (to == vpOccipitalStructure::imu) {
809  ST::Matrix4 imu_M_v = m_captureSession.getImuFromVisibleExtrinsics().inversed();
810 
811  result[0][0] = imu_M_v.m00;
812  result[0][1] = imu_M_v.m10;
813  result[0][2] = imu_M_v.m20;
814  result[0][3] = imu_M_v.m30;
815  result[1][0] = imu_M_v.m01;
816  result[1][1] = imu_M_v.m11;
817  result[1][2] = imu_M_v.m21;
818  result[1][3] = imu_M_v.m31;
819  result[2][0] = imu_M_v.m02;
820  result[2][1] = imu_M_v.m12;
821  result[2][2] = imu_M_v.m22;
822  result[2][3] = imu_M_v.m32;
823  }
824  break;
825 
827  break;
828 
830  if (to == vpOccipitalStructure::depth) {
831  ST::Matrix4 d_M_imu = m_captureSession.getImuFromDepthExtrinsics();
832 
833  result[0][0] = d_M_imu.m00;
834  result[0][1] = d_M_imu.m10;
835  result[0][2] = d_M_imu.m20;
836  result[0][3] = d_M_imu.m30;
837  result[1][0] = d_M_imu.m01;
838  result[1][1] = d_M_imu.m11;
839  result[1][2] = d_M_imu.m21;
840  result[1][3] = d_M_imu.m31;
841  result[2][0] = d_M_imu.m02;
842  result[2][1] = d_M_imu.m12;
843  result[2][2] = d_M_imu.m22;
844  result[2][3] = d_M_imu.m32;
845  }
846 
847  if (to == vpOccipitalStructure::visible) {
848  ST::Matrix4 v_M_imu = m_captureSession.getImuFromVisibleExtrinsics();
849 
850  result[0][0] = v_M_imu.m00;
851  result[0][1] = v_M_imu.m10;
852  result[0][2] = v_M_imu.m20;
853  result[0][3] = v_M_imu.m30;
854  result[1][0] = v_M_imu.m01;
855  result[1][1] = v_M_imu.m11;
856  result[1][2] = v_M_imu.m21;
857  result[1][3] = v_M_imu.m31;
858  result[2][0] = v_M_imu.m02;
859  result[2][1] = v_M_imu.m12;
860  result[2][2] = v_M_imu.m22;
861  result[2][3] = v_M_imu.m32;
862  }
863  break;
864 
865  default:
866  break;
867  }
868 
869  return result;
870 }
871 
877 {
878  ST::Intrinsics result;
879 
880  switch (stream_type) {
882  result = m_delegate.m_visibleFrame.intrinsics();
883  break;
884 
886  result = m_delegate.m_depthFrame.intrinsics();
887  break;
888 
890  result = m_delegate.m_infraredFrame.intrinsics();
891  break;
892 
893  default:
894  // Deal with exceptions.
895  break;
896  }
897 
898  return result;
899 }
900 
907 {
908  m_delegate.m_depthFrame.saveImageAsPointCloudMesh(filename.c_str());
909 }
910 
918 {
919  ST::Intrinsics cam_intrinsics;
920  switch (stream_type) {
921  case vpOccipitalStructureStream::visible:
922  cam_intrinsics = m_delegate.m_visibleFrame.intrinsics();
924  m_visible_camera_parameters = vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx,
925  cam_intrinsics.cy, cam_intrinsics.k1, -cam_intrinsics.k1);
926 
929  vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
930 
932  break;
933 
934  case vpOccipitalStructureStream::depth:
935  cam_intrinsics = m_delegate.m_depthFrame.intrinsics();
937  vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
938 
940  break;
941 
942  default: // Should throw exception for not having this type of extrinsic transforms.
943  return vpCameraParameters();
944  break;
945  }
946 }
947 
948 // Protected functions.
949 void vpOccipitalStructure::getPointcloud(std::vector<vpColVector> &pointcloud)
950 {
951  ST::DepthFrame last_df = m_delegate.m_depthFrame;
952  const int width = last_df.width();
953  const int height = last_df.height();
954  pointcloud.resize((size_t)(width * height));
955 
956  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
957 
958 // Multi-threading if OpenMP
959 // Concurrent writes at different locations are safe
960 #pragma omp parallel for schedule(dynamic)
961  for (int i = 0; i < height; i++) {
962  auto depth_pixel_index = i * width;
963 
964  for (int j = 0; j < width; j++, depth_pixel_index++) {
965  if (std::isnan(p_depth_frame[depth_pixel_index])) {
966  pointcloud[(size_t)depth_pixel_index].resize(4, false);
967  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
968  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
969  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
970  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
971  continue;
972  }
973 
974  // Get the depth value of the current pixel
975  auto pixels_distance = p_depth_frame[depth_pixel_index];
976 
977  ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
978 
979  if (pixels_distance > m_maxZ)
980  point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
981 
982  pointcloud[(size_t)depth_pixel_index].resize(4, false);
983  pointcloud[(size_t)depth_pixel_index][0] = point_3D.x * 0.001;
984  pointcloud[(size_t)depth_pixel_index][1] = point_3D.y * 0.001;
985  pointcloud[(size_t)depth_pixel_index][2] = point_3D.z * 0.001;
986  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
987  }
988  }
989 }
990 
991 #ifdef VISP_HAVE_PCL
992 void vpOccipitalStructure::getPointcloud(pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
993 {
994  ST::DepthFrame last_df = m_delegate.m_depthFrame;
995  const int width = last_df.width();
996  const int height = last_df.height();
997  pointcloud->width = (uint32_t)width;
998  pointcloud->height = (uint32_t)height;
999  pointcloud->resize((size_t)(width * height));
1000 
1001 #if MANUAL_POINTCLOUD // faster to compute manually when tested
1002  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1003 
1004  // Multi-threading if OpenMP
1005  // Concurrent writes at different locations are safe
1006 #pragma omp parallel for schedule(dynamic)
1007  for (int i = 0; i < height; i++) {
1008  auto depth_pixel_index = i * width;
1009 
1010  for (int j = 0; j < width; j++, depth_pixel_index++) {
1011  if (p_depth_frame[depth_pixel_index] == 0) {
1012  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
1013  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
1014  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
1015  continue;
1016  }
1017 
1018  // Get the depth value of the current pixel
1019  auto pixels_distance = p_depth_frame[depth_pixel_index];
1020 
1021  // Get 3D coordinates in depth frame. (in mm)
1022  ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
1023 
1024  if (pixels_distance > m_maxZ)
1025  point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
1026 
1027  pointcloud->points[(size_t)(depth_pixel_index)].x = point_3D.x * 0.001;
1028  pointcloud->points[(size_t)(depth_pixel_index)].y = point_3D.y * 0.001;
1029  pointcloud->points[(size_t)(depth_pixel_index)].z = point_3D.z * 0.001;
1030  }
1031  }
1032 // #else
1033 // m_points = m_pointcloud.calculate(depth_frame);
1034 // auto vertices = m_points.get_vertices();
1035 
1036 // for (size_t i = 0; i < m_points.size(); i++) {
1037 // if (vertices[i].z <= 0.0f || vertices[i].z > m_maxZ) {
1038 // pointcloud->points[i].x = m_invalidDepthValue;
1039 // pointcloud->points[i].y = m_invalidDepthValue;
1040 // pointcloud->points[i].z = m_invalidDepthValue;
1041 // } else {
1042 // pointcloud->points[i].x = vertices[i].x;
1043 // pointcloud->points[i].y = vertices[i].y;
1044 // pointcloud->points[i].z = vertices[i].z;
1045 // }
1046 // }
1047 #endif
1048 }
1049 
1050 void vpOccipitalStructure::getColoredPointcloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
1051 {
1052  ST::DepthFrame last_df = m_delegate.m_depthFrame;
1053  ST::ColorFrame last_cf = m_delegate.m_visibleFrame;
1054 
1055  const int depth_width = last_df.width();
1056  const int depth_height = last_df.height();
1057  pointcloud->width = static_cast<uint32_t>(depth_width);
1058  pointcloud->height = static_cast<uint32_t>(depth_height);
1059  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
1060 
1061  const int color_width = last_cf.width();
1062  const int color_height = last_cf.height();
1063 
1064  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1065  const ST::Matrix4 depth2ColorExtrinsics = last_df.visibleCameraPoseInDepthCoordinateFrame(); // c_M_d
1066 
1067  const bool swap_rb = false;
1068  unsigned int nb_color_pixel = 3; // RGB image.
1069  const uint8_t *p_color_frame = static_cast<const uint8_t *>(last_cf.rgbData());
1070 
1071  const bool registered_streams = last_df.isRegisteredTo(last_cf);
1072 
1073  // Multi-threading if OpenMP
1074  // Concurrent writes at different locations are safe
1075 #pragma omp parallel for schedule(dynamic)
1076  for (int i = 0; i < depth_height; i++) {
1077  auto depth_pixel_index = i * depth_width;
1078 
1079  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1080  if (std::isnan(p_depth_frame[depth_pixel_index])) {
1081  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
1082  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1083  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1084 
1085  // For out of bounds color data, default to black.
1086 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1087  unsigned int r = 0, g = 0, b = 0;
1088  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1089 
1090  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1091 #else
1092  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1093  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1094  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1095 #endif
1096  continue;
1097  }
1098 
1099  // Get the depth value of the current pixel
1100  auto pixels_distance = p_depth_frame[depth_pixel_index];
1101 
1102  ST::Vector3f depth_point_3D = last_df.unprojectPoint(i, j);
1103 
1104  if (pixels_distance > m_maxZ)
1105  depth_point_3D.x = depth_point_3D.y = depth_point_3D.z = m_invalidDepthValue;
1106 
1107  pointcloud->points[(size_t)depth_pixel_index].x = depth_point_3D.x * 0.001;
1108  pointcloud->points[(size_t)depth_pixel_index].y = depth_point_3D.y * 0.001;
1109  pointcloud->points[(size_t)depth_pixel_index].z = depth_point_3D.z * 0.001;
1110 
1111  if (!registered_streams) {
1112 
1113  ST::Vector3f color_point_3D = depth2ColorExtrinsics * depth_point_3D;
1114 
1115  ST::Vector2f color_pixel;
1116  if (color_point_3D.z != 0) {
1117  double x, y, pixel_x, pixel_y;
1118  x = static_cast<double>(color_point_3D.y / color_point_3D.z);
1119  y = static_cast<double>(color_point_3D.x / color_point_3D.z);
1121  color_pixel.x = pixel_x;
1122  color_pixel.y = pixel_y;
1123  }
1124 
1125  if (color_pixel.y < 0 || color_pixel.y >= color_height || color_pixel.x < 0 || color_pixel.x >= color_width) {
1126  // For out of bounds color data, default to a shade of blue in order to
1127  // visually distinguish holes.
1128 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1129  unsigned int r = 0, g = 0, b = 0;
1130  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1131 
1132  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1133 #else
1134  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1135  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1136  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1137 #endif
1138  } else {
1139  unsigned int i_ = (unsigned int)color_pixel.x;
1140  unsigned int j_ = (unsigned int)color_pixel.y;
1141 
1142 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1143  uint32_t rgb = 0;
1144  if (swap_rb) {
1145  rgb =
1146  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1147  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1148  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2])
1149  << 16);
1150  } else {
1151  rgb =
1152  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1153  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1154  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1155  }
1156 
1157  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1158 #else
1159  if (swap_rb) {
1160  pointcloud->points[(size_t)depth_pixel_index].b =
1161  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1162  pointcloud->points[(size_t)depth_pixel_index].g =
1163  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1164  pointcloud->points[(size_t)depth_pixel_index].r =
1165  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1166  } else {
1167  pointcloud->points[(size_t)depth_pixel_index].r =
1168  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1169  pointcloud->points[(size_t)depth_pixel_index].g =
1170  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1171  pointcloud->points[(size_t)depth_pixel_index].b =
1172  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1173  }
1174 #endif
1175  }
1176  } else {
1177 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1178  uint32_t rgb = 0;
1179  if (swap_rb) {
1180  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1181  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1182  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1183  } else {
1184  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1185  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1186  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1187  }
1188 
1189  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1190 #else
1191  if (swap_rb) {
1192  pointcloud->points[(size_t)depth_pixel_index].b =
1193  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1194  pointcloud->points[(size_t)depth_pixel_index].g =
1195  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1196  pointcloud->points[(size_t)depth_pixel_index].r =
1197  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1198  } else {
1199  pointcloud->points[(size_t)depth_pixel_index].r =
1200  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1201  pointcloud->points[(size_t)depth_pixel_index].g =
1202  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1203  pointcloud->points[(size_t)depth_pixel_index].b =
1204  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1205  }
1206 #endif
1207  }
1208  }
1209  }
1210 }
1211 
1212 #endif
1213 
1214 #endif
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:314
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void GreyToRGBa(unsigned char *grey, unsigned char *rgba, unsigned int width, unsigned int height)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
Type * bitmap
points toward the bitmap
Definition: vpImage.h:143
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts=NULL)
vpPoint unprojectPoint(int row, int col)
unsigned int getHeight(vpOccipitalStructureStream stream_type)
vpCameraParameters getCameraParameters(const vpOccipitalStructureStream stream_type, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithoutDistortion)
vpCameraParameters m_visible_camera_parameters
vpCameraParameters m_depth_camera_parameters
void saveDepthImageAsPointCloudMesh(std::string &filename)
float getDepth(int x, int y)
ST::Intrinsics getIntrinsics(const vpOccipitalStructureStream stream_type) const
void getColoredPointcloud(pcl::PointCloud< pcl::PointXYZRGB >::Ptr &pointcloud)
ST::CaptureSession m_captureSession
void acquire(vpImage< unsigned char > &gray, bool undistorted=false, double *ts=NULL)
void getPointcloud(std::vector< vpColVector > &pointcloud)
unsigned int getWidth(vpOccipitalStructureStream stream_type)
ST::CaptureSessionSettings m_captureSessionSettings
@ infrared
Infrared stream.
vpHomogeneousMatrix getTransform(const vpOccipitalStructureStream from, const vpOccipitalStructureStream to)
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
bool open(const ST::CaptureSessionSettings &settings)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82