Visual Servoing Platform  version 3.5.1 under development (2022-12-01)
vpMbtFaceDepthDense.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Manage depth dense features for a particular face.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpCPUFeatures.h>
37 #include <visp3/mbt/vpMbtFaceDepthDense.h>
38 
39 #ifdef VISP_HAVE_PCL
40 #include <pcl/common/point_tests.h>
41 #endif
42 
43 #if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2)
44 #include <emmintrin.h>
45 #define VISP_HAVE_SSE2 1
46 #endif
47 
48 // https://stackoverflow.com/a/40765925
49 #if !defined(__FMA__) && defined(__AVX2__)
50  #define __FMA__ 1
51 #endif
52 
53 #if defined _WIN32 && defined(_M_ARM64)
54 #define _ARM64_DISTINCT_NEON_TYPES
55 #include <Intrin.h>
56 #include <arm_neon.h>
57 #define VISP_HAVE_NEON 1
58 #elif (defined(__ARM_NEON__) || defined (__ARM_NEON)) && defined(__aarch64__)
59 #include <arm_neon.h>
60 #define VISP_HAVE_NEON 1
61 #endif
62 
63 #define USE_SIMD_CODE 1
64 
65 #if VISP_HAVE_SSE2 && USE_SIMD_CODE
66 #define USE_SSE 1
67 #else
68 #define USE_SSE 0
69 #endif
70 
71 #if VISP_HAVE_NEON && USE_SIMD_CODE
72 #define USE_NEON 1
73 #else
74 #define USE_NEON 0
75 #endif
76 
77 #if (VISP_HAVE_OPENCV_VERSION >= 0x040101 || (VISP_HAVE_OPENCV_VERSION < 0x040000 && VISP_HAVE_OPENCV_VERSION >= 0x030407)) && USE_SIMD_CODE
78 #define USE_OPENCV_HAL 1
79 #include <opencv2/core/simd_intrinsics.hpp>
80 #include <opencv2/core/hal/intrin.hpp>
81 #endif
82 
83 #if !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
84 #include <cstdint>
85 
86 namespace
87 {
88 #if USE_SSE
89 inline void v_load_deinterleave(const uint64_t *ptr, __m128i& a, __m128i& b, __m128i& c)
90 {
91  __m128i t0 = _mm_loadu_si128((const __m128i*)ptr); // a0, b0
92  __m128i t1 = _mm_loadu_si128((const __m128i*)(ptr + 2)); // c0, a1
93  __m128i t2 = _mm_loadu_si128((const __m128i*)(ptr + 4)); // b1, c1
94 
95  t1 = _mm_shuffle_epi32(t1, 0x4e); // a1, c0
96 
97  a = _mm_unpacklo_epi64(t0, t1);
98  b = _mm_unpacklo_epi64(_mm_unpackhi_epi64(t0, t0), t2);
99  c = _mm_unpackhi_epi64(t1, t2);
100 }
101 
102 inline void v_load_deinterleave(const double* ptr, __m128d& a0, __m128d& b0, __m128d& c0)
103 {
104  __m128i a1, b1, c1;
105  v_load_deinterleave((const uint64_t*)ptr, a1, b1, c1);
106  a0 = _mm_castsi128_pd(a1);
107  b0 = _mm_castsi128_pd(b1);
108  c0 = _mm_castsi128_pd(c1);
109 }
110 
111 inline __m128d v_combine_low(const __m128d& a, const __m128d& b)
112 {
113  __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
114  return _mm_castsi128_pd(_mm_unpacklo_epi64(a1, b1));
115 }
116 
117 inline __m128d v_combine_high(const __m128d& a, const __m128d& b)
118 {
119  __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
120  return _mm_castsi128_pd(_mm_unpackhi_epi64(a1, b1));
121 }
122 
123 inline __m128d v_fma(const __m128d& a, const __m128d& b, const __m128d& c)
124 {
125 #if __FMA__
126  return _mm_fmadd_pd(a, b, c);
127 #else
128  return _mm_add_pd(_mm_mul_pd(a, b), c);
129 #endif
130 }
131 #else
132 inline void v_load_deinterleave(const double* ptr, float64x2_t& a0, float64x2_t& b0, float64x2_t& c0)
133 {
134  float64x2x3_t v = vld3q_f64(ptr);
135  a0 = v.val[0];
136  b0 = v.val[1];
137  c0 = v.val[2];
138 }
139 
140 inline float64x2_t v_combine_low(const float64x2_t& a, const float64x2_t& b)
141 {
142  return vcombine_f64(vget_low_f64(a), vget_low_f64(b));
143 }
144 
145 inline float64x2_t v_combine_high(const float64x2_t& a, const float64x2_t& b)
146 {
147  return vcombine_f64(vget_high_f64(a), vget_high_f64(b));
148 }
149 
150 inline float64x2_t v_fma(const float64x2_t& a, const float64x2_t& b, const float64x2_t& c)
151 {
152  return vfmaq_f64(c, a, b);
153 }
154 #endif
155 }
156 #endif // !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
157 
159  : m_cam(), m_clippingFlag(vpPolygon3D::NO_CLIPPING), m_distFarClip(100), m_distNearClip(0.001), m_hiddenFace(NULL),
160  m_planeObject(), m_polygon(NULL), m_useScanLine(false),
161  m_depthDenseFilteringMethod(DEPTH_OCCUPANCY_RATIO_FILTERING), m_depthDenseFilteringMaxDist(3.0),
162  m_depthDenseFilteringMinDist(0.8), m_depthDenseFilteringOccupancyRatio(0.3), m_isTrackedDepthDenseFace(true),
163  m_isVisible(false), m_listOfFaceLines(), m_planeCamera(), m_pointCloudFace(), m_polygonLines()
164 {
165 }
166 
168 {
169  for (size_t i = 0; i < m_listOfFaceLines.size(); i++) {
170  delete m_listOfFaceLines[i];
171  }
172 }
173 
189  vpUniRand &rand_gen, int polygon, std::string name)
190 {
191  // Build a PolygonLine to be able to easily display the lines model
192  PolygonLine polygon_line;
193 
194  // Add polygon
195  polygon_line.m_poly.setNbPoint(2);
196  polygon_line.m_poly.addPoint(0, P1);
197  polygon_line.m_poly.addPoint(1, P2);
198 
199  polygon_line.m_poly.setClipping(m_clippingFlag);
200  polygon_line.m_poly.setNearClippingDistance(m_distNearClip);
201  polygon_line.m_poly.setFarClippingDistance(m_distFarClip);
202 
203  polygon_line.m_p1 = &polygon_line.m_poly.p[0];
204  polygon_line.m_p2 = &polygon_line.m_poly.p[1];
205 
206  m_polygonLines.push_back(polygon_line);
207 
208  // suppress line already in the model
209  bool already_here = false;
211 
212  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
213  ++it) {
214  l = *it;
215  if ((samePoint(*(l->p1), P1) && samePoint(*(l->p2), P2)) || (samePoint(*(l->p1), P2) && samePoint(*(l->p2), P1))) {
216  already_here = true;
217  l->addPolygon(polygon);
218  l->hiddenface = faces;
220  }
221  }
222 
223  if (!already_here) {
224  l = new vpMbtDistanceLine;
225 
227  l->buildFrom(P1, P2, rand_gen);
228  l->addPolygon(polygon);
229  l->hiddenface = faces;
231 
232  l->setIndex((unsigned int)m_listOfFaceLines.size());
233  l->setName(name);
234 
237 
240 
243 
244  m_listOfFaceLines.push_back(l);
245  }
246 }
247 
248 #ifdef VISP_HAVE_PCL
250  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
251  unsigned int stepX, unsigned int stepY
252 #if DEBUG_DISPLAY_DEPTH_DENSE
253  ,
254  vpImage<unsigned char> &debugImage,
255  std::vector<std::vector<vpImagePoint> > &roiPts_vec
256 #endif
257  ,
258  const vpImage<bool> *mask)
259 {
260  unsigned int width = point_cloud->width, height = point_cloud->height;
261  m_pointCloudFace.clear();
262 
263  if (point_cloud->width == 0 || point_cloud->height == 0)
264  return false;
265 
266  std::vector<vpImagePoint> roiPts;
267  double distanceToFace;
268  computeROI(cMo, width, height, roiPts
269 #if DEBUG_DISPLAY_DEPTH_DENSE
270  ,
271  roiPts_vec
272 #endif
273  ,
274  distanceToFace);
275 
276  if (roiPts.size() <= 2) {
277 #ifndef NDEBUG
278  std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
279 #endif
280  return false;
281  }
282 
285  return false;
286  }
287 
288  vpPolygon polygon_2d(roiPts);
289  vpRect bb = polygon_2d.getBoundingBox();
290 
291  unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
292  unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
293  unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
294  unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
295 
296  bb.setTop(top);
297  bb.setBottom(bottom);
298  bb.setLeft(left);
299  bb.setRight(right);
300 
301  if (bb.getHeight() < 0 || bb.getWidth() < 0) {
302  return false;
303  }
304 
305  m_pointCloudFace.reserve((size_t)(bb.getWidth() * bb.getHeight()));
306 
307  int totalTheoreticalPoints = 0, totalPoints = 0;
308  for (unsigned int i = top; i < bottom; i += stepY) {
309  for (unsigned int j = left; j < right; j += stepX) {
310  if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
311  j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
312  m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
313  : polygon_2d.isInside(vpImagePoint(i, j)))) {
314  totalTheoreticalPoints++;
315 
316  if (vpMeTracker::inMask(mask, i, j) && pcl::isFinite((*point_cloud)(j, i)) && (*point_cloud)(j, i).z > 0) {
317  totalPoints++;
318 
319  m_pointCloudFace.push_back((*point_cloud)(j, i).x);
320  m_pointCloudFace.push_back((*point_cloud)(j, i).y);
321  m_pointCloudFace.push_back((*point_cloud)(j, i).z);
322 
323 #if DEBUG_DISPLAY_DEPTH_DENSE
324  debugImage[i][j] = 255;
325 #endif
326  }
327  }
328  }
329  }
330 
331  if (totalPoints == 0 || ((m_depthDenseFilteringMethod & DEPTH_OCCUPANCY_RATIO_FILTERING) &&
332  totalPoints / (double)totalTheoreticalPoints < m_depthDenseFilteringOccupancyRatio)) {
333  return false;
334  }
335 
336  return true;
337 }
338 #endif
339 
341  unsigned int height, const std::vector<vpColVector> &point_cloud,
342  unsigned int stepX, unsigned int stepY
343 #if DEBUG_DISPLAY_DEPTH_DENSE
344  ,
345  vpImage<unsigned char> &debugImage,
346  std::vector<std::vector<vpImagePoint> > &roiPts_vec
347 #endif
348  ,
349  const vpImage<bool> *mask)
350 {
351  m_pointCloudFace.clear();
352 
353  if (width == 0 || height == 0)
354  return 0;
355 
356  std::vector<vpImagePoint> roiPts;
357  double distanceToFace;
358  computeROI(cMo, width, height, roiPts
359 #if DEBUG_DISPLAY_DEPTH_DENSE
360  ,
361  roiPts_vec
362 #endif
363  ,
364  distanceToFace);
365 
366  if (roiPts.size() <= 2) {
367 #ifndef NDEBUG
368  std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
369 #endif
370  return false;
371  }
372 
375  return false;
376  }
377 
378  vpPolygon polygon_2d(roiPts);
379  vpRect bb = polygon_2d.getBoundingBox();
380 
381  unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
382  unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
383  unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
384  unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
385 
386  bb.setTop(top);
387  bb.setBottom(bottom);
388  bb.setLeft(left);
389  bb.setRight(right);
390 
391  m_pointCloudFace.reserve((size_t)(bb.getWidth() * bb.getHeight()));
392 
393  int totalTheoreticalPoints = 0, totalPoints = 0;
394  for (unsigned int i = top; i < bottom; i += stepY) {
395  for (unsigned int j = left; j < right; j += stepX) {
396  if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
397  j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
398  m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
399  : polygon_2d.isInside(vpImagePoint(i, j)))) {
400  totalTheoreticalPoints++;
401 
402  if (vpMeTracker::inMask(mask, i, j) && point_cloud[i * width + j][2] > 0) {
403  totalPoints++;
404 
405  m_pointCloudFace.push_back(point_cloud[i * width + j][0]);
406  m_pointCloudFace.push_back(point_cloud[i * width + j][1]);
407  m_pointCloudFace.push_back(point_cloud[i * width + j][2]);
408 
409 #if DEBUG_DISPLAY_DEPTH_DENSE
410  debugImage[i][j] = 255;
411 #endif
412  }
413  }
414  }
415  }
416 
417  if (totalPoints == 0 || ((m_depthDenseFilteringMethod & DEPTH_OCCUPANCY_RATIO_FILTERING) &&
418  totalPoints / (double)totalTheoreticalPoints < m_depthDenseFilteringOccupancyRatio)) {
419  return false;
420  }
421 
422  return true;
423 }
424 
426 
428 {
429  // Compute lines visibility, only for display
430  vpMbtDistanceLine *line;
431  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
432  ++it) {
433  line = *it;
434  bool isvisible = false;
435 
436  for (std::list<int>::const_iterator itindex = line->Lindex_polygon.begin(); itindex != line->Lindex_polygon.end();
437  ++itindex) {
438  int index = *itindex;
439  if (index == -1) {
440  isvisible = true;
441  } else {
442  if (line->hiddenface->isVisible((unsigned int)index)) {
443  isvisible = true;
444  }
445  }
446  }
447 
448  // Si la ligne n'appartient a aucune face elle est tout le temps visible
449  if (line->Lindex_polygon.empty())
450  isvisible = true; // Not sure that this can occur
451 
452  if (isvisible) {
453  line->setVisible(true);
454  } else {
455  line->setVisible(false);
456  }
457  }
458 }
459 
461  vpColVector &error)
462 {
463  if (m_pointCloudFace.empty()) {
464  L.resize(0, 0);
465  error.resize(0);
466  return;
467  }
468 
469  L.resize(getNbFeatures(), 6, false, false);
470  error.resize(getNbFeatures(), false);
471 
472  // Transform the plane equation for the current pose
475 
476  double nx = m_planeCamera.getA();
477  double ny = m_planeCamera.getB();
478  double nz = m_planeCamera.getC();
479  double D = m_planeCamera.getD();
480 
482 #if USE_OPENCV_HAL
483  useSIMD = true;
484 #endif
485 #if !USE_SSE && !USE_NEON && !USE_OPENCV_HAL
486  useSIMD = false;
487 #endif
488 
489  if (useSIMD) {
490 #if USE_SSE || USE_NEON|| USE_OPENCV_HAL
491  size_t cpt = 0;
492  if (getNbFeatures() >= 2) {
493  double *ptr_point_cloud = &m_pointCloudFace[0];
494  double *ptr_L = L.data;
495  double *ptr_error = error.data;
496 
497 #if USE_OPENCV_HAL
498  const cv::v_float64x2 vnx = cv::v_setall_f64(nx);
499  const cv::v_float64x2 vny = cv::v_setall_f64(ny);
500  const cv::v_float64x2 vnz = cv::v_setall_f64(nz);
501  const cv::v_float64x2 vd = cv::v_setall_f64(D);
502 #elif USE_SSE
503  const __m128d vnx = _mm_set1_pd(nx);
504  const __m128d vny = _mm_set1_pd(ny);
505  const __m128d vnz = _mm_set1_pd(nz);
506  const __m128d vd = _mm_set1_pd(D);
507 #else
508  const float64x2_t vnx = vdupq_n_f64(nx);
509  const float64x2_t vny = vdupq_n_f64(ny);
510  const float64x2_t vnz = vdupq_n_f64(nz);
511  const float64x2_t vd = vdupq_n_f64(D);
512 #endif
513 
514  for (; cpt <= m_pointCloudFace.size() - 6; cpt += 6, ptr_point_cloud += 6) {
515 #if USE_OPENCV_HAL
516  cv::v_float64x2 vx, vy, vz;
517  cv::v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
518 
519  cv::v_float64x2 va1 = vnz*vy - vny*vz;
520  cv::v_float64x2 va2 = vnx*vz - vnz*vx;
521  cv::v_float64x2 va3 = vny*vx - vnx*vy;
522 
523  cv::v_float64x2 vnxy = cv::v_combine_low(vnx, vny);
524  cv::v_store(ptr_L, vnxy);
525  ptr_L += 2;
526  vnxy = cv::v_combine_low(vnz, va1);
527  cv::v_store(ptr_L, vnxy);
528  ptr_L += 2;
529  vnxy = cv::v_combine_low(va2, va3);
530  cv::v_store(ptr_L, vnxy);
531  ptr_L += 2;
532 
533  vnxy = cv::v_combine_high(vnx, vny);
534  cv::v_store(ptr_L, vnxy);
535  ptr_L += 2;
536  vnxy = cv::v_combine_high(vnz, va1);
537  cv::v_store(ptr_L, vnxy);
538  ptr_L += 2;
539  vnxy = cv::v_combine_high(va2, va3);
540  cv::v_store(ptr_L, vnxy);
541  ptr_L += 2;
542 
543  cv::v_float64x2 verr = vd + cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, vnz*vz));
544  cv::v_store(ptr_error, verr);
545  ptr_error += 2;
546 #elif USE_SSE
547  __m128d vx, vy, vz;
548  v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
549 
550  __m128d va1 = _mm_sub_pd(_mm_mul_pd(vnz, vy), _mm_mul_pd(vny, vz));
551  __m128d va2 = _mm_sub_pd(_mm_mul_pd(vnx, vz), _mm_mul_pd(vnz, vx));
552  __m128d va3 = _mm_sub_pd(_mm_mul_pd(vny, vx), _mm_mul_pd(vnx, vy));
553 
554  __m128d vnxy = v_combine_low(vnx, vny);
555  _mm_storeu_pd(ptr_L, vnxy);
556  ptr_L += 2;
557  vnxy = v_combine_low(vnz, va1);
558  _mm_storeu_pd(ptr_L, vnxy);
559  ptr_L += 2;
560  vnxy = v_combine_low(va2, va3);
561  _mm_storeu_pd(ptr_L, vnxy);
562  ptr_L += 2;
563 
564  vnxy = v_combine_high(vnx, vny);
565  _mm_storeu_pd(ptr_L, vnxy);
566  ptr_L += 2;
567  vnxy = v_combine_high(vnz, va1);
568  _mm_storeu_pd(ptr_L, vnxy);
569  ptr_L += 2;
570  vnxy = v_combine_high(va2, va3);
571  _mm_storeu_pd(ptr_L, vnxy);
572  ptr_L += 2;
573 
574  const __m128d verror = _mm_add_pd(vd, v_fma(vnx, vx, v_fma(vny, vy, _mm_mul_pd(vnz, vz))));
575  _mm_storeu_pd(ptr_error, verror);
576  ptr_error += 2;
577 #else
578  float64x2_t vx, vy, vz;
579  v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
580 
581  float64x2_t va1 = vsubq_f64(vmulq_f64(vnz, vy), vmulq_f64(vny, vz));
582  float64x2_t va2 = vsubq_f64(vmulq_f64(vnx, vz), vmulq_f64(vnz, vx));
583  float64x2_t va3 = vsubq_f64(vmulq_f64(vny, vx), vmulq_f64(vnx, vy));
584 
585  float64x2_t vnxy = v_combine_low(vnx, vny);
586  vst1q_f64(ptr_L, vnxy);
587  ptr_L += 2;
588  vnxy = v_combine_low(vnz, va1);
589  vst1q_f64(ptr_L, vnxy);
590  ptr_L += 2;
591  vnxy = v_combine_low(va2, va3);
592  vst1q_f64(ptr_L, vnxy);
593  ptr_L += 2;
594 
595  vnxy = v_combine_high(vnx, vny);
596  vst1q_f64(ptr_L, vnxy);
597  ptr_L += 2;
598  vnxy = v_combine_high(vnz, va1);
599  vst1q_f64(ptr_L, vnxy);
600  ptr_L += 2;
601  vnxy = v_combine_high(va2, va3);
602  vst1q_f64(ptr_L, vnxy);
603  ptr_L += 2;
604 
605  const float64x2_t verror = vaddq_f64(vd, v_fma(vnx, vx, v_fma(vny, vy, vmulq_f64(vnz, vz))));
606  vst1q_f64(ptr_error, verror);
607  ptr_error += 2;
608 #endif
609  }
610  }
611 
612  for (; cpt < m_pointCloudFace.size(); cpt += 3) {
613  double x = m_pointCloudFace[cpt];
614  double y = m_pointCloudFace[cpt + 1];
615  double z = m_pointCloudFace[cpt + 2];
616 
617  double _a1 = (nz * y) - (ny * z);
618  double _a2 = (nx * z) - (nz * x);
619  double _a3 = (ny * x) - (nx * y);
620 
621  // L
622  L[(unsigned int)(cpt / 3)][0] = nx;
623  L[(unsigned int)(cpt / 3)][1] = ny;
624  L[(unsigned int)(cpt / 3)][2] = nz;
625  L[(unsigned int)(cpt / 3)][3] = _a1;
626  L[(unsigned int)(cpt / 3)][4] = _a2;
627  L[(unsigned int)(cpt / 3)][5] = _a3;
628 
629  vpColVector normal(3);
630  normal[0] = nx;
631  normal[1] = ny;
632  normal[2] = nz;
633 
634  vpColVector pt(3);
635  pt[0] = x;
636  pt[1] = y;
637  pt[2] = z;
638 
639  // Error
640  error[(unsigned int)(cpt / 3)] = D + (normal.t() * pt);
641  }
642 #endif
643  } else {
644  vpColVector normal(3);
645  normal[0] = nx;
646  normal[1] = ny;
647  normal[2] = nz;
648  vpColVector pt(3);
649 
650  unsigned int idx = 0;
651  for (size_t i = 0; i < m_pointCloudFace.size(); i += 3, idx++) {
652  double x = m_pointCloudFace[i];
653  double y = m_pointCloudFace[i + 1];
654  double z = m_pointCloudFace[i + 2];
655 
656  double _a1 = (nz * y) - (ny * z);
657  double _a2 = (nx * z) - (nz * x);
658  double _a3 = (ny * x) - (nx * y);
659 
660  // L
661  L[idx][0] = nx;
662  L[idx][1] = ny;
663  L[idx][2] = nz;
664  L[idx][3] = _a1;
665  L[idx][4] = _a2;
666  L[idx][5] = _a3;
667 
668  pt[0] = x;
669  pt[1] = y;
670  pt[2] = z;
671  // Error
672  error[idx] = D + (normal.t() * pt);
673  }
674  }
675 }
676 
677 void vpMbtFaceDepthDense::computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height,
678  std::vector<vpImagePoint> &roiPts
679 #if DEBUG_DISPLAY_DEPTH_DENSE
680  ,
681  std::vector<std::vector<vpImagePoint> > &roiPts_vec
682 #endif
683  ,
684  double &distanceToFace)
685 {
686  if (m_useScanLine || m_clippingFlag > 2)
687  m_cam.computeFov(width, height);
688 
689  if (m_useScanLine) {
690  for (std::vector<PolygonLine>::iterator it = m_polygonLines.begin(); it != m_polygonLines.end(); ++it) {
691  it->m_p1->changeFrame(cMo);
692  it->m_p2->changeFrame(cMo);
693 
694  vpImagePoint ip1, ip2;
695 
696  it->m_poly.changeFrame(cMo);
697  it->m_poly.computePolygonClipped(m_cam);
698 
699  if (it->m_poly.polyClipped.size() == 2 &&
700  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
701  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
702  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
703  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::UP_CLIPPING) == 0) &&
704  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
705  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
706 
707  std::vector<std::pair<vpPoint, vpPoint> > linesLst;
708  m_hiddenFace->computeScanLineQuery(it->m_poly.polyClipped[0].first, it->m_poly.polyClipped[1].first, linesLst,
709  true);
710 
711  vpPoint faceCentroid;
712 
713  for (unsigned int i = 0; i < linesLst.size(); i++) {
714  linesLst[i].first.project();
715  linesLst[i].second.project();
716 
717  vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
718  vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
719 
720  it->m_imPt1 = ip1;
721  it->m_imPt2 = ip2;
722 
723  roiPts.push_back(ip1);
724  roiPts.push_back(ip2);
725 
726  faceCentroid.set_X(faceCentroid.get_X() + linesLst[i].first.get_X() + linesLst[i].second.get_X());
727  faceCentroid.set_Y(faceCentroid.get_Y() + linesLst[i].first.get_Y() + linesLst[i].second.get_Y());
728  faceCentroid.set_Z(faceCentroid.get_Z() + linesLst[i].first.get_Z() + linesLst[i].second.get_Z());
729 
730 #if DEBUG_DISPLAY_DEPTH_DENSE
731  std::vector<vpImagePoint> roiPts_;
732  roiPts_.push_back(ip1);
733  roiPts_.push_back(ip2);
734  roiPts_vec.push_back(roiPts_);
735 #endif
736  }
737 
738  if (linesLst.empty()) {
739  distanceToFace = std::numeric_limits<double>::max();
740  } else {
741  faceCentroid.set_X(faceCentroid.get_X() / (2 * linesLst.size()));
742  faceCentroid.set_Y(faceCentroid.get_Y() / (2 * linesLst.size()));
743  faceCentroid.set_Z(faceCentroid.get_Z() / (2 * linesLst.size()));
744 
745  distanceToFace =
746  sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
747  faceCentroid.get_Z() * faceCentroid.get_Z());
748  }
749  }
750  }
751  } else {
752  // Get polygon clipped
753  m_polygon->getRoiClipped(m_cam, roiPts, cMo);
754 
755  // Get 3D polygon clipped
756  std::vector<vpPoint> polygonsClipped;
757  m_polygon->getPolygonClipped(polygonsClipped);
758 
759  if (polygonsClipped.empty()) {
760  distanceToFace = std::numeric_limits<double>::max();
761  } else {
762  vpPoint faceCentroid;
763 
764  for (size_t i = 0; i < polygonsClipped.size(); i++) {
765  faceCentroid.set_X(faceCentroid.get_X() + polygonsClipped[i].get_X());
766  faceCentroid.set_Y(faceCentroid.get_Y() + polygonsClipped[i].get_Y());
767  faceCentroid.set_Z(faceCentroid.get_Z() + polygonsClipped[i].get_Z());
768  }
769 
770  faceCentroid.set_X(faceCentroid.get_X() / polygonsClipped.size());
771  faceCentroid.set_Y(faceCentroid.get_Y() / polygonsClipped.size());
772  faceCentroid.set_Z(faceCentroid.get_Z() / polygonsClipped.size());
773 
774  distanceToFace = sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
775  faceCentroid.get_Z() * faceCentroid.get_Z());
776  }
777 
778 #if DEBUG_DISPLAY_DEPTH_DENSE
779  roiPts_vec.push_back(roiPts);
780 #endif
781  }
782 }
783 
785  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
786  bool displayFullModel)
787 {
788  std::vector<std::vector<double> > models =
789  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
790 
791  for (size_t i = 0; i < models.size(); i++) {
792  vpImagePoint ip1(models[i][1], models[i][2]);
793  vpImagePoint ip2(models[i][3], models[i][4]);
794  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
795  }
796 }
797 
799  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
800  bool displayFullModel)
801 {
802  std::vector<std::vector<double> > models =
803  getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
804 
805  for (size_t i = 0; i < models.size(); i++) {
806  vpImagePoint ip1(models[i][1], models[i][2]);
807  vpImagePoint ip2(models[i][3], models[i][4]);
808  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
809  }
810 }
811 
813  const vpCameraParameters & /*cam*/, const double /*scale*/,
814  const unsigned int /*thickness*/)
815 {
816 }
817 
819  const vpCameraParameters & /*cam*/, const double /*scale*/,
820  const unsigned int /*thickness*/)
821 {
822 }
823 
835 std::vector<std::vector<double> > vpMbtFaceDepthDense::getModelForDisplay(unsigned int width, unsigned int height,
836  const vpHomogeneousMatrix &cMo,
837  const vpCameraParameters &cam,
838  bool displayFullModel)
839 {
840  std::vector<std::vector<double> > models;
841 
842  if ((m_polygon->isVisible() && m_isTrackedDepthDenseFace) || displayFullModel) {
844 
845  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
846  ++it) {
847  vpMbtDistanceLine *line = *it;
848  std::vector<std::vector<double> > lineModels =
849  line->getModelForDisplay(width, height, cMo, cam, displayFullModel);
850  models.insert(models.end(), lineModels.begin(), lineModels.end());
851  }
852  }
853 
854  return models;
855 }
856 
866 bool vpMbtFaceDepthDense::samePoint(const vpPoint &P1, const vpPoint &P2) const
867 {
868  double dx = fabs(P1.get_oX() - P2.get_oX());
869  double dy = fabs(P1.get_oY() - P2.get_oY());
870  double dz = fabs(P1.get_oZ() - P2.get_oZ());
871 
872  if (dx <= std::numeric_limits<double>::epsilon() && dy <= std::numeric_limits<double>::epsilon() &&
873  dz <= std::numeric_limits<double>::epsilon())
874  return true;
875  else
876  return false;
877 }
878 
880 {
881  m_cam = camera;
882 
883  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
884  ++it) {
885  (*it)->setCameraParameters(camera);
886  }
887 }
888 
890 {
891  m_useScanLine = v;
892 
893  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
894  ++it) {
895  (*it)->useScanLine = v;
896  }
897 }
Type * data
Address of the first element of the data array.
Definition: vpArray2D.h:145
Generic class defining intrinsic camera parameters.
void computeFov(const unsigned int &w, const unsigned int &h)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
vpRowVector t() const
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:314
Class to define RGB colors available for display functionnalities.
Definition: vpColor.h:158
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:89
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:154
vpMbScanLine & getMbScanLineRenderer()
bool isVisible(unsigned int i)
void computeScanLineQuery(const vpPoint &a, const vpPoint &b, std::vector< std::pair< vpPoint, vpPoint > > &lines, const bool &displayResults=false)
Manage the line of a polygon used in the model-based tracker.
void setIndex(unsigned int i)
vpPoint * p2
The second extremity.
std::list< int > Lindex_polygon
Index of the faces which contain the line.
void buildFrom(vpPoint &_p1, vpPoint &_p2, vpUniRand &rand_gen)
vpMbHiddenFaces< vpMbtPolygon > * hiddenface
Pointer to the list of faces.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
bool useScanLine
Use scanline rendering.
vpPoint * p1
The first extremity.
void setCameraParameters(const vpCameraParameters &camera)
void setName(const std::string &line_name)
void setVisible(bool _isvisible)
void addPolygon(const int &index)
vpMbtPolygon & getPolygon()
double m_depthDenseFilteringMinDist
Minimum distance threshold.
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
bool m_isVisible
Visibility flag.
double m_distFarClip
Distance for near clipping.
std::vector< double > m_pointCloudFace
List of depth points inside the face.
vpPlane m_planeObject
Plane equation described in the object frame.
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
void setCameraParameters(const vpCameraParameters &camera)
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=NULL)
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, std::vector< vpImagePoint > &roiPts, double &distanceToFace)
unsigned int getNbFeatures() const
bool samePoint(const vpPoint &P1, const vpPoint &P2) const
void computeInteractionMatrixAndResidu(const vpHomogeneousMatrix &cMo, vpMatrix &L, vpColVector &error)
void setScanLineVisibilityTest(bool v)
vpMbtPolygon * m_polygon
Polygon defining the face.
bool m_useScanLine
Scan line visibility.
bool m_isTrackedDepthDenseFace
Flag to define if the face should be tracked or not.
double m_depthDenseFilteringMaxDist
Maximum distance threshold.
std::vector< PolygonLine > m_polygonLines
Polygon lines used for scan-line visibility.
int m_depthDenseFilteringMethod
Method to use to consider or not the face.
unsigned int m_clippingFlag
Flags specifying which clipping to used.
std::vector< vpMbtDistanceLine * > m_listOfFaceLines
vpCameraParameters m_cam
Camera intrinsic parameters.
double m_depthDenseFilteringOccupancyRatio
Ratio between available depth points and theoretical number of points.
double m_distNearClip
Distance for near clipping.
void displayFeature(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05, unsigned int thickness=1)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
virtual bool isVisible(const vpHomogeneousMatrix &cMo, double alpha, const bool &modulo=false, const vpCameraParameters &cam=vpCameraParameters(), unsigned int width=0, unsigned int height=0)
int getIndex() const
Definition: vpMbtPolygon.h:101
static bool inMask(const vpImage< bool > *mask, unsigned int i, unsigned int j)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
void changeFrame(const vpHomogeneousMatrix &cMo)
Definition: vpPlane.cpp:364
double getD() const
Definition: vpPlane.h:111
double getA() const
Definition: vpPlane.h:105
double getC() const
Definition: vpPlane.h:109
double getB() const
Definition: vpPlane.h:107
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
double get_oX() const
Get the point oX coordinate in the object frame.
Definition: vpPoint.cpp:461
double get_Y() const
Get the point cY coordinate in the camera frame.
Definition: vpPoint.cpp:454
double get_oZ() const
Get the point oZ coordinate in the object frame.
Definition: vpPoint.cpp:465
void set_X(double cX)
Set the point cX coordinate in the camera frame.
Definition: vpPoint.cpp:493
void set_Y(double cY)
Set the point cY coordinate in the camera frame.
Definition: vpPoint.cpp:495
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition: vpPoint.cpp:456
void set_Z(double cZ)
Set the point cZ coordinate in the camera frame.
Definition: vpPoint.cpp:497
double get_oY() const
Get the point oY coordinate in the object frame.
Definition: vpPoint.cpp:463
double get_X() const
Get the point cX coordinate in the camera frame.
Definition: vpPoint.cpp:452
Implements a 3D polygon with render functionnalities like clipping.
Definition: vpPolygon3D.h:60
void setFarClippingDistance(const double &dist)
Definition: vpPolygon3D.h:194
void setNearClippingDistance(const double &dist)
Definition: vpPolygon3D.h:207
void setClipping(const unsigned int &flags)
Definition: vpPolygon3D.h:187
void getRoiClipped(const vpCameraParameters &cam, std::vector< vpImagePoint > &roi)
void getPolygonClipped(std::vector< std::pair< vpPoint, unsigned int > > &poly)
Defines a generic 2D polygon.
Definition: vpPolygon.h:106
vpRect getBoundingBox() const
Definition: vpPolygon.h:180
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Definition: vpPolygon.cpp:401
Defines a rectangle in the plane.
Definition: vpRect.h:80
double getWidth() const
Definition: vpRect.h:228
void setTop(double pos)
Definition: vpRect.h:358
double getLeft() const
Definition: vpRect.h:174
void setLeft(double pos)
Definition: vpRect.h:322
void setRight(double pos)
Definition: vpRect.h:349
double getRight() const
Definition: vpRect.h:180
double getBottom() const
Definition: vpRect.h:98
double getHeight() const
Definition: vpRect.h:167
void setBottom(double pos)
Definition: vpRect.h:289
double getTop() const
Definition: vpRect.h:193
Class for generating random numbers with uniform probability density.
Definition: vpUniRand.h:101
VISP_EXPORT bool checkSSE2()
VISP_EXPORT bool checkNeon()