Visual Servoing Platform  version 3.2.0 under development (2019-01-22)
AROgre.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Implementation of a simple augmented reality application using the vpAROgre
33  * class.
34  *
35  * Authors:
36  * Bertrand Delabarre
37  *
38  *****************************************************************************/
39 
45 #include <iostream>
46 #include <visp3/core/vpConfig.h>
47 
48 //#if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) ||
49 // defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK)
50 //|| (defined(VISP_HAVE_X11) && ! defined(APPLE)))
51 #if defined(VISP_HAVE_OGRE) && \
52  (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK) || \
53  (defined(VISP_HAVE_X11) && !(defined(__APPLE__) && defined(__MACH__))))
54 
55 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
56 #if defined(VISP_HAVE_X11) && !(defined(__APPLE__) && defined(__MACH__))
57 // produce an error on OSX: ‘typedef int Cursor’
58 // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
59 // declaration as ‘typedef XID Cursor’. That's why it should not be
60 // used on APPLE platforms
61 #include <visp3/gui/vpDisplayX.h>
62 #endif
63 #include <visp3/ar/vpAROgre.h>
64 #include <visp3/blob/vpDot2.h>
65 #include <visp3/core/vpDebug.h>
66 #include <visp3/core/vpImagePoint.h>
67 #include <visp3/core/vpIoTools.h>
68 #include <visp3/core/vpPixelMeterConversion.h>
69 #include <visp3/core/vpPoint.h>
70 #include <visp3/gui/vpDisplayD3D.h>
71 #include <visp3/gui/vpDisplayGDI.h>
72 #include <visp3/gui/vpDisplayGTK.h>
73 #include <visp3/gui/vpDisplayOpenCV.h>
74 #include <visp3/io/vpParseArgv.h>
75 #include <visp3/io/vpVideoReader.h>
76 #include <visp3/vision/vpPose.h>
77 
78 // List of allowed command line options
79 #define GETOPTARGS "ci:p:h"
80 
92 void usage(const char *name, const char *badparam, std::string ipath, std::string ppath)
93 {
94  fprintf(stdout, "\n\
95 Test augmented reality using the vpAROgre class.\n\
96 \n\
97 SYNOPSIS\n\
98  %s [-i <test image path>] [-p <personal image path>]\n\
99  [-c] [-h]\n", name);
100 
101  fprintf(stdout, "\n\
102 OPTIONS: Default\n\
103  -i <input image path> %s\n\
104  Set image input path.\n\
105  From this path read images \n\
106  \"mire-2/image.%%04d.pgm\". These \n\
107  images come from ViSP-images-x.y.z.tar.gz available \n\
108  on the ViSP website.\n\
109  Setting the VISP_INPUT_IMAGE_PATH environment\n\
110  variable produces the same behaviour than using\n\
111  this option.\n\
112  \n\
113  -p <personal image path> %s\n\
114  Specify a personal sequence containing images \n\
115  to process.\n\
116  By image sequence, we mean one file per image.\n\
117  The following image file formats PNM (PGM P5, PPM P6)\n\
118  are supported. The format is selected by analysing \n\
119  the filename extension.\n\
120  Example : \"/Temp/ViSP-images/cube/image.%%04d.pgm\"\n\
121  %%04d is for the image numbering.\n\
122 \n\
123  -c\n\
124  Disable the mouse click. Useful to automaze the \n\
125  execution of this program without humain intervention.\n\
126 \n\
127  -h\n\
128  Print the help.\n", ipath.c_str(), ppath.c_str());
129 
130  if (badparam)
131  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
132 }
146 bool getOptions(int argc, const char **argv, std::string &ipath, std::string &ppath, bool &click_allowed)
147 {
148  const char *optarg;
149  int c;
150  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
151 
152  switch (c) {
153  case 'c':
154  click_allowed = false;
155  break;
156  case 'i':
157  ipath = optarg;
158  break;
159  case 'p':
160  ppath = optarg;
161  break;
162  case 'h':
163  usage(argv[0], NULL, ipath, ppath);
164  return false;
165  break;
166 
167  default:
168  usage(argv[0], optarg, ipath, ppath);
169  return false;
170  break;
171  }
172  }
173 
174  if ((c == 1) || (c == -1)) {
175  // standalone param or error
176  usage(argv[0], NULL, ipath, ppath);
177  std::cerr << "ERROR: " << std::endl;
178  std::cerr << " Bad argument " << optarg << std::endl << std::endl;
179  return false;
180  }
181 
182  return true;
183 }
184 
185 #ifndef DOXYGEN_SHOULD_SKIP_THIS
186 
187 class vpAROgreExample : public vpAROgre
188 {
189 public:
190  // The constructor doesn't change here
191  vpAROgreExample(const vpCameraParameters &mcam = vpCameraParameters(), unsigned int width = 640,
192  unsigned int height = 480, const char *resourcePath = NULL)
193  : vpAROgre(mcam, width, height)
194  {
195  // Direction vectors
196  if (resourcePath)
197  mResourcePath = resourcePath;
198  std::cout << "mResourcePath: " << mResourcePath << std::endl;
199  vecDevant = Ogre::Vector3(0, -1, 0);
200  robot = NULL;
201  mAnimationState = NULL;
202  }
203 
204 protected:
205  // Attributes
206  // Vector to move
207  Ogre::Vector3 vecDevant;
208  // Animation attribute
209  Ogre::AnimationState *mAnimationState;
210  // The entity representing the robot
211  Ogre::Entity *robot;
212 
213  // Our scene will just be a plane
214  void createScene()
215  {
216  // Lumieres
217  mSceneMgr->setAmbientLight(Ogre::ColourValue((float)0.6, (float)0.6, (float)0.6)); // Default value of lightning
218  Ogre::Light *light = mSceneMgr->createLight();
219  light->setDiffuseColour(1.0, 1.0, 1.0); // scaled RGB values
220  light->setSpecularColour(1.0, 1.0, 1.0); // scaled RGB values
221  // Lumiere ponctuelle
222  light->setPosition(-5, -5, 10);
223  light->setType(Ogre::Light::LT_POINT);
224  light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
225  // Ombres
226  light->setCastShadows(true);
227 
228  // Create the Entity
229  robot = mSceneMgr->createEntity("Robot", "robot.mesh");
230  // Attach robot to scene graph
231  Ogre::SceneNode *RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
232  RobotNode->attachObject(robot);
233  RobotNode->scale((Ogre::Real)0.001, (Ogre::Real)0.001, (Ogre::Real)0.001);
234  RobotNode->pitch(Ogre::Degree(90));
235  RobotNode->yaw(Ogre::Degree(-90));
236  robot->setCastShadows(true);
237  mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
238 
239  // Add an animation
240  // Set the good animation
241  mAnimationState = robot->getAnimationState("Idle");
242  // Start over when finished
243  mAnimationState->setLoop(true);
244  // Animation enabled
245  mAnimationState->setEnabled(true);
246 
247  // Add a ground
248  Ogre::Plane plan;
249  plan.d = 0;
250  plan.normal = Ogre::Vector3::UNIT_Z;
251  Ogre::MeshManager::getSingleton().createPlane("sol", Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan,
252  (Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
253  Ogre::Entity *ent = mSceneMgr->createEntity("Entitesol", "sol");
254  Ogre::SceneNode *PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
255  PlaneNode->attachObject(ent);
256  ent->setMaterialName("Examples/GrassFloor");
257  }
258 
259  bool customframeEnded(const Ogre::FrameEvent &evt)
260  {
261  // Update animation
262  // To move, we add it the time since last frame
263  mAnimationState->addTime(evt.timeSinceLastFrame);
264  return true;
265  }
266 
267 #ifdef VISP_HAVE_OIS
268  bool processInputEvent(const Ogre::FrameEvent & /*evt*/)
269  {
270  mKeyboard->capture();
271  Ogre::Matrix3 rotmy;
272  double angle = -M_PI / 8;
273  if (mKeyboard->isKeyDown(OIS::KC_ESCAPE))
274  return false;
275 
276  // Event telling that we will have to move, setting the animation to
277  // "walk", if false, annimation goes to "Idle"
278  bool event = false;
279  // Check entries
280  if (mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)) {
281  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition() +
282  (Ogre::Real)0.003 * vecDevant);
283  event = true;
284  }
285  if (mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)) {
286  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition() -
287  (Ogre::Real)0.003 * vecDevant);
288  event = true;
289  }
290  if (mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)) {
291  rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0, (Ogre::Real)(-sin(-angle)),
292  (Ogre::Real)cos(-angle), 0, 0, 0, 1);
293  vecDevant = vecDevant * rotmy;
294  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
295  event = true;
296  }
297  if (mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)) {
298  rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0, (Ogre::Real)(-sin(angle)),
299  (Ogre::Real)cos(angle), 0, 0, 0, 1);
300  vecDevant = vecDevant * rotmy;
301  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
302  event = true;
303  }
304 
305  // Play the right animation
306  if (event) {
307  mAnimationState = robot->getAnimationState("Walk");
308  } else
309  mAnimationState = robot->getAnimationState("Idle");
310 
311  // Start over when finished
312  mAnimationState->setLoop(true);
313  // Animation enabled
314  mAnimationState->setEnabled(true);
315 
316  return true;
317  }
318 #endif
319 };
320 
327 void computeInitialPose(vpCameraParameters *mcam, vpImage<unsigned char> &I, vpPose *mPose, vpDot2 *md,
328  vpImagePoint *mcog, vpHomogeneousMatrix *cMo, vpPoint *mP, const bool &opt_click_allowed)
329 {
330  // ---------------------------------------------------
331  // Code inspired from ViSP example of camera pose
332  // ----------------------------------------------------
333  bool opt_display = true;
334 
335 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
336 #if defined(VISP_HAVE_X11) && !(defined(__APPLE__) && defined(__MACH__))
337  // produce an error on OSX: ‘typedef int Cursor’
338  // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
339  // declaration as ‘typedef XID Cursor’. That's why it should not be
340  // used on APPLE platforms
342 #elif defined VISP_HAVE_GTK
344 #elif defined VISP_HAVE_GDI
346 #elif defined VISP_HAVE_OPENCV
348 #elif defined VISP_HAVE_D3D9
350 #endif
351  for (unsigned int i = 0; i < 4; i++) {
352  if (opt_display) {
353  md[i].setGraphics(true);
354  } else {
355  md[i].setGraphics(false);
356  }
357  }
358 
359  if (opt_display) {
360  try {
361  // Display size is automatically defined by the image (I) size
362  display.init(I, 100, 100, "Preliminary Pose Calculation");
363  // display the image
364  // The image class has a member that specify a pointer toward
365  // the display that has been initialized in the display declaration
366  // therefore is is no longuer necessary to make a reference to the
367  // display variable.
369  // Flush the display
370  vpDisplay::flush(I);
371 
372  } catch (...) {
373  vpERROR_TRACE("Error while displaying the image");
374  return;
375  }
376  }
377 
378  std::cout << "*************************************************************"
379  "***********************"
380  << std::endl;
381  std::cout << "*************************** Preliminary Pose Calculation "
382  "***************************"
383  << std::endl;
384  std::cout << "****************************** Click on the 4 dots "
385  "*******************************"
386  << std::endl;
387  std::cout << "********Dot1 : (-x,-y,0), Dot2 : (x,-y,0), Dot3 : (x,y,0), "
388  "Dot4 : (-x,y,0)**********"
389  << std::endl;
390  std::cout << "*************************************************************"
391  "***********************"
392  << std::endl;
393 
394  try {
395  vpImagePoint ip[4];
396  if (!opt_click_allowed) {
397  ip[0].set_i(265);
398  ip[0].set_j(93);
399  ip[1].set_i(248);
400  ip[1].set_j(242);
401  ip[2].set_i(166);
402  ip[2].set_j(215);
403  ip[3].set_i(178);
404  ip[3].set_j(85);
405  }
406  for (unsigned int i = 0; i < 4; i++) {
407  // by using setGraphics, we request to see the edges of the dot
408  // in red on the screen.
409  // It uses the overlay image plane.
410  // The default of this setting is that it is time consumming
411 
412  md[i].setGraphics(true);
413  md[i].setGrayLevelPrecision(0.7);
414  md[i].setSizePrecision(0.5);
415 
416  for (unsigned int j = 0; j < i; j++)
417  md[j].display(I);
418 
419  // flush the display buffer
420  vpDisplay::flush(I);
421  try {
422  if (opt_click_allowed)
423  md[i].initTracking(I);
424  else
425  md[i].initTracking(I, ip[i]);
426  } catch (...) {
427  }
428 
429  mcog[i] = md[i].getCog();
430  // an expcetion is thrown by the track method if
431  // - dot is lost
432  // - the number of pixel is too small
433  // - too many pixels are detected (this is usual when a "big"
434  // specularity
435  // occurs. The threshold can be modified using the
436  // setNbMaxPoint(int) method
437  if (opt_display) {
438  md[i].display(I);
439  // flush the display buffer
440  vpDisplay::flush(I);
441  }
442  }
443  } catch (const vpException &e) {
444  vpERROR_TRACE("Error while tracking dots");
445  vpCTRACE << e;
446  return;
447  }
448 
449  if (opt_display) {
450  // display a red cross (size 10) in the image at the dot center
451  // of gravity location
452  //
453  // WARNING
454  // in the vpDisplay class member's when pixel coordinates
455  // are considered the first element is the row index and the second
456  // is the column index:
457  // vpDisplay::displayCross(Image, row index, column index, size, color)
458  // therefore u and v are inverted wrt to the vpDot specification
459  // Alternatively, to avoid this problem another set of member have
460  // been defined in the vpDisplay class.
461  // If the method name is postfixe with _uv the specification is :
462  // vpDisplay::displayCross_uv(Image, column index, row index, size,
463  // color)
464 
465  for (unsigned int i = 0; i < 4; i++)
466  vpDisplay::displayCross(I, mcog[i], 10, vpColor::red);
467 
468  // flush the X11 buffer
469  vpDisplay::flush(I);
470  }
471 
472  // --------------------------------------------------------
473  // Now we will compute the pose
474  // --------------------------------------------------------
475 
476  // the list of point is cleared (if that's not done before)
477  mPose->clearPoint();
478 
479  // we set the 3D points coordinates (in meter !) in the object/world frame
480  double l = 0.06;
481  double L = 0.07;
482  mP[0].setWorldCoordinates(-L, -l, 0); // (X,Y,Z)
483  mP[1].setWorldCoordinates(L, -l, 0);
484  mP[2].setWorldCoordinates(L, l, 0);
485  mP[3].setWorldCoordinates(-L, l, 0);
486 
487  // pixel-> meter conversion
488  for (unsigned int i = 0; i < 4; i++) {
489  // u[i]. v[i] are expressed in pixel
490  // conversion in meter is achieved using
491  // x = (u-u0)/px
492  // y = (v-v0)/py
493  // where px, py, u0, v0 are the intrinsic camera parameters
494  double x = 0, y = 0;
495  vpPixelMeterConversion::convertPoint(*mcam, mcog[i], x, y);
496  mP[i].set_x(x);
497  mP[i].set_y(y);
498  }
499 
500  // The pose structure is build, we put in the point list the set of point
501  // here both 2D and 3D world coordinates are known
502  for (unsigned int i = 0; i < 4; i++) {
503  mPose->addPoint(mP[i]); // and added to the pose computation point list
504  }
505 
506  // compute the initial pose using Dementhon method followed by a non linear
507  // minimisation method
508 
509  // Pose by Lagrange it provides an initialization of the pose
510  mPose->computePose(vpPose::LAGRANGE, *cMo);
511  // the pose is now refined using the virtual visual servoing approach
512  // Warning: cMo needs to be initialized otherwise it may diverge
513  mPose->computePose(vpPose::VIRTUAL_VS, *cMo);
514 
515  // Display breifly just to have a glimpse a the ViSP pose
516  // while(cpt<500){
517  if (opt_display) {
518  // Display the computed pose
519  mPose->display(I, *cMo, *mcam, 0.05, vpColor::red);
520  vpDisplay::flush(I);
521  vpTime::wait(800);
522  }
523 }
524 
525 #endif
526 
527 int main(int argc, const char **argv)
528 {
529  try {
530  std::string env_ipath;
531  std::string opt_ipath;
532  std::string ipath;
533  std::string opt_ppath;
534  std::string dirname;
535  std::string filename;
536  bool opt_click_allowed = true;
537 
538  // Get the visp-images-data package path or VISP_INPUT_IMAGE_PATH
539  // environment variable value
540  env_ipath = vpIoTools::getViSPImagesDataPath();
541 
542  // Set the default input path
543  if (!env_ipath.empty())
544  ipath = env_ipath;
545 
546  // Read the command line options
547  if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
548  exit(-1);
549  }
550 
551  // Get the option values
552  if (!opt_ipath.empty())
553  ipath = opt_ipath;
554 
555  // Compare ipath and env_ipath. If they differ, we take into account
556  // the input path comming from the command line option
557  if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
558  if (ipath != env_ipath) {
559  std::cout << std::endl << "WARNING: " << std::endl;
560  std::cout << " Since -i <visp image path=" << ipath << "> "
561  << " is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
562  << " we skip the environment variable." << std::endl;
563  }
564  }
565 
566  // Test if an input path is set
567  if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty()) {
568  usage(argv[0], NULL, ipath, opt_ppath);
569  std::cerr << std::endl << "ERROR:" << std::endl;
570  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
571  << " environment variable to specify the location of the " << std::endl
572  << " image path where test images are located." << std::endl
573  << " Use -p <personal image path> option if you want to " << std::endl
574  << " use personal images." << std::endl
575  << std::endl;
576 
577  exit(-1);
578  }
579 
580  std::ostringstream s;
581 
582  if (opt_ppath.empty()) {
583  // Set the path location of the image sequence
584  dirname = vpIoTools::createFilePath(ipath, "mire-2");
585 
586  // Build the name of the image file
587 
588  s.setf(std::ios::right, std::ios::adjustfield);
589  s << "image.%04d.pgm";
590  filename = vpIoTools::createFilePath(dirname, s.str());
591  } else {
592  filename = opt_ppath;
593  }
594 
595  // We will read a sequence of images
596  vpVideoReader grabber;
597  grabber.setFirstFrameIndex(1);
598  grabber.setFileName(filename.c_str());
599  // Grey level image associated to a display in the initial pose
600  // computation
601  vpImage<unsigned char> Idisplay;
602  // Grey level image to track points
604  // RGBa image to get background
605  vpImage<vpRGBa> IC;
606  // Matrix representing camera parameters
608 
609  // Variables used for pose computation purposes
610  vpPose mPose;
611  vpDot2 md[4];
612  vpImagePoint mcog[4];
613  vpPoint mP[4];
614 
615  // CameraParameters we got from calibration
616  // Keep u0 and v0 as center of the screen
618 
619  // Read the PGM image named "filename" on the disk, and put the
620  // bitmap into the image structure I. I is initialized to the
621  // correct size
622  //
623  // exception readPGM may throw various exception if, for example,
624  // the file does not exist, or if the memory cannot be allocated
625  try {
626  vpCTRACE << "Load: " << filename << std::endl;
627  grabber.open(Idisplay);
628  grabber.acquire(Idisplay);
629  vpCameraParameters mcamTmp(592, 570, grabber.getWidth() / 2, grabber.getHeight() / 2);
630  // Compute the initial pose of the camera
631  computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cMo, mP, opt_click_allowed);
632  // Close the framegrabber
633  grabber.close();
634 
635  // Associate the grabber to the RGBa image
636  grabber.open(IC);
637  mcam.init(mcamTmp);
638  } catch (...) {
639  // an exception is thrown if an exception from readPGM has been caught
640  // here this will result in the end of the program
641  // Note that another error message has been printed from readPGM
642  // to give more information about the error
643  std::cerr << std::endl << "ERROR:" << std::endl;
644  std::cerr << " Cannot read " << filename << std::endl;
645  std::cerr << " Check your -i " << ipath << " option " << std::endl
646  << " or VISP_INPUT_IMAGE_PATH environment variable." << std::endl;
647  exit(-1);
648  }
649 
650  // Create a vpRAOgre object with color background
651  vpAROgreExample ogre(mcam, (unsigned int)grabber.getWidth(), (unsigned int)grabber.getHeight());
652  // Initialize it
653  ogre.init(IC);
654 
655  double t0 = vpTime::measureTimeMs();
656 
657  // Rendering loop
658  while (ogre.continueRendering() && !grabber.end()) {
659  // Acquire a frame
660  grabber.acquire(IC);
661 
662  // Convert it to a grey level image for tracking purpose
664 
665  // kill the point list
666  mPose.clearPoint();
667 
668  // track the dot
669  for (int i = 0; i < 4; i++) {
670  // track the point
671  md[i].track(I, mcog[i]);
672  md[i].setGrayLevelPrecision(0.90);
673  // pixel->meter conversion
674  {
675  double x = 0, y = 0;
676  vpPixelMeterConversion::convertPoint(mcam, mcog[i], x, y);
677  mP[i].set_x(x);
678  mP[i].set_y(y);
679  }
680 
681  // and added to the pose computation point list
682  mPose.addPoint(mP[i]);
683  }
684  // the pose structure has been updated
685 
686  // the pose is now updated using the virtual visual servoing approach
687  // Dementhon or lagrange is no longuer necessary, pose at the
688  // previous iteration is sufficient
689  mPose.computePose(vpPose::VIRTUAL_VS, cMo);
690 
691  // Display with ogre
692  ogre.display(IC, cMo);
693 
694  // Wait so that the video does not go too fast
695  double t1 = vpTime::measureTimeMs();
696  std::cout << "\r> " << 1000 / (t1 - t0) << " fps";
697  t0 = t1;
698  }
699  // Close the grabber
700  grabber.close();
701 
702  return EXIT_SUCCESS;
703  } catch (const vpException &e) {
704  std::cout << "Catch a ViSP exception: " << e << std::endl;
705  return EXIT_FAILURE;
706  } catch (Ogre::Exception &e) {
707  std::cout << "Catch an Ogre exception: " << e.getDescription() << std::endl;
708  return EXIT_FAILURE;
709  } catch (...) {
710  std::cout << "Catch an exception " << std::endl;
711  return EXIT_FAILURE;
712  }
713 }
714 #else // VISP_HAVE_OGRE && VISP_HAVE_DISPLAY
715 int main()
716 {
717 #if (!(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GTK) || defined(VISP_HAVE_GDI)))
718  std::cout << "You do not have X11, or GTK, or GDI (Graphical Device Interface) functionalities to display images..." << std::endl;
719  std::cout << "Tip if you are on a unix-like system:" << std::endl;
720  std::cout << "- Install X11, configure again ViSP using cmake and build again this example" << std::endl;
721  std::cout << "Tip if you are on a windows-like system:" << std::endl;
722  std::cout << "- Install GDI, configure again ViSP using cmake and build again this example" << std::endl;
723 #else
724  std::cout << "You do not have Ogre functionalities" << std::endl;
725  std::cout << "Tip:" << std::endl;
726  std::cout << "- Install Ogre3D, configure again ViSP using cmake and build again this example" << std::endl;
727 #endif
728  return EXIT_SUCCESS;
729 }
730 #endif
VISP_EXPORT int wait(double t0, double t)
Definition: vpTime.cpp:150
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, bool(*func)(const vpHomogeneousMatrix &)=NULL)
Definition: vpPose.cpp:362
void init()
basic initialization with the default parameters
static std::string getViSPImagesDataPath()
Definition: vpIoTools.cpp:1316
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Implementation of an homogeneous matrix and operations on such kind of matrices.
virtual bool customframeEnded(const Ogre::FrameEvent &evt)
Definition: vpAROgre.cpp:560
#define vpERROR_TRACE
Definition: vpDebug.h:393
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:151
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
error that can be emited by ViSP classes.
Definition: vpException.h:71
void set_x(const double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:470
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Implementation of an augmented reality viewer using Ogre3D 3rd party.
Definition: vpAROgre.h:90
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:126
void track(const vpImage< unsigned char > &I)
Definition: vpDot2.cpp:438
static void flush(const vpImage< unsigned char > &I)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMw)
Definition: vpAROgre.cpp:618
VISP_EXPORT double measureTimeMs()
Definition: vpTime.cpp:88
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:69
static const vpColor red
Definition: vpColor.h:180
Class that defines what is a point.
Definition: vpPoint.h:58
vpImagePoint getCog() const
Definition: vpDot2.h:161
void open(vpImage< vpRGBa > &I)
Display for windows using Direct3D 3rd party. Thus to enable this class Direct3D should be installed...
Definition: vpDisplayD3D.h:107
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:724
void set_i(const double ii)
Definition: vpImagePoint.h:167
static void display(vpImage< unsigned char > &I, vpHomogeneousMatrix &cMo, vpCameraParameters &cam, double size, vpColor col=vpColor::none)
Definition: vpPose.cpp:477
static std::string createFilePath(const std::string &parent, const std::string &child)
Definition: vpIoTools.cpp:1541
unsigned int getWidth() const
Return the number of columns in the image.
vpCameraParameters mcam
Definition: vpAROgre.h:385
virtual bool processInputEvent(const Ogre::FrameEvent &)
Definition: vpAROgre.h:315
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
void display(const vpImage< unsigned char > &I, vpColor color=vpColor::red, unsigned int thickness=1) const
Definition: vpDot2.cpp:212
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition: vpPose.h:78
Generic class defining intrinsic camera parameters.
void set_y(const double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:472
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
Definition: vpDisplayGTK.h:138
void acquire(vpImage< vpRGBa > &I)
void setFileName(const char *filename)
void set_j(const double jj)
Definition: vpImagePoint.h:178
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
void setSizePrecision(const double &sizePrecision)
Definition: vpDot2.cpp:752
void setWorldCoordinates(const double oX, const double oY, const double oZ)
Definition: vpPoint.cpp:113
#define vpCTRACE
Definition: vpDebug.h:338
virtual void createScene(void)
Definition: vpAROgre.h:299
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:253
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const std::string &title="")
void setFirstFrameIndex(const long first_frame)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
void addPoint(const vpPoint &P)
Definition: vpPose.cpp:137
unsigned int getHeight() const
Return the number of rows in the image.
void setGraphics(const bool activate)
Definition: vpDot2.h:294
void clearPoint()
Definition: vpPose.cpp:122