ViSP  2.6.2
AROgre.cpp
1 /****************************************************************************
2  *
3  * $Id: AROgre.cpp 3619 2012-03-09 17:28:57Z fspindle $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2012 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  *
34  * Description:
35  * Implementation of a simple augmented reality application using the vpAROgre
36  * class.
37  *
38  * Authors:
39  * Bertrand Delabarre
40  *
41  *****************************************************************************/
42 
50 #include <visp/vpConfig.h>
51 #include <iostream>
52 
53 #ifdef VISP_HAVE_OGRE
54 
55 #if defined(VISP_HAVE_X11) && ! defined(APPLE)
56 // produce an error on OSX: ‘typedef int Cursor’
57 // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
58 // declaration as ‘typedef XID Cursor’. That's why it should not be
59 // used on APPLE platforms
60 # include <visp/vpDisplayX.h>
61 #endif
62 #include <visp/vpDisplayGTK.h>
63 #include <visp/vpDisplayGDI.h>
64 #include <visp/vpDisplayOpenCV.h>
65 #include <visp/vpPose.h>
66 #include <visp/vpPoint.h>
67 #include <visp/vpImagePoint.h>
68 #include <visp/vpDot2.h>
69 #include <visp/vpPixelMeterConversion.h>
70 #include <visp/vpVideoReader.h>
71 #include <visp/vpParseArgv.h>
72 #include <visp/vpIoTools.h>
73 #include <visp/vpDebug.h>
74 #include <visp/vpAROgre.h>
75 
76 // List of allowed command line options
77 #define GETOPTARGS "ci:p:h"
78 
90 void usage(const char *name, const char *badparam, std::string ipath, std::string ppath)
91 {
92  fprintf(stdout, "\n\
93 Test augmented reality using the vpAROgre class.\n\
94 \n\
95 SYNOPSIS\n\
96  %s [-i <test image path>] [-p <personal image path>]\n\
97  [-c] [-h]\n", name);
98 
99  fprintf(stdout, "\n\
100 OPTIONS: Default\n\
101  -i <input image path> %s\n\
102  Set image input path.\n\
103  From this path read images \n\
104  \"ViSP-images/mire-2/image.%%04d.pgm\". These \n\
105  images come from ViSP-images-x.y.z.tar.gz available \n\
106  on the ViSP website.\n\
107  Setting the VISP_INPUT_IMAGE_PATH environment\n\
108  variable produces the same behaviour than using\n\
109  this option.\n\
110  \n\
111  -p <personal image path> %s\n\
112  Specify a personal sequence containing images \n\
113  to process.\n\
114  By image sequence, we mean one file per image.\n\
115  The following image file formats PNM (PGM P5, PPM P6)\n\
116  are supported. The format is selected by analysing \n\
117  the filename extension.\n\
118  Example : \"/Temp/ViSP-images/cube/image.%%04d.pgm\"\n\
119  %%04d is for the image numbering.\n\
120 \n\
121  -c\n\
122  Disable the mouse click. Useful to automaze the \n\
123  execution of this program without humain intervention.\n\
124 \n\
125  -h\n\
126  Print the help.\n",
127  ipath.c_str(), ppath.c_str());
128 
129  if (badparam)
130  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
131 }
145 bool getOptions(int argc, const char **argv, std::string &ipath,
146  std::string &ppath, bool &click_allowed)
147 {
148  const char *optarg;
149  int c;
150  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
151 
152  switch (c) {
153  case 'c': click_allowed = false; break;
154  case 'i': ipath = optarg; break;
155  case 'p': ppath = optarg; break;
156  case 'h': usage(argv[0], NULL, ipath, ppath);
157  return false; break;
158 
159  default:
160  usage(argv[0], optarg, ipath, ppath);
161  return false; break;
162  }
163  }
164 
165  if ((c == 1) || (c == -1)) {
166  // standalone param or error
167  usage(argv[0], NULL, ipath, ppath);
168  std::cerr << "ERROR: " << std::endl;
169  std::cerr << " Bad argument " << optarg << std::endl << std::endl;
170  return false;
171  }
172 
173  return true;
174 }
175 
176 
177 #ifndef DOXYGEN_SHOULD_SKIP_THIS
178 
179 class vpAROgreExample : public vpAROgre
180 {
181 public:
182  // The constructor doesn't change here
183  vpAROgreExample(const vpCameraParameters &mcam = vpCameraParameters(),
184  unsigned int width = 640, unsigned int height = 480,
185  const char *resourcePath=NULL)
186  : vpAROgre(mcam, width, height){
187  // Direction vectors
188  if (resourcePath) mResourcePath = resourcePath;
189  std::cout << "mResourcePath: " << mResourcePath<< std::endl;
190  vecDevant = Ogre::Vector3(0,-1,0);
191  }
192 
193 protected :
194 
195  // Attributes
196  // Vector to move
197  Ogre::Vector3 vecDevant;
198  // Animation attribute
199  Ogre::AnimationState * mAnimationState;
200  // The entity representing the robot
201  Ogre::Entity* robot;
202 
203  // Our scene will just be a plane
204  void createScene()
205  {
206  // Lumieres
207  mSceneMgr->setAmbientLight(Ogre::ColourValue((float)0.6,(float)0.6,(float)0.6)); // Default value of lightning
208  Ogre::Light * light = mSceneMgr->createLight();
209  light->setDiffuseColour(1.0,1.0,1.0); // scaled RGB values
210  light->setSpecularColour(1.0,1.0,1.0); // scaled RGB values
211  // Lumiere ponctuelle
212  light->setPosition(-5, -5, 10);
213  light->setType(Ogre::Light::LT_POINT);
214  light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
215  //Ombres
216  light->setCastShadows(true);
217 
218  // Create the Entity
219  robot = mSceneMgr->createEntity("Robot", "robot.mesh");
220  // Attach robot to scene graph
221  Ogre::SceneNode* RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
222  RobotNode->attachObject(robot);
223  RobotNode->scale((Ogre::Real)0.001,(Ogre::Real)0.001,(Ogre::Real)0.001);
224  RobotNode->pitch(Ogre::Degree(90));
225  RobotNode->yaw(Ogre::Degree(-90));
226  robot->setCastShadows(true);
227  mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
228 
229  // Add an animation
230  // Set the good animation
231  mAnimationState = robot->getAnimationState( "Idle" );
232  // Start over when finished
233  mAnimationState->setLoop( true );
234  // Animation enabled
235  mAnimationState->setEnabled( true );
236 
237  // Add a ground
238  Ogre::Plane plan;
239  plan.d = 0;
240  plan.normal = Ogre::Vector3::UNIT_Z;
241  Ogre::MeshManager::getSingleton().createPlane("sol",Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan, (Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
242  Ogre::Entity* ent = mSceneMgr->createEntity("Entitesol", "sol");
243  Ogre::SceneNode* PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
244  PlaneNode->attachObject(ent);
245  ent->setMaterialName("Examples/GrassFloor");
246  }
247 
248  bool customframeEnded(const Ogre::FrameEvent& evt) {
249  // Update animation
250  // To move, we add it the time since last frame
251  mAnimationState->addTime( evt.timeSinceLastFrame );
252  return true;
253  }
254 
255 #ifdef VISP_HAVE_OIS
256  bool processInputEvent(const Ogre::FrameEvent& /*evt*/) {
257  mKeyboard->capture();
258  Ogre::Matrix3 rotmy;
259  double angle = -M_PI/8;
260  if(mKeyboard->isKeyDown(OIS::KC_ESCAPE))
261  return false;
262 
263  // Event telling that we will have to move, setting the animation to "walk", if false, annimation goes to "Idle"
264  bool event = false;
265  // Check entries
266  if(mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)){
267  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()+(Ogre::Real)0.003*vecDevant);
268  event = true;
269  }
270  if(mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)){
271  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()-(Ogre::Real)0.003*vecDevant);
272  event = true;
273  }
274  if(mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)){
275  rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0,
276  (Ogre::Real)(-sin(-angle)), (Ogre::Real)cos(-angle),0,
277  0,0,1);
278  vecDevant=vecDevant*rotmy;
279  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
280  event = true;
281  }
282  if(mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)){
283  rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0,
284  (Ogre::Real)(-sin(angle)), (Ogre::Real)cos(angle),0,
285  0,0,1);
286  vecDevant=vecDevant*rotmy;
287  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
288  event = true;
289  }
290 
291  // Play the right animation
292  if(event){
293  mAnimationState = robot->getAnimationState("Walk");
294  }
295  else mAnimationState = robot->getAnimationState( "Idle" );
296 
297  // Start over when finished
298  mAnimationState->setLoop( true );
299  // Animation enabled
300  mAnimationState->setEnabled( true );
301 
302  return true;
303  }
304 #endif
305 };
306 
311 void computeInitialPose(vpCameraParameters *mcam, vpImage<unsigned char> &I,
312  vpPose * mPose, vpDot2 *md, vpImagePoint *mcog,
313  vpHomogeneousMatrix *cmo, vpPoint *mP,
314  const bool &opt_click_allowed)
315 {
316  // ---------------------------------------------------
317  // Code inspired from ViSP example of camera pose
318  // ----------------------------------------------------
319  bool opt_display = true;
320 
321 #if defined(VISP_HAVE_X11) && ! defined(APPLE)
322  // produce an error on OSX: ‘typedef int Cursor’
323  // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
324  // declaration as ‘typedef XID Cursor’. That's why it should not be
325  // used on APPLE platforms
326  vpDisplayX display;
327 #elif defined VISP_HAVE_GTK
328  vpDisplayGTK display;
329 #elif defined VISP_HAVE_GDI
330  vpDisplayGDI display;
331 #elif defined VISP_HAVE_OPENCV
332  vpDisplayOpenCV display;
333 #endif
334 
335  for (unsigned int i=0 ; i < 4 ; i++)
336  {
337  if (opt_display) {
338  md[i].setGraphics(true) ;
339  }
340  else {
341  md[i].setGraphics(false) ;
342  }
343  }
344 
345  if (opt_display) {
346  try{
347  // Display size is automatically defined by the image (I) size
348  display.init(I,100,100,"Preliminary Pose Calculation");
349  // display the image
350  // The image class has a member that specify a pointer toward
351  // the display that has been initialized in the display declaration
352  // therefore is is no longuer necessary to make a reference to the
353  // display variable.
354  vpDisplay::display(I) ;
355  //Flush the display
356  vpDisplay::flush(I) ;
357 
358  }
359  catch(...)
360  {
361  vpERROR_TRACE("Error while displaying the image") ;
362  return ;
363  }
364  }
365 
366  std::cout<<"************************************************************************************"<<std::endl;
367  std::cout<<"*************************** Preliminary Pose Calculation ***************************"<<std::endl;
368  std::cout<<"****************************** Click on the 4 dots *******************************"<<std::endl;
369  std::cout<<"********Dot1 : (-x,-y,0), Dot2 : (x,-y,0), Dot3 : (x,y,0), Dot4 : (-x,y,0)**********"<<std::endl;
370  std::cout<<"************************************************************************************"<<std::endl;
371 
372  try{
373  vpImagePoint ip[4];
374  if (! opt_click_allowed) {
375  ip[0].set_i( 265 );
376  ip[0].set_j( 93 );
377  ip[1].set_i( 248 );
378  ip[1].set_j( 242 );
379  ip[2].set_i( 166 );
380  ip[2].set_j( 215 );
381  ip[3].set_i( 178 );
382  ip[3].set_j( 85 );
383  }
384  for(unsigned int i=0;i<4;i++) {
385  // by using setGraphics, we request to see the edges of the dot
386  // in red on the screen.
387  // It uses the overlay image plane.
388  // The default of this setting is that it is time consumming
389 
390  md[i].setGraphics(true) ;
391  md[i].setGrayLevelPrecision(0.7);
392  md[i].setSizePrecision(0.5);
393 
394  for(unsigned int j = 0;j<i;j++)
395  md[j].display(I) ;
396 
397  // flush the display buffer
398  vpDisplay::flush(I);
399  try{
400  if (opt_click_allowed)
401  md[i].initTracking(I);
402  else
403  md[i].initTracking(I, ip[i]);
404  }
405  catch(...){
406  }
407 
408  mcog[i] = md[i].getCog();
409  // an expcetion is thrown by the track method if
410  // - dot is lost
411  // - the number of pixel is too small
412  // - too many pixels are detected (this is usual when a "big" specularity
413  // occurs. The threshold can be modified using the
414  // setNbMaxPoint(int) method
415  if (opt_display) {
416  md[i].display(I) ;
417  // flush the display buffer
418  vpDisplay::flush(I) ;
419  }
420  }
421  }
422  catch(vpException e){
423  vpERROR_TRACE("Error while tracking dots") ;
424  vpCTRACE << e;
425  return;
426  }
427 
428  if (opt_display)
429  {
430  // display a red cross (size 10) in the image at the dot center
431  // of gravity location
432  //
433  // WARNING
434  // in the vpDisplay class member's when pixel coordinates
435  // are considered the first element is the row index and the second
436  // is the column index:
437  // vpDisplay::displayCross(Image, row index, column index, size, color)
438  // therefore u and v are inverted wrt to the vpDot specification
439  // Alternatively, to avoid this problem another set of member have
440  // been defined in the vpDisplay class.
441  // If the method name is postfixe with _uv the specification is :
442  // vpDisplay::displayCross_uv(Image, column index, row index, size, color)
443 
444  for (unsigned int i=0 ; i < 4 ; i++)
445  vpDisplay::displayCross(I, mcog[i], 10, vpColor::red) ;
446 
447  // flush the X11 buffer
448  vpDisplay::flush(I) ;
449  }
450 
451  // --------------------------------------------------------
452  // Now we will compute the pose
453  // --------------------------------------------------------
454 
455  // the list of point is cleared (if that's not done before)
456  mPose->clearPoint() ;
457 
458  // we set the 3D points coordinates (in meter !) in the object/world frame
459  double l=0.06 ;
460  double L=0.07 ;
461  mP[0].setWorldCoordinates(-L,-l, 0 ) ; // (X,Y,Z)
462  mP[1].setWorldCoordinates(L,-l, 0 ) ;
463  mP[2].setWorldCoordinates(L,l, 0 ) ;
464  mP[3].setWorldCoordinates(-L,l, 0 ) ;
465 
466  // pixel-> meter conversion
467  for (unsigned int i=0 ; i < 4 ; i++)
468  {
469  // u[i]. v[i] are expressed in pixel
470  // conversion in meter is achieved using
471  // x = (u-u0)/px
472  // y = (v-v0)/py
473  // where px, py, u0, v0 are the intrinsic camera parameters
474  double x=0, y=0;
475  vpPixelMeterConversion::convertPoint(*mcam, mcog[i], x,y) ;
476  mP[i].set_x(x) ;
477  mP[i].set_y(y) ;
478  }
479 
480 
481  // The pose structure is build, we put in the point list the set of point
482  // here both 2D and 3D world coordinates are known
483  for (unsigned int i=0 ; i < 4 ; i++)
484  {
485  mPose->addPoint(mP[i]) ; // and added to the pose computation point list
486  }
487 
488  // compute the initial pose using Dementhon method followed by a non linear
489  // minimisation method
490 
491  // Pose by Lagrange it provides an initialization of the pose
492  mPose->computePose(vpPose::LAGRANGE, *cmo) ;
493  // the pose is now refined using the virtual visual servoing approach
494  // Warning: cMo needs to be initialized otherwise it may diverge
495  mPose->computePose(vpPose::VIRTUAL_VS, *cmo) ;
496 
497  // Display breifly just to have a glimpse a the ViSP pose
498  // while(cpt<500){
499  if( opt_display ){
500  // Display the computed pose
501  mPose->display(I,*cmo,*mcam, 0.05, vpColor::red) ;
502  vpDisplay::flush(I) ;
503  vpTime::wait(800);
504  }
505 }
506 
507 #endif
508 
509 int main(int argc, const char **argv)
510 {
511  std::string env_ipath;
512  std::string opt_ipath;
513  std::string ipath;
514  std::string opt_ppath;
515  std::string dirname;
516  std::string filename;
517  bool opt_click_allowed = true;
518 
519  // Get the VISP_IMAGE_PATH environment variable value
520  char *ptenv = getenv("VISP_INPUT_IMAGE_PATH");
521  if (ptenv != NULL)
522  env_ipath = ptenv;
523 
524  // Set the default input path
525  if (! env_ipath.empty())
526  ipath = env_ipath;
527 
528 
529  // Read the command line options
530  if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
531  exit (-1);
532  }
533 
534  // Get the option values
535  if (!opt_ipath.empty())
536  ipath = opt_ipath;
537 
538  // Compare ipath and env_ipath. If they differ, we take into account
539  // the input path comming from the command line option
540  if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
541  if (ipath != env_ipath) {
542  std::cout << std::endl
543  << "WARNING: " << std::endl;
544  std::cout << " Since -i <visp image path=" << ipath << "> "
545  << " is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
546  << " we skip the environment variable." << std::endl;
547  }
548  }
549 
550  // Test if an input path is set
551  if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty() ){
552  usage(argv[0], NULL, ipath, opt_ppath);
553  std::cerr << std::endl
554  << "ERROR:" << std::endl;
555  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH "
556  << std::endl
557  << " environment variable to specify the location of the " << std::endl
558  << " image path where test images are located." << std::endl
559  << " Use -p <personal image path> option if you want to "<<std::endl
560  << " use personal images." << std::endl
561  << std::endl;
562 
563  exit(-1);
564  }
565 
566  std::ostringstream s;
567 
568  if (opt_ppath.empty()){
569  // Set the path location of the image sequence
570  dirname = ipath + vpIoTools::path("/ViSP-images/mire-2/");
571 
572  // Build the name of the image file
573 
574  s.setf(std::ios::right, std::ios::adjustfield);
575  s << "image.%04d.pgm";
576  filename = dirname + s.str();
577  }
578  else {
579  filename = opt_ppath;
580  }
581 
582  //We will read a sequence of images
583  vpVideoReader grabber;
584  grabber.setFirstFrameIndex(1);
585  grabber.setFileName(filename.c_str());
586  // Grey level image associated to a display in the initial pose computation
587  vpImage<unsigned char> Idisplay;
588  // Grey level image to track points
590  // RGBa image to get background
591  vpImage<vpRGBa> IC;
592  // Matrix representing camera parameters
594 
595  // Variables used for pose computation purposes
596  vpPose mPose;
597  vpDot2 md[4];
598  vpImagePoint mcog[4];
599  vpPoint mP[4];
600 
601  // CameraParameters we got from calibration
602  // Keep u0 and v0 as center of the screen
603  vpCameraParameters mcam;
604 
605  // Read the PGM image named "filename" on the disk, and put the
606  // bitmap into the image structure I. I is initialized to the
607  // correct size
608  //
609  // exception readPGM may throw various exception if, for example,
610  // the file does not exist, or if the memory cannot be allocated
611  try{
612  vpCTRACE << "Load: " << filename << std::endl;
613  grabber.open(Idisplay);
614  grabber.acquire(Idisplay);
615  vpCameraParameters mcamTmp(592,570,grabber.getWidth()/2,grabber.getHeight()/2);
616  // Compute the initial pose of the camera
617  computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cmo, mP,
618  opt_click_allowed);
619  // Close the framegrabber
620  grabber.close();
621 
622  // Associate the grabber to the RGBa image
623  grabber.open(IC);
624  mcam.init(mcamTmp);
625  }
626  catch(...)
627  {
628  // an exception is thrown if an exception from readPGM has been caught
629  // here this will result in the end of the program
630  // Note that another error message has been printed from readPGM
631  // to give more information about the error
632  std::cerr << std::endl
633  << "ERROR:" << std::endl;
634  std::cerr << " Cannot read " << filename << std::endl;
635  std::cerr << " Check your -i " << ipath << " option " << std::endl
636  << " or VISP_INPUT_IMAGE_PATH environment variable."
637  << std::endl;
638  exit(-1);
639  }
640 
641  // Create a vpRAOgre object with color background
642  vpAROgreExample ogre(mcam, (unsigned int)grabber.getWidth(), (unsigned int)grabber.getHeight());
643  // Initialize it
644  ogre.init(IC);
645 
646  try
647  {
648  // Rendering loop
649  while(ogre.continueRendering()){
650  // Acquire a frame
651  grabber.acquire(IC);
652 
653  // Convert it to a grey level image for tracking purpose
655 
656  // Update pose calculation
657  try{
658  // kill the point list
659  mPose.clearPoint() ;
660 
661  // track the dot
662  for (int i=0 ; i < 4 ; i++)
663  {
664  // track the point
665  md[i].track(I, mcog[i]) ;
666  md[i].setGrayLevelPrecision(0.90);
667  // pixel->meter conversion
668  {
669  double x=0, y=0;
670  vpPixelMeterConversion::convertPoint(mcam, mcog[i], x, y) ;
671  mP[i].set_x(x) ;
672  mP[i].set_y(y) ;
673  }
674 
675  // and added to the pose computation point list
676  mPose.addPoint(mP[i]) ;
677  }
678  // the pose structure has been updated
679 
680  // the pose is now updated using the virtual visual servoing approach
681  // Dementhon or lagrange is no longuer necessary, pose at the
682  // previous iteration is sufficient
683  mPose.computePose(vpPose::VIRTUAL_VS, cmo);
684  }
685  catch(...){
686  vpERROR_TRACE("Error in tracking loop") ;
687  return false;
688  }
689 
690  // Display with ogre
691  ogre.display(IC,cmo);
692 
693  // Wait so that the video does not go too fast
694  vpTime::wait(15);
695  }
696  // Close the grabber
697  grabber.close();
698  }
699  catch (Ogre::Exception& e)
700  {
701  std::cerr << "Exception:\n";
702  std::cerr << e.getFullDescription().c_str() << "\n";
703  return 1;
704  }
705  catch (...)
706  {
707  std::cerr << "Exception: " << "\n";
708  return 1;
709  }
710 
711  return EXIT_SUCCESS;
712 }
713 #else // VISP_HAVE_OGRE
714 int
715 main()
716 {
717  std::cout << "You should install Ogre3D to run this example..." << std::endl;
718 }
719 #endif
void setFirstFrameIndex(const long firstFrame)
void set_j(const double j)
Definition: vpImagePoint.h:156
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const char *title=NULL)
void init()
basic initialization with the default parameters
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
#define vpERROR_TRACE
Definition: vpDebug.h:379
virtual bool customframeEnded(const Ogre::FrameEvent &evt)
Definition: vpAROgre.cpp:442
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:132
Define the X11 console to display images.
Definition: vpDisplayX.h:152
void set_i(const double i)
Definition: vpImagePoint.h:145
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
static std::string path(const char *pathname)
Definition: vpIoTools.cpp:669
void set_x(const double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.h:183
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Point coordinates conversion from pixel coordinates to normalized coordinates in meter...
Implementation of an augmented reality viewer.
Definition: vpAROgre.h:90
static int wait(double t0, double t)
Definition: vpTime.cpp:149
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:114
void track(const vpImage< unsigned char > &I)
Definition: vpDot2.cpp:439
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:1964
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:79
static const vpColor red
Definition: vpColor.h:165
Class that defines what is a point.
Definition: vpPoint.h:65
#define vpCTRACE
Definition: vpDebug.h:327
vpImagePoint getCog() const
Definition: vpDot2.h:254
void open(vpImage< vpRGBa > &I)
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:763
static void display(vpImage< unsigned char > &I, vpHomogeneousMatrix &cMo, vpCameraParameters &cam, double size, vpColor col=vpColor::none)
Definition: vpPose.cpp:490
unsigned int getWidth() const
Return the number of columns in the image.
virtual bool processInputEvent(const Ogre::FrameEvent &)
Definition: vpAROgre.h:280
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:186
The vpDisplayOpenCV allows to display image using the opencv library.
virtual void displayCross(const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)=0
Class used for pose computation from N points (pose from point only).
Definition: vpPose.h:80
Generic class defining intrinsic camera parameters.
void set_y(const double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.h:185
The vpDisplayGTK allows to display image using the GTK+ library version 1.2.
Definition: vpDisplayGTK.h:145
void acquire(vpImage< vpRGBa > &I)
void setFileName(const char *filename)
void display(const vpImage< unsigned char > &I, vpColor color=vpColor::red, unsigned int thickness=1)
Definition: vpDot2.cpp:196
void setSizePrecision(const double &sizePrecision)
Definition: vpDot2.cpp:793
virtual void createScene(void)
Definition: vpAROgre.h:264
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:240
void computePose(vpPoseMethodType methode, vpHomogeneousMatrix &cMo)
compute the pose for a given method
Definition: vpPose.cpp:298
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:92
void addPoint(const vpPoint &P)
Add a new point in this array.
Definition: vpPose.cpp:148
unsigned int getHeight() const
Return the number of rows in the image.
void setGraphics(const bool activate)
Definition: vpDot2.h:178
void setWorldCoordinates(const double ox, const double oy, const double oz)
Set the point world coordinates. We mean here the coordinates of the point in the object frame...
Definition: vpPoint.cpp:74
void clearPoint()
suppress all the point in the array of point
Definition: vpPose.cpp:126