ViSP  2.8.0
AROgre.cpp
1 /****************************************************************************
2  *
3  * $Id: AROgre.cpp 4305 2013-07-05 13:23:47Z fspindle $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  *
34  * Description:
35  * Implementation of a simple augmented reality application using the vpAROgre
36  * class.
37  *
38  * Authors:
39  * Bertrand Delabarre
40  *
41  *****************************************************************************/
42 
50 #include <visp/vpConfig.h>
51 #include <iostream>
52 
53 //#if defined(VISP_HAVE_OGRE) && defined(VISP_HAVE_DISPLAY)
54 #if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK))
55 
56 #if defined(VISP_HAVE_X11) && ! defined(APPLE)
57 // produce an error on OSX: ‘typedef int Cursor’
58 // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
59 // declaration as ‘typedef XID Cursor’. That's why it should not be
60 // used on APPLE platforms
61 # include <visp/vpDisplayX.h>
62 #endif
63 #include <visp/vpDisplayGTK.h>
64 #include <visp/vpDisplayGDI.h>
65 #include <visp/vpDisplayOpenCV.h>
66 #include <visp/vpDisplayD3D.h>
67 #include <visp/vpPose.h>
68 #include <visp/vpPoint.h>
69 #include <visp/vpImagePoint.h>
70 #include <visp/vpDot2.h>
71 #include <visp/vpPixelMeterConversion.h>
72 #include <visp/vpVideoReader.h>
73 #include <visp/vpParseArgv.h>
74 #include <visp/vpIoTools.h>
75 #include <visp/vpDebug.h>
76 #include <visp/vpAROgre.h>
77 
78 // List of allowed command line options
79 #define GETOPTARGS "ci:p:h"
80 
92 void usage(const char *name, const char *badparam, std::string ipath, std::string ppath)
93 {
94  fprintf(stdout, "\n\
95 Test augmented reality using the vpAROgre class.\n\
96 \n\
97 SYNOPSIS\n\
98  %s [-i <test image path>] [-p <personal image path>]\n\
99  [-c] [-h]\n", name);
100 
101  fprintf(stdout, "\n\
102 OPTIONS: Default\n\
103  -i <input image path> %s\n\
104  Set image input path.\n\
105  From this path read images \n\
106  \"ViSP-images/mire-2/image.%%04d.pgm\". These \n\
107  images come from ViSP-images-x.y.z.tar.gz available \n\
108  on the ViSP website.\n\
109  Setting the VISP_INPUT_IMAGE_PATH environment\n\
110  variable produces the same behaviour than using\n\
111  this option.\n\
112  \n\
113  -p <personal image path> %s\n\
114  Specify a personal sequence containing images \n\
115  to process.\n\
116  By image sequence, we mean one file per image.\n\
117  The following image file formats PNM (PGM P5, PPM P6)\n\
118  are supported. The format is selected by analysing \n\
119  the filename extension.\n\
120  Example : \"/Temp/ViSP-images/cube/image.%%04d.pgm\"\n\
121  %%04d is for the image numbering.\n\
122 \n\
123  -c\n\
124  Disable the mouse click. Useful to automaze the \n\
125  execution of this program without humain intervention.\n\
126 \n\
127  -h\n\
128  Print the help.\n",
129  ipath.c_str(), ppath.c_str());
130 
131  if (badparam)
132  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
133 }
147 bool getOptions(int argc, const char **argv, std::string &ipath,
148  std::string &ppath, bool &click_allowed)
149 {
150  const char *optarg;
151  int c;
152  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
153 
154  switch (c) {
155  case 'c': click_allowed = false; break;
156  case 'i': ipath = optarg; break;
157  case 'p': ppath = optarg; break;
158  case 'h': usage(argv[0], NULL, ipath, ppath);
159  return false; break;
160 
161  default:
162  usage(argv[0], optarg, ipath, ppath);
163  return false; break;
164  }
165  }
166 
167  if ((c == 1) || (c == -1)) {
168  // standalone param or error
169  usage(argv[0], NULL, ipath, ppath);
170  std::cerr << "ERROR: " << std::endl;
171  std::cerr << " Bad argument " << optarg << std::endl << std::endl;
172  return false;
173  }
174 
175  return true;
176 }
177 
178 
179 #ifndef DOXYGEN_SHOULD_SKIP_THIS
180 
181 class vpAROgreExample : public vpAROgre
182 {
183 public:
184  // The constructor doesn't change here
185  vpAROgreExample(const vpCameraParameters &mcam = vpCameraParameters(),
186  unsigned int width = 640, unsigned int height = 480,
187  const char *resourcePath=NULL)
188  : vpAROgre(mcam, width, height){
189  // Direction vectors
190  if (resourcePath) mResourcePath = resourcePath;
191  std::cout << "mResourcePath: " << mResourcePath<< std::endl;
192  vecDevant = Ogre::Vector3(0,-1,0);
193  }
194 
195 protected :
196 
197  // Attributes
198  // Vector to move
199  Ogre::Vector3 vecDevant;
200  // Animation attribute
201  Ogre::AnimationState * mAnimationState;
202  // The entity representing the robot
203  Ogre::Entity* robot;
204 
205  // Our scene will just be a plane
206  void createScene()
207  {
208  // Lumieres
209  mSceneMgr->setAmbientLight(Ogre::ColourValue((float)0.6,(float)0.6,(float)0.6)); // Default value of lightning
210  Ogre::Light * light = mSceneMgr->createLight();
211  light->setDiffuseColour(1.0,1.0,1.0); // scaled RGB values
212  light->setSpecularColour(1.0,1.0,1.0); // scaled RGB values
213  // Lumiere ponctuelle
214  light->setPosition(-5, -5, 10);
215  light->setType(Ogre::Light::LT_POINT);
216  light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
217  //Ombres
218  light->setCastShadows(true);
219 
220  // Create the Entity
221  robot = mSceneMgr->createEntity("Robot", "robot.mesh");
222  // Attach robot to scene graph
223  Ogre::SceneNode* RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
224  RobotNode->attachObject(robot);
225  RobotNode->scale((Ogre::Real)0.001,(Ogre::Real)0.001,(Ogre::Real)0.001);
226  RobotNode->pitch(Ogre::Degree(90));
227  RobotNode->yaw(Ogre::Degree(-90));
228  robot->setCastShadows(true);
229  mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
230 
231  // Add an animation
232  // Set the good animation
233  mAnimationState = robot->getAnimationState( "Idle" );
234  // Start over when finished
235  mAnimationState->setLoop( true );
236  // Animation enabled
237  mAnimationState->setEnabled( true );
238 
239  // Add a ground
240  Ogre::Plane plan;
241  plan.d = 0;
242  plan.normal = Ogre::Vector3::UNIT_Z;
243  Ogre::MeshManager::getSingleton().createPlane("sol",Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan, (Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
244  Ogre::Entity* ent = mSceneMgr->createEntity("Entitesol", "sol");
245  Ogre::SceneNode* PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
246  PlaneNode->attachObject(ent);
247  ent->setMaterialName("Examples/GrassFloor");
248  }
249 
250  bool customframeEnded(const Ogre::FrameEvent& evt) {
251  // Update animation
252  // To move, we add it the time since last frame
253  mAnimationState->addTime( evt.timeSinceLastFrame );
254  return true;
255  }
256 
257 #ifdef VISP_HAVE_OIS
258  bool processInputEvent(const Ogre::FrameEvent& /*evt*/) {
259  mKeyboard->capture();
260  Ogre::Matrix3 rotmy;
261  double angle = -M_PI/8;
262  if(mKeyboard->isKeyDown(OIS::KC_ESCAPE))
263  return false;
264 
265  // Event telling that we will have to move, setting the animation to "walk", if false, annimation goes to "Idle"
266  bool event = false;
267  // Check entries
268  if(mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)){
269  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()+(Ogre::Real)0.003*vecDevant);
270  event = true;
271  }
272  if(mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)){
273  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()-(Ogre::Real)0.003*vecDevant);
274  event = true;
275  }
276  if(mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)){
277  rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0,
278  (Ogre::Real)(-sin(-angle)), (Ogre::Real)cos(-angle),0,
279  0,0,1);
280  vecDevant=vecDevant*rotmy;
281  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
282  event = true;
283  }
284  if(mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)){
285  rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0,
286  (Ogre::Real)(-sin(angle)), (Ogre::Real)cos(angle),0,
287  0,0,1);
288  vecDevant=vecDevant*rotmy;
289  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
290  event = true;
291  }
292 
293  // Play the right animation
294  if(event){
295  mAnimationState = robot->getAnimationState("Walk");
296  }
297  else mAnimationState = robot->getAnimationState( "Idle" );
298 
299  // Start over when finished
300  mAnimationState->setLoop( true );
301  // Animation enabled
302  mAnimationState->setEnabled( true );
303 
304  return true;
305  }
306 #endif
307 };
308 
313 void computeInitialPose(vpCameraParameters *mcam, vpImage<unsigned char> &I,
314  vpPose * mPose, vpDot2 *md, vpImagePoint *mcog,
315  vpHomogeneousMatrix *cmo, vpPoint *mP,
316  const bool &opt_click_allowed)
317 {
318  // ---------------------------------------------------
319  // Code inspired from ViSP example of camera pose
320  // ----------------------------------------------------
321  bool opt_display = true;
322 
323 #if defined(VISP_HAVE_X11) && ! defined(APPLE)
324  // produce an error on OSX: ‘typedef int Cursor’
325  // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
326  // declaration as ‘typedef XID Cursor’. That's why it should not be
327  // used on APPLE platforms
328  vpDisplayX display;
329 #elif defined VISP_HAVE_GTK
330  vpDisplayGTK display;
331 #elif defined VISP_HAVE_GDI
332  vpDisplayGDI display;
333 #elif defined VISP_HAVE_OPENCV
334  vpDisplayOpenCV display;
335 #elif defined VISP_HAVE_D3D9
336  vpDisplayD3D display;
337 #endif
338 
339  for (unsigned int i=0 ; i < 4 ; i++)
340  {
341  if (opt_display) {
342  md[i].setGraphics(true) ;
343  }
344  else {
345  md[i].setGraphics(false) ;
346  }
347  }
348 
349  if (opt_display) {
350  try{
351  // Display size is automatically defined by the image (I) size
352  display.init(I,100,100,"Preliminary Pose Calculation");
353  // display the image
354  // The image class has a member that specify a pointer toward
355  // the display that has been initialized in the display declaration
356  // therefore is is no longuer necessary to make a reference to the
357  // display variable.
358  vpDisplay::display(I) ;
359  //Flush the display
360  vpDisplay::flush(I) ;
361 
362  }
363  catch(...)
364  {
365  vpERROR_TRACE("Error while displaying the image") ;
366  return ;
367  }
368  }
369 
370  std::cout<<"************************************************************************************"<<std::endl;
371  std::cout<<"*************************** Preliminary Pose Calculation ***************************"<<std::endl;
372  std::cout<<"****************************** Click on the 4 dots *******************************"<<std::endl;
373  std::cout<<"********Dot1 : (-x,-y,0), Dot2 : (x,-y,0), Dot3 : (x,y,0), Dot4 : (-x,y,0)**********"<<std::endl;
374  std::cout<<"************************************************************************************"<<std::endl;
375 
376  try{
377  vpImagePoint ip[4];
378  if (! opt_click_allowed) {
379  ip[0].set_i( 265 );
380  ip[0].set_j( 93 );
381  ip[1].set_i( 248 );
382  ip[1].set_j( 242 );
383  ip[2].set_i( 166 );
384  ip[2].set_j( 215 );
385  ip[3].set_i( 178 );
386  ip[3].set_j( 85 );
387  }
388  for(unsigned int i=0;i<4;i++) {
389  // by using setGraphics, we request to see the edges of the dot
390  // in red on the screen.
391  // It uses the overlay image plane.
392  // The default of this setting is that it is time consumming
393 
394  md[i].setGraphics(true) ;
395  md[i].setGrayLevelPrecision(0.7);
396  md[i].setSizePrecision(0.5);
397 
398  for(unsigned int j = 0;j<i;j++)
399  md[j].display(I) ;
400 
401  // flush the display buffer
402  vpDisplay::flush(I);
403  try{
404  if (opt_click_allowed)
405  md[i].initTracking(I);
406  else
407  md[i].initTracking(I, ip[i]);
408  }
409  catch(...){
410  }
411 
412  mcog[i] = md[i].getCog();
413  // an expcetion is thrown by the track method if
414  // - dot is lost
415  // - the number of pixel is too small
416  // - too many pixels are detected (this is usual when a "big" specularity
417  // occurs. The threshold can be modified using the
418  // setNbMaxPoint(int) method
419  if (opt_display) {
420  md[i].display(I) ;
421  // flush the display buffer
422  vpDisplay::flush(I) ;
423  }
424  }
425  }
426  catch(vpException e){
427  vpERROR_TRACE("Error while tracking dots") ;
428  vpCTRACE << e;
429  return;
430  }
431 
432  if (opt_display)
433  {
434  // display a red cross (size 10) in the image at the dot center
435  // of gravity location
436  //
437  // WARNING
438  // in the vpDisplay class member's when pixel coordinates
439  // are considered the first element is the row index and the second
440  // is the column index:
441  // vpDisplay::displayCross(Image, row index, column index, size, color)
442  // therefore u and v are inverted wrt to the vpDot specification
443  // Alternatively, to avoid this problem another set of member have
444  // been defined in the vpDisplay class.
445  // If the method name is postfixe with _uv the specification is :
446  // vpDisplay::displayCross_uv(Image, column index, row index, size, color)
447 
448  for (unsigned int i=0 ; i < 4 ; i++)
449  vpDisplay::displayCross(I, mcog[i], 10, vpColor::red) ;
450 
451  // flush the X11 buffer
452  vpDisplay::flush(I) ;
453  }
454 
455  // --------------------------------------------------------
456  // Now we will compute the pose
457  // --------------------------------------------------------
458 
459  // the list of point is cleared (if that's not done before)
460  mPose->clearPoint() ;
461 
462  // we set the 3D points coordinates (in meter !) in the object/world frame
463  double l=0.06 ;
464  double L=0.07 ;
465  mP[0].setWorldCoordinates(-L,-l, 0 ) ; // (X,Y,Z)
466  mP[1].setWorldCoordinates(L,-l, 0 ) ;
467  mP[2].setWorldCoordinates(L,l, 0 ) ;
468  mP[3].setWorldCoordinates(-L,l, 0 ) ;
469 
470  // pixel-> meter conversion
471  for (unsigned int i=0 ; i < 4 ; i++)
472  {
473  // u[i]. v[i] are expressed in pixel
474  // conversion in meter is achieved using
475  // x = (u-u0)/px
476  // y = (v-v0)/py
477  // where px, py, u0, v0 are the intrinsic camera parameters
478  double x=0, y=0;
479  vpPixelMeterConversion::convertPoint(*mcam, mcog[i], x,y) ;
480  mP[i].set_x(x) ;
481  mP[i].set_y(y) ;
482  }
483 
484 
485  // The pose structure is build, we put in the point list the set of point
486  // here both 2D and 3D world coordinates are known
487  for (unsigned int i=0 ; i < 4 ; i++)
488  {
489  mPose->addPoint(mP[i]) ; // and added to the pose computation point list
490  }
491 
492  // compute the initial pose using Dementhon method followed by a non linear
493  // minimisation method
494 
495  // Pose by Lagrange it provides an initialization of the pose
496  mPose->computePose(vpPose::LAGRANGE, *cmo) ;
497  // the pose is now refined using the virtual visual servoing approach
498  // Warning: cMo needs to be initialized otherwise it may diverge
499  mPose->computePose(vpPose::VIRTUAL_VS, *cmo) ;
500 
501  // Display breifly just to have a glimpse a the ViSP pose
502  // while(cpt<500){
503  if( opt_display ){
504  // Display the computed pose
505  mPose->display(I,*cmo,*mcam, 0.05, vpColor::red) ;
506  vpDisplay::flush(I) ;
507  vpTime::wait(800);
508  }
509 }
510 
511 #endif
512 
513 int main(int argc, const char **argv)
514 {
515  std::string env_ipath;
516  std::string opt_ipath;
517  std::string ipath;
518  std::string opt_ppath;
519  std::string dirname;
520  std::string filename;
521  bool opt_click_allowed = true;
522 
523  // Get the VISP_IMAGE_PATH environment variable value
524  char *ptenv = getenv("VISP_INPUT_IMAGE_PATH");
525  if (ptenv != NULL)
526  env_ipath = ptenv;
527 
528  // Set the default input path
529  if (! env_ipath.empty())
530  ipath = env_ipath;
531 
532 
533  // Read the command line options
534  if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
535  exit (-1);
536  }
537 
538  // Get the option values
539  if (!opt_ipath.empty())
540  ipath = opt_ipath;
541 
542  // Compare ipath and env_ipath. If they differ, we take into account
543  // the input path comming from the command line option
544  if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
545  if (ipath != env_ipath) {
546  std::cout << std::endl
547  << "WARNING: " << std::endl;
548  std::cout << " Since -i <visp image path=" << ipath << "> "
549  << " is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
550  << " we skip the environment variable." << std::endl;
551  }
552  }
553 
554  // Test if an input path is set
555  if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty() ){
556  usage(argv[0], NULL, ipath, opt_ppath);
557  std::cerr << std::endl
558  << "ERROR:" << std::endl;
559  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH "
560  << std::endl
561  << " environment variable to specify the location of the " << std::endl
562  << " image path where test images are located." << std::endl
563  << " Use -p <personal image path> option if you want to "<<std::endl
564  << " use personal images." << std::endl
565  << std::endl;
566 
567  exit(-1);
568  }
569 
570  std::ostringstream s;
571 
572  if (opt_ppath.empty()){
573  // Set the path location of the image sequence
574  dirname = ipath + vpIoTools::path("/ViSP-images/mire-2/");
575 
576  // Build the name of the image file
577 
578  s.setf(std::ios::right, std::ios::adjustfield);
579  s << "image.%04d.pgm";
580  filename = dirname + s.str();
581  }
582  else {
583  filename = opt_ppath;
584  }
585 
586  //We will read a sequence of images
587  vpVideoReader grabber;
588  grabber.setFirstFrameIndex(1);
589  grabber.setFileName(filename.c_str());
590  // Grey level image associated to a display in the initial pose computation
591  vpImage<unsigned char> Idisplay;
592  // Grey level image to track points
594  // RGBa image to get background
595  vpImage<vpRGBa> IC;
596  // Matrix representing camera parameters
598 
599  // Variables used for pose computation purposes
600  vpPose mPose;
601  vpDot2 md[4];
602  vpImagePoint mcog[4];
603  vpPoint mP[4];
604 
605  // CameraParameters we got from calibration
606  // Keep u0 and v0 as center of the screen
607  vpCameraParameters mcam;
608 
609  // Read the PGM image named "filename" on the disk, and put the
610  // bitmap into the image structure I. I is initialized to the
611  // correct size
612  //
613  // exception readPGM may throw various exception if, for example,
614  // the file does not exist, or if the memory cannot be allocated
615  try{
616  vpCTRACE << "Load: " << filename << std::endl;
617  grabber.open(Idisplay);
618  grabber.acquire(Idisplay);
619  vpCameraParameters mcamTmp(592,570,grabber.getWidth()/2,grabber.getHeight()/2);
620  // Compute the initial pose of the camera
621  computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cmo, mP,
622  opt_click_allowed);
623  // Close the framegrabber
624  grabber.close();
625 
626  // Associate the grabber to the RGBa image
627  grabber.open(IC);
628  mcam.init(mcamTmp);
629  }
630  catch(...)
631  {
632  // an exception is thrown if an exception from readPGM has been caught
633  // here this will result in the end of the program
634  // Note that another error message has been printed from readPGM
635  // to give more information about the error
636  std::cerr << std::endl
637  << "ERROR:" << std::endl;
638  std::cerr << " Cannot read " << filename << std::endl;
639  std::cerr << " Check your -i " << ipath << " option " << std::endl
640  << " or VISP_INPUT_IMAGE_PATH environment variable."
641  << std::endl;
642  exit(-1);
643  }
644 
645  // Create a vpRAOgre object with color background
646  vpAROgreExample ogre(mcam, (unsigned int)grabber.getWidth(), (unsigned int)grabber.getHeight());
647  // Initialize it
648  ogre.init(IC);
649 
650  try
651  {
652  double t0 = vpTime::measureTimeMs();
653 
654  // Rendering loop
655  while(ogre.continueRendering() && !grabber.end()){
656  // Acquire a frame
657  grabber.acquire(IC);
658 
659  // Convert it to a grey level image for tracking purpose
661 
662  // Update pose calculation
663  try{
664  // kill the point list
665  mPose.clearPoint() ;
666 
667  // track the dot
668  for (int i=0 ; i < 4 ; i++)
669  {
670  // track the point
671  md[i].track(I, mcog[i]) ;
672  md[i].setGrayLevelPrecision(0.90);
673  // pixel->meter conversion
674  {
675  double x=0, y=0;
676  vpPixelMeterConversion::convertPoint(mcam, mcog[i], x, y) ;
677  mP[i].set_x(x) ;
678  mP[i].set_y(y) ;
679  }
680 
681  // and added to the pose computation point list
682  mPose.addPoint(mP[i]) ;
683  }
684  // the pose structure has been updated
685 
686  // the pose is now updated using the virtual visual servoing approach
687  // Dementhon or lagrange is no longuer necessary, pose at the
688  // previous iteration is sufficient
689  mPose.computePose(vpPose::VIRTUAL_VS, cmo);
690  }
691  catch(...){
692  vpERROR_TRACE("Error in tracking loop") ;
693  return false;
694  }
695 
696  // Display with ogre
697  ogre.display(IC,cmo);
698 
699  // Wait so that the video does not go too fast
700  double t1 = vpTime::measureTimeMs();
701  std::cout << "\r> " << 1000 / (t1 - t0) << " fps" ;
702  t0 = t1;
703  }
704  // Close the grabber
705  grabber.close();
706  }
707  catch (Ogre::Exception& e)
708  {
709  std::cerr << "Exception:\n";
710  std::cerr << e.getFullDescription().c_str() << "\n";
711  return 1;
712  }
713  catch (...)
714  {
715  std::cerr << "Exception: " << "\n";
716  return 1;
717  }
718 
719  return EXIT_SUCCESS;
720 }
721 #else // VISP_HAVE_OGRE && VISP_HAVE_DISPLAY
722 int
723 main()
724 {
725  std::cout << "You should install Ogre3D to run this example..." << std::endl;
726 }
727 #endif
void setFirstFrameIndex(const long firstFrame)
void set_j(const double j)
Definition: vpImagePoint.h:156
void init()
basic initialization with the default parameters
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
#define vpERROR_TRACE
Definition: vpDebug.h:379
virtual bool customframeEnded(const Ogre::FrameEvent &evt)
Definition: vpAROgre.cpp:538
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:133
Define the X11 console to display images.
Definition: vpDisplayX.h:152
void set_i(const double i)
Definition: vpImagePoint.h:145
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
error that can be emited by ViSP classes.
Definition: vpException.h:75
static std::string path(const char *pathname)
Definition: vpIoTools.cpp:715
void set_x(const double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.h:194
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Point coordinates conversion from pixel coordinates to normalized coordinates in meter...
Implementation of an augmented reality viewer.
Definition: vpAROgre.h:90
static double measureTimeMs()
Definition: vpTime.cpp:86
static int wait(double t0, double t)
Definition: vpTime.cpp:149
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:131
void track(const vpImage< unsigned char > &I)
Definition: vpDot2.cpp:444
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:1991
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:79
static const vpColor red
Definition: vpColor.h:167
Class that defines what is a point.
Definition: vpPoint.h:65
#define vpCTRACE
Definition: vpDebug.h:327
vpImagePoint getCog() const
Definition: vpDot2.h:167
void open(vpImage< vpRGBa > &I)
Display for windows using Direct3D.
Definition: vpDisplayD3D.h:109
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:796
static void display(vpImage< unsigned char > &I, vpHomogeneousMatrix &cMo, vpCameraParameters &cam, double size, vpColor col=vpColor::none)
Definition: vpPose.cpp:582
unsigned int getWidth() const
Return the number of columns in the image.
virtual bool processInputEvent(const Ogre::FrameEvent &)
Definition: vpAROgre.h:283
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:203
The vpDisplayOpenCV allows to display image using the opencv library.
virtual void displayCross(const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)=0
Class used for pose computation from N points (pose from point only).
Definition: vpPose.h:78
Generic class defining intrinsic camera parameters.
void set_y(const double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.h:196
The vpDisplayGTK allows to display image using the GTK+ library version 1.2.
Definition: vpDisplayGTK.h:145
void acquire(vpImage< vpRGBa > &I)
void setFileName(const char *filename)
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const char *title=NULL)
void display(const vpImage< unsigned char > &I, vpColor color=vpColor::red, unsigned int thickness=1)
Definition: vpDot2.cpp:201
void setSizePrecision(const double &sizePrecision)
Definition: vpDot2.cpp:826
virtual void createScene(void)
Definition: vpAROgre.h:267
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:245
void computePose(vpPoseMethodType methode, vpHomogeneousMatrix &cMo)
compute the pose for a given method
Definition: vpPose.cpp:382
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:92
void addPoint(const vpPoint &P)
Add a new point in this array.
Definition: vpPose.cpp:150
unsigned int getHeight() const
Return the number of rows in the image.
void setGraphics(const bool activate)
Definition: vpDot2.h:312
void setWorldCoordinates(const double ox, const double oy, const double oz)
Set the point world coordinates. We mean here the coordinates of the point in the object frame...
Definition: vpPoint.cpp:74
void clearPoint()
suppress all the point in the array of point
Definition: vpPose.cpp:128