ViSP  2.9.0
AROgre.cpp
1 /****************************************************************************
2  *
3  * $Id: AROgre.cpp 4604 2014-01-21 14:15:23Z fspindle $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2014 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  *
34  * Description:
35  * Implementation of a simple augmented reality application using the vpAROgre
36  * class.
37  *
38  * Authors:
39  * Bertrand Delabarre
40  *
41  *****************************************************************************/
42 
50 #include <visp/vpConfig.h>
51 #include <iostream>
52 
53 //#if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK) || (defined(VISP_HAVE_X11) && ! defined(APPLE)))
54 #if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK) || (defined(VISP_HAVE_X11) && ! (defined(__APPLE__) && defined(__MACH__))))
55 
56 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
57 #if defined(VISP_HAVE_X11) && ! (defined(__APPLE__) && defined(__MACH__))
58 // produce an error on OSX: ‘typedef int Cursor’
59 // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
60 // declaration as ‘typedef XID Cursor’. That's why it should not be
61 // used on APPLE platforms
62 # include <visp/vpDisplayX.h>
63 #endif
64 #include <visp/vpDisplayGTK.h>
65 #include <visp/vpDisplayGDI.h>
66 #include <visp/vpDisplayOpenCV.h>
67 #include <visp/vpDisplayD3D.h>
68 #include <visp/vpPose.h>
69 #include <visp/vpPoint.h>
70 #include <visp/vpImagePoint.h>
71 #include <visp/vpDot2.h>
72 #include <visp/vpPixelMeterConversion.h>
73 #include <visp/vpVideoReader.h>
74 #include <visp/vpParseArgv.h>
75 #include <visp/vpIoTools.h>
76 #include <visp/vpDebug.h>
77 #include <visp/vpAROgre.h>
78 
79 // List of allowed command line options
80 #define GETOPTARGS "ci:p:h"
81 
93 void usage(const char *name, const char *badparam, std::string ipath, std::string ppath)
94 {
95  fprintf(stdout, "\n\
96 Test augmented reality using the vpAROgre class.\n\
97 \n\
98 SYNOPSIS\n\
99  %s [-i <test image path>] [-p <personal image path>]\n\
100  [-c] [-h]\n", name);
101 
102  fprintf(stdout, "\n\
103 OPTIONS: Default\n\
104  -i <input image path> %s\n\
105  Set image input path.\n\
106  From this path read images \n\
107  \"ViSP-images/mire-2/image.%%04d.pgm\". These \n\
108  images come from ViSP-images-x.y.z.tar.gz available \n\
109  on the ViSP website.\n\
110  Setting the VISP_INPUT_IMAGE_PATH environment\n\
111  variable produces the same behaviour than using\n\
112  this option.\n\
113  \n\
114  -p <personal image path> %s\n\
115  Specify a personal sequence containing images \n\
116  to process.\n\
117  By image sequence, we mean one file per image.\n\
118  The following image file formats PNM (PGM P5, PPM P6)\n\
119  are supported. The format is selected by analysing \n\
120  the filename extension.\n\
121  Example : \"/Temp/ViSP-images/cube/image.%%04d.pgm\"\n\
122  %%04d is for the image numbering.\n\
123 \n\
124  -c\n\
125  Disable the mouse click. Useful to automaze the \n\
126  execution of this program without humain intervention.\n\
127 \n\
128  -h\n\
129  Print the help.\n",
130  ipath.c_str(), ppath.c_str());
131 
132  if (badparam)
133  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
134 }
148 bool getOptions(int argc, const char **argv, std::string &ipath,
149  std::string &ppath, bool &click_allowed)
150 {
151  const char *optarg;
152  int c;
153  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
154 
155  switch (c) {
156  case 'c': click_allowed = false; break;
157  case 'i': ipath = optarg; break;
158  case 'p': ppath = optarg; break;
159  case 'h': usage(argv[0], NULL, ipath, ppath);
160  return false; break;
161 
162  default:
163  usage(argv[0], optarg, ipath, ppath);
164  return false; break;
165  }
166  }
167 
168  if ((c == 1) || (c == -1)) {
169  // standalone param or error
170  usage(argv[0], NULL, ipath, ppath);
171  std::cerr << "ERROR: " << std::endl;
172  std::cerr << " Bad argument " << optarg << std::endl << std::endl;
173  return false;
174  }
175 
176  return true;
177 }
178 
179 
180 #ifndef DOXYGEN_SHOULD_SKIP_THIS
181 
182 class vpAROgreExample : public vpAROgre
183 {
184 public:
185  // The constructor doesn't change here
186  vpAROgreExample(const vpCameraParameters &mcam = vpCameraParameters(),
187  unsigned int width = 640, unsigned int height = 480,
188  const char *resourcePath=NULL)
189  : vpAROgre(mcam, width, height){
190  // Direction vectors
191  if (resourcePath) mResourcePath = resourcePath;
192  std::cout << "mResourcePath: " << mResourcePath<< std::endl;
193  vecDevant = Ogre::Vector3(0,-1,0);
194  robot = NULL;
195  mAnimationState = NULL;
196  }
197 
198 protected :
199 
200  // Attributes
201  // Vector to move
202  Ogre::Vector3 vecDevant;
203  // Animation attribute
204  Ogre::AnimationState * mAnimationState;
205  // The entity representing the robot
206  Ogre::Entity* robot;
207 
208  // Our scene will just be a plane
209  void createScene()
210  {
211  // Lumieres
212  mSceneMgr->setAmbientLight(Ogre::ColourValue((float)0.6,(float)0.6,(float)0.6)); // Default value of lightning
213  Ogre::Light * light = mSceneMgr->createLight();
214  light->setDiffuseColour(1.0,1.0,1.0); // scaled RGB values
215  light->setSpecularColour(1.0,1.0,1.0); // scaled RGB values
216  // Lumiere ponctuelle
217  light->setPosition(-5, -5, 10);
218  light->setType(Ogre::Light::LT_POINT);
219  light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
220  //Ombres
221  light->setCastShadows(true);
222 
223  // Create the Entity
224  robot = mSceneMgr->createEntity("Robot", "robot.mesh");
225  // Attach robot to scene graph
226  Ogre::SceneNode* RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
227  RobotNode->attachObject(robot);
228  RobotNode->scale((Ogre::Real)0.001,(Ogre::Real)0.001,(Ogre::Real)0.001);
229  RobotNode->pitch(Ogre::Degree(90));
230  RobotNode->yaw(Ogre::Degree(-90));
231  robot->setCastShadows(true);
232  mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
233 
234  // Add an animation
235  // Set the good animation
236  mAnimationState = robot->getAnimationState( "Idle" );
237  // Start over when finished
238  mAnimationState->setLoop( true );
239  // Animation enabled
240  mAnimationState->setEnabled( true );
241 
242  // Add a ground
243  Ogre::Plane plan;
244  plan.d = 0;
245  plan.normal = Ogre::Vector3::UNIT_Z;
246  Ogre::MeshManager::getSingleton().createPlane("sol",Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan, (Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
247  Ogre::Entity* ent = mSceneMgr->createEntity("Entitesol", "sol");
248  Ogre::SceneNode* PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
249  PlaneNode->attachObject(ent);
250  ent->setMaterialName("Examples/GrassFloor");
251  }
252 
253  bool customframeEnded(const Ogre::FrameEvent& evt) {
254  // Update animation
255  // To move, we add it the time since last frame
256  mAnimationState->addTime( evt.timeSinceLastFrame );
257  return true;
258  }
259 
260 #ifdef VISP_HAVE_OIS
261  bool processInputEvent(const Ogre::FrameEvent& /*evt*/) {
262  mKeyboard->capture();
263  Ogre::Matrix3 rotmy;
264  double angle = -M_PI/8;
265  if(mKeyboard->isKeyDown(OIS::KC_ESCAPE))
266  return false;
267 
268  // Event telling that we will have to move, setting the animation to "walk", if false, annimation goes to "Idle"
269  bool event = false;
270  // Check entries
271  if(mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)){
272  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()+(Ogre::Real)0.003*vecDevant);
273  event = true;
274  }
275  if(mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)){
276  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()-(Ogre::Real)0.003*vecDevant);
277  event = true;
278  }
279  if(mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)){
280  rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0,
281  (Ogre::Real)(-sin(-angle)), (Ogre::Real)cos(-angle),0,
282  0,0,1);
283  vecDevant=vecDevant*rotmy;
284  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
285  event = true;
286  }
287  if(mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)){
288  rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0,
289  (Ogre::Real)(-sin(angle)), (Ogre::Real)cos(angle),0,
290  0,0,1);
291  vecDevant=vecDevant*rotmy;
292  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
293  event = true;
294  }
295 
296  // Play the right animation
297  if(event){
298  mAnimationState = robot->getAnimationState("Walk");
299  }
300  else mAnimationState = robot->getAnimationState( "Idle" );
301 
302  // Start over when finished
303  mAnimationState->setLoop( true );
304  // Animation enabled
305  mAnimationState->setEnabled( true );
306 
307  return true;
308  }
309 #endif
310 };
311 
316 void computeInitialPose(vpCameraParameters *mcam, vpImage<unsigned char> &I,
317  vpPose * mPose, vpDot2 *md, vpImagePoint *mcog,
318  vpHomogeneousMatrix *cMo, vpPoint *mP,
319  const bool &opt_click_allowed)
320 {
321  // ---------------------------------------------------
322  // Code inspired from ViSP example of camera pose
323  // ----------------------------------------------------
324  bool opt_display = true;
325 
326 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
327 #if defined(VISP_HAVE_X11) && ! (defined(__APPLE__) && defined(__MACH__))
328  // produce an error on OSX: ‘typedef int Cursor’
329  // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
330  // declaration as ‘typedef XID Cursor’. That's why it should not be
331  // used on APPLE platforms
332  vpDisplayX display;
333 #elif defined VISP_HAVE_GTK
334  vpDisplayGTK display;
335 #elif defined VISP_HAVE_GDI
336  vpDisplayGDI display;
337 #elif defined VISP_HAVE_OPENCV
338  vpDisplayOpenCV display;
339 #elif defined VISP_HAVE_D3D9
340  vpDisplayD3D display;
341 #endif
342  for (unsigned int i=0 ; i < 4 ; i++)
343  {
344  if (opt_display) {
345  md[i].setGraphics(true) ;
346  }
347  else {
348  md[i].setGraphics(false) ;
349  }
350  }
351 
352  if (opt_display) {
353  try{
354  // Display size is automatically defined by the image (I) size
355  display.init(I,100,100,"Preliminary Pose Calculation");
356  // display the image
357  // The image class has a member that specify a pointer toward
358  // the display that has been initialized in the display declaration
359  // therefore is is no longuer necessary to make a reference to the
360  // display variable.
361  vpDisplay::display(I) ;
362  //Flush the display
363  vpDisplay::flush(I) ;
364 
365  }
366  catch(...)
367  {
368  vpERROR_TRACE("Error while displaying the image") ;
369  return ;
370  }
371  }
372 
373  std::cout<<"************************************************************************************"<<std::endl;
374  std::cout<<"*************************** Preliminary Pose Calculation ***************************"<<std::endl;
375  std::cout<<"****************************** Click on the 4 dots *******************************"<<std::endl;
376  std::cout<<"********Dot1 : (-x,-y,0), Dot2 : (x,-y,0), Dot3 : (x,y,0), Dot4 : (-x,y,0)**********"<<std::endl;
377  std::cout<<"************************************************************************************"<<std::endl;
378 
379  try{
380  vpImagePoint ip[4];
381  if (! opt_click_allowed) {
382  ip[0].set_i( 265 );
383  ip[0].set_j( 93 );
384  ip[1].set_i( 248 );
385  ip[1].set_j( 242 );
386  ip[2].set_i( 166 );
387  ip[2].set_j( 215 );
388  ip[3].set_i( 178 );
389  ip[3].set_j( 85 );
390  }
391  for(unsigned int i=0;i<4;i++) {
392  // by using setGraphics, we request to see the edges of the dot
393  // in red on the screen.
394  // It uses the overlay image plane.
395  // The default of this setting is that it is time consumming
396 
397  md[i].setGraphics(true) ;
398  md[i].setGrayLevelPrecision(0.7);
399  md[i].setSizePrecision(0.5);
400 
401  for(unsigned int j = 0;j<i;j++)
402  md[j].display(I) ;
403 
404  // flush the display buffer
405  vpDisplay::flush(I);
406  try{
407  if (opt_click_allowed)
408  md[i].initTracking(I);
409  else
410  md[i].initTracking(I, ip[i]);
411  }
412  catch(...){
413  }
414 
415  mcog[i] = md[i].getCog();
416  // an expcetion is thrown by the track method if
417  // - dot is lost
418  // - the number of pixel is too small
419  // - too many pixels are detected (this is usual when a "big" specularity
420  // occurs. The threshold can be modified using the
421  // setNbMaxPoint(int) method
422  if (opt_display) {
423  md[i].display(I) ;
424  // flush the display buffer
425  vpDisplay::flush(I) ;
426  }
427  }
428  }
429  catch(vpException e){
430  vpERROR_TRACE("Error while tracking dots") ;
431  vpCTRACE << e;
432  return;
433  }
434 
435  if (opt_display)
436  {
437  // display a red cross (size 10) in the image at the dot center
438  // of gravity location
439  //
440  // WARNING
441  // in the vpDisplay class member's when pixel coordinates
442  // are considered the first element is the row index and the second
443  // is the column index:
444  // vpDisplay::displayCross(Image, row index, column index, size, color)
445  // therefore u and v are inverted wrt to the vpDot specification
446  // Alternatively, to avoid this problem another set of member have
447  // been defined in the vpDisplay class.
448  // If the method name is postfixe with _uv the specification is :
449  // vpDisplay::displayCross_uv(Image, column index, row index, size, color)
450 
451  for (unsigned int i=0 ; i < 4 ; i++)
452  vpDisplay::displayCross(I, mcog[i], 10, vpColor::red) ;
453 
454  // flush the X11 buffer
455  vpDisplay::flush(I) ;
456  }
457 
458  // --------------------------------------------------------
459  // Now we will compute the pose
460  // --------------------------------------------------------
461 
462  // the list of point is cleared (if that's not done before)
463  mPose->clearPoint() ;
464 
465  // we set the 3D points coordinates (in meter !) in the object/world frame
466  double l=0.06 ;
467  double L=0.07 ;
468  mP[0].setWorldCoordinates(-L,-l, 0 ) ; // (X,Y,Z)
469  mP[1].setWorldCoordinates(L,-l, 0 ) ;
470  mP[2].setWorldCoordinates(L,l, 0 ) ;
471  mP[3].setWorldCoordinates(-L,l, 0 ) ;
472 
473  // pixel-> meter conversion
474  for (unsigned int i=0 ; i < 4 ; i++)
475  {
476  // u[i]. v[i] are expressed in pixel
477  // conversion in meter is achieved using
478  // x = (u-u0)/px
479  // y = (v-v0)/py
480  // where px, py, u0, v0 are the intrinsic camera parameters
481  double x=0, y=0;
482  vpPixelMeterConversion::convertPoint(*mcam, mcog[i], x,y) ;
483  mP[i].set_x(x) ;
484  mP[i].set_y(y) ;
485  }
486 
487 
488  // The pose structure is build, we put in the point list the set of point
489  // here both 2D and 3D world coordinates are known
490  for (unsigned int i=0 ; i < 4 ; i++)
491  {
492  mPose->addPoint(mP[i]) ; // and added to the pose computation point list
493  }
494 
495  // compute the initial pose using Dementhon method followed by a non linear
496  // minimisation method
497 
498  // Pose by Lagrange it provides an initialization of the pose
499  mPose->computePose(vpPose::LAGRANGE, *cMo) ;
500  // the pose is now refined using the virtual visual servoing approach
501  // Warning: cMo needs to be initialized otherwise it may diverge
502  mPose->computePose(vpPose::VIRTUAL_VS, *cMo) ;
503 
504  // Display breifly just to have a glimpse a the ViSP pose
505  // while(cpt<500){
506  if( opt_display ){
507  // Display the computed pose
508  mPose->display(I,*cMo,*mcam, 0.05, vpColor::red) ;
509  vpDisplay::flush(I) ;
510  vpTime::wait(800);
511  }
512 }
513 
514 #endif
515 
516 int main(int argc, const char **argv)
517 {
518  try {
519  std::string env_ipath;
520  std::string opt_ipath;
521  std::string ipath;
522  std::string opt_ppath;
523  std::string dirname;
524  std::string filename;
525  bool opt_click_allowed = true;
526 
527  // Get the VISP_IMAGE_PATH environment variable value
528  char *ptenv = getenv("VISP_INPUT_IMAGE_PATH");
529  if (ptenv != NULL)
530  env_ipath = ptenv;
531 
532  // Set the default input path
533  if (! env_ipath.empty())
534  ipath = env_ipath;
535 
536 
537  // Read the command line options
538  if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
539  exit (-1);
540  }
541 
542  // Get the option values
543  if (!opt_ipath.empty())
544  ipath = opt_ipath;
545 
546  // Compare ipath and env_ipath. If they differ, we take into account
547  // the input path comming from the command line option
548  if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
549  if (ipath != env_ipath) {
550  std::cout << std::endl
551  << "WARNING: " << std::endl;
552  std::cout << " Since -i <visp image path=" << ipath << "> "
553  << " is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
554  << " we skip the environment variable." << std::endl;
555  }
556  }
557 
558  // Test if an input path is set
559  if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty() ){
560  usage(argv[0], NULL, ipath, opt_ppath);
561  std::cerr << std::endl
562  << "ERROR:" << std::endl;
563  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH "
564  << std::endl
565  << " environment variable to specify the location of the " << std::endl
566  << " image path where test images are located." << std::endl
567  << " Use -p <personal image path> option if you want to "<<std::endl
568  << " use personal images." << std::endl
569  << std::endl;
570 
571  exit(-1);
572  }
573 
574  std::ostringstream s;
575 
576  if (opt_ppath.empty()){
577  // Set the path location of the image sequence
578  dirname = ipath + vpIoTools::path("/ViSP-images/mire-2/");
579 
580  // Build the name of the image file
581 
582  s.setf(std::ios::right, std::ios::adjustfield);
583  s << "image.%04d.pgm";
584  filename = dirname + s.str();
585  }
586  else {
587  filename = opt_ppath;
588  }
589 
590  //We will read a sequence of images
591  vpVideoReader grabber;
592  grabber.setFirstFrameIndex(1);
593  grabber.setFileName(filename.c_str());
594  // Grey level image associated to a display in the initial pose computation
595  vpImage<unsigned char> Idisplay;
596  // Grey level image to track points
598  // RGBa image to get background
599  vpImage<vpRGBa> IC;
600  // Matrix representing camera parameters
602 
603  // Variables used for pose computation purposes
604  vpPose mPose;
605  vpDot2 md[4];
606  vpImagePoint mcog[4];
607  vpPoint mP[4];
608 
609  // CameraParameters we got from calibration
610  // Keep u0 and v0 as center of the screen
611  vpCameraParameters mcam;
612 
613  // Read the PGM image named "filename" on the disk, and put the
614  // bitmap into the image structure I. I is initialized to the
615  // correct size
616  //
617  // exception readPGM may throw various exception if, for example,
618  // the file does not exist, or if the memory cannot be allocated
619  try{
620  vpCTRACE << "Load: " << filename << std::endl;
621  grabber.open(Idisplay);
622  grabber.acquire(Idisplay);
623  vpCameraParameters mcamTmp(592,570,grabber.getWidth()/2,grabber.getHeight()/2);
624  // Compute the initial pose of the camera
625  computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cMo, mP,
626  opt_click_allowed);
627  // Close the framegrabber
628  grabber.close();
629 
630  // Associate the grabber to the RGBa image
631  grabber.open(IC);
632  mcam.init(mcamTmp);
633  }
634  catch(...)
635  {
636  // an exception is thrown if an exception from readPGM has been caught
637  // here this will result in the end of the program
638  // Note that another error message has been printed from readPGM
639  // to give more information about the error
640  std::cerr << std::endl
641  << "ERROR:" << std::endl;
642  std::cerr << " Cannot read " << filename << std::endl;
643  std::cerr << " Check your -i " << ipath << " option " << std::endl
644  << " or VISP_INPUT_IMAGE_PATH environment variable."
645  << std::endl;
646  exit(-1);
647  }
648 
649  // Create a vpRAOgre object with color background
650  vpAROgreExample ogre(mcam, (unsigned int)grabber.getWidth(), (unsigned int)grabber.getHeight());
651  // Initialize it
652  ogre.init(IC);
653 
654  double t0 = vpTime::measureTimeMs();
655 
656  // Rendering loop
657  while(ogre.continueRendering() && !grabber.end()) {
658  // Acquire a frame
659  grabber.acquire(IC);
660 
661  // Convert it to a grey level image for tracking purpose
663 
664  // kill the point list
665  mPose.clearPoint() ;
666 
667  // track the dot
668  for (int i=0 ; i < 4 ; i++)
669  {
670  // track the point
671  md[i].track(I, mcog[i]) ;
672  md[i].setGrayLevelPrecision(0.90);
673  // pixel->meter conversion
674  {
675  double x=0, y=0;
676  vpPixelMeterConversion::convertPoint(mcam, mcog[i], x, y) ;
677  mP[i].set_x(x) ;
678  mP[i].set_y(y) ;
679  }
680 
681  // and added to the pose computation point list
682  mPose.addPoint(mP[i]) ;
683  }
684  // the pose structure has been updated
685 
686  // the pose is now updated using the virtual visual servoing approach
687  // Dementhon or lagrange is no longuer necessary, pose at the
688  // previous iteration is sufficient
689  mPose.computePose(vpPose::VIRTUAL_VS, cMo);
690 
691  // Display with ogre
692  ogre.display(IC,cMo);
693 
694  // Wait so that the video does not go too fast
695  double t1 = vpTime::measureTimeMs();
696  std::cout << "\r> " << 1000 / (t1 - t0) << " fps" ;
697  t0 = t1;
698  }
699  // Close the grabber
700  grabber.close();
701 
702  return 0;
703  }
704  catch(vpException e) {
705  std::cout << "Catch a ViSP exception: " << e << std::endl;
706  return 1;
707  }
708  catch(...) {
709  std::cout << "Catch an exception " << std::endl;
710  return 1;
711  }
712 }
713 #else // VISP_HAVE_OGRE && VISP_HAVE_DISPLAY
714 int
715 main()
716 {
717  std::cout << "You should install Ogre3D or a display (GTK or OpenCV...) to run this example..." << std::endl;
718 }
719 #endif
void init()
basic initialization with the default parameters
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
#define vpERROR_TRACE
Definition: vpDebug.h:395
virtual bool customframeEnded(const Ogre::FrameEvent &evt)
Definition: vpAROgre.cpp:542
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:132
Define the X11 console to display images.
Definition: vpDisplayX.h:152
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
error that can be emited by ViSP classes.
Definition: vpException.h:76
static std::string path(const char *pathname)
Definition: vpIoTools.cpp:715
void set_x(const double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.h:194
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Point coordinates conversion from pixel coordinates to normalized coordinates in meter...
Implementation of an augmented reality viewer.
Definition: vpAROgre.h:90
static double measureTimeMs()
Definition: vpTime.cpp:86
static int wait(double t0, double t)
Definition: vpTime.cpp:149
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:127
void track(const vpImage< unsigned char > &I)
Definition: vpDot2.cpp:465
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:1994
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:79
static const vpColor red
Definition: vpColor.h:167
Class that defines what is a point.
Definition: vpPoint.h:65
#define vpCTRACE
Definition: vpDebug.h:341
vpImagePoint getCog() const
Definition: vpDot2.h:163
void open(vpImage< vpRGBa > &I)
Display for windows using Direct3D.
Definition: vpDisplayD3D.h:109
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:788
void set_i(const double ii)
Definition: vpImagePoint.h:158
static void display(vpImage< unsigned char > &I, vpHomogeneousMatrix &cMo, vpCameraParameters &cam, double size, vpColor col=vpColor::none)
Definition: vpPose.cpp:586
unsigned int getWidth() const
Return the number of columns in the image.
virtual bool processInputEvent(const Ogre::FrameEvent &)
Definition: vpAROgre.h:283
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:206
The vpDisplayOpenCV allows to display image using the opencv library.
virtual void displayCross(const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)=0
void display(const vpImage< unsigned char > &I, vpColor color=vpColor::red, unsigned int thickness=1) const
Definition: vpDot2.cpp:223
Class used for pose computation from N points (pose from point only).
Definition: vpPose.h:78
Generic class defining intrinsic camera parameters.
void set_y(const double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.h:196
The vpDisplayGTK allows to display image using the GTK+ library version 1.2.
Definition: vpDisplayGTK.h:145
void acquire(vpImage< vpRGBa > &I)
void setFileName(const char *filename)
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const char *title=NULL)
void set_j(const double jj)
Definition: vpImagePoint.h:169
void setSizePrecision(const double &sizePrecision)
Definition: vpDot2.cpp:818
virtual void createScene(void)
Definition: vpAROgre.h:267
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:266
void setFirstFrameIndex(const long first_frame)
void computePose(vpPoseMethodType methode, vpHomogeneousMatrix &cMo)
compute the pose for a given method
Definition: vpPose.cpp:386
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:92
void addPoint(const vpPoint &P)
Add a new point in this array.
Definition: vpPose.cpp:155
unsigned int getHeight() const
Return the number of rows in the image.
void setGraphics(const bool activate)
Definition: vpDot2.h:312
void setWorldCoordinates(const double ox, const double oy, const double oz)
Set the point world coordinates. We mean here the coordinates of the point in the object frame...
Definition: vpPoint.cpp:74
void clearPoint()
suppress all the point in the array of point
Definition: vpPose.cpp:133