Visual Servoing Platform  version 3.0.0
AROgre.cpp
1 /****************************************************************************
2  *
3  * This file is part of the ViSP software.
4  * Copyright (C) 2005 - 2015 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public License
8  * ("GPL") version 2 as published by the Free Software Foundation.
9  * See the file LICENSE.txt at the root directory of this source
10  * distribution for additional information about the GNU GPL.
11  *
12  * For using ViSP with software that can not be combined with the GNU
13  * GPL, please contact Inria about acquiring a ViSP Professional
14  * Edition License.
15  *
16  * See http://visp.inria.fr for more information.
17  *
18  * This software was developed at:
19  * Inria Rennes - Bretagne Atlantique
20  * Campus Universitaire de Beaulieu
21  * 35042 Rennes Cedex
22  * France
23  *
24  * If you have questions regarding the use of this file, please contact
25  * Inria at visp@inria.fr
26  *
27  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29  *
30  * Description:
31  * Implementation of a simple augmented reality application using the vpAROgre
32  * class.
33  *
34  * Authors:
35  * Bertrand Delabarre
36  *
37  *****************************************************************************/
38 
46 #include <visp3/core/vpConfig.h>
47 #include <iostream>
48 
49 //#if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK) || (defined(VISP_HAVE_X11) && ! defined(APPLE)))
50 #if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_OPENCV) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_D3D9) || defined(VISP_HAVE_GTK) || (defined(VISP_HAVE_X11) && ! (defined(__APPLE__) && defined(__MACH__))))
51 
52 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
53 #if defined(VISP_HAVE_X11) && ! (defined(__APPLE__) && defined(__MACH__))
54 // produce an error on OSX: ‘typedef int Cursor’
55 // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
56 // declaration as ‘typedef XID Cursor’. That's why it should not be
57 // used on APPLE platforms
58 # include <visp3/gui/vpDisplayX.h>
59 #endif
60 #include <visp3/gui/vpDisplayGTK.h>
61 #include <visp3/gui/vpDisplayGDI.h>
62 #include <visp3/gui/vpDisplayOpenCV.h>
63 #include <visp3/gui/vpDisplayD3D.h>
64 #include <visp3/vision/vpPose.h>
65 #include <visp3/core/vpPoint.h>
66 #include <visp3/core/vpImagePoint.h>
67 #include <visp3/blob/vpDot2.h>
68 #include <visp3/core/vpPixelMeterConversion.h>
69 #include <visp3/io/vpVideoReader.h>
70 #include <visp3/io/vpParseArgv.h>
71 #include <visp3/core/vpIoTools.h>
72 #include <visp3/core/vpDebug.h>
73 #include <visp3/ar/vpAROgre.h>
74 
75 // List of allowed command line options
76 #define GETOPTARGS "ci:p:h"
77 
89 void usage(const char *name, const char *badparam, std::string ipath, std::string ppath)
90 {
91  fprintf(stdout, "\n\
92 Test augmented reality using the vpAROgre class.\n\
93 \n\
94 SYNOPSIS\n\
95  %s [-i <test image path>] [-p <personal image path>]\n\
96  [-c] [-h]\n", name);
97 
98  fprintf(stdout, "\n\
99 OPTIONS: Default\n\
100  -i <input image path> %s\n\
101  Set image input path.\n\
102  From this path read images \n\
103  \"ViSP-images/mire-2/image.%%04d.pgm\". These \n\
104  images come from ViSP-images-x.y.z.tar.gz available \n\
105  on the ViSP website.\n\
106  Setting the VISP_INPUT_IMAGE_PATH environment\n\
107  variable produces the same behaviour than using\n\
108  this option.\n\
109  \n\
110  -p <personal image path> %s\n\
111  Specify a personal sequence containing images \n\
112  to process.\n\
113  By image sequence, we mean one file per image.\n\
114  The following image file formats PNM (PGM P5, PPM P6)\n\
115  are supported. The format is selected by analysing \n\
116  the filename extension.\n\
117  Example : \"/Temp/ViSP-images/cube/image.%%04d.pgm\"\n\
118  %%04d is for the image numbering.\n\
119 \n\
120  -c\n\
121  Disable the mouse click. Useful to automaze the \n\
122  execution of this program without humain intervention.\n\
123 \n\
124  -h\n\
125  Print the help.\n",
126  ipath.c_str(), ppath.c_str());
127 
128  if (badparam)
129  fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
130 }
144 bool getOptions(int argc, const char **argv, std::string &ipath,
145  std::string &ppath, bool &click_allowed)
146 {
147  const char *optarg;
148  int c;
149  while ((c = vpParseArgv::parse(argc, argv, GETOPTARGS, &optarg)) > 1) {
150 
151  switch (c) {
152  case 'c': click_allowed = false; break;
153  case 'i': ipath = optarg; break;
154  case 'p': ppath = optarg; break;
155  case 'h': usage(argv[0], NULL, ipath, ppath);
156  return false; break;
157 
158  default:
159  usage(argv[0], optarg, ipath, ppath);
160  return false; break;
161  }
162  }
163 
164  if ((c == 1) || (c == -1)) {
165  // standalone param or error
166  usage(argv[0], NULL, ipath, ppath);
167  std::cerr << "ERROR: " << std::endl;
168  std::cerr << " Bad argument " << optarg << std::endl << std::endl;
169  return false;
170  }
171 
172  return true;
173 }
174 
175 
176 #ifndef DOXYGEN_SHOULD_SKIP_THIS
177 
178 class vpAROgreExample : public vpAROgre
179 {
180 public:
181  // The constructor doesn't change here
182  vpAROgreExample(const vpCameraParameters &mcam = vpCameraParameters(),
183  unsigned int width = 640, unsigned int height = 480,
184  const char *resourcePath=NULL)
185  : vpAROgre(mcam, width, height){
186  // Direction vectors
187  if (resourcePath) mResourcePath = resourcePath;
188  std::cout << "mResourcePath: " << mResourcePath<< std::endl;
189  vecDevant = Ogre::Vector3(0,-1,0);
190  robot = NULL;
191  mAnimationState = NULL;
192  }
193 
194 protected :
195 
196  // Attributes
197  // Vector to move
198  Ogre::Vector3 vecDevant;
199  // Animation attribute
200  Ogre::AnimationState * mAnimationState;
201  // The entity representing the robot
202  Ogre::Entity* robot;
203 
204  // Our scene will just be a plane
205  void createScene()
206  {
207  // Lumieres
208  mSceneMgr->setAmbientLight(Ogre::ColourValue((float)0.6,(float)0.6,(float)0.6)); // Default value of lightning
209  Ogre::Light * light = mSceneMgr->createLight();
210  light->setDiffuseColour(1.0,1.0,1.0); // scaled RGB values
211  light->setSpecularColour(1.0,1.0,1.0); // scaled RGB values
212  // Lumiere ponctuelle
213  light->setPosition(-5, -5, 10);
214  light->setType(Ogre::Light::LT_POINT);
215  light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
216  //Ombres
217  light->setCastShadows(true);
218 
219  // Create the Entity
220  robot = mSceneMgr->createEntity("Robot", "robot.mesh");
221  // Attach robot to scene graph
222  Ogre::SceneNode* RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
223  RobotNode->attachObject(robot);
224  RobotNode->scale((Ogre::Real)0.001,(Ogre::Real)0.001,(Ogre::Real)0.001);
225  RobotNode->pitch(Ogre::Degree(90));
226  RobotNode->yaw(Ogre::Degree(-90));
227  robot->setCastShadows(true);
228  mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
229 
230  // Add an animation
231  // Set the good animation
232  mAnimationState = robot->getAnimationState( "Idle" );
233  // Start over when finished
234  mAnimationState->setLoop( true );
235  // Animation enabled
236  mAnimationState->setEnabled( true );
237 
238  // Add a ground
239  Ogre::Plane plan;
240  plan.d = 0;
241  plan.normal = Ogre::Vector3::UNIT_Z;
242  Ogre::MeshManager::getSingleton().createPlane("sol",Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan, (Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
243  Ogre::Entity* ent = mSceneMgr->createEntity("Entitesol", "sol");
244  Ogre::SceneNode* PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
245  PlaneNode->attachObject(ent);
246  ent->setMaterialName("Examples/GrassFloor");
247  }
248 
249  bool customframeEnded(const Ogre::FrameEvent& evt) {
250  // Update animation
251  // To move, we add it the time since last frame
252  mAnimationState->addTime( evt.timeSinceLastFrame );
253  return true;
254  }
255 
256 #ifdef VISP_HAVE_OIS
257  bool processInputEvent(const Ogre::FrameEvent& /*evt*/) {
258  mKeyboard->capture();
259  Ogre::Matrix3 rotmy;
260  double angle = -M_PI/8;
261  if(mKeyboard->isKeyDown(OIS::KC_ESCAPE))
262  return false;
263 
264  // Event telling that we will have to move, setting the animation to "walk", if false, annimation goes to "Idle"
265  bool event = false;
266  // Check entries
267  if(mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)){
268  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()+(Ogre::Real)0.003*vecDevant);
269  event = true;
270  }
271  if(mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)){
272  mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition()-(Ogre::Real)0.003*vecDevant);
273  event = true;
274  }
275  if(mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)){
276  rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0,
277  (Ogre::Real)(-sin(-angle)), (Ogre::Real)cos(-angle),0,
278  0,0,1);
279  vecDevant=vecDevant*rotmy;
280  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
281  event = true;
282  }
283  if(mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)){
284  rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0,
285  (Ogre::Real)(-sin(angle)), (Ogre::Real)cos(angle),0,
286  0,0,1);
287  vecDevant=vecDevant*rotmy;
288  mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
289  event = true;
290  }
291 
292  // Play the right animation
293  if(event){
294  mAnimationState = robot->getAnimationState("Walk");
295  }
296  else mAnimationState = robot->getAnimationState( "Idle" );
297 
298  // Start over when finished
299  mAnimationState->setLoop( true );
300  // Animation enabled
301  mAnimationState->setEnabled( true );
302 
303  return true;
304  }
305 #endif
306 };
307 
312 void computeInitialPose(vpCameraParameters *mcam, vpImage<unsigned char> &I,
313  vpPose * mPose, vpDot2 *md, vpImagePoint *mcog,
314  vpHomogeneousMatrix *cMo, vpPoint *mP,
315  const bool &opt_click_allowed)
316 {
317  // ---------------------------------------------------
318  // Code inspired from ViSP example of camera pose
319  // ----------------------------------------------------
320  bool opt_display = true;
321 
322 //#if defined(VISP_HAVE_X11) && ! defined(APPLE)
323 #if defined(VISP_HAVE_X11) && ! (defined(__APPLE__) && defined(__MACH__))
324  // produce an error on OSX: ‘typedef int Cursor’
325  // /usr/X11R6/include/X11/X.h:108: error: ‘Cursor’ has a previous
326  // declaration as ‘typedef XID Cursor’. That's why it should not be
327  // used on APPLE platforms
328  vpDisplayX display;
329 #elif defined VISP_HAVE_GTK
330  vpDisplayGTK display;
331 #elif defined VISP_HAVE_GDI
332  vpDisplayGDI display;
333 #elif defined VISP_HAVE_OPENCV
334  vpDisplayOpenCV display;
335 #elif defined VISP_HAVE_D3D9
336  vpDisplayD3D display;
337 #endif
338  for (unsigned int i=0 ; i < 4 ; i++)
339  {
340  if (opt_display) {
341  md[i].setGraphics(true) ;
342  }
343  else {
344  md[i].setGraphics(false) ;
345  }
346  }
347 
348  if (opt_display) {
349  try{
350  // Display size is automatically defined by the image (I) size
351  display.init(I,100,100,"Preliminary Pose Calculation");
352  // display the image
353  // The image class has a member that specify a pointer toward
354  // the display that has been initialized in the display declaration
355  // therefore is is no longuer necessary to make a reference to the
356  // display variable.
357  vpDisplay::display(I) ;
358  //Flush the display
359  vpDisplay::flush(I) ;
360 
361  }
362  catch(...)
363  {
364  vpERROR_TRACE("Error while displaying the image") ;
365  return ;
366  }
367  }
368 
369  std::cout<<"************************************************************************************"<<std::endl;
370  std::cout<<"*************************** Preliminary Pose Calculation ***************************"<<std::endl;
371  std::cout<<"****************************** Click on the 4 dots *******************************"<<std::endl;
372  std::cout<<"********Dot1 : (-x,-y,0), Dot2 : (x,-y,0), Dot3 : (x,y,0), Dot4 : (-x,y,0)**********"<<std::endl;
373  std::cout<<"************************************************************************************"<<std::endl;
374 
375  try{
376  vpImagePoint ip[4];
377  if (! opt_click_allowed) {
378  ip[0].set_i( 265 );
379  ip[0].set_j( 93 );
380  ip[1].set_i( 248 );
381  ip[1].set_j( 242 );
382  ip[2].set_i( 166 );
383  ip[2].set_j( 215 );
384  ip[3].set_i( 178 );
385  ip[3].set_j( 85 );
386  }
387  for(unsigned int i=0;i<4;i++) {
388  // by using setGraphics, we request to see the edges of the dot
389  // in red on the screen.
390  // It uses the overlay image plane.
391  // The default of this setting is that it is time consumming
392 
393  md[i].setGraphics(true) ;
394  md[i].setGrayLevelPrecision(0.7);
395  md[i].setSizePrecision(0.5);
396 
397  for(unsigned int j = 0;j<i;j++)
398  md[j].display(I) ;
399 
400  // flush the display buffer
401  vpDisplay::flush(I);
402  try{
403  if (opt_click_allowed)
404  md[i].initTracking(I);
405  else
406  md[i].initTracking(I, ip[i]);
407  }
408  catch(...){
409  }
410 
411  mcog[i] = md[i].getCog();
412  // an expcetion is thrown by the track method if
413  // - dot is lost
414  // - the number of pixel is too small
415  // - too many pixels are detected (this is usual when a "big" specularity
416  // occurs. The threshold can be modified using the
417  // setNbMaxPoint(int) method
418  if (opt_display) {
419  md[i].display(I) ;
420  // flush the display buffer
421  vpDisplay::flush(I) ;
422  }
423  }
424  }
425  catch(vpException e){
426  vpERROR_TRACE("Error while tracking dots") ;
427  vpCTRACE << e;
428  return;
429  }
430 
431  if (opt_display)
432  {
433  // display a red cross (size 10) in the image at the dot center
434  // of gravity location
435  //
436  // WARNING
437  // in the vpDisplay class member's when pixel coordinates
438  // are considered the first element is the row index and the second
439  // is the column index:
440  // vpDisplay::displayCross(Image, row index, column index, size, color)
441  // therefore u and v are inverted wrt to the vpDot specification
442  // Alternatively, to avoid this problem another set of member have
443  // been defined in the vpDisplay class.
444  // If the method name is postfixe with _uv the specification is :
445  // vpDisplay::displayCross_uv(Image, column index, row index, size, color)
446 
447  for (unsigned int i=0 ; i < 4 ; i++)
448  vpDisplay::displayCross(I, mcog[i], 10, vpColor::red) ;
449 
450  // flush the X11 buffer
451  vpDisplay::flush(I) ;
452  }
453 
454  // --------------------------------------------------------
455  // Now we will compute the pose
456  // --------------------------------------------------------
457 
458  // the list of point is cleared (if that's not done before)
459  mPose->clearPoint() ;
460 
461  // we set the 3D points coordinates (in meter !) in the object/world frame
462  double l=0.06 ;
463  double L=0.07 ;
464  mP[0].setWorldCoordinates(-L,-l, 0 ) ; // (X,Y,Z)
465  mP[1].setWorldCoordinates(L,-l, 0 ) ;
466  mP[2].setWorldCoordinates(L,l, 0 ) ;
467  mP[3].setWorldCoordinates(-L,l, 0 ) ;
468 
469  // pixel-> meter conversion
470  for (unsigned int i=0 ; i < 4 ; i++)
471  {
472  // u[i]. v[i] are expressed in pixel
473  // conversion in meter is achieved using
474  // x = (u-u0)/px
475  // y = (v-v0)/py
476  // where px, py, u0, v0 are the intrinsic camera parameters
477  double x=0, y=0;
478  vpPixelMeterConversion::convertPoint(*mcam, mcog[i], x,y) ;
479  mP[i].set_x(x) ;
480  mP[i].set_y(y) ;
481  }
482 
483 
484  // The pose structure is build, we put in the point list the set of point
485  // here both 2D and 3D world coordinates are known
486  for (unsigned int i=0 ; i < 4 ; i++)
487  {
488  mPose->addPoint(mP[i]) ; // and added to the pose computation point list
489  }
490 
491  // compute the initial pose using Dementhon method followed by a non linear
492  // minimisation method
493 
494  // Pose by Lagrange it provides an initialization of the pose
495  mPose->computePose(vpPose::LAGRANGE, *cMo) ;
496  // the pose is now refined using the virtual visual servoing approach
497  // Warning: cMo needs to be initialized otherwise it may diverge
498  mPose->computePose(vpPose::VIRTUAL_VS, *cMo) ;
499 
500  // Display breifly just to have a glimpse a the ViSP pose
501  // while(cpt<500){
502  if( opt_display ){
503  // Display the computed pose
504  mPose->display(I,*cMo,*mcam, 0.05, vpColor::red) ;
505  vpDisplay::flush(I) ;
506  vpTime::wait(800);
507  }
508 }
509 
510 #endif
511 
512 int main(int argc, const char **argv)
513 {
514  try {
515  std::string env_ipath;
516  std::string opt_ipath;
517  std::string ipath;
518  std::string opt_ppath;
519  std::string dirname;
520  std::string filename;
521  bool opt_click_allowed = true;
522 
523  // Get the visp-images-data package path or VISP_INPUT_IMAGE_PATH environment variable value
524  env_ipath = vpIoTools::getViSPImagesDataPath();
525 
526  // Set the default input path
527  if (! env_ipath.empty())
528  ipath = env_ipath;
529 
530  // Read the command line options
531  if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
532  exit (-1);
533  }
534 
535  // Get the option values
536  if (!opt_ipath.empty())
537  ipath = opt_ipath;
538 
539  // Compare ipath and env_ipath. If they differ, we take into account
540  // the input path comming from the command line option
541  if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
542  if (ipath != env_ipath) {
543  std::cout << std::endl
544  << "WARNING: " << std::endl;
545  std::cout << " Since -i <visp image path=" << ipath << "> "
546  << " is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
547  << " we skip the environment variable." << std::endl;
548  }
549  }
550 
551  // Test if an input path is set
552  if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty() ){
553  usage(argv[0], NULL, ipath, opt_ppath);
554  std::cerr << std::endl
555  << "ERROR:" << std::endl;
556  std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH "
557  << std::endl
558  << " environment variable to specify the location of the " << std::endl
559  << " image path where test images are located." << std::endl
560  << " Use -p <personal image path> option if you want to "<<std::endl
561  << " use personal images." << std::endl
562  << std::endl;
563 
564  exit(-1);
565  }
566 
567  std::ostringstream s;
568 
569  if (opt_ppath.empty()){
570  // Set the path location of the image sequence
571  dirname = vpIoTools::createFilePath(ipath, "ViSP-images/mire-2");
572 
573  // Build the name of the image file
574 
575  s.setf(std::ios::right, std::ios::adjustfield);
576  s << "image.%04d.pgm";
577  filename = vpIoTools::createFilePath(dirname, s.str());
578  }
579  else {
580  filename = opt_ppath;
581  }
582 
583  //We will read a sequence of images
584  vpVideoReader grabber;
585  grabber.setFirstFrameIndex(1);
586  grabber.setFileName(filename.c_str());
587  // Grey level image associated to a display in the initial pose computation
588  vpImage<unsigned char> Idisplay;
589  // Grey level image to track points
591  // RGBa image to get background
592  vpImage<vpRGBa> IC;
593  // Matrix representing camera parameters
595 
596  // Variables used for pose computation purposes
597  vpPose mPose;
598  vpDot2 md[4];
599  vpImagePoint mcog[4];
600  vpPoint mP[4];
601 
602  // CameraParameters we got from calibration
603  // Keep u0 and v0 as center of the screen
604  vpCameraParameters mcam;
605 
606  // Read the PGM image named "filename" on the disk, and put the
607  // bitmap into the image structure I. I is initialized to the
608  // correct size
609  //
610  // exception readPGM may throw various exception if, for example,
611  // the file does not exist, or if the memory cannot be allocated
612  try{
613  vpCTRACE << "Load: " << filename << std::endl;
614  grabber.open(Idisplay);
615  grabber.acquire(Idisplay);
616  vpCameraParameters mcamTmp(592,570,grabber.getWidth()/2,grabber.getHeight()/2);
617  // Compute the initial pose of the camera
618  computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cMo, mP,
619  opt_click_allowed);
620  // Close the framegrabber
621  grabber.close();
622 
623  // Associate the grabber to the RGBa image
624  grabber.open(IC);
625  mcam.init(mcamTmp);
626  }
627  catch(...)
628  {
629  // an exception is thrown if an exception from readPGM has been caught
630  // here this will result in the end of the program
631  // Note that another error message has been printed from readPGM
632  // to give more information about the error
633  std::cerr << std::endl
634  << "ERROR:" << std::endl;
635  std::cerr << " Cannot read " << filename << std::endl;
636  std::cerr << " Check your -i " << ipath << " option " << std::endl
637  << " or VISP_INPUT_IMAGE_PATH environment variable."
638  << std::endl;
639  exit(-1);
640  }
641 
642  // Create a vpRAOgre object with color background
643  vpAROgreExample ogre(mcam, (unsigned int)grabber.getWidth(), (unsigned int)grabber.getHeight());
644  // Initialize it
645  ogre.init(IC);
646 
647  double t0 = vpTime::measureTimeMs();
648 
649  // Rendering loop
650  while(ogre.continueRendering() && !grabber.end()) {
651  // Acquire a frame
652  grabber.acquire(IC);
653 
654  // Convert it to a grey level image for tracking purpose
656 
657  // kill the point list
658  mPose.clearPoint() ;
659 
660  // track the dot
661  for (int i=0 ; i < 4 ; i++)
662  {
663  // track the point
664  md[i].track(I, mcog[i]) ;
665  md[i].setGrayLevelPrecision(0.90);
666  // pixel->meter conversion
667  {
668  double x=0, y=0;
669  vpPixelMeterConversion::convertPoint(mcam, mcog[i], x, y) ;
670  mP[i].set_x(x) ;
671  mP[i].set_y(y) ;
672  }
673 
674  // and added to the pose computation point list
675  mPose.addPoint(mP[i]) ;
676  }
677  // the pose structure has been updated
678 
679  // the pose is now updated using the virtual visual servoing approach
680  // Dementhon or lagrange is no longuer necessary, pose at the
681  // previous iteration is sufficient
682  mPose.computePose(vpPose::VIRTUAL_VS, cMo);
683 
684  // Display with ogre
685  ogre.display(IC,cMo);
686 
687  // Wait so that the video does not go too fast
688  double t1 = vpTime::measureTimeMs();
689  std::cout << "\r> " << 1000 / (t1 - t0) << " fps" ;
690  t0 = t1;
691  }
692  // Close the grabber
693  grabber.close();
694 
695  return 0;
696  }
697  catch(vpException e) {
698  std::cout << "Catch a ViSP exception: " << e << std::endl;
699  return 1;
700  }
701  catch(Ogre::Exception e) {
702  std::cout << "Catch an Ogre exception: " << e.getDescription() << std::endl;
703  return 1;
704  }
705  catch(...) {
706  std::cout << "Catch an exception " << std::endl;
707  return 1;
708  }
709 }
710 #else // VISP_HAVE_OGRE && VISP_HAVE_DISPLAY
711 int
712 main()
713 {
714  std::cout << "You should install Ogre3D or a display (GTK or OpenCV...) to run this example..." << std::endl;
715 }
716 #endif
VISP_EXPORT int wait(double t0, double t)
Definition: vpTime.cpp:150
void init()
basic initialization with the default parameters
static std::string getViSPImagesDataPath()
Definition: vpIoTools.cpp:1091
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Implementation of an homogeneous matrix and operations on such kind of matrices.
virtual bool customframeEnded(const Ogre::FrameEvent &evt)
Definition: vpAROgre.cpp:555
#define vpERROR_TRACE
Definition: vpDebug.h:391
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
Define the X11 console to display images.
Definition: vpDisplayX.h:148
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
error that can be emited by ViSP classes.
Definition: vpException.h:73
void set_x(const double x)
Set the point x coordinate in the image plane.
Definition: vpPoint.cpp:496
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Point coordinates conversion from pixel coordinates to normalized coordinates in meter...
Implementation of an augmented reality viewer.
Definition: vpAROgre.h:86
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:124
void track(const vpImage< unsigned char > &I)
Definition: vpDot2.cpp:461
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:2233
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
Definition: vpParseArgv.cpp:76
static const vpColor red
Definition: vpColor.h:163
Class that defines what is a point.
Definition: vpPoint.h:59
vpImagePoint getCog() const
Definition: vpDot2.h:160
void open(vpImage< vpRGBa > &I)
Display for windows using Direct3D.
Definition: vpDisplayD3D.h:105
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:784
void set_i(const double ii)
Definition: vpImagePoint.h:154
static void display(vpImage< unsigned char > &I, vpHomogeneousMatrix &cMo, vpCameraParameters &cam, double size, vpColor col=vpColor::none)
Definition: vpPose.cpp:585
unsigned int getWidth() const
Return the number of columns in the image.
virtual bool processInputEvent(const Ogre::FrameEvent &)
Definition: vpAROgre.h:312
static std::string createFilePath(const std::string &parent, const std::string child)
Definition: vpIoTools.cpp:1265
bool computePose(vpPoseMethodType methode, vpHomogeneousMatrix &cMo, bool(*func)(vpHomogeneousMatrix *)=NULL)
compute the pose for a given method
Definition: vpPose.cpp:382
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:206
The vpDisplayOpenCV allows to display image using the opencv library.
virtual void displayCross(const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)=0
void display(const vpImage< unsigned char > &I, vpColor color=vpColor::red, unsigned int thickness=1) const
Definition: vpDot2.cpp:219
Class used for pose computation from N points (pose from point only).
Definition: vpPose.h:74
Generic class defining intrinsic camera parameters.
void set_y(const double y)
Set the point y coordinate in the image plane.
Definition: vpPoint.cpp:498
The vpDisplayGTK allows to display image using the GTK+ library version 1.2.
Definition: vpDisplayGTK.h:141
void acquire(vpImage< vpRGBa > &I)
void setFileName(const char *filename)
void init(vpImage< unsigned char > &I, int winx=-1, int winy=-1, const char *title=NULL)
VISP_EXPORT double measureTimeMs()
Definition: vpTime.cpp:93
void set_j(const double jj)
Definition: vpImagePoint.h:165
void setSizePrecision(const double &sizePrecision)
Definition: vpDot2.cpp:814
void setWorldCoordinates(const double oX, const double oY, const double oZ)
Definition: vpPoint.cpp:111
#define vpCTRACE
Definition: vpDebug.h:337
virtual void createScene(void)
Definition: vpAROgre.h:296
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:262
void setFirstFrameIndex(const long first_frame)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
void addPoint(const vpPoint &P)
Add a new point in this array.
Definition: vpPose.cpp:151
unsigned int getHeight() const
Return the number of rows in the image.
void setGraphics(const bool activate)
Definition: vpDot2.h:309
void clearPoint()
suppress all the point in the array of point
Definition: vpPose.cpp:129