Visual Servoing Platform  version 3.2.0 under development (2019-01-22)
tutorial-ibvs-4pts-ogre-tracking.cpp
1 
2 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_AR
4 #include <visp3/ar/vpAROgre.h>
5 #endif
6 #include <visp3/blob/vpDot2.h>
7 #include <visp3/gui/vpDisplayGDI.h>
8 #include <visp3/gui/vpDisplayOpenCV.h>
9 #include <visp3/gui/vpDisplayX.h>
10 #include <visp3/robot/vpSimulatorCamera.h>
11 #include <visp3/vision/vpPose.h>
12 #include <visp3/visual_features/vpFeatureBuilder.h>
13 #include <visp3/vs/vpServo.h>
14 #include <visp3/vs/vpServoDisplay.h>
15 
16 void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness);
17 #if defined(VISP_HAVE_OGRE)
18 void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
20 #endif
21 
22 void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness)
23 {
24  static std::vector<vpImagePoint> traj[4];
25  for (unsigned int i = 0; i < 4; i++) {
26  traj[i].push_back(dot[i].getCog());
27  }
28  for (unsigned int i = 0; i < 4; i++) {
29  for (unsigned int j = 1; j < traj[i].size(); j++) {
30  vpDisplay::displayLine(I, traj[i][j - 1], traj[i][j], vpColor::green, thickness);
31  }
32  }
33 }
34 
35 #if defined(VISP_HAVE_OGRE)
36 void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
38 {
39  static vpImage<vpRGBa> Irender; // Image from ogre scene rendering
40  ogre.display(background, cMo);
41  ogre.getRenderingOutput(Irender, cMo);
42 
43  vpImageConvert::convert(Irender, I);
44  // Due to the light that was added to the scene, we need to threshold the
45  // image
46  vpImageTools::binarise(I, (unsigned char)254, (unsigned char)255, (unsigned char)0, (unsigned char)255,
47  (unsigned char)255);
48 }
49 #endif
50 
51 int main()
52 {
53 #if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
54  try {
55  unsigned int thickness = 3;
56 
57  vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
58  vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));
59 
60  // Color image used as background texture.
61  vpImage<unsigned char> background(480, 640, 255);
62 
63  // Parameters of our camera
64  vpCameraParameters cam(840, 840, background.getWidth() / 2, background.getHeight() / 2);
65 
66  // Define the target as 4 points
67  std::vector<vpPoint> point;
68  point.push_back(vpPoint(-0.1, -0.1, 0));
69  point.push_back(vpPoint(0.1, -0.1, 0));
70  point.push_back(vpPoint(0.1, 0.1, 0));
71  point.push_back(vpPoint(-0.1, 0.1, 0));
72 
73  // Our object
74  // A simulator with the camera parameters defined above,
75  // and the background image size
76  vpAROgre ogre;
77  ogre.setCameraParameters(cam);
78  ogre.setShowConfigDialog(false);
79  ogre.addResource("./"); // Add the path to the Sphere.mesh resource
80  ogre.init(background, false, true);
81  // ogre.setWindowPosition(680, 400);
82 
83  // Create the scene that contains 4 spheres
84  // Sphere.mesh contains a sphere with 1 meter radius
85  std::vector<std::string> name(4);
86  for (unsigned int i = 0; i < 4; i++) {
87  std::ostringstream s;
88  s << "Sphere" << i;
89  name[i] = s.str();
90  ogre.load(name[i], "Sphere.mesh");
91  ogre.setScale(name[i], 0.02f, 0.02f,
92  0.02f); // Rescale the sphere to 2 cm radius
93  // Set the position of each sphere in the object frame
94  ogre.setPosition(name[i], vpTranslationVector(point[i].get_oX(), point[i].get_oY(), point[i].get_oZ()));
95  ogre.setRotation(name[i], vpRotationMatrix(M_PI / 2, 0, 0));
96  }
97 
98  // Add an optional point light source
99  Ogre::Light *light = ogre.getSceneManager()->createLight();
100  light->setDiffuseColour(1, 1, 1); // scaled RGB values
101  light->setSpecularColour(1, 1, 1); // scaled RGB values
102  light->setPosition((Ogre::Real)cdMo[0][3], (Ogre::Real)cdMo[1][3], (Ogre::Real)(-cdMo[2][3]));
103  light->setType(Ogre::Light::LT_POINT);
104 
105  vpServo task;
108  task.setLambda(0.5);
109 
110  // Image used for the image processing
112 
113  // Render the scene at the desired position
114  ogre_get_render_image(ogre, background, cdMo, I);
115 
116 // Display the image in which we will do the tracking
117 #if defined(VISP_HAVE_X11)
118  vpDisplayX d(I, 0, 0, "Camera view at desired position");
119 #elif defined(VISP_HAVE_GDI)
120  vpDisplayGDI d(I, 0, 0, "Camera view at desired position");
121 #elif defined(VISP_HAVE_OPENCV)
122  vpDisplayOpenCV d(I, 0, 0, "Camera view at desired position");
123 #else
124  std::cout << "No image viewer is available..." << std::endl;
125 #endif
126 
128  vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to learn their positions", vpColor::red);
129  vpDisplay::flush(I);
130 
131  std::vector<vpDot2> dot(4);
132  vpFeaturePoint p[4], pd[4];
133 
134  for (unsigned int i = 0; i < 4; i++) {
135  // Compute the desired feature at the desired position
136  dot[i].setGraphics(true);
137  dot[i].setGraphicsThickness(thickness);
138  dot[i].initTracking(I);
139  vpDisplay::flush(I);
140  vpFeatureBuilder::create(pd[i], cam, dot[i].getCog());
141  }
142 
143  // Render the scene at the initial position
144  ogre_get_render_image(ogre, background, cMo, I);
145 
147  vpDisplay::setTitle(I, "Current camera view");
148  vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to initialise the tracking and start the servo",
149  vpColor::red);
150  vpDisplay::flush(I);
151 
152  for (unsigned int i = 0; i < 4; i++) {
153  // We notice that if we project the scene at a given pose, the pose
154  // estimated from the rendered image differs a little. That's why we
155  // cannot simply compute the desired feature from the desired pose using
156  // the next two lines. We will rather compute the desired position of
157  // the features from a learning stage. point[i].project(cdMo);
158  // vpFeatureBuilder::create(pd[i], point[i]);
159 
160  // Compute the current feature at the initial position
161  dot[i].setGraphics(true);
162  dot[i].initTracking(I);
163  vpDisplay::flush(I);
164  vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
165  }
166 
167  for (unsigned int i = 0; i < 4; i++) {
168  // Set the feature Z coordinate from the pose
169  vpColVector cP;
170  point[i].changeFrame(cMo, cP);
171  p[i].set_Z(cP[2]);
172 
173  task.addFeature(p[i], pd[i]);
174  }
175 
176  vpHomogeneousMatrix wMc, wMo;
177  vpSimulatorCamera robot;
178  robot.setSamplingTime(0.040);
179  robot.getPosition(wMc);
180  wMo = wMc * cMo;
181 
182  for (;;) {
183  // From the camera position in the world frame we retrieve the object
184  // position
185  robot.getPosition(wMc);
186  cMo = wMc.inverse() * wMo;
187 
188  // Update the scene from the new camera position
189  ogre_get_render_image(ogre, background, cMo, I);
190 
192 
193  for (unsigned int i = 0; i < 4; i++) {
194  dot[i].track(I);
195  vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
196  }
197 
198  for (unsigned int i = 0; i < 4; i++) {
199  // Set the feature Z coordinate from the pose
200  vpColVector cP;
201  point[i].changeFrame(cMo, cP);
202  p[i].set_Z(cP[2]);
203  }
204 
205  vpColVector v = task.computeControlLaw();
206 
207  display_trajectory(I, dot, thickness);
208  vpServoDisplay::display(task, cam, I, vpColor::green, vpColor::red, thickness + 2);
210 
211  vpDisplay::flush(I);
212  if (vpDisplay::getClick(I, false))
213  break;
214 
215  vpTime::wait(robot.getSamplingTime() * 1000);
216  }
217  task.kill();
218  } catch (const vpException &e) {
219  std::cout << "Catch a ViSP exception: " << e << std::endl;
220  } catch (...) {
221  std::cout << "Catch an exception " << std::endl;
222  return 1;
223  }
224 #endif
225 }
static void binarise(vpImage< Type > &I, Type threshold1, Type threshold2, Type value1, Type value2, Type value3, const bool useLUT=true)
Definition: vpImageTools.h:430
void setRotation(const std::string &sceneName, const vpRotationMatrix &wRo)
Definition: vpAROgre.cpp:695
VISP_EXPORT int wait(double t0, double t)
Definition: vpTime.cpp:150
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
unsigned int getWidth() const
Definition: vpImage.h:239
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
double getSamplingTime() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines the simplest robot: a free flying camera.
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
void setShowConfigDialog(const bool showConfigDialog)
Definition: vpAROgre.h:258
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:151
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, const unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:497
error that can be emited by ViSP classes.
Definition: vpException.h:71
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
Implementation of an augmented reality viewer using Ogre3D 3rd party.
Definition: vpAROgre.h:90
static const vpColor green
Definition: vpColor.h:183
static void flush(const vpImage< unsigned char > &I)
Ogre::SceneManager * getSceneManager()
Definition: vpAROgre.h:163
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMw)
Definition: vpAROgre.cpp:618
static const vpColor red
Definition: vpColor.h:180
Class that defines what is a point.
Definition: vpPoint.h:58
Implementation of a rotation matrix and operations on such kind of matrices.
virtual void setSamplingTime(const double &delta_t)
void kill()
Definition: vpServo.cpp:192
vpColVector computeControlLaw()
Definition: vpServo.cpp:935
virtual void init(vpImage< unsigned char > &I, bool bufferedKeys=false, bool hidden=false)
Definition: vpAROgre.cpp:115
static void display(const vpImage< unsigned char > &I)
void setCameraParameters(const vpCameraParameters &cameraP)
Definition: vpAROgre.cpp:652
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
void setScale(const std::string &sceneName, const float factorx, const float factory, const float factorz)
Definition: vpAROgre.cpp:759
Generic class defining intrinsic camera parameters.
void setLambda(double c)
Definition: vpServo.h:406
void getRenderingOutput(vpImage< vpRGBa > &I, const vpHomogeneousMatrix &cMo)
Definition: vpAROgre.cpp:1037
vpHomogeneousMatrix getPosition() const
void load(const std::string &entityName, const std::string &model)
Definition: vpAROgre.cpp:659
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:574
static double rad(double deg)
Definition: vpMath.h:102
Implementation of column vector and the associated operations.
Definition: vpColVector.h:72
void setPosition(const std::string &sceneName, const vpTranslationVector &wTo)
Definition: vpAROgre.cpp:672
vpHomogeneousMatrix inverse() const
void addResource(const std::string &resourceLocation)
Definition: vpAROgre.h:126
unsigned int getHeight() const
Definition: vpImage.h:178
void set_Z(const double Z)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1)
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
static void setTitle(const vpImage< unsigned char > &I, const std::string &windowtitle)
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:223
Class that consider the case of a translation vector.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)