Visual Servoing Platform  version 3.6.1 under development (2025-02-13)
tutorial-ibvs-4pts-ogre-tracking.cpp
1 
2 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_AR
4 #include <visp3/ar/vpAROgre.h>
5 #endif
6 #include <visp3/blob/vpDot2.h>
7 #include <visp3/gui/vpDisplayFactory.h>
8 #include <visp3/robot/vpSimulatorCamera.h>
9 #include <visp3/vision/vpPose.h>
10 #include <visp3/visual_features/vpFeatureBuilder.h>
11 #include <visp3/vs/vpServo.h>
12 #include <visp3/vs/vpServoDisplay.h>
13 
14 #if defined(ENABLE_VISP_NAMESPACE)
15 using namespace VISP_NAMESPACE_NAME;
16 #endif
17 
18 void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness);
19 #if defined(VISP_HAVE_OGRE)
20 void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
22 #endif
23 
24 void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness)
25 {
26  static std::vector<vpImagePoint> traj[4];
27  for (unsigned int i = 0; i < 4; i++) {
28  traj[i].push_back(dot[i].getCog());
29  }
30  for (unsigned int i = 0; i < 4; i++) {
31  for (unsigned int j = 1; j < traj[i].size(); j++) {
32  vpDisplay::displayLine(I, traj[i][j - 1], traj[i][j], vpColor::green, thickness);
33  }
34  }
35 }
36 
37 #if defined(VISP_HAVE_OGRE)
38 void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
40 {
41  static vpImage<vpRGBa> Irender; // Image from ogre scene rendering
42  ogre.display(background, cMo);
43  ogre.getRenderingOutput(Irender, cMo);
44 
45  vpImageConvert::convert(Irender, I);
46  // Due to the light that was added to the scene, we need to threshold the
47  // image
48  vpImageTools::binarise(I, (unsigned char)254, (unsigned char)255, (unsigned char)0, (unsigned char)255,
49  (unsigned char)255);
50 }
51 #endif
52 
53 int main()
54 {
55 #if defined(VISP_HAVE_OGRE) && defined(VISP_HAVE_DISPLAY)
56 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
57  std::shared_ptr<vpDisplay> display;
58 #else
59  vpDisplay *display = nullptr;
60 #endif
61 
62  try {
63  unsigned int thickness = 3;
64 
65  vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
66  vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));
67 
68  // Color image used as background texture.
69  vpImage<unsigned char> background(480, 640, 255);
70 
71  // Parameters of our camera
72  vpCameraParameters cam(840, 840, background.getWidth() / 2, background.getHeight() / 2);
73 
74  // Define the target as 4 points
75  std::vector<vpPoint> point;
76  point.push_back(vpPoint(-0.1, -0.1, 0));
77  point.push_back(vpPoint(0.1, -0.1, 0));
78  point.push_back(vpPoint(0.1, 0.1, 0));
79  point.push_back(vpPoint(-0.1, 0.1, 0));
80 
81  // Our object
82  // A simulator with the camera parameters defined above,
83  // and the background image size
84  vpAROgre ogre;
85  ogre.setCameraParameters(cam);
86  ogre.setShowConfigDialog(false);
87  ogre.addResource("./"); // Add the path to the Sphere.mesh resource
88  ogre.init(background, false, true);
89  // ogre.setWindowPosition(680, 400);
90 
91  // Create the scene that contains 4 spheres
92  // Sphere.mesh contains a sphere with 1 meter radius
93  std::vector<std::string> name(4);
94  for (unsigned int i = 0; i < 4; i++) {
95  std::ostringstream s;
96  s << "Sphere" << i;
97  name[i] = s.str();
98  ogre.load(name[i], "Sphere.mesh");
99  ogre.setMaterial(name[i], "UniformBlue");
100  ogre.setScale(name[i], 0.02f, 0.02f,
101  0.02f); // Rescale the sphere to 2 cm radius
102  // Set the position of each sphere in the object frame
103  ogre.setPosition(name[i], vpTranslationVector(point[i].get_oX(), point[i].get_oY(), point[i].get_oZ()));
104  ogre.setRotation(name[i], vpRotationMatrix(M_PI / 2, 0, 0));
105  }
106 
107  // Add an optional point light source
108  Ogre::Light *light = ogre.getSceneManager()->createLight();
109  light->setDiffuseColour(1., 1., 1.); // scaled RGB values
110  light->setSpecularColour(1., 1., 1.); // scaled RGB values
111  light->setType(Ogre::Light::LT_POINT);
112 #if (VISP_HAVE_OGRE_VERSION < (1 << 16 | 10 << 8 | 0))
113  light->setPosition((Ogre::Real)cdMo[0][3], (Ogre::Real)cdMo[1][3], (Ogre::Real)(-cdMo[2][3]));
114 #else
115  Ogre::SceneNode *spotLightNode = ogre.getSceneManager()->getRootSceneNode()->createChildSceneNode();
116  spotLightNode->attachObject(light);
117  spotLightNode->setPosition((Ogre::Real)cdMo[0][3], (Ogre::Real)cdMo[1][3], (Ogre::Real)(-cdMo[2][3]));
118 #endif
119 
120  vpServo task;
123  task.setLambda(0.5);
124 
125  // Image used for the image processing
127 
128  // Render the scene at the desired position
129  ogre_get_render_image(ogre, background, cdMo, I);
130 
131 // Display the image in which we will do the tracking
132 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
133  display = vpDisplayFactory::createDisplay(I, 0, 0, "Camera view at desired position");
134 #else
135  display = vpDisplayFactory::allocateDisplay(I, 0, 0, "Camera view at desired position");
136 #endif
137 
139  vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to learn their positions", vpColor::red);
140  vpDisplay::flush(I);
141 
142  std::vector<vpDot2> dot(4);
143  vpFeaturePoint p[4], pd[4];
144 
145  for (unsigned int i = 0; i < 4; i++) {
146  // Compute the desired feature at the desired position
147  dot[i].setGraphics(true);
148  dot[i].setGraphicsThickness(thickness);
149  dot[i].initTracking(I);
150  vpDisplay::flush(I);
151  vpFeatureBuilder::create(pd[i], cam, dot[i].getCog());
152  }
153 
154  // Render the scene at the initial position
155  ogre_get_render_image(ogre, background, cMo, I);
156 
158  vpDisplay::setTitle(I, "Current camera view");
159  vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to initialise the tracking and start the servo",
160  vpColor::red);
161  vpDisplay::flush(I);
162 
163  for (unsigned int i = 0; i < 4; i++) {
164  // We notice that if we project the scene at a given pose, the pose
165  // estimated from the rendered image differs a little. That's why we
166  // cannot simply compute the desired feature from the desired pose using
167  // the next two lines. We will rather compute the desired position of
168  // the features from a learning stage. point[i].project(cdMo);
169  // vpFeatureBuilder::create(pd[i], point[i]);
170 
171  // Compute the current feature at the initial position
172  dot[i].setGraphics(true);
173  dot[i].initTracking(I);
174  vpDisplay::flush(I);
175  vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
176  }
177 
178  for (unsigned int i = 0; i < 4; i++) {
179  // Set the feature Z coordinate from the pose
180  vpColVector cP;
181  point[i].changeFrame(cMo, cP);
182  p[i].set_Z(cP[2]);
183 
184  task.addFeature(p[i], pd[i]);
185  }
186 
187  vpHomogeneousMatrix wMc, wMo;
188  vpSimulatorCamera robot;
189  robot.setSamplingTime(0.040);
190  robot.getPosition(wMc);
191  wMo = wMc * cMo;
192 
193  for (;;) {
194  // From the camera position in the world frame we retrieve the object
195  // position
196  robot.getPosition(wMc);
197  cMo = wMc.inverse() * wMo;
198 
199  // Update the scene from the new camera position
200  ogre_get_render_image(ogre, background, cMo, I);
201 
203 
204  for (unsigned int i = 0; i < 4; i++) {
205  dot[i].track(I);
206  vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
207  }
208 
209  for (unsigned int i = 0; i < 4; i++) {
210  // Set the feature Z coordinate from the pose
211  vpColVector cP;
212  point[i].changeFrame(cMo, cP);
213  p[i].set_Z(cP[2]);
214  }
215 
216  vpColVector v = task.computeControlLaw();
217 
218  display_trajectory(I, dot, thickness);
219  vpServoDisplay::display(task, cam, I, vpColor::green, vpColor::red, thickness + 2);
221 
222  vpDisplay::flush(I);
223  if (vpDisplay::getClick(I, false))
224  break;
225 
226  vpTime::wait(robot.getSamplingTime() * 1000);
227  }
228  }
229  catch (const vpException &e) {
230  std::cout << "Catch a ViSP exception: " << e << std::endl;
231 #if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
232  if (display != nullptr) {
233  delete display;
234  }
235 #endif
236  return EXIT_FAILURE;
237  }
238  catch (...) {
239  std::cout << "Catch an exception " << std::endl;
240 #if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
241  if (display != nullptr) {
242  delete display;
243  }
244 #endif
245  return EXIT_FAILURE;
246  }
247 #if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
248  if (display != nullptr) {
249  delete display;
250  }
251 #endif
252  return EXIT_SUCCESS;
253 #endif
254 }
Implementation of an augmented reality viewer using Ogre3D 3rd party.
Definition: vpAROgre.h:110
void setCameraParameters(const vpCameraParameters &cameraP)
Definition: vpAROgre.cpp:852
void setShowConfigDialog(bool showConfigDialog)
Definition: vpAROgre.h:274
void getRenderingOutput(vpImage< vpRGBa > &I, const vpHomogeneousMatrix &cMo)
Definition: vpAROgre.cpp:1282
void setMaterial(const std::string &entityName, const std::string &materialName)
Definition: vpAROgre.cpp:871
void setRotation(const std::string &sceneName, const vpRotationMatrix &wRo)
Definition: vpAROgre.cpp:907
void addResource(const std::string &resourceLocation)
Definition: vpAROgre.h:140
Ogre::SceneManager * getSceneManager()
Definition: vpAROgre.h:177
virtual void init(vpImage< unsigned char > &I, bool bufferedKeys=false, bool hidden=false)
Definition: vpAROgre.cpp:180
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMw)
Definition: vpAROgre.cpp:814
void load(const std::string &entityName, const std::string &model)
Definition: vpAROgre.cpp:859
void setPosition(const std::string &sceneName, const vpTranslationVector &wTo)
Definition: vpAROgre.cpp:884
void setScale(const std::string &sceneName, float factorx, float factory, float factorz)
Definition: vpAROgre.cpp:971
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
static const vpColor red
Definition: vpColor.h:198
static const vpColor green
Definition: vpColor.h:201
Class that defines generic functionalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void setTitle(const vpImage< unsigned char > &I, const std::string &windowtitle)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static void binarise(vpImage< Type > &I, Type threshold1, Type threshold2, Type value1, Type value2, Type value3, bool useLUT=true)
Definition: vpImageTools.h:473
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
static double rad(double deg)
Definition: vpMath.h:129
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
@ CAMERA_FRAME
Definition: vpRobot.h:84
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_CAMERA
Definition: vpServo.h:161
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void setLambda(double c)
Definition: vpServo.h:991
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Class that defines the simplest robot: a free flying camera.
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT int wait(double t0, double t)