#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_PANDA3D) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_MODULE_IO)
#include <visp3/core/vpException.h>
#include <visp3/core/vpExponentialMap.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayD3D.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/io/vpParseArgv.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/ar/vpPanda3DRGBRenderer.h>
#include <visp3/ar/vpPanda3DGeometryRenderer.h>
#include <visp3/ar/vpPanda3DRendererSet.h>
#include <visp3/ar/vpPanda3DCommonFilters.h>
#ifdef ENABLE_VISP_NAMESPACE
#endif
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (
unsigned int i = 0; i < normalsImage.
getSize(); ++i) {
normalDisplayImage.
bitmap[i].
R =
static_cast<unsigned char>((normalsImage.
bitmap[i].R + 1.0) * 127.5f);
normalDisplayImage.
bitmap[i].
G =
static_cast<unsigned char>((normalsImage.
bitmap[i].G + 1.0) * 127.5f);
normalDisplayImage.
bitmap[i].
B =
static_cast<unsigned char>((normalsImage.
bitmap[i].B + 1.0) * 127.5f);
}
}
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (
unsigned int i = 0; i < depthImage.
getSize(); ++i) {
float val = std::max(0.f, (depthImage.
bitmap[i] - nearV) / (farV - nearV));
depthDisplayImage.
bitmap[i] =
static_cast<unsigned char>(val * 255.f);
}
}
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (
unsigned int i = 0; i < colorImage.
getSize(); ++i) {
float I1 = 0.299 * colorImage.
bitmap[i].
R + 0.587 * colorImage.
bitmap[i].
G + 0.114 * colorImage.
bitmap[i].
B;
float I2 = 0.299 * colorDiffuseOnly.
bitmap[i].
R + 0.587 * colorDiffuseOnly.
bitmap[i].
G + 0.114 * colorDiffuseOnly.
bitmap[i].
B;
lightDifference.
bitmap[i] =
static_cast<unsigned char>(round(abs(I1 - I2)));
}
}
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (
unsigned int i = 0; i < cannyRawData.
getSize(); ++i) {
canny.
bitmap[i] = 255 * (px.
R * px.
R + px.
G * px.
G > 0);
}
for (
unsigned int i = 0; i < canny.
getHeight(); i += 8) {
for (
unsigned int j = 0; j < canny.
getWidth(); j += 8) {
bool valid = (pow(cannyRawData[i][j].R, 2.f) + pow(cannyRawData[i][j].G, 2.f)) > 0;
if (!valid) continue;
float angle = cannyRawData[i][j].B;
unsigned x = j + 10 * cos(angle);
unsigned y = i + 10 * sin(angle);
}
}
}
int main(int argc, const char **argv)
{
bool stepByStep = false;
bool debug = false;
bool showLightContrib = false;
bool showCanny = false;
char *modelPathCstr = nullptr;
char *backgroundPathCstr = nullptr;
vpParseArgv::vpArgvInfo argTable[] =
{
"Path to the model to load."},
"Path to the background image to load for the rgb renderer."},
"Show frames step by step."},
"Show frames step by step."},
"Show frames step by step."},
"Show Opengl/Panda3D debug message."},
"Print the help."},
return (false);
}
if (PStatClient::is_connected()) {
PStatClient::disconnect();
}
std::string host = "";
int port = -1;
if (!PStatClient::connect(host, port)) {
std::cout << "Could not connect to PStat server." << std::endl;
}
std::string modelPath;
if (modelPathCstr) {
modelPath = modelPathCstr;
}
else {
modelPath = "data/suzanne.bam";
}
std::string backgroundPath;
if (backgroundPathCstr) {
backgroundPath = backgroundPathCstr;
}
const std::string objectName = "object";
double factor = 1.0;
unsigned h = renderParams.getImageHeight(), w = renderParams.getImageWidth();
renderer.setRenderParameters(renderParams);
renderer.setVerticalSyncEnabled(false);
renderer.setAbortOnPandaError(true);
if (debug) {
renderer.enableDebugLog();
}
std::shared_ptr<vpPanda3DGeometryRenderer> geometryRenderer = std::make_shared<vpPanda3DGeometryRenderer>(vpPanda3DGeometryRenderer::vpRenderType::OBJECT_NORMALS);
std::shared_ptr<vpPanda3DGeometryRenderer> cameraRenderer = std::make_shared<vpPanda3DGeometryRenderer>(vpPanda3DGeometryRenderer::vpRenderType::CAMERA_NORMALS);
std::shared_ptr<vpPanda3DRGBRenderer> rgbRenderer = std::make_shared<vpPanda3DRGBRenderer>();
std::shared_ptr<vpPanda3DRGBRenderer> rgbDiffuseRenderer = std::make_shared<vpPanda3DRGBRenderer>(false);
std::shared_ptr<vpPanda3DLuminanceFilter> grayscaleFilter = std::make_shared<vpPanda3DLuminanceFilter>("toGrayscale", rgbRenderer, false);
std::shared_ptr<vpPanda3DCanny> cannyFilter = std::make_shared<vpPanda3DCanny>("canny", grayscaleFilter, true, 10.f);
renderer.addSubRenderer(geometryRenderer);
renderer.addSubRenderer(cameraRenderer);
renderer.addSubRenderer(rgbRenderer);
if (showLightContrib) {
renderer.addSubRenderer(rgbDiffuseRenderer);
}
if (showCanny) {
renderer.addSubRenderer(grayscaleFilter);
renderer.addSubRenderer(cannyFilter);
}
std::cout << "Initializing Panda3D rendering framework" << std::endl;
renderer.initFramework();
NodePath object = renderer.loadObject(objectName, modelPath);
renderer.addNodeToScene(object);
renderer.addLight(alight);
renderer.addLight(plight);
renderer.addLight(dlight);
if (!backgroundPath.empty()) {
rgbRenderer->setBackgroundImage(background);
}
rgbRenderer->printStructure();
std::cout << "Creating display and data images" << std::endl;
#if defined(VISP_HAVE_GTK)
#elif defined(VISP_HAVE_X11)
using DisplayCls = vpDisplayX;
#elif defined(HAVE_OPENCV_HIGHGUI)
#elif defined(VISP_HAVE_GDI)
#elif defined(VISP_HAVE_D3D9)
#endif
unsigned int padding = 80;
DisplayCls dNormals(normalDisplayImage, 0, 0, "normals in object space");
DisplayCls dNormalsCamera(cameraNormalDisplayImage, 0, h + padding, "normals in camera space");
DisplayCls dDepth(depthDisplayImage, w + padding, 0, "depth");
DisplayCls dColor(colorImage, w + padding, h + padding, "color");
DisplayCls dImageDiff;
if (showLightContrib) {
dImageDiff.init(lightDifference, w * 2 + padding, 0, "Specular/reflectance contribution");
}
DisplayCls dCanny;
if (showCanny) {
dCanny.init(cannyImage, w * 2 + padding, h + padding, "Canny");
}
renderer.renderFrame();
bool end = false;
std::vector<double> renderTime, fetchTime, displayTime;
while (!end) {
float nearV = 0, farV = 0;
geometryRenderer->computeNearAndFarPlanesFromNode(objectName, nearV, farV, true);
renderParams.setClippingDistance(nearV, farV);
renderer.setRenderParameters(renderParams);
renderer.renderFrame();
if (showLightContrib) {
renderer.getRenderer<
vpPanda3DRGBRenderer>(rgbDiffuseRenderer->getName())->getRender(colorDiffuseOnly);
}
if (showCanny) {
}
displayNormals(normalsImage, normalDisplayImage);
displayNormals(cameraNormalsImage, cameraNormalDisplayImage);
displayDepth(depthImage, depthDisplayImage, nearV, farV);
if (showLightContrib) {
displayLightDifference(colorImage, colorDiffuseOnly, lightDifference);
}
if (showCanny) {
displayCanny(cannyRawData, cannyImage);
}
if (stepByStep) {
}
end = true;
}
renderTime.push_back(beforeFetch - beforeRender);
fetchTime.push_back(beforeConvert - beforeFetch);
displayTime.push_back(endDisplay - beforeConvert);
std::string s;
if (stepByStep) {
bool next = false;
while (!next) {
if (s == " ") {
next = true;
}
}
}
const double delta = (afterAll - beforeRender) / 1000.0;
renderer.setNodePose(objectName, wTo * oToo);
}
if (renderTime.size() > 0) {
}
return 0;
}
#else
int main()
{
std::cerr << "Recompile ViSP with Panda3D as a third party to run this tutorial" << std::endl;
return EXIT_FAILURE;
}
#endif
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor green
Display for windows using Direct3D 3rd party. Thus to enable this class Direct3D should be installed....
Display for windows using GDI (available on any windows 32 platform).
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static bool getKeyboardEvent(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void read(vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition of the vpImage class member functions.
unsigned int getWidth() const
unsigned int getSize() const
Type * bitmap
points toward the bitmap
unsigned int getHeight() const
static double rad(double deg)
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
static double getMean(const std::vector< double > &v)
Class representing an ambient light.
Implementation of canny filtering, using Sobel kernels.
Class representing a directional light.
Renderer that outputs object geometric information.
Class representing a Point Light.
Implementation of a traditional RGB renderer in Panda3D.
Rendering parameters for a panda3D simulation.
Class that rendering multiple datatypes, in a single pass. A RendererSet contains multiple subrendere...
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
@ ARGV_NO_DEFAULTS
No default options like -help.
@ ARGV_NO_LEFTOVERS
Print an error message if an option is not in the argument list.
@ ARGV_STRING
Argument is associated to a char * string.
@ ARGV_CONSTANT_BOOL
Stand alone argument associated to a bool var that is set to true.
@ ARGV_END
End of the argument list.
@ ARGV_HELP
Argument is for help displaying.
unsigned char B
Blue component.
unsigned char R
Red component.
unsigned char G
Green component.
VISP_EXPORT double measureTimeMs()