#include <iostream>
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpImageConvert.h>
#include <visp3/core/vpTime.h>
#include <visp3/detection/vpDetectorFace.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/sensor/vpV4l2Grabber.h>
#if defined(HAVE_OPENCV_OBJDETECT) && defined(HAVE_OPENCV_HIGHGUI) && defined(HAVE_OPENCV_IMGPROC) \
&& defined(HAVE_OPENCV_VIDEOIO) && defined(VISP_HAVE_THREADS)
#include <thread>
#include <mutex>
#include <opencv2/videoio.hpp>
#ifdef ENABLE_VISP_NAMESPACE
#endif
typedef enum { capture_waiting, capture_started, capture_stopped } t_CaptureState;
#if defined(VISP_HAVE_V4L2)
#elif defined(HAVE_OPENCV_VIDEOIO)
void captureFunction(cv::VideoCapture &cap, std::mutex &mutex_capture, cv::Mat &frame, t_CaptureState &capture_state)
#endif
{
#if defined(VISP_HAVE_V4L2)
#elif defined(HAVE_OPENCV_VIDEOIO)
cv::Mat frame_;
#endif
bool stop_capture_ = false;
cap >> frame_;
{
std::lock_guard<std::mutex> lock(mutex_capture);
if (capture_state == capture_stopped)
stop_capture_ = true;
else
capture_state = capture_started;
frame = frame_;
}
}
{
std::lock_guard<std::mutex> lock(mutex_capture);
capture_state = capture_stopped;
}
std::cout << "End of capture thread" << std::endl;
}
#if defined(VISP_HAVE_V4L2)
void displayFunction(std::mutex &mutex_capture, std::mutex &mutex_face,
vpImage<unsigned char> &frame, t_CaptureState &capture_state,
vpRect &face_bbox,
bool &face_available)
#elif defined(HAVE_OPENCV_VIDEOIO)
void displayFunction(std::mutex &mutex_capture, std::mutex &mutex_face, cv::Mat &frame, t_CaptureState &capture_state,
vpRect &face_bbox,
bool &face_available)
#endif
{
t_CaptureState capture_state_;
bool display_initialized_ = false;
bool face_available_ = false;
#if defined(VISP_HAVE_X11)
vpDisplayX *d_ = nullptr;
#elif defined(VISP_HAVE_GDI)
#endif
do {
mutex_capture.lock();
capture_state_ = capture_state;
mutex_capture.unlock();
if (capture_state_ == capture_started) {
{
std::lock_guard<std::mutex> lock(mutex_capture);
#if defined(VISP_HAVE_V4L2)
I_ = frame;
#elif defined(VISP_HAVE_OPENCV)
#endif
}
if (!display_initialized_) {
#if defined(VISP_HAVE_X11)
d_ = new vpDisplayX(I_);
display_initialized_ = true;
#elif defined(VISP_HAVE_GDI)
display_initialized_ = true;
#endif
}
{
std::lock_guard<std::mutex> lock(mutex_face);
face_available_ = face_available;
face_bbox_ = face_bbox;
}
if (face_available_) {
face_available_ = false;
}
std::lock_guard<std::mutex> lock(mutex_capture);
capture_state = capture_stopped;
}
}
else {
}
} while (capture_state_ != capture_stopped);
#if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
delete d_;
#endif
std::cout << "End of display thread" << std::endl;
}
#if defined(VISP_HAVE_V4L2)
void detectionFunction(std::mutex &mutex_capture, std::mutex &mutex_face,
vpImage<unsigned char> &frame, t_CaptureState &capture_state,
vpRect &face_bbox, std::string &face_cascade_name,
bool &face_available)
#elif defined(HAVE_OPENCV_VIDEOIO)
void detectionFunction(std::mutex &mutex_capture, std::mutex &mutex_face, cv::Mat &frame, t_CaptureState &capture_state,
vpRect &face_bbox, std::string &face_cascade_name,
bool &face_available)
#endif
{
t_CaptureState capture_state_;
#if defined(VISP_HAVE_V4L2)
#elif defined(VISP_HAVE_OPENCV)
cv::Mat frame_;
#endif
do {
mutex_capture.lock();
capture_state_ = capture_state;
mutex_capture.unlock();
if (capture_state_ == capture_started) {
{
std::lock_guard<std::mutex> lock(mutex_capture);
frame_ = frame;
}
bool face_found_ = face_detector_.
detect(frame_);
if (face_found_) {
std::lock_guard<std::mutex> lock(mutex_face);
face_available = true;
face_bbox = face_detector_.
getBBox(0);
}
}
else {
}
} while (capture_state_ != capture_stopped);
std::cout << "End of face detection thread" << std::endl;
}
int main(int argc, const char *argv[])
{
std::string opt_face_cascade_name = "./haarcascade_frontalface_alt.xml";
unsigned int opt_device = 0;
unsigned int opt_scale = 2;
for (int i = 0; i < argc; i++) {
if (std::string(argv[i]) == "--haar")
opt_face_cascade_name = std::string(argv[i + 1]);
else if (std::string(argv[i]) == "--device")
opt_device = (unsigned int)atoi(argv[i + 1]);
else if (std::string(argv[i]) == "--scale")
opt_scale = (unsigned int)atoi(argv[i + 1]);
else if (std::string(argv[i]) == "--help") {
std::cout << "Usage: " << argv[0]
<< " [--haar <haarcascade xml filename>] [--device <camera "
"device>] [--scale <subsampling factor>] [--help]"
<< std::endl;
return EXIT_SUCCESS;
}
}
#if defined(VISP_HAVE_V4L2)
std::ostringstream device;
device << "/dev/video" << opt_device;
#elif defined(HAVE_OPENCV_VIDEOIO)
cv::Mat frame;
cv::VideoCapture cap;
#if (VISP_HAVE_OPENCV_VERSION >= 0x030000)
int width = (int)cap.get(cv::CAP_PROP_FRAME_WIDTH);
int height = (int)cap.get(cv::CAP_PROP_FRAME_HEIGHT);
cap.set(cv::CAP_PROP_FRAME_WIDTH, width / opt_scale);
cap.set(cv::CAP_PROP_FRAME_HEIGHT, height / opt_scale);
#else
int width = cap.get(CV_CAP_PROP_FRAME_WIDTH);
int height = cap.get(CV_CAP_PROP_FRAME_HEIGHT);
cap.set(CV_CAP_PROP_FRAME_WIDTH, width / opt_scale);
cap.set(CV_CAP_PROP_FRAME_HEIGHT, height / opt_scale);
#endif
#endif
std::mutex mutex_capture;
std::mutex mutex_face;
t_CaptureState capture_state = capture_waiting;
bool face_available = false;
std::thread thread_capture(&captureFunction, std::ref(cap), std::ref(mutex_capture), std::ref(frame), std::ref(capture_state));
std::thread thread_display(&displayFunction, std::ref(mutex_capture), std::ref(mutex_face), std::ref(frame),
std::ref(capture_state), std::ref(face_bbox), std::ref(face_available));
std::thread thread_detection(&detectionFunction, std::ref(mutex_capture), std::ref(mutex_face), std::ref(frame),
std::ref(capture_state), std::ref(face_bbox), std::ref(opt_face_cascade_name), std::ref(face_available));
thread_capture.join();
thread_display.join();
thread_detection.join();
return EXIT_SUCCESS;
}
#else
int main()
{
#ifndef VISP_HAVE_OPENCV
std::cout << "You should install OpenCV to make this example working..." << std::endl;
#elif !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__)))
std::cout << "You should enable pthread usage and rebuild ViSP..." << std::endl;
#else
std::cout << "Multi-threading seems not supported on this platform" << std::endl;
#endif
return EXIT_SUCCESS;
}
#endif
static const vpColor green
vpRect getBBox(size_t i) const
void setCascadeClassifierFile(const std::string &filename)
bool detect(const vpImage< unsigned char > &I) VP_OVERRIDE
Display for windows using GDI (available on any windows 32 platform).
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayRectangle(const vpImage< unsigned char > &I, const vpImagePoint &topLeft, unsigned int width, unsigned int height, const vpColor &color, bool fill=false, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Defines a rectangle in the plane.
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
VISP_EXPORT int wait(double t0, double t)
VISP_EXPORT double measureTimeSecond()