Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
tutorial-ibvs-4pts-ogre-tracking.cpp
#include <visp3/core/vpConfig.h>
#ifdef VISP_HAVE_MODULE_AR
#include <visp3/ar/vpAROgre.h>
#endif
#include <visp3/blob/vpDot2.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/robot/vpSimulatorCamera.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness);
#if defined(VISP_HAVE_OGRE)
void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
#endif
void display_trajectory(const vpImage<unsigned char> &I, const std::vector<vpDot2> &dot, unsigned int thickness)
{
static std::vector<vpImagePoint> traj[4];
for (unsigned int i = 0; i < 4; i++) {
traj[i].push_back(dot[i].getCog());
}
for (unsigned int i = 0; i < 4; i++) {
for (unsigned int j = 1; j < traj[i].size(); j++) {
vpDisplay::displayLine(I, traj[i][j - 1], traj[i][j], vpColor::green, thickness);
}
}
}
#if defined(VISP_HAVE_OGRE)
void ogre_get_render_image(vpAROgre &ogre, const vpImage<unsigned char> &background, const vpHomogeneousMatrix &cMo,
{
static vpImage<vpRGBa> Irender; // Image from ogre scene rendering
ogre.display(background, cMo);
ogre.getRenderingOutput(Irender, cMo);
// Due to the light that was added to the scene, we need to threshold the
// image
vpImageTools::binarise(I, (unsigned char)254, (unsigned char)255, (unsigned char)0, (unsigned char)255,
(unsigned char)255);
}
#endif
int main()
{
#if defined(VISP_HAVE_OGRE) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
try {
unsigned int thickness = 3;
vpHomogeneousMatrix cdMo(0, 0, 0.75, 0, 0, 0);
vpHomogeneousMatrix cMo(0.15, -0.1, 1., vpMath::rad(10), vpMath::rad(-10), vpMath::rad(50));
// Color image used as background texture.
vpImage<unsigned char> background(480, 640, 255);
// Parameters of our camera
vpCameraParameters cam(840, 840, background.getWidth() / 2, background.getHeight() / 2);
// Define the target as 4 points
std::vector<vpPoint> point;
point.push_back(vpPoint(-0.1, -0.1, 0));
point.push_back(vpPoint(0.1, -0.1, 0));
point.push_back(vpPoint(0.1, 0.1, 0));
point.push_back(vpPoint(-0.1, 0.1, 0));
// Our object
// A simulator with the camera parameters defined above,
// and the background image size
vpAROgre ogre;
ogre.setShowConfigDialog(false);
ogre.addResource("./"); // Add the path to the Sphere.mesh resource
ogre.init(background, false, true);
// ogre.setWindowPosition(680, 400);
// Create the scene that contains 4 spheres
// Sphere.mesh contains a sphere with 1 meter radius
std::vector<std::string> name(4);
for (unsigned int i = 0; i < 4; i++) {
std::ostringstream s;
s << "Sphere" << i;
name[i] = s.str();
ogre.load(name[i], "Sphere.mesh");
ogre.setScale(name[i], 0.02f, 0.02f,
0.02f); // Rescale the sphere to 2 cm radius
// Set the position of each sphere in the object frame
ogre.setPosition(name[i], vpTranslationVector(point[i].get_oX(), point[i].get_oY(), point[i].get_oZ()));
ogre.setRotation(name[i], vpRotationMatrix(M_PI / 2, 0, 0));
}
// Add an optional point light source
Ogre::Light *light = ogre.getSceneManager()->createLight();
light->setDiffuseColour(1, 1, 1); // scaled RGB values
light->setSpecularColour(1, 1, 1); // scaled RGB values
light->setPosition((Ogre::Real)cdMo[0][3], (Ogre::Real)cdMo[1][3], (Ogre::Real)(-cdMo[2][3]));
light->setType(Ogre::Light::LT_POINT);
vpServo task;
task.setLambda(0.5);
// Image used for the image processing
// Render the scene at the desired position
ogre_get_render_image(ogre, background, cdMo, I);
// Display the image in which we will do the tracking
#if defined(VISP_HAVE_X11)
vpDisplayX d(I, 0, 0, "Camera view at desired position");
#elif defined(VISP_HAVE_GDI)
vpDisplayGDI d(I, 0, 0, "Camera view at desired position");
#elif defined(HAVE_OPENCV_HIGHGUI)
vpDisplayOpenCV d(I, 0, 0, "Camera view at desired position");
#else
std::cout << "No image viewer is available..." << std::endl;
#endif
vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to learn their positions", vpColor::red);
std::vector<vpDot2> dot(4);
vpFeaturePoint p[4], pd[4];
for (unsigned int i = 0; i < 4; i++) {
// Compute the desired feature at the desired position
dot[i].setGraphics(true);
dot[i].setGraphicsThickness(thickness);
dot[i].initTracking(I);
vpFeatureBuilder::create(pd[i], cam, dot[i].getCog());
}
// Render the scene at the initial position
ogre_get_render_image(ogre, background, cMo, I);
vpDisplay::setTitle(I, "Current camera view");
vpDisplay::displayText(I, 10, 10, "Click in the 4 dots to initialise the tracking and start the servo",
for (unsigned int i = 0; i < 4; i++) {
// We notice that if we project the scene at a given pose, the pose
// estimated from the rendered image differs a little. That's why we
// cannot simply compute the desired feature from the desired pose using
// the next two lines. We will rather compute the desired position of
// the features from a learning stage. point[i].project(cdMo);
// vpFeatureBuilder::create(pd[i], point[i]);
// Compute the current feature at the initial position
dot[i].setGraphics(true);
dot[i].initTracking(I);
vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
}
for (unsigned int i = 0; i < 4; i++) {
// Set the feature Z coordinate from the pose
point[i].changeFrame(cMo, cP);
p[i].set_Z(cP[2]);
task.addFeature(p[i], pd[i]);
}
robot.setSamplingTime(0.040);
robot.getPosition(wMc);
wMo = wMc * cMo;
for (;;) {
// From the camera position in the world frame we retrieve the object
// position
robot.getPosition(wMc);
cMo = wMc.inverse() * wMo;
// Update the scene from the new camera position
ogre_get_render_image(ogre, background, cMo, I);
for (unsigned int i = 0; i < 4; i++) {
dot[i].track(I);
vpFeatureBuilder::create(p[i], cam, dot[i].getCog());
}
for (unsigned int i = 0; i < 4; i++) {
// Set the feature Z coordinate from the pose
point[i].changeFrame(cMo, cP);
p[i].set_Z(cP[2]);
}
display_trajectory(I, dot, thickness);
vpServoDisplay::display(task, cam, I, vpColor::green, vpColor::red, thickness + 2);
if (vpDisplay::getClick(I, false))
break;
vpTime::wait(robot.getSamplingTime() * 1000);
}
} catch (const vpException &e) {
std::cout << "Catch a ViSP exception: " << e << std::endl;
return EXIT_FAILURE;
} catch (...) {
std::cout << "Catch an exception " << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
#endif
}
Implementation of an augmented reality viewer using Ogre3D 3rd party.
Definition vpAROgre.h:96
void setCameraParameters(const vpCameraParameters &cameraP)
Definition vpAROgre.cpp:656
Ogre::SceneManager * getSceneManager()
Definition vpAROgre.h:163
void setShowConfigDialog(bool showConfigDialog)
Definition vpAROgre.h:258
void getRenderingOutput(vpImage< vpRGBa > &I, const vpHomogeneousMatrix &cMo)
void setRotation(const std::string &sceneName, const vpRotationMatrix &wRo)
Definition vpAROgre.cpp:699
void addResource(const std::string &resourceLocation)
Definition vpAROgre.h:126
virtual void init(vpImage< unsigned char > &I, bool bufferedKeys=false, bool hidden=false)
Definition vpAROgre.cpp:115
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMw)
Definition vpAROgre.cpp:622
void load(const std::string &entityName, const std::string &model)
Definition vpAROgre.cpp:663
void setPosition(const std::string &sceneName, const vpTranslationVector &wTo)
Definition vpAROgre.cpp:676
void setScale(const std::string &sceneName, float factorx, float factory, float factorz)
Definition vpAROgre.cpp:763
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
Display for windows using GDI (available on any windows 32 platform).
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void setTitle(const vpImage< unsigned char > &I, const std::string &windowtitle)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static void binarise(vpImage< Type > &I, Type threshold1, Type threshold2, Type value1, Type value2, Type value3, bool useLUT=true)
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
static double rad(double deg)
Definition vpMath.h:116
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ CAMERA_FRAME
Definition vpRobot.h:80
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that defines the simplest robot: a free flying camera.
Class that consider the case of a translation vector.
VISP_EXPORT int wait(double t0, double t)