Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoAfma62DhalfCamVelocity.cpp

Example of eye-in-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The velocity is computed in the camera frame. Visual features are given thanks to four lines and are the x and y coordinates of the rectangle center, log(Z/Z*) the current depth relative to the desired depth and the thetau rotations.

/****************************************************************************
*
* ViSP, open source Visual Servoing Platform software.
* Copyright (C) 2005 - 2023 by Inria. All rights reserved.
*
* This software is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact Inria about acquiring a ViSP Professional
* Edition License.
*
* See https://visp.inria.fr for more information.
*
* This software was developed at:
* Inria Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
*
* If you have questions regarding the use of this file, please contact
* Inria at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
* Description:
* tests the control law
* eye-in-hand control
* velocity computed in the camera frame
*
*****************************************************************************/
#include <cmath> // std::fabs
#include <limits> // numeric_limits
#include <stdlib.h>
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpDebug.h> // Debug trace
#if (defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_DC1394))
#include <visp3/core/vpDisplay.h>
#include <visp3/core/vpImage.h>
#include <visp3/core/vpImagePoint.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpLine.h>
#include <visp3/core/vpMath.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeatureDepth.h>
#include <visp3/visual_features/vpFeatureLine.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/visual_features/vpGenericFeature.h>
#include <visp3/vs/vpServo.h>
#include <visp3/robot/vpRobotAfma6.h>
// Exception
#include <visp3/core/vpException.h>
#include <visp3/vs/vpServoDisplay.h>
#include <visp3/blob/vpDot2.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpPoint.h>
int main()
{
try {
g.open(I);
g.acquire(I);
#ifdef VISP_HAVE_X11
vpDisplayX display(I, 100, 100, "Current image");
#elif defined(HAVE_OPENCV_HIGHGUI)
vpDisplayOpenCV display(I, 100, 100, "Current image");
#elif defined(VISP_HAVE_GTK)
vpDisplayGTK display(I, 100, 100, "Current image");
#endif
vpServo task;
vpRobotAfma6 robot;
// robot.move("zero.pos") ;
// Update camera parameters
robot.getCameraParameters(cam, I);
std::cout << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a line " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << std::endl;
int nbline = 4;
int nbpoint = 4;
vpTRACE("sets the desired position of the visual feature ");
vpPoint pointd[nbpoint]; // position of the fours corners
vpPoint pointcd; // position of the center of the square
double L = 0.05;
pointd[0].setWorldCoordinates(L, -L, 0);
pointd[1].setWorldCoordinates(L, L, 0);
pointd[2].setWorldCoordinates(-L, L, 0);
pointd[3].setWorldCoordinates(-L, -L, 0);
// The coordinates in the object frame of the point used as a feature ie
// the center of the square
pointcd.setWorldCoordinates(0, 0, 0);
// The desired homogeneous matrix.
vpHomogeneousMatrix cMod(0, 0, 0.4, 0, 0, vpMath::rad(10));
pointd[0].project(cMod);
pointd[1].project(cMod);
pointd[2].project(cMod);
pointd[3].project(cMod);
pointcd.project(cMod);
vpTRACE("Initialization of the tracking");
vpMeLine line[nbline];
vpPoint point[nbpoint];
int i;
vpMe me;
me.setRange(10);
me.setThreshold(15);
me.setSampleStep(10);
// Initialize the tracking. Define the four lines to track
for (i = 0; i < nbline; i++) {
line[i].setMe(&me);
line[i].initTracking(I);
line[i].track(I);
}
// Compute the position of the four corners. The goal is to
// compute the pose
for (i = 0; i < nbline; i++) {
double x = 0, y = 0;
if (!vpMeLine::intersection(line[i % nbline], line[(i + 1) % nbline], ip)) {
return EXIT_FAILURE;
}
point[i].set_x(x);
point[i].set_y(y);
}
// Compute the pose cMo
vpPose pose;
pose.clearPoint();
point[0].setWorldCoordinates(L, -L, 0);
point[1].setWorldCoordinates(L, L, 0);
point[2].setWorldCoordinates(-L, L, 0);
point[3].setWorldCoordinates(-L, -L, 0);
for (i = 0; i < nbline; i++) {
pose.addPoint(point[i]); // and added to the pose computation point list
}
// Pose by Dementhon or Lagrange provides an initialization of the non linear virtual visual-servoing pose estimation
vpTRACE("sets the current position of the visual feature ");
// The first features are the position in the camera frame x and y of the
// square center
vpPoint pointc; // The current position of the center of the square
double xc = (point[0].get_x() + point[2].get_x()) / 2;
double yc = (point[0].get_y() + point[2].get_y()) / 2;
pointc.set_x(xc);
pointc.set_y(yc);
pointc.project(cMo);
// The second feature is the depth of the current square center relative
// to the depth of the desired square center.
logZ.buildFrom(pointc.get_x(), pointc.get_y(), pointc.get_Z(), log(pointc.get_Z() / pointcd.get_Z()));
// The last three features are the rotations thetau between the current
// pose and the desired pose.
cdMc = cMod * cMo.inverse();
tu.buildFrom(cdMc);
vpTRACE("define the task");
vpTRACE("\t we want an eye-in-hand control law");
vpTRACE("\t robot is controlled in the camera frame");
vpTRACE("\t we want to see a point on a point..");
std::cout << std::endl;
task.addFeature(p, pd);
task.addFeature(logZ);
task.addFeature(tu);
vpTRACE("\t set the gain");
task.setLambda(0.2);
vpTRACE("Display task information ");
task.print();
unsigned int iter = 0;
vpTRACE("\t loop");
double lambda_av = 0.05;
double alpha = 0.05;
double beta = 3;
for (;;) {
std::cout << "---------------------------------------------" << iter << std::endl;
try {
g.acquire(I);
pose.clearPoint();
// Track the lines and find the current position of the corners
for (i = 0; i < nbline; i++) {
line[i].track(I);
line[i].display(I, vpColor::green);
double x = 0, y = 0;
if (!vpMeLine::intersection(line[i % nbline], line[(i + 1) % nbline], ip)) {
return EXIT_FAILURE;
}
point[i].set_x(x);
point[i].set_y(y);
pose.addPoint(point[i]);
}
// Compute the pose
// Update the two first features x and y (position of the square
// center)
xc = (point[0].get_x() + point[2].get_x()) / 2;
yc = (point[0].get_y() + point[2].get_y()) / 2;
pointc.set_x(xc);
pointc.set_y(yc);
pointc.project(cMo);
// Print the current and the desired position of the center of the
// square Print the desired position of the four corners
p.display(cam, I, vpColor::green);
pd.display(cam, I, vpColor::red);
for (i = 0; i < nbpoint; i++)
pointd[i].display(I, cam, vpColor::red);
// Update the second feature
logZ.buildFrom(pointc.get_x(), pointc.get_y(), pointc.get_Z(), log(pointc.get_Z() / pointcd.get_Z()));
// Update the last three features
cdMc = cMod * cMo.inverse();
tu.buildFrom(cdMc);
// Adaptive gain
double gain;
{
if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
gain = lambda_av;
else {
gain = alpha * exp(-beta * (task.getError()).sumSquare()) + lambda_av;
}
}
task.setLambda(gain);
v = task.computeControlLaw();
std::cout << v.sumSquare() << std::endl;
if (iter == 0)
if (v.sumSquare() > 0.5) {
v = 0;
robot.stopMotion();
}
}
catch (...) {
v = 0;
robot.stopMotion();
exit(1);
}
vpTRACE("\t\t || s - s* || = %f ", (task.getError()).sumSquare());
iter++;
}
vpTRACE("Display task information ");
task.print();
return EXIT_SUCCESS;
}
catch (const vpException &e) {
std::cout << "Test failed with exception: " << e << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void acquire(vpImage< unsigned char > &I)
void setVideoMode(vp1394TwoVideoModeType videomode)
void setFramerate(vp1394TwoFramerateType fps)
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
double sumSquare() const
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
error that can be emitted by ViSP classes.
Definition vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
void buildFrom(double x, double y, double Z, double LogZoverZstar)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void display(const vpCameraParameters &cam, const vpImage< unsigned char > &I, const vpColor &color=vpColor::green, unsigned int thickness=1) const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
void buildFrom(vpThetaUVector &tu)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
static double rad(double deg)
Definition vpMath.h:116
Class that tracks in an image a line moving edges.
Definition vpMeLine.h:148
void display(const vpImage< unsigned char > &I, const vpColor &color, unsigned int thickness=1)
Definition vpMeLine.cpp:181
void track(const vpImage< unsigned char > &I)
Definition vpMeLine.cpp:649
static bool intersection(const vpMeLine &line1, const vpMeLine &line2, vpImagePoint &ip)
Definition vpMeLine.cpp:838
void initTracking(const vpImage< unsigned char > &I)
Definition vpMeLine.cpp:186
void setMe(vpMe *p_me)
Definition vpMe.h:122
void setSampleStep(const double &s)
Definition vpMe.h:390
void setRange(const unsigned int &r)
Definition vpMe.h:383
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition vpMe.h:445
void setPointsToTrack(const int &n)
Definition vpMe.h:376
@ NORMALIZED_THRESHOLD
Easy-to-use normalized likelihood threshold corresponding to the minimal luminance contrast to consid...
Definition vpMe.h:132
void setThreshold(const double &t)
Definition vpMe.h:435
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:508
double get_y() const
Get the point y coordinate in the image plane.
Definition vpPoint.cpp:469
double get_x() const
Get the point x coordinate in the image plane.
Definition vpPoint.cpp:467
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:453
void setWorldCoordinates(double oX, double oY, double oZ)
Definition vpPoint.cpp:110
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:510
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition vpPose.h:81
void addPoint(const vpPoint &P)
Definition vpPose.cpp:140
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
Definition vpPose.h:102
@ VIRTUAL_VS
Definition vpPose.h:96
void clearPoint()
Definition vpPose.cpp:125
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, bool(*func)(const vpHomogeneousMatrix &)=NULL)
Definition vpPose.cpp:469
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ CAMERA_FRAME
Definition vpRobot.h:80
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
@ PSEUDO_INVERSE
Definition vpServo.h:199
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
#define vpTRACE
Definition vpDebug.h:411