Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoViper850FourPoints2DArtVelocityLs_des.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * tests the control law
33 * eye-in-hand control
34 * velocity computed in the articular frame
35 *
36*****************************************************************************/
48#include <visp3/core/vpConfig.h>
49#include <visp3/core/vpDebug.h> // Debug trace
50
51#include <fstream>
52#include <iostream>
53#include <sstream>
54#include <stdio.h>
55#include <stdlib.h>
56#if (defined(VISP_HAVE_VIPER850) && defined(VISP_HAVE_DC1394))
57
58#include <visp3/blob/vpDot2.h>
59#include <visp3/core/vpDisplay.h>
60#include <visp3/core/vpHomogeneousMatrix.h>
61#include <visp3/core/vpImage.h>
62#include <visp3/core/vpIoTools.h>
63#include <visp3/core/vpMath.h>
64#include <visp3/core/vpPoint.h>
65#include <visp3/gui/vpDisplayGTK.h>
66#include <visp3/gui/vpDisplayOpenCV.h>
67#include <visp3/gui/vpDisplayX.h>
68#include <visp3/robot/vpRobotViper850.h>
69#include <visp3/sensor/vp1394TwoGrabber.h>
70#include <visp3/vision/vpPose.h>
71#include <visp3/visual_features/vpFeatureBuilder.h>
72#include <visp3/visual_features/vpFeaturePoint.h>
73#include <visp3/vs/vpServo.h>
74#include <visp3/vs/vpServoDisplay.h>
75
76int main()
77{
78 // Log file creation in /tmp/$USERNAME/log.dat
79 // This file contains by line:
80 // - the 6 computed joint velocities (m/s, rad/s) to achieve the task
81 // - the 6 mesured joint velocities (m/s, rad/s)
82 // - the 6 mesured joint positions (m, rad)
83 // - the 8 values of s - s*
84 std::string username;
85 // Get the user login name
86 vpIoTools::getUserName(username);
87
88 // Create a log filename to save velocities...
89 std::string logdirname;
90 logdirname = "/tmp/" + username;
91
92 // Test if the output path exist. If no try to create it
93 if (vpIoTools::checkDirectory(logdirname) == false) {
94 try {
95 // Create the dirname
96 vpIoTools::makeDirectory(logdirname);
97 } catch (...) {
98 std::cerr << std::endl << "ERROR:" << std::endl;
99 std::cerr << " Cannot create " << logdirname << std::endl;
100 return EXIT_FAILURE;
101 }
102 }
103 std::string logfilename;
104 logfilename = logdirname + "/log.dat";
105
106 // Open the log file name
107 std::ofstream flog(logfilename.c_str());
108
109 try {
110// Define the square CAD model
111// Square dimention
112// #define L 0.075
113#define L 0.05
114// Distance between the camera and the square at the desired
115// position after visual servoing convergence
116#define D 0.5
117
118 vpRobotViper850 robot;
119 // Load the end-effector to camera frame transformation obtained
120 // using a camera intrinsic model with distortion
123
124 vpServo task;
125
127 int i;
128
129 bool reset = false;
130 vp1394TwoGrabber g(reset);
132 g.setFramerate(vp1394TwoGrabber::vpFRAMERATE_60);
133 g.open(I);
134
135 g.acquire(I);
136
137#ifdef VISP_HAVE_X11
138 vpDisplayX display(I, 100, 100, "Current image");
139#elif defined(HAVE_OPENCV_HIGHGUI)
140 vpDisplayOpenCV display(I, 100, 100, "Current image");
141#elif defined(VISP_HAVE_GTK)
142 vpDisplayGTK display(I, 100, 100, "Current image");
143#endif
144
147
148 std::cout << std::endl;
149 std::cout << "-------------------------------------------------------" << std::endl;
150 std::cout << " Test program for vpServo " << std::endl;
151 std::cout << " Eye-in-hand task control, velocity computed in the joint space" << std::endl;
152 std::cout << " Use of the Afma6 robot " << std::endl;
153 std::cout << " task : servo 4 points on a square with dimention " << L << " meters" << std::endl;
154 std::cout << "-------------------------------------------------------" << std::endl;
155 std::cout << std::endl;
156
157 vpDot dot[4];
158 vpImagePoint cog;
159
160 std::cout << "Click on the 4 dots clockwise starting from upper/left dot..." << std::endl;
161
162 for (i = 0; i < 4; i++) {
163 dot[i].setGraphics(true);
164 dot[i].initTracking(I);
165 cog = dot[i].getCog();
168 }
169
171
172 // Update camera parameters
173 robot.getCameraParameters(cam, I);
174
175 cam.printParameters();
176
177 // Sets the current position of the visual feature
178 vpFeaturePoint p[4];
179 for (i = 0; i < 4; i++)
180 vpFeatureBuilder::create(p[i], cam, dot[i]); // retrieve x,y and Z of the vpPoint structure
181
182 // sets the desired position of the visual feature
183 vpFeaturePoint pd[4];
184
185 pd[0].buildFrom(-L, -L, D);
186 pd[1].buildFrom(L, -L, D);
187 pd[2].buildFrom(L, L, D);
188 pd[3].buildFrom(-L, L, D);
189
190 // We want to see a point on a point
191 std::cout << std::endl;
192 for (i = 0; i < 4; i++)
193 task.addFeature(p[i], pd[i]);
194
195 // Set the proportional gain
196 task.setLambda(0.4);
197
198 // Display task information
199 task.print();
200
201 // Define the task
202 // - we want an eye-in-hand control law
203 // - articular velocity are computed
206 task.print();
207
209 robot.get_cVe(cVe);
210 task.set_cVe(cVe);
211 task.print();
212
213 // Set the Jacobian (expressed in the end-effector frame)
214 vpMatrix eJe;
215 robot.get_eJe(eJe);
216 task.set_eJe(eJe);
217 task.print();
218
219 // Initialise the velocity control of the robot
221
222 std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
223 for (;;) {
224 // Acquire a new image from the camera
225 g.acquire(I);
226
227 // Display this image
229
230 try {
231 // For each point...
232 for (i = 0; i < 4; i++) {
233 // Achieve the tracking of the dot in the image
234 dot[i].track(I);
235 // Display a green cross at the center of gravity position in the
236 // image
237 cog = dot[i].getCog();
239 }
240 } catch (...) {
241 flog.close(); // Close the log file
242 vpTRACE("Error detected while tracking visual features");
243 robot.stopMotion();
244 exit(1);
245 }
246
247 // Update the point feature from the dot location
248 for (i = 0; i < 4; i++)
249 vpFeatureBuilder::create(p[i], cam, dot[i]);
250
251 // Get the jacobian of the robot
252 robot.get_eJe(eJe);
253 // Update this jacobian in the task structure. It will be used to
254 // compute the velocity skew (as an articular velocity) qdot = -lambda *
255 // L^+ * cVe * eJe * (s-s*)
256 task.set_eJe(eJe);
257
258 vpColVector v;
259 // Compute the visual servoing skew vector
260 v = task.computeControlLaw();
261
262 // Display the current and desired feature points in the image display
263 vpServoDisplay::display(task, cam, I);
264
265 // Apply the computed joint velocities to the robot
267
268 // Save velocities applied to the robot in the log file
269 // v[0], v[1], v[2] correspond to joint translation velocities in m/s
270 // v[3], v[4], v[5] correspond to joint rotation velocities in rad/s
271 flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
272
273 // Get the measured joint velocities of the robot
274 vpColVector qvel;
276 // Save measured joint velocities of the robot in the log file:
277 // - qvel[0], qvel[1], qvel[2] correspond to measured joint translation
278 // velocities in m/s
279 // - qvel[3], qvel[4], qvel[5] correspond to measured joint rotation
280 // velocities in rad/s
281 flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
282
283 // Get the measured joint positions of the robot
284 vpColVector q;
285 robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
286 // Save measured joint positions of the robot in the log file
287 // - q[0], q[1], q[2] correspond to measured joint translation
288 // positions in m
289 // - q[3], q[4], q[5] correspond to measured joint rotation
290 // positions in rad
291 flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
292
293 // Save feature error (s-s*) for the 4 feature points. For each feature
294 // point, we have 2 errors (along x and y axis). This error is
295 // expressed in meters in the camera frame
296 flog << (task.getError()).t() << std::endl;
297
298 // Flush the display
300
301 // std::cout << "|| s - s* || = " << ( task.getError() ).sumSquare() <<
302 // std::endl;
303 }
304
305 std::cout << "Display task information: " << std::endl;
306 task.print();
307 flog.close(); // Close the log file
308 return EXIT_SUCCESS;
309 } catch (const vpException &e) {
310 flog.close(); // Close the log file
311 std::cout << "Catch an exception: " << e.getMessage() << std::endl;
312 return EXIT_FAILURE;
313 }
314}
315
316#else
317int main()
318{
319 std::cout << "You do not have an Viper 850 robot connected to your computer..." << std::endl;
320 return EXIT_SUCCESS;
321}
322#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor blue
Definition vpColor.h:217
static const vpColor green
Definition vpColor.h:214
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static void display(const vpImage< unsigned char > &I)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a dot (connected pixels with same gray level) on a vpImage.
Definition vpDot.h:112
void initTracking(const vpImage< unsigned char > &I)
Definition vpDot.cpp:617
void setGraphics(bool activate)
Definition vpDot.h:357
vpImagePoint getCog() const
Definition vpDot.h:243
void track(const vpImage< unsigned char > &I)
Definition vpDot.cpp:757
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * getMessage() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void buildFrom(double x, double y, double Z)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
static bool checkDirectory(const std::string &dirname)
static std::string getUserName()
static void makeDirectory(const std::string &dirname)
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
void getVelocity(const vpRobot::vpControlFrameType frame, vpColVector &velocity)
void get_eJe(vpMatrix &eJe)
@ ARTICULAR_FRAME
Definition vpRobot.h:76
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:155
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition vpServo.h:448
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void set_eJe(const vpMatrix &eJe_)
Definition vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
@ PSEUDO_INVERSE
Definition vpServo.h:199
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ DESIRED
Definition vpServo.h:183
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
vpVelocityTwistMatrix get_cVe() const
Definition vpUnicycle.h:79
@ TOOL_PTGREY_FLEA2_CAMERA
Definition vpViper850.h:127
#define vpTRACE
Definition vpDebug.h:411