Select Git revision
VisionControl.cpp
Farid Alijani authored
VisionControl.cpp 25.89 KiB
#include <iostream>
#include <fstream>
#include <sstream>
#include <array>
#include <math.h>
#include <unistd.h>
#include <cstdlib>
#include <stdio.h> /* printf, scanf, puts, NULL */
#include <stdlib.h> /* srand, rand */
#include <time.h> /* time */
#include <aruco/aruco.h>
#include <aruco/cvdrawingutils.h>
#include "ros/ros.h"
#include <tf/transform_broadcaster.h>
#include <tf/transform_listener.h>
#include <tf/tf.h>
#include <geometry_msgs/Pose.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/Twist.h>
#include <image_transport/image_transport.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include "opencv2/core/core_c.h"
#include "opencv2/core/core.hpp"
#include "opencv2/flann/miniflann.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/video/video.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/ml/ml.hpp"
#include "opencv2/highgui/highgui_c.h"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/contrib/contrib.hpp"
#include<VisionControl.h>
using namespace cv;
using namespace aruco;
using namespace std;
float PID4Docking::TheMarkerSize = .1; // Default marker size
int PID4Docking::Thresh1_min = 10;
int PID4Docking::Thresh2_min = 10;
int PID4Docking::Thresh1_max = 300;
int PID4Docking::Thresh2_max = 300;
const string PID4Docking::trackbarWindowName = "Trackbars";
int PID4Docking::ThePyrDownLevel = 0;
bool PID4Docking::update_images = true;
bool PID4Docking::found = false;
bool PID4Docking::Go2RandomPose = false;
int PID4Docking::ThresParam1 = 0;
int PID4Docking::ThresParam2 = 0;
// ---- CONTROLL PARAMETERS ------ //
double PID4Docking::prev_errorX, PID4Docking::curr_errorX, PID4Docking::int_errorX, PID4Docking::diffX;
double PID4Docking::p_termX = 0;
double PID4Docking::i_termX = 0;
double PID4Docking::d_termX = 0;
double PID4Docking::prev_errorY, PID4Docking::curr_errorY, PID4Docking::int_errorY, PID4Docking::diffY;
double PID4Docking::p_termY = 0;
double PID4Docking::i_termY = 0;
double PID4Docking::d_termY = 0;
double PID4Docking::prev_errorYAW,PID4Docking::curr_errorYAW,PID4Docking::int_errorYAW,PID4Docking::diffYAW;
double PID4Docking::p_termYAW = 0;
double PID4Docking::i_termYAW = 0;
double PID4Docking::d_termYAW = 0;
double PID4Docking::control_signalX, PID4Docking::control_signalY, PID4Docking::control_signalYAW;
// ---- CONTROLL PARAMETERS ------ //
// ---- Ref. Values for Android Camera ---- //
//const double PID4Docking::RefPose[4] = {-.0957, 0.00638817 /* Y_ref*/ , 0.308857 /* X_ref*/ , 0.17 /* theta_ref*/};
// ---- Ref. Values for Logitech Camera ---- //
const double PID4Docking::RefPose[4] = {-.0957, -0.0311278 /* Y_ref*/ , 0.219607 /* X_ref*/ , -0.616442 /* theta_ref*/};
// ---------------- PID gains---------------- //
double PID4Docking::Kp_y = .48; //.55
double PID4Docking::Ki_y = 0 ;//.002
double PID4Docking::Kd_y = 0; //.1
double PID4Docking::Kp_theta = .2;// .34 * Kp_y; .//35 * Kp_y (u can't put that gain less than certain value since the left joint on the robot would hit the docking platform!)
double PID4Docking::Ki_theta = 0; //* Ki_y; // .15 * Ki_y
double PID4Docking::Kd_theta = 0; //* Kd_y; // .0008
// ---------------- PID gains---------------- //
double PID4Docking::TT_S,PID4Docking::TT_E;
// random pose initialized
const double PID4Docking::y_up = .3;
const double PID4Docking::y_dwn = -.1;
const double PID4Docking::theta_dwn = -.7 /*-RefPose[3]*/;
const double PID4Docking::theta_up = .7 /*RefPose[3]*/;
double PID4Docking::x_new,PID4Docking::y_new,PID4Docking::theta_new;
double PID4Docking::dock_started,PID4Docking::dock_finished,PID4Docking::docking_duration;
double PID4Docking::speed_reducer_X = 1;
double PID4Docking::speed_reducer_Y = 1;
double PID4Docking::speed_reducer_theta = 1;
// ------ offsets X, Y, theta for Docking ---------
double PID4Docking::Pz_eps = .001;
double PID4Docking::Py_eps = .002;
double PID4Docking::A_eps = (CV_PI/180) * 5; // 1 deg.
double PID4Docking::safety_margin_X = .13; // safety margin X axis in docking process : 18 cm
// ------ offsets X, Y, theta for Undocking ---------
double PID4Docking::x_thresh_undock = .02;
double PID4Docking::y_thresh_undock = .015;
double PID4Docking::theta_thresh_undock = (CV_PI/180) * 3;
double PID4Docking::docking_counter = 1;
PID4Docking::PID4Docking()
{
keepMoving = true;
/* initialize random seed: */
srand (time(NULL));
x_rand_SM = RefPose[2] + .55; // 55 cm spreading domain in the x-axis while moving towards the random pose
// Publish pose message and buffer up to 100 messages
MarPose_pub = node_vis.advertise<geometry_msgs::PoseStamped>("/marker_pose", 100);
commandPub = node_cont.advertise<geometry_msgs::Twist>("/base_controller/command",1);
MarPose_Sub = node_vis.subscribe("/marker_pose",1,&PID4Docking::camCB,this);
}
PID4Docking::~PID4Docking()
{
}
Mat PID4Docking::getCurrentImage()
{
return img;
}
void PID4Docking::cvTackBarEvents(int value,void* ptr)
{
PID4Docking* ic = (PID4Docking*)(ptr);
ic-> myhandler(value);
}
void PID4Docking::myhandler(int value)
{
if (Thresh1_min<3) Thresh1_min=3;
if (Thresh1_min%2!=1) Thresh1_min++;
if (ThresParam2<1) ThresParam2=1;
ThresParam1 = Thresh1_min;
ThresParam2 = Thresh2_min;
MDetector.setThresholdParams(ThresParam1,ThresParam2);
// Recompute
MDetector.detect(TheInputImage,TheMarkers,TheCameraParameters);
// TheInputImageCopy is the output image for TheInputImage
TheInputImage.copyTo(TheInputImageCopy);
for (unsigned int i=0;i<TheMarkers.size();i++)
{
TheMarkers[i].draw(TheInputImageCopy,Scalar(205,0,0),1);
}
//imshow("INPUT IMAGE",TheInputImageCopy);
//imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
}
void PID4Docking::createTrackbars()
{
namedWindow(trackbarWindowName, 0);
createTrackbar("ThresParam 1", trackbarWindowName, &Thresh1_min, Thresh1_max, cvTackBarEvents, this);
createTrackbar("ThresParam 2", trackbarWindowName, &Thresh2_min, Thresh2_max, cvTackBarEvents, this);
}
void PID4Docking::imageCb(const sensor_msgs::ImageConstPtr& msg)
{
cv_bridge::CvImagePtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
}
img = cv_ptr->image;
}
bool PID4Docking::readArguments ( int argc,char **argv )
{
if (argc<2) {
cerr<< "Invalid number of arguments!\n" <<endl;
cerr<< "Usage: (in.avi|live|copy) [intrinsics.yml] [marker's width/height]" <<endl;
return false;
}
TheInputVideo=argv[1];
if (argc>=3)
TheIntrinsicFile=argv[2];
if (argc>=4)
TheMarkerSize=atof(argv[3]);
if (argc==3)
cerr<< "NOTE: You need makersize to see 3d info!" <<endl;
return true;
}
void PID4Docking::ProgStart(int argc,char** argv)
{
// Show images, press "SPACE" to diable image
// rendering to save CPU time
if (readArguments(argc,argv)==false)
{
cerr<<"check the authenticity of your file again!"<<endl;
node_vis.shutdown();
}
//createTrackbars();
// IP address for raw3_lund
const std::string vsa = "http://192.168.0.101:8080/video?x.mjpeg";
// -- publishing video stream with Android Camera--
//TheVideoCapturer.open(vsa);
TheVideoCapturer.open(0);
// Check video is open
if (!TheVideoCapturer.isOpened())
{
cerr<<"Could not open video!!"<<endl;
node_vis.shutdown();
}
dock_started = ros::Time::now().toSec();
// Read first image to get the dimensions
TheVideoCapturer>>TheInputImage;
// Read camera parameters if passed
if (TheIntrinsicFile!="") {
TheCameraParameters.readFromXMLFile(TheIntrinsicFile);
TheCameraParameters.resize(TheInputImage.size());
}
// Configure other parameters
if (ThePyrDownLevel>0)
{
MDetector.pyrDown(ThePyrDownLevel);
}
MDetector.setCornerRefinementMethod(MarkerDetector::LINES);
char key=0;
int index=0;
//ros::Rate rate(10);
// Capture until press ESC or until the end of the video
while ((key != 'x') && (key!=27) && TheVideoCapturer.grab() && node_vis.ok() && keepMoving)
{
TT_S = ros::Time::now().toSec();
/* --- For the purpose of showing the time ---
Mat frame;
TheVideoCapturer >> frame; // get a new frame from camera
imshow("video stream",frame);
waitKey(30); // 30 ms */
if (TheVideoCapturer.retrieve(TheInputImage))
{
// Detection of markers in the image passed
MDetector.detect(TheInputImage,TheMarkers,TheCameraParameters,TheMarkerSize);
TheInputImage.copyTo(TheInputImageCopy);
geometry_msgs::PoseStamped msg;
float x_t, y_t, z_t;
float roll,yaw,pitch;
float rollE,yawE,pitchE;
if (TheMarkers.size()>0)
{
found = true;
//ROS_INFO("MARKER FOUND!!! ... \n");
}else
{
found = false;
keepMoving = false;
ROS_INFO_STREAM("Marker is lost, successful docking trials : " << (docking_counter - 1) << "\n");
//RandomPose(x_new,y_new,theta_new);
//move2docking(-control_signalX, -control_signalY, control_signalYAW);
}
if (node_vis.ok() && found)
{
y_t = -TheMarkers[0].Tvec.at<Vec3f>(0,0)[0];
x_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[1];
z_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[2];
Mat R(3,3,cv::DataType<float>::type);
// You need to apply cv::Rodrigues() in order to obatain angles wrt to camera coords
Rodrigues(TheMarkers[0].Rvec,R);
// ----------- Euler angle -----------------//
float roll1 = -asin(R.at<float>(2,0));
float roll2 = CV_PI - roll1;
float pitch1 = atan2(R.at<float>(2,1) / cos(roll1), R.at<float>(2,2) / cos(roll1));
float pitch2 = atan2(R.at<float>(2,1) / cos(roll2), R.at<float>(2,2) / cos(roll2));
float yaw2 = atan2(R.at<float>(1,0) / cos(roll2), R.at<float>(0,0) / cos(roll2));
float yaw1 = atan2(R.at<float>(1,0) / cos(roll1), R.at<float>(0,0) / cos(roll1));
//choose one solution to return
//for example the "shortest" rotation
if ((abs(roll1) + abs(pitch1) + abs(yaw1)) <= (abs(roll2) + abs(pitch2) + abs(yaw2)))
{
rollE = roll1;
pitchE= pitch1;
yawE = yaw1;
} else
{
rollE = roll2;
pitchE = pitch2;
yawE = yaw2;
}
// ----------- Euler angle -----------------//
pitch = -atan2(R.at<float>(2,0), R.at<float>(2,1));
yaw = acos(R.at<float>(2,2));
roll = -atan2(R.at<float>(0,2), R.at<float>(1,2));
// Marker rotation should be initially zero (just for convenience)
float p_off = CV_PI;
float r_off = CV_PI/2;
float y_off = CV_PI/2;
// Camera Frame ---
transformCAM.setOrigin( tf::Vector3(.25, 0, .5) );
transformCAM.setRotation( tf::Quaternion(0, 0, 0 , 1) );
CAMbr.sendTransform(tf::StampedTransform(transformCAM,ros::Time::now(),"/base_link","/camera"));
// Publish TF message including the offsets
tf::Quaternion quat = tf::createQuaternionFromRPY(roll-r_off, pitch+p_off, yaw-y_off);
broadcaster.sendTransform(tf::StampedTransform(tf::Transform(quat,tf::Vector3(x_t, y_t, z_t)),ros::Time::now(),"/camera","/marker"));
// Now publish the pose message, remember the offsets
msg.header.frame_id = "/camera";
msg.pose.position.x = x_t;
msg.pose.position.y = y_t;
msg.pose.position.z = z_t;
geometry_msgs::Quaternion p_quat = tf::createQuaternionMsgFromRollPitchYaw(roll-r_off, pitch+p_off, yaw-y_off);
msg.pose.orientation = p_quat;
MarPose_pub.publish(msg);
}
/*// Print other rectangles that contains no valid markers
for (unsigned int i=0;i<MDetector.getCandidates().size();i++)
{
Marker m( MDetector.getCandidates()[i],10);
m.draw(TheInputImageCopy,Scalar(0,255,0),2);
}*/
for (unsigned int i=0;i<TheMarkers.size();i++)
{
int currentMarID = TheMarkers[i].id;
TheMarkers[i].draw(TheInputImageCopy,Scalar(0,255,0),2) ;
CvDrawingUtils::draw3dCube(TheInputImageCopy,TheMarkers[i],TheCameraParameters);
CvDrawingUtils::draw3dAxis(TheInputImageCopy,TheMarkers[i],TheCameraParameters);
//Marker ID to string
stringstream marker_id_string;
marker_id_string << "marker_ " << currentMarID;
}
if (update_images)
{
imshow("INPUT IMAGE",TheInputImageCopy);
imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
}
}else
{
printf("retrieve failed\n");
}
key=cv::waitKey(30);
// If space is hit, don't render the image.
if (key == ' ')
{
update_images = !update_images;
}
ros::spinOnce();
TT_E = ros::Time::now().toSec();
ROS_INFO_STREAM(" visualization while loop duration = " << (TT_E - TT_S) <<" \n");
}
}
void PID4Docking::camCB(const geometry_msgs::PoseStamped::ConstPtr& CamFB) // subscriber
{
camPose[0] = CamFB->pose.position.x; // not important!!!
camPose[1] = CamFB->pose.position.y; // y pose
camPose[2] = CamFB->pose.position.z; // x_rob
camPose[3] = CamFB->pose.orientation.x; // theta orientation
// in Marker coordinate sys.
// z => X robot (thrust)
// y => -Y robot (left - right)
// x => Z robot (NOT applicabale in our case!)
// correspondingly ...
// roll in Marker coordinate => yaw in Robot coordinate
ROS_INFO_STREAM("--------- PID gains in trial no. " << docking_counter << " : ---------\n");
ROS_INFO_STREAM(" Kp = " << Kp_y << " , Ki = " << Ki_y << " , Kd = " << Kd_y << "\n");
ROS_INFO_STREAM(" --------------------- Pose estimation ------------------ \n");
ROS_INFO_STREAM(" X_mar = " << camPose[2] << " vs X_ref = " << RefPose[2] << " \n");
ROS_INFO_STREAM(" Y_mar = " << camPose[1] << " vs Y_ref = " << RefPose[1] << " \n");
ROS_INFO_STREAM(" theta_mar = " << camPose[3] << " rad. =~ " << (180/CV_PI) * camPose[3] << " deg. \n");
ROS_INFO_STREAM(" theta_ref = " << RefPose[3] << " rad. =~ " << (180/CV_PI) * RefPose[3] << " deg. \n");
ROS_INFO_STREAM("------------------------------------------------------ \n ");
if(Go2RandomPose == false)
{
ROS_INFO_STREAM("---------- MOVING TOWARDS DOCKING PLATFORM --------- \n ");
if (
(abs(RefPose[2] - camPose[2]) <= Pz_eps) //&& // Z
//(abs(RefPose[1] - camPose[1]) <= Py_eps) && // Y
//(abs(RefPose[3] - camPose[3]) <= A_eps) // Yaw
)
{
dock_finished = ros::Time::now().toSec();
docking_duration = dock_finished - dock_started;
ROS_INFO_STREAM("docking No. " << docking_counter << " is finished in "<< docking_duration <<" sec, moving to new Random Pose\n");
keepMoving = false;
GenerateRandomVal();
docking_counter ++;
speed_reducer_X = 1;
speed_reducer_Y = 1;
speed_reducer_theta = 1;
//Go2RandomPose = true;
// to make sure that y & theta are within the threshold...
} else if (abs(RefPose[2] - camPose[2]) <= safety_margin_X)
{
if(
(abs(RefPose[1] - camPose[1]) > Py_eps) ||
(abs(abs(RefPose[3]) - abs(camPose[3])) > A_eps)
)
{
ROS_INFO_STREAM(" delta_X < " << safety_margin_X << " m., Fixing Y or theta. \n ");
speed_reducer_Y = 1;
speed_reducer_theta = 1;
Controller(RefPose[2], RefPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.1);
} else if(
(abs(RefPose[1] - camPose[1]) <= Py_eps) &&
(abs(abs(RefPose[3]) - abs(camPose[3])) <= A_eps)
)
{
ROS_INFO("y & theta fixed successfully, MOVING STRAIGHT AHEAD ... \n");
speed_reducer_X = .08;
Controller(RefPose[2], camPose[2], RefPose[1], RefPose[1], RefPose[3], RefPose[3],.1);
}
}else
{
speed_reducer_X = 1;
Controller(RefPose[2], camPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.1);
}
} else
{
ROS_INFO("---------- MOVING TOWARDS RANDOM POSE ---------\n");
RandomPose(x_new,y_new,theta_new);
}
}
void PID4Docking::Controller(double RefX, double MarPoseX, double RefY, double MarPoseY, double RefYAW, double MarPoseYAW, double dt)
{
ROS_INFO_STREAM("--------------------- Controller started ----------------------\n ");
// -----------------X--------------------- //
if(abs(RefX - MarPoseX) > Pz_eps)
{
/*// e(t) = setpoint - actual value;
curr_errorX = RefX - MarPoseX;
// Integrated error
int_errorX += curr_errorX * dt;
// differentiation
diffX = ((curr_errorX - prev_errorX) / dt);
// scalling
p_termX = Pos_Px * curr_errorX;
i_termX = Pos_Ix * int_errorX;
d_termX = Pos_Dx * diffX;
// control signal
control_signalX = p_termX + i_termX + d_termX;
prev_errorX = curr_errorX;*/
control_signalX = speed_reducer_X * 0.1;
} else
{
control_signalX = 5e-5;
}
// -----------------Y--------------------- //
if((RefY - MarPoseY) < -Py_eps || (RefY - MarPoseY) > Py_eps)
{
// e(t) = setpoint - actual value;
curr_errorY = RefY - MarPoseY;
// Integrated error
int_errorY += curr_errorY * dt;
/*
// -- windup gaurd --
if (int_error < )
{}
else if ()
{}*/
// differentiation
diffY = ((curr_errorY - prev_errorY) / dt);
// scalling
p_termY = Kp_y * curr_errorY;
i_termY = Ki_y * int_errorY;
d_termY = Kd_y * diffY;
ROS_INFO_STREAM("pY = " << p_termY << ", iY = " << i_termY << " dY = " << d_termY<< " \n");
// control signal
control_signalY = p_termY + i_termY + d_termY;
// robot & marker coordinates conversion
control_signalY = - speed_reducer_Y * control_signalY;
prev_errorY = curr_errorY;
} else if ((RefY - MarPoseY) <= Py_eps && (RefY - MarPoseY) >= -Py_eps)
{
control_signalY = 0;
}
// -------------------YAW--------------------------//
if(abs(abs(RefYAW) - abs(MarPoseYAW)) > A_eps)
{
// e(t) = setpoint - actual value;
curr_errorYAW = abs(RefYAW) - abs(MarPoseYAW);
// Integrated error
int_errorYAW += curr_errorYAW * dt;
// differentiation
diffYAW = ((curr_errorYAW - prev_errorYAW) / dt);
//ROS_INFO_STREAM(" adjusting orientation! \n");
// scalling
p_termYAW = Kp_theta * curr_errorYAW;
i_termYAW = Ki_theta * int_errorYAW;
d_termYAW = Kd_theta * diffYAW;
ROS_INFO_STREAM("p_theta = " << p_termYAW << ", i_theta = " << i_termYAW << " d_theta = " << d_termYAW << " \n");
// control signal
control_signalYAW = p_termYAW + i_termYAW + d_termYAW;
if(MarPoseYAW > 0 && abs(RefYAW) >= abs(MarPoseYAW))
{
ROS_INFO("abs(RefYAW) >= abs(orientation) && orientation > 0 => CCW rotation\n"); // correct
control_signalYAW = speed_reducer_theta * control_signalYAW;
} else if (MarPoseYAW > 0 && abs(RefYAW) < abs(MarPoseYAW))
{
ROS_INFO("abs(RefYAW) < abs(orientation) && orientation > 0 => CCW rotation\n"); // correct ?
control_signalYAW = - speed_reducer_theta * control_signalYAW;
} else if (MarPoseYAW < 0 && abs(RefYAW) >= abs(MarPoseYAW))
{
ROS_INFO("abs(RefYAW) >= abs(orientation) && orientation < 0 => CW rotation \n"); // correct
control_signalYAW = - speed_reducer_theta * control_signalYAW;
} else if (MarPoseYAW < 0 && abs(RefYAW) < abs(MarPoseYAW))
{
ROS_INFO("abs(RefYAW) < abs(orientation) && orientation < 0 => CCW rotation \n"); // correct ?
control_signalYAW = - speed_reducer_theta * control_signalYAW;
} else
{
ROS_INFO("New Condition should be added! \n");
keepMoving = false;
}
// save the current error as the previous one
// for the next iteration.
prev_errorYAW = curr_errorYAW;
} else
{
control_signalYAW = 0;
}
//ROS_INFO_STREAM("Control signalX = " << control_signalX <<" . \n");
//ROS_INFO_STREAM("Control signalY = " << control_signalY << ". \n");
//ROS_INFO_STREAM("Control signalYAW = "<< control_signalYAW <<". \n");
ROS_INFO_STREAM(" ---------------------- Controller ended ----------------------- \n");
dock(control_signalX, control_signalY, control_signalYAW);
}
void PID4Docking::dock(double VelX, double VelY, double omegaZ)
{
ROS_INFO(".... REAL .... !");
geometry_msgs::Twist msg;
msg.linear.x = VelX;
msg.linear.y = VelY;
msg.angular.z = omegaZ;
commandPub.publish(msg);
ROS_INFO_STREAM(" Current speed of robot: \n" << msg << ".\n");
}
void PID4Docking::move2docking(double VelX_est, double VelY_est, double omegaZ_est)
{
ROS_INFO_STREAM(" Zmar = " << camPose[2] << " m. \n");
ROS_INFO_STREAM(" Zref = " << RefPose[2] << " m. \n");
ROS_INFO_STREAM(" Ymar = " << camPose[1] << " m. \n");
ROS_INFO_STREAM(" Yref = " << RefPose[1] << " m. \n");
ROS_INFO_STREAM(" rollmar = " << camPose[3] << " rad. \n");
ROS_INFO_STREAM(" rollref = " << RefPose[3] << " rad. \n");
ROS_INFO(".... ESTIMATION .... !\n");
geometry_msgs::Twist msg;
if (VelX_est == 0 && VelY_est == 0 && omegaZ_est == 0)
{
VelX_est = .0001;
VelX_est = .0001;
omegaZ_est = 0;
}
msg.linear.x = VelX_est;
msg.linear.y = VelY_est;
msg.angular.z = omegaZ_est;
commandPub.publish(msg);
ROS_INFO_STREAM(" Current ESTIMATED speed of robot: \n" << msg << ".\n");
}
// ---- Controller part -------- END ------
void PID4Docking::GenerateRandomVal()
{
// ---------------- PID gains ------------------
Kp_y = ((double) rand() / (RAND_MAX)) * (.76 - .4) + .4; //.1 < Kp < .76
Ki_y = ((double) rand() / (RAND_MAX)) * .006; // 0 < Ki < .006
Kd_y = ((double) rand() / (RAND_MAX)) * .02; // 0 < Kd < .01
// ------------------ Generating Random Pose ------------------
//x_new = ((double) rand() / (RAND_MAX)) * (1.1 - x_rand_SM) + x_rand_SM;
x_new = 1.1;
y_new = ((double) rand() / (RAND_MAX)) * (y_up - y_dwn) + y_dwn; // will be used for Q_Learning
theta_new = ((double) rand() / (RAND_MAX)) * (theta_up - theta_dwn) + theta_dwn; // will be used for Q_Learning
}
void PID4Docking::RandomPose(double X_rand, double Y_rand, double theta_rand)
{
ROS_INFO_STREAM(" Xr = " << X_rand << ", Yr = " << Y_rand << ", Thetar = " << theta_rand << " rad ~ " << theta_rand * (180/CV_PI) << " deg\n");
ROS_INFO_STREAM(" -------------------------------------------------------------- \n");
double vel_x,vel_y,omega_z;
geometry_msgs::Twist msg_new;
// CCW ==>> w > 0 , CW ==>> w < 0
// Leaving docking station, moving towards x-axis SM
if (X_rand - camPose[2] > x_thresh_undock)
{
ROS_INFO_STREAM(" Adjusting X, moving backward ... \n");
vel_x = -.04;
} else if (X_rand - camPose[2] < -x_thresh_undock)
{
ROS_INFO_STREAM(" Adjusting X, moving forward ... \n");
vel_x = .04;
}else if (abs(X_rand - camPose[2]) <= x_thresh_undock)
{
ROS_INFO(" X-axis is fixed, adjusting Y & theta - axes ... \n");
if ((camPose[1] - Y_rand > y_thresh_undock) && (theta_rand > 0))
{
if(abs(abs(camPose[3]) - abs(theta_rand)) > theta_thresh_undock)
{
ROS_INFO("moving 2 left side & CW rot. \n");
vel_y = .03;
omega_z = -.01;
} else
{
ROS_INFO("CW rot. is fixed, only moving 2 left side ...\n");
vel_y = .03;
}
} else if ((camPose[1] - Y_rand < -y_thresh_undock) && (theta_rand < 0))
{
if(abs(abs(camPose[3]) - abs(theta_rand)) > theta_thresh_undock)
{
ROS_INFO("moving 2 right side & CCW rot. \n");
vel_y = -.03;
omega_z = .01;
}else
{
ROS_INFO("CCW rot. is fixed, only moving 2 right side ... \n");
vel_y = -.03;
}
}else if (abs(camPose[1] - Y_rand) <= y_thresh_undock)
{
ROS_INFO(" Y-axis is fixed, adjusting theta-axis ... ! \n");
if (abs(abs(camPose[3]) - abs(theta_rand)) <= theta_thresh_undock)
{
ROS_INFO(" Robot is in a new random Pose! \n");
//keepMoving = false;
Go2RandomPose = false;
} else
{
if(theta_rand > 0)
{
ROS_INFO_STREAM(" theta > 0 => Rob rot. is CW(-) \n");
omega_z = -.01;
} else
{
ROS_INFO_STREAM(" theta < 0 => Rob rot. is CCW(+) \n");
omega_z = .01;
}
}
} else if ((camPose[1] - Y_rand > y_thresh_undock) && (theta_rand < 0))
{
if(abs(abs(camPose[3]) - abs(theta_rand)) > theta_thresh_undock)
{
ROS_INFO("moving 2 left side & CCW rot., chance of losing marker \n");
vel_y = .03;
omega_z = .01;
} else
{
ROS_INFO("CCW rot. is fixed, only moving 2 left side ... \n");
vel_y = .03;
}
} else if ((camPose[1] - Y_rand < -y_thresh_undock) && (theta_rand > 0))
{
if(abs(abs(camPose[3]) - abs(theta_rand)) > theta_thresh_undock)
{
ROS_INFO("moving 2 right side & CW rot., chance of losing marker \n");
vel_y = -.03;
omega_z = -.01;
} else
{
ROS_INFO("CW rot. is fixed, only moving 2 right side ... \n");
vel_y = -.03;
}
} else
{
ROS_INFO(" New condition should be added! \n");
keepMoving = false;
}
}
msg_new.linear.x = vel_x;
msg_new.linear.y = vel_y;
msg_new.angular.z = omega_z;
commandPub.publish(msg_new);
}