diff --git a/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/CXX.includecache b/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/CXX.includecache
index 9d5c4b4f54e39af0a4c6395865fab0378e5c7641..53b38064fa981f6926abff84da47d9e3fab95cba 100644
--- a/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/CXX.includecache
+++ b/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/CXX.includecache
@@ -77,6 +77,14 @@ math.h
 -
 unistd.h
 -
+cstdlib
+-
+stdio.h
+-
+stdlib.h
+-
+time.h
+-
 aruco/aruco.h
 -
 aruco/cvdrawingutils.h
diff --git a/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/VisionControl.cpp.o b/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/VisionControl.cpp.o
index 43c9b17bac21609f92a7a1ab97bc00810fafe819..7efb3d17610d7be99fd096bd5a6375caa4075901 100644
Binary files a/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/VisionControl.cpp.o and b/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/VisionControl.cpp.o differ
diff --git a/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/run_vis_cont.cpp.o b/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/run_vis_cont.cpp.o
index d7c41697c316a8dcc0b0bf2b5ea8c3c1f512581a..b8f39251084244c1fc7416e7445031982fe36083 100644
Binary files a/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/run_vis_cont.cpp.o and b/MobileRobot/AugReaMarker/CamMark/build/camtomar/CMakeFiles/camtomar.dir/src/run_vis_cont.cpp.o differ
diff --git a/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h b/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h
index e684e703ed84da9be8a5ef25aa95c4a50046a586..20512a5a867f304c32ee97ac8b01af91f79bedbc 100644
--- a/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h
+++ b/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h
@@ -36,20 +36,18 @@ using namespace cv;
 using namespace aruco;
 using namespace std;
 
-class ImageConverter
+class VisCont
 {
 private:
 
-        ros::NodeHandle nh_;
-        image_transport::ImageTransport it_;
-        image_transport::Subscriber image_sub_;
-        ros::Publisher commandPub;
-        
-        ros::Subscriber Sub;
-	ros::Subscriber marSub;
-	
+        ros::NodeHandle node_vis;
+	ros::NodeHandle node_cont;	
+
+	ros::Publisher commandPub;
+       
+        ros::Subscriber MarPose_Sub;
         ros::Publisher MarPose_pub;
-        ros::Publisher CamPose_pub;
+
         Mat img;
                 
         string TheInputVideo;
@@ -85,7 +83,6 @@ private:
         static int ThePyrDownLevel;
         static float TheMarkerSize;
         
-        void ContStart();
         static bool update_images,found,Go2RandomPose;
         
         bool readArguments ( int argc,char **argv );
@@ -101,9 +98,9 @@ private:
         
 public:
 
-  ImageConverter();
+  VisCont();
     
-  ~ImageConverter();
+  ~VisCont();
   
   Mat getCurrentImage();
 
@@ -122,6 +119,8 @@ public:
   void GenerateRandomVal();
   void RandomPose(double X_rand, double Y_rand, double theta_rand);
 
+        void ContStart();
+
          static double Kp_x,Ki_x,Kd_x;
 	 
 	static double Kp_theta,Ki_theta,Kd_theta;
@@ -198,6 +197,8 @@ public:
 
 	static double docking_counter;
 	static double dock_started,dock_finished,docking_duration;
+	
+	static double TT_S,TT_E;
 
         static double zeroMin,zeroMax;
 	static double Py_eps,Pz_eps,A_eps;
diff --git a/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h~ b/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h~
index c462fe231036bb552297d535bf8f615b56c316a0..e684e703ed84da9be8a5ef25aa95c4a50046a586 100644
--- a/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h~
+++ b/MobileRobot/AugReaMarker/CamMark/camtomar/include/VisionControl.h~
@@ -175,8 +175,8 @@ public:
 	static double alpha;
 	static double R_step;
 	
-	static double R[5][5];
-	static double Q[5][5];
+	static double R[7][7];
+	static double Q[7][7];
 
 	static double Q_curr_state,Q_next_state;
 	static double reward;
diff --git a/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp b/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp
index 55dde4fd7b05f4f63d978e727d53c3230035cfcb..59dd582ce29efafff9311b3dff96006d49f5a02a 100644
--- a/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp
+++ b/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp
@@ -46,163 +46,160 @@ using namespace cv;
 using namespace aruco;
 using namespace std;
 
-float ImageConverter::TheMarkerSize = .1; // Default marker size
+float VisCont::TheMarkerSize = .1; // Default marker size
 
-int ImageConverter::Thresh1_min = 10;
-int ImageConverter::Thresh2_min = 10;
+int VisCont::Thresh1_min = 10;
+int VisCont::Thresh2_min = 10;
 
-int ImageConverter::Thresh1_max = 300;
-int ImageConverter::Thresh2_max = 300;
+int VisCont::Thresh1_max = 300;
+int VisCont::Thresh2_max = 300;
 
-const string ImageConverter::trackbarWindowName = "Trackbars";
+const string VisCont::trackbarWindowName = "Trackbars";
 
-int ImageConverter::ThePyrDownLevel = 0;
+int VisCont::ThePyrDownLevel = 0;
 
-bool ImageConverter::update_images = true;
+bool VisCont::update_images = true;
 
-bool ImageConverter::found = false;
-bool ImageConverter::Go2RandomPose = false;
+bool VisCont::found = false;
+bool VisCont::Go2RandomPose = false;
 
-int ImageConverter::ThresParam1 = 0;
-int ImageConverter::ThresParam2 = 0;
+int VisCont::ThresParam1 = 0;
+int VisCont::ThresParam2 = 0;
 
 // ---- CONTROLL PARAMETERS ------ //
-double ImageConverter::prev_errorX, ImageConverter::curr_errorX, ImageConverter::int_errorX, ImageConverter::diffX;
+double VisCont::prev_errorX, VisCont::curr_errorX, VisCont::int_errorX, VisCont::diffX;
 
-double ImageConverter::p_termX = 0;
-double ImageConverter::i_termX = 0;
-double ImageConverter::d_termX = 0;
+double VisCont::p_termX = 0;
+double VisCont::i_termX = 0;
+double VisCont::d_termX = 0;
 
-double ImageConverter::prev_errorY, ImageConverter::curr_errorY, ImageConverter::int_errorY, ImageConverter::diffY;
+double VisCont::prev_errorY, VisCont::curr_errorY, VisCont::int_errorY, VisCont::diffY;
 
-double ImageConverter::p_termY = 0;
-double ImageConverter::i_termY = 0;
-double ImageConverter::d_termY = 0;
+double VisCont::p_termY = 0;
+double VisCont::i_termY = 0;
+double VisCont::d_termY = 0;
 
-double ImageConverter::prev_errorYAW,ImageConverter::curr_errorYAW,ImageConverter::int_errorYAW,ImageConverter::diffYAW;
+double VisCont::prev_errorYAW,VisCont::curr_errorYAW,VisCont::int_errorYAW,VisCont::diffYAW;
 
-double ImageConverter::p_termYAW = 0;
-double ImageConverter::i_termYAW = 0;
-double ImageConverter::d_termYAW = 0;
+double VisCont::p_termYAW = 0;
+double VisCont::i_termYAW = 0;
+double VisCont::d_termYAW = 0;
 
-double ImageConverter::control_signalX, ImageConverter::control_signalY, ImageConverter::control_signalYAW;
+double VisCont::control_signalX, VisCont::control_signalY, VisCont::control_signalYAW;
 // ---- CONTROLL PARAMETERS ------ //
 
 
-// ---- Ref. Values ---- //
-const double ImageConverter::RefPose[4] = {-.0957, 0.00638817 /* Y_ref*/ ,  0.308857 /* X_ref*/ , 0.17 /* theta_ref*/}; 
+// ---- Ref. Values for Android Camera ---- //
+//const double VisCont::RefPose[4] = {-.0957, 0.00638817 /* Y_ref*/ ,  0.308857 /* X_ref*/ , 0.17 /* theta_ref*/}; 
+
+
+// ---- Ref. Values for Logitech Camera ---- //
+const double VisCont::RefPose[4] = {-.0957, -0.0251325 /* Y_ref*/ ,  0.267882 /* X_ref*/ , -0.608046 /* theta_ref*/}; 
+
+
 
 // ----------------  PID gains---------------- //
-double ImageConverter::Kp_y = .4;
-double ImageConverter::Ki_y = .0042;
-double ImageConverter::Kd_y = 0;
+double VisCont::Kp_y = .4;
+double VisCont::Ki_y = .0042;
+double VisCont::Kd_y = 0;
 
-double ImageConverter::Kp_theta = .15 * Kp_y;
-double ImageConverter::Ki_theta = (8/14) * Ki_y;
-double ImageConverter::Kd_theta = .15 * Kd_y;
+double VisCont::Kp_theta = .15 * Kp_y;
+double VisCont::Ki_theta = (8/13) * Ki_y;
+double VisCont::Kd_theta = .15 * Kd_y;
 // ----------------  PID gains---------------- //
 
+
+double VisCont::TT_S,VisCont::TT_E;
 // random pose initialized
-const double ImageConverter::y_up = .3; 
-const double ImageConverter::y_dwn = -.1; 
-const double ImageConverter::theta_dwn = -.7 /*-RefPose[3]*/; 
-const double ImageConverter::theta_up = .7 /*RefPose[3]*/;
+const double VisCont::y_up = .3; 
+const double VisCont::y_dwn = -.1; 
+const double VisCont::theta_dwn = -.7 /*-RefPose[3]*/; 
+const double VisCont::theta_up = .7 /*RefPose[3]*/;
 
-double ImageConverter::x_new,ImageConverter::y_new,ImageConverter::theta_new;
+double VisCont::x_new,VisCont::y_new,VisCont::theta_new;
 
 // ----------------  Q_LEARNING PARAMETERS ---------------- //
 
-double ImageConverter::gamma = .8;
-double ImageConverter::alpha = .1;
-double ImageConverter::R_step = 200;
+double VisCont::gamma = .8;
+double VisCont::alpha = .1;
+double VisCont::R_step = 200;
 
-const int ImageConverter::row = 5/*(y_up - y_dwn) * 10000*/;
-const int ImageConverter::col = 5/*(theta_up - theta_dwn) * 1000*/;
+const int VisCont::row = 5/*(y_up - y_dwn) * 10000*/;
+const int VisCont::col = 5/*(theta_up - theta_dwn) * 1000*/;
 
-double ImageConverter::Q[7][7] = {0};
-double ImageConverter::R[7][7] = {0};
+double VisCont::Q[7][7] = {0};
+double VisCont::R[7][7] = {0};
 
-double ImageConverter::Q_next_state;
+double VisCont::Q_next_state;
 
-int ImageConverter::counter = 1;
-int ImageConverter::Time_Reward;
-double ImageConverter::sample;
+int VisCont::counter = 1;
+int VisCont::Time_Reward;
+double VisCont::sample;
 
-int ImageConverter::R_indx_i = 0;
-int ImageConverter::R_indx_j = 2;
+int VisCont::R_indx_i = 0;
+int VisCont::R_indx_j = 2;
 
-int ImageConverter::P_indx_i = 1;
-int ImageConverter::P_indx_j = 1;
+int VisCont::P_indx_i = 1;
+int VisCont::P_indx_j = 1;
 // ----------------  Q_LEARNING PARAMETERS ---------------- //
 
-double ImageConverter::dock_started,ImageConverter::dock_finished,ImageConverter::docking_duration;
-double ImageConverter::zeroMax = .0000000000000000001;
-double ImageConverter::zeroMin = -.0000000000000000001;
+double VisCont::dock_started,VisCont::dock_finished,VisCont::docking_duration;
+double VisCont::zeroMax = .0000000000000000001;
+double VisCont::zeroMin = -.0000000000000000001;
 
-double ImageConverter::speed_reducer = 1;
+double VisCont::speed_reducer = 1;
 
 // ------ offsets X, Y, theta for Docking ---------
-double ImageConverter::Pz_eps = .001;
-double ImageConverter::Py_eps = .002;
-double ImageConverter::A_eps = (CV_PI/180) * 3; // 1 deg.
+double VisCont::Pz_eps = .001;
+double VisCont::Py_eps = .002;
+double VisCont::A_eps = (CV_PI/180) * 3; // 1 deg.
 
-double ImageConverter::safety_margin_X = .12; // safety margin X axis in docking process : 12 cm
+double VisCont::safety_margin_X = .15; // safety margin X axis in docking process : 12 cm
 
 // ------ offsets X, Y, theta for Undocking ---------
-double ImageConverter::x_thresh_undock = .02;
-double ImageConverter::y_thresh_undock = .015;
-double ImageConverter::theta_thresh_undock = (CV_PI/180) * 3;
+double VisCont::x_thresh_undock = .02;
+double VisCont::y_thresh_undock = .015;
+double VisCont::theta_thresh_undock = (CV_PI/180) * 3;
 
 
-double ImageConverter::docking_counter = 1;
- ImageConverter::ImageConverter() : 
-                                it_(nh_)
-  {
-    // subscribe to input video feed and publish output video feed
-    //image_sub_ = it_.subscribe("/ar_follow/image", 1, &ImageConverter::imageCb, this);
+double VisCont::docking_counter = 1;
+
+VisCont::VisCont()
+{	
+	keepMoving = true;    
     
 	/* initialize random seed: */
-  	srand (time(NULL));
+  	//srand (time(NULL));
 
-/*// Ref. Values
-RefPose[0] = -.0957;
-
-RefPose[1] = -0.0193378;
-RefPose[2] = 0.306532;
-RefPose[3] = 0.702702;*/
 
 	R[R_indx_i][R_indx_j] = 50; // reward
         R[P_indx_i][P_indx_j] = -60; // punishment
 
-x_rand_SM = RefPose[2] + .55; // 55 cm spreading domain in the x-axis while moving towards the random pose
+	x_rand_SM = RefPose[2] + .55; // 55 cm spreading domain in the x-axis while moving towards the random pose
 
-     //marSub = nh_.subscribe("/Marker_pose",1,&ImageConverter::marCB,this);
-     
-    // Publish pose message and buffer up to 100 messages
-    MarPose_pub = nh_.advertise<geometry_msgs::PoseStamped>("/marker_pose", 100);
+    	// Publish pose message and buffer up to 100 messages
+    	MarPose_pub = node_vis.advertise<geometry_msgs::PoseStamped>("/marker_pose", 100);
+    	commandPub = node_cont.advertise<geometry_msgs::Twist>("/base_controller/command",1);
     
-    keepMoving = true;
-    commandPub = nh_.advertise<geometry_msgs::Twist>("/base_controller/command",1);
-    Sub = nh_.subscribe("/marker_pose",1,&ImageConverter::camCB,this);
-  }
+    	MarPose_Sub = node_vis.subscribe("/marker_pose",1,&VisCont::camCB,this);
+}
 
-  ImageConverter::~ImageConverter()
+  VisCont::~VisCont()
   {
-   //nh_.shutdown();
-   //destroyWindow("Preview");
+	
   }
-  Mat ImageConverter::getCurrentImage() 
+  Mat VisCont::getCurrentImage() 
   {
         return img;
   }
 
-void ImageConverter::cvTackBarEvents(int value,void* ptr)
+void VisCont::cvTackBarEvents(int value,void* ptr)
 {
-    ImageConverter* ic = (ImageConverter*)(ptr);
+    VisCont* ic = (VisCont*)(ptr);
     ic-> myhandler(value);
 }
 
-void ImageConverter::myhandler(int value)
+void VisCont::myhandler(int value)
 {
         if (Thresh1_min<3) Thresh1_min=3;
     
@@ -225,17 +222,17 @@ void ImageConverter::myhandler(int value)
         TheMarkers[i].draw(TheInputImageCopy,Scalar(205,0,0),1);
     }
 	
-    imshow("INPUT IMAGE",TheInputImageCopy);
-    imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
+    //imshow("INPUT IMAGE",TheInputImageCopy);
+    //imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
 }
-void ImageConverter::createTrackbars()
+void VisCont::createTrackbars()
 {    
 	namedWindow(trackbarWindowName, 0);
 	createTrackbar("ThresParam 1", trackbarWindowName, &Thresh1_min, Thresh1_max, cvTackBarEvents, this);
 	createTrackbar("ThresParam 2", trackbarWindowName, &Thresh2_min, Thresh2_max, cvTackBarEvents, this);
 	
 }
-void ImageConverter::imageCb(const sensor_msgs::ImageConstPtr& msg)
+void VisCont::imageCb(const sensor_msgs::ImageConstPtr& msg)
 {
     cv_bridge::CvImagePtr cv_ptr;
     try
@@ -250,7 +247,7 @@ void ImageConverter::imageCb(const sensor_msgs::ImageConstPtr& msg)
     img = cv_ptr->image;
     
 }
-bool ImageConverter::readArguments ( int argc,char **argv )
+bool VisCont::readArguments ( int argc,char **argv )
 {
     if (argc<2) {
         cerr<< "Invalid number of arguments!\n" <<endl;
@@ -267,7 +264,7 @@ bool ImageConverter::readArguments ( int argc,char **argv )
     return true;
 }
 
-void ImageConverter::ProgStart(int argc,char** argv)
+void VisCont::ProgStart(int argc,char** argv)
 {
 	// Show images, press "SPACE" to diable image
         // rendering to save CPU time
@@ -275,23 +272,23 @@ void ImageConverter::ProgStart(int argc,char** argv)
 	if (readArguments(argc,argv)==false)
 	{
 		cerr<<"check the authenticity of your file again!"<<endl;
-		nh_.shutdown();
+		node_vis.shutdown();
 	}
-	createTrackbars();
+	//createTrackbars();
 	
 	// IP address for raw3_lund    
         const std::string vsa = "http://192.168.0.101:8080/video?x.mjpeg";
-
         // -- publishing video stream with Android Camera--
-        TheVideoCapturer.open(vsa);
+        
+	//TheVideoCapturer.open(vsa);
 
-	//TheVideoCapturer.open(0);
+	TheVideoCapturer.open(0);
 	
 	// Check video is open
 	if (!TheVideoCapturer.isOpened())
 	{
 		cerr<<"Could not open video!!"<<endl;
-		nh_.shutdown();
+		node_vis.shutdown();
 	}
 	dock_started = ros::Time::now().toSec();
 	// Read first image to get the dimensions
@@ -313,56 +310,60 @@ void ImageConverter::ProgStart(int argc,char** argv)
 	
 	char key=0;
 	int index=0;
+
+	//ros::Rate rate(10);
 	// Capture until press ESC or until the end of the video
-	while ((key != 'x') && (key!=27) && TheVideoCapturer.grab() && ros::ok() && keepMoving){
-                ros::spinOnce();
+	while ((key != 'x') && (key!=27) && TheVideoCapturer.grab() && node_vis.ok() && keepMoving)
+	{
+		TT_S = ros::Time::now().toSec();
 
-		if (TheVideoCapturer.retrieve(TheInputImage))
-		{
-                        // Detection of markers in the image passed
-			MDetector.detect(TheInputImage,TheMarkers,TheCameraParameters,TheMarkerSize);
-			
-                        TheInputImage.copyTo(TheInputImageCopy);
+	/* --- For the purpose of showing the time ---
+	Mat frame;
+        TheVideoCapturer >> frame; // get a new frame from camera
+	imshow("video stream",frame);
+	waitKey(30); // 30 ms */
 
-			geometry_msgs::PoseStamped msg;
 
-			float x_t, y_t, z_t;
-			
-			float roll,yaw,pitch;
-			float rollE,yawE,pitchE;
-			
-			if (TheMarkers.size()>0)
-			{
-			        found = true;
-			        //ROS_INFO("MARKER FOUND!!! ... \n");
-			}else
-			{
-			        found = false;
-				keepMoving = false;
-				ROS_INFO_STREAM("Marker is lost, successful docking trials : " << (docking_counter - 1) << "\n");					
-				//RandomPose(x_new,y_new,theta_new);			
-				//move2docking(-control_signalX, -control_signalY, control_signalYAW);
-			}
+
+
+		
+if (TheVideoCapturer.retrieve(TheInputImage))
+{
+
+// Detection of markers in the image passed
+		MDetector.detect(TheInputImage,TheMarkers,TheCameraParameters,TheMarkerSize);
+		TheInputImage.copyTo(TheInputImageCopy);
+		geometry_msgs::PoseStamped msg;
+                
+                float x_t, y_t, z_t;
+		float roll,yaw,pitch;
+		float rollE,yawE,pitchE;
 			
-			if (ros::ok() && found)
-			{
-			        y_t = -TheMarkers[0].Tvec.at<Vec3f>(0,0)[0];
-				x_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[1];
-				z_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[2];
+		if (TheMarkers.size()>0)
+		{
+		        found = true;
+		        //ROS_INFO("MARKER FOUND!!! ... \n");
+		}else
+		{
+		        found = false;
+			keepMoving = false;
+			ROS_INFO_STREAM("Marker is lost, successful docking trials : " << (docking_counter - 1) << "\n");					
+		        //RandomPose(x_new,y_new,theta_new);			
+			//move2docking(-control_signalX, -control_signalY, control_signalYAW);
+		}
 			
-				//printf("\n X = %4.2f Y = %4.2f Z = %4.2f\n",x_t,y_t,z_t);
-				
-				cv::Mat R(3,3,cv::DataType<float>::type);
-				// You need to apply cv::Rodrigues() in order to obatain angles wrt to camera coords
-				cv::Rodrigues(TheMarkers[0].Rvec,R);
-                                
-                                
-                                //cout << "\nThe Marker =  " << TheMarkers[0] << endl;
-                                
-                                //cout << "\nR =  " << R << endl;
-                                
+		if (node_vis.ok() && found)
+		{
+		        y_t = -TheMarkers[0].Tvec.at<Vec3f>(0,0)[0];
+			x_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[1];
+			z_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[2];
+		
+			cv::Mat R(3,3,cv::DataType<float>::type);
+
+			// You need to apply cv::Rodrigues() in order to obatain angles wrt to camera coords
+			cv::Rodrigues(TheMarkers[0].Rvec,R);
+
                                 // ----------- Euler angle -----------------//
-                                
                                 float roll1 = -asin(R.at<float>(2,0));
                                 float roll2 = CV_PI - roll1;
 
@@ -385,39 +386,27 @@ void ImageConverter::ProgStart(int argc,char** argv)
                                         rollE = roll2;
                                         pitchE = pitch2;
 		                        yawE = yaw2;
-                                }                   
-                                
+                                }
                                 // ----------- Euler angle -----------------//
                                 
-				pitch   = -atan2(R.at<float>(2,0), R.at<float>(2,1));
-				yaw     = acos(R.at<float>(2,2));
-				roll    = -atan2(R.at<float>(0,2), R.at<float>(1,2));
+			pitch   = -atan2(R.at<float>(2,0), R.at<float>(2,1));
+			yaw     = acos(R.at<float>(2,2));
+			roll    = -atan2(R.at<float>(0,2), R.at<float>(1,2));
 			
 			// Marker rotation should be initially zero (just for convenience)
 			float p_off = CV_PI;
 			float r_off = CV_PI/2;
 			float y_off = CV_PI/2;
-
-			// See: http://en.wikipedia.org/wiki/Flight_dynamics
-			
-			        
-			        ros::Time time = ros::Time::now();
-			        // Camera Frame ---
-                                transformCAM.setOrigin( tf::Vector3(.25, 0, .5) );
-                                transformCAM.setRotation( tf::Quaternion(0, 0, 0 , 1) );
-                                CAMbr.sendTransform(tf::StampedTransform(transformCAM, 
-                                                                        (ros::Time::now()/* - ros::Duration(35)*/),
-                                                                        "/base_link",
-                                                                        "/camera"));
-                                       
-                                       
-                                // Publish TF message including the offsets
-				tf::Quaternion quat = tf::createQuaternionFromRPY(roll-r_off, pitch+p_off, yaw-y_off);
-				broadcaster.sendTransform(tf::StampedTransform(tf::Transform(quat, 
-				                                               tf::Vector3(x_t, y_t, z_t)), 
-				                                               (ros::Time::now()/* - ros::Duration(35)*/),
-				                                                "/camera", 
-				                                                "/marker"));
+                        
+                        // Camera Frame ---
+                        transformCAM.setOrigin( tf::Vector3(.25, 0, .5) );
+                        transformCAM.setRotation( tf::Quaternion(0, 0, 0 , 1) );
+                        
+                        CAMbr.sendTransform(tf::StampedTransform(transformCAM,ros::Time::now(),"/base_link","/camera"));
+                        
+                        // Publish TF message including the offsets
+			tf::Quaternion quat = tf::createQuaternionFromRPY(roll-r_off, pitch+p_off, yaw-y_off);
+			broadcaster.sendTransform(tf::StampedTransform(tf::Transform(quat,tf::Vector3(x_t, y_t, z_t)),ros::Time::now(),"/camera","/marker"));
 				
 				// Now publish the pose message, remember the offsets
 				msg.header.frame_id = "/camera";
@@ -429,15 +418,15 @@ void ImageConverter::ProgStart(int argc,char** argv)
 				msg.pose.orientation = p_quat;
 				
 				MarPose_pub.publish(msg);
-				ros::spinOnce();
 			} 
+			
 			/*// Print other rectangles that contains no valid markers
 			 for (unsigned int i=0;i<MDetector.getCandidates().size();i++) 
 			{
 				Marker m( MDetector.getCandidates()[i],10);
 				m.draw(TheInputImageCopy,Scalar(0,255,0),2);
 			}*/
-			
+					
 			for (unsigned int i=0;i<TheMarkers.size();i++)
 				{
 					int currentMarID = TheMarkers[i].id;
@@ -456,39 +445,34 @@ void ImageConverter::ProgStart(int argc,char** argv)
 				imshow("INPUT IMAGE",TheInputImageCopy);
 				imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
 			}
-		}else
-	        {
-		      printf("retrieve failed\n");
-		}
+                
+}else
+{
+        printf("retrieve failed\n");
+}
 
-		key=cv::waitKey(1);
+key=cv::waitKey(30);
+// If space is hit, don't render the image.
 
-                // If space is hit, don't render the image.
-		if (key == ' '){
-			update_images = !update_images;
-		}
-	}
+if (key == ' ')
+{
+	update_images = !update_images;
 }
 
-// ---- Controller part ----------- START -------
+		ros::spinOnce();
+		//rate.sleep();
+TT_E = ros::Time::now().toSec();
+ROS_INFO_STREAM("start = "<< TT_S << " , end = " << TT_E << " , duration = " << (TT_E - TT_S) <<" \n");
 
-void ImageConverter::ContStart()
-{
-        ros::Rate rate(100);
-        
-        while (ros::ok() && keepMoving)
-        {
-                ros::spinOnce();
-                rate.sleep();
-        }
+	}
+		
 
 }
-
-void ImageConverter::camCB(const geometry_msgs::PoseStamped::ConstPtr& CamFB) // subscriber
+void VisCont::camCB(const geometry_msgs::PoseStamped::ConstPtr& CamFB) // subscriber
 {
-camPose[0] = CamFB->pose.position.x;
+camPose[0] = CamFB->pose.position.x; // not important!!!
 camPose[1] = CamFB->pose.position.y; // y pose
-camPose[2] = CamFB->pose.position.z;
+camPose[2] = CamFB->pose.position.z; // x_rob
 camPose[3] = CamFB->pose.orientation.x; //  theta orientation
         
         // in Marker coordinate sys. 
@@ -539,7 +523,7 @@ camPose[3] = CamFB->pose.orientation.x; //  theta orientation
 			)
 			{	
 				ROS_INFO_STREAM(" delta_X < " << safety_margin_X << " m. , Fixing Y or theta. \n ");     			
-				Controller(RefPose[2], RefPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.01);
+				Controller(RefPose[2], RefPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.1);
 			} else if(
 				(abs(RefPose[1] - camPose[1]) <= Py_eps) && 
 				(abs(RefPose[3] - abs(camPose[3])) <= A_eps)				
@@ -547,12 +531,12 @@ camPose[3] = CamFB->pose.orientation.x; //  theta orientation
 			{
 				ROS_INFO("y & theta fixed successfully, MOVING STRAIGHT AHEAD ... \n");
 				speed_reducer = .1;
-				Controller(RefPose[2], camPose[2], RefPose[1], RefPose[1], RefPose[3], RefPose[3],.01);
+				Controller(RefPose[2], camPose[2], RefPose[1], RefPose[1], RefPose[3], RefPose[3],.1);
 			}
 		}else
         	{
-                	Controller(RefPose[2], camPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.01);
-			Q_Learning(camPose[1],camPose[3]); // performing Q_Learning...
+                	Controller(RefPose[2], camPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.1);
+			//Q_Learning(camPose[1],camPose[3]); // performing Q_Learning...
 			//print_R();
 
 		}
@@ -563,7 +547,7 @@ camPose[3] = CamFB->pose.orientation.x; //  theta orientation
 	}
 }
 
-void ImageConverter::Controller(double RefX, double MarPoseX, double RefY, double MarPoseY, double RefYAW, double MarPoseYAW, double dt)
+void VisCont::Controller(double RefX, double MarPoseX, double RefY, double MarPoseY, double RefYAW, double MarPoseYAW, double dt)
 {
 	ROS_INFO_STREAM("--------------------- Controller started ----------------------\n "); 
         
@@ -682,7 +666,7 @@ void ImageConverter::Controller(double RefX, double MarPoseX, double RefY, doubl
 	dock(control_signalX, control_signalY, control_signalYAW);
 }
 
-void ImageConverter::dock(double VelX, double VelY, double omegaZ)
+void VisCont::dock(double VelX, double VelY, double omegaZ)
 {
         ROS_INFO(".... REAL .... !");
         geometry_msgs::Twist msg;
@@ -696,7 +680,7 @@ void ImageConverter::dock(double VelX, double VelY, double omegaZ)
 	ROS_INFO_STREAM(" Current speed of robot: \n" << msg << ".\n");
 }
 
-void ImageConverter::move2docking(double VelX_est, double VelY_est, double omegaZ_est)
+void VisCont::move2docking(double VelX_est, double VelY_est, double omegaZ_est)
 {
 	
         ROS_INFO_STREAM(" Zmar = " << camPose[2] << " m. \n");
@@ -728,7 +712,7 @@ void ImageConverter::move2docking(double VelX_est, double VelY_est, double omega
 }
 // ---- Controller part -------- END ------
 
-void ImageConverter::GenerateRandomVal()
+void VisCont::GenerateRandomVal()
 {
 
 	// ---------------- PID gains ------------------
@@ -743,7 +727,7 @@ void ImageConverter::GenerateRandomVal()
 	theta_new = ((double) rand() / (RAND_MAX)) * (theta_up - theta_dwn) + theta_dwn; // will be used for Q_Learning
 }
 
-void ImageConverter::RandomPose(double X_rand, double Y_rand, double theta_rand)
+void VisCont::RandomPose(double X_rand, double Y_rand, double theta_rand)
 {
 	ROS_INFO_STREAM(" Xr = " << X_rand << ", Yr = " << Y_rand << ", Thetar = " << theta_rand << " rad ~ " << theta_rand * (180/CV_PI) << " deg\n");
 	ROS_INFO_STREAM(" -------------------------------------------------------------- \n");
@@ -847,7 +831,7 @@ commandPub.publish(msg_new);
 	
 }
 
-void ImageConverter::Q_Learning(double y, double theta)
+void VisCont::Q_Learning(double y, double theta)
 {
 
 /*	while ( iterations <= it_ ) 
@@ -958,7 +942,7 @@ void ImageConverter::Q_Learning(double y, double theta)
 }
 
 
-void ImageConverter::print_R()
+void VisCont::print_R()
 {
          cout << " R = \n";
         for(int i = 0; i <= (row - 1); i++)
@@ -977,7 +961,7 @@ void ImageConverter::print_R()
 }
 
 
-void ImageConverter::print_Q()
+void VisCont::print_Q()
 {
          cout << " Q = \n";
         for(int i = 0; i <= (row - 1); i++)
@@ -995,3 +979,4 @@ void ImageConverter::print_Q()
         cout << "\n";
 }
 
+
diff --git a/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp~ b/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp~
index 53517b4fc5a186534675ce45b39f7d23eb701f32..460125a29663b96bca7c6c5f4b0fdcef5d06bd9d 100644
--- a/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp~
+++ b/MobileRobot/AugReaMarker/CamMark/camtomar/src/VisionControl.cpp~
@@ -46,165 +46,154 @@ using namespace cv;
 using namespace aruco;
 using namespace std;
 
-float ImageConverter::TheMarkerSize = .1; // Default marker size
+float VisCont::TheMarkerSize = .1; // Default marker size
 
-int ImageConverter::Thresh1_min = 10;
-int ImageConverter::Thresh2_min = 10;
+int VisCont::Thresh1_min = 10;
+int VisCont::Thresh2_min = 10;
 
-int ImageConverter::Thresh1_max = 300;
-int ImageConverter::Thresh2_max = 300;
+int VisCont::Thresh1_max = 300;
+int VisCont::Thresh2_max = 300;
 
-const string ImageConverter::trackbarWindowName = "Trackbars";
+const string VisCont::trackbarWindowName = "Trackbars";
 
-int ImageConverter::ThePyrDownLevel = 0;
+int VisCont::ThePyrDownLevel = 0;
 
-bool ImageConverter::update_images = true;
+bool VisCont::update_images = true;
 
-bool ImageConverter::found = false;
-bool ImageConverter::Go2RandomPose = false;
+bool VisCont::found = false;
+bool VisCont::Go2RandomPose = false;
 
-int ImageConverter::ThresParam1 = 0;
-int ImageConverter::ThresParam2 = 0;
+int VisCont::ThresParam1 = 0;
+int VisCont::ThresParam2 = 0;
 
 // ---- CONTROLL PARAMETERS ------ //
-double ImageConverter::prev_errorX, ImageConverter::curr_errorX, ImageConverter::int_errorX, ImageConverter::diffX;
+double VisCont::prev_errorX, VisCont::curr_errorX, VisCont::int_errorX, VisCont::diffX;
 
-double ImageConverter::p_termX = 0;
-double ImageConverter::i_termX = 0;
-double ImageConverter::d_termX = 0;
+double VisCont::p_termX = 0;
+double VisCont::i_termX = 0;
+double VisCont::d_termX = 0;
 
-double ImageConverter::prev_errorY, ImageConverter::curr_errorY, ImageConverter::int_errorY, ImageConverter::diffY;
+double VisCont::prev_errorY, VisCont::curr_errorY, VisCont::int_errorY, VisCont::diffY;
 
-double ImageConverter::p_termY = 0;
-double ImageConverter::i_termY = 0;
-double ImageConverter::d_termY = 0;
+double VisCont::p_termY = 0;
+double VisCont::i_termY = 0;
+double VisCont::d_termY = 0;
 
-double ImageConverter::prev_errorYAW,ImageConverter::curr_errorYAW,ImageConverter::int_errorYAW,ImageConverter::diffYAW;
+double VisCont::prev_errorYAW,VisCont::curr_errorYAW,VisCont::int_errorYAW,VisCont::diffYAW;
 
-double ImageConverter::p_termYAW = 0;
-double ImageConverter::i_termYAW = 0;
-double ImageConverter::d_termYAW = 0;
+double VisCont::p_termYAW = 0;
+double VisCont::i_termYAW = 0;
+double VisCont::d_termYAW = 0;
 
-double ImageConverter::control_signalX, ImageConverter::control_signalY, ImageConverter::control_signalYAW;
+double VisCont::control_signalX, VisCont::control_signalY, VisCont::control_signalYAW;
 // ---- CONTROLL PARAMETERS ------ //
 
 
 // ---- Ref. Values ---- //
-const double ImageConverter::RefPose[4] = {-.0957, -0.0605845 /* Y_ref*/ ,  0.303369 /* X_ref*/ , 0.586541 /* theta_ref*/}; 
+const double VisCont::RefPose[4] = {-.0957, 0.00638817 /* Y_ref*/ ,  0.308857 /* X_ref*/ , 0.17 /* theta_ref*/}; 
 
 // ----------------  PID gains---------------- //
-double ImageConverter::Kp_y = .4;
-double ImageConverter::Ki_y = .0042;
-double ImageConverter::Kd_y = 0;
+double VisCont::Kp_y = .4;
+double VisCont::Ki_y = .0042;
+double VisCont::Kd_y = 0;
 
-double ImageConverter::Kp_theta = .2 * Kp_y;
-double ImageConverter::Ki_theta = (8/15) * Ki_y;
-double ImageConverter::Kd_theta = .2 * Kd_y;
+double VisCont::Kp_theta = .15 * Kp_y;
+double VisCont::Ki_theta = (8/13) * Ki_y;
+double VisCont::Kd_theta = .15 * Kd_y;
 // ----------------  PID gains---------------- //
 
-// random pose initialized
 
-const double ImageConverter::y_up = .3; 
-const double ImageConverter::y_dwn = -.1; 
-const double ImageConverter::theta_dwn = -.3*RefPose[3]; 
-const double ImageConverter::theta_up = .3*RefPose[3];
+double VisCont::TT_S,VisCont::TT_E;
+// random pose initialized
+const double VisCont::y_up = .3; 
+const double VisCont::y_dwn = -.1; 
+const double VisCont::theta_dwn = -.7 /*-RefPose[3]*/; 
+const double VisCont::theta_up = .7 /*RefPose[3]*/;
 
-double ImageConverter::x_new,ImageConverter::y_new,ImageConverter::theta_new;
+double VisCont::x_new,VisCont::y_new,VisCont::theta_new;
 
 // ----------------  Q_LEARNING PARAMETERS ---------------- //
 
-double ImageConverter::gamma = .8;
-double ImageConverter::alpha = .1;
-double ImageConverter::R_step = 200;
-
-const int ImageConverter::row = 5/*(y_up - y_dwn) * 10000*/;
-const int ImageConverter::col = 5/*(theta_up - theta_dwn) * 1000*/;
+double VisCont::gamma = .8;
+double VisCont::alpha = .1;
+double VisCont::R_step = 200;
 
-double ImageConverter::Q[5][5] = {0};
-double ImageConverter::R[5][5] = {0};
+const int VisCont::row = 5/*(y_up - y_dwn) * 10000*/;
+const int VisCont::col = 5/*(theta_up - theta_dwn) * 1000*/;
 
-double ImageConverter::Q_next_state;
+double VisCont::Q[7][7] = {0};
+double VisCont::R[7][7] = {0};
 
-int ImageConverter::counter = 1;
-int ImageConverter::Time_Reward;
-double ImageConverter::sample;
+double VisCont::Q_next_state;
 
+int VisCont::counter = 1;
+int VisCont::Time_Reward;
+double VisCont::sample;
 
-int ImageConverter::R_indx_i = 0;
-int ImageConverter::R_indx_j = 2;
+int VisCont::R_indx_i = 0;
+int VisCont::R_indx_j = 2;
 
-int ImageConverter::P_indx_i = 1;
-int ImageConverter::P_indx_j = 1;
+int VisCont::P_indx_i = 1;
+int VisCont::P_indx_j = 1;
 // ----------------  Q_LEARNING PARAMETERS ---------------- //
 
-double ImageConverter::dock_started,ImageConverter::dock_finished,ImageConverter::docking_duration;
-double ImageConverter::zeroMax = .0000000000000000001;
-double ImageConverter::zeroMin = -.0000000000000000001;
+double VisCont::dock_started,VisCont::dock_finished,VisCont::docking_duration;
+double VisCont::zeroMax = .0000000000000000001;
+double VisCont::zeroMin = -.0000000000000000001;
 
-double ImageConverter::speed_reducer = 1;
+double VisCont::speed_reducer = 1;
 
 // ------ offsets X, Y, theta for Docking ---------
-double ImageConverter::Pz_eps = .001;
-double ImageConverter::Py_eps = .002;
-double ImageConverter::A_eps = (CV_PI/180) * .5; // 1 deg.
+double VisCont::Pz_eps = .001;
+double VisCont::Py_eps = .002;
+double VisCont::A_eps = (CV_PI/180) * 3; // 1 deg.
 
-double ImageConverter::safety_margin_X = .15; // safety margin X axis in docking process : 15 cm
+double VisCont::safety_margin_X = .15; // safety margin X axis in docking process : 12 cm
 
 // ------ offsets X, Y, theta for Undocking ---------
-double ImageConverter::x_thresh_undock = .02;
-double ImageConverter::y_thresh_undock = .015;
-double ImageConverter::theta_thresh_undock = (CV_PI/180) * 3;
+double VisCont::x_thresh_undock = .02;
+double VisCont::y_thresh_undock = .015;
+double VisCont::theta_thresh_undock = (CV_PI/180) * 3;
 
 
-double ImageConverter::docking_counter = 1;
- ImageConverter::ImageConverter() : 
-                                it_(nh_)
-  {
-    // subscribe to input video feed and publish output video feed
-    //image_sub_ = it_.subscribe("/ar_follow/image", 1, &ImageConverter::imageCb, this);
+double VisCont::docking_counter = 1;
+
+VisCont::VisCont()
+{	
+	keepMoving = true;    
     
 	/* initialize random seed: */
-  	srand (time(NULL));
-
-/*// Ref. Values
-RefPose[0] = -.0957;
+  	//srand (time(NULL));
 
-RefPose[1] = -0.0193378;
-RefPose[2] = 0.306532;
-RefPose[3] = 0.702702;*/
 
 	R[R_indx_i][R_indx_j] = 50; // reward
         R[P_indx_i][P_indx_j] = -60; // punishment
 
-x_rand_SM = RefPose[2] + .55; // 55 cm spreading domain in the x-axis while moving towards the random pose
+	x_rand_SM = RefPose[2] + .55; // 55 cm spreading domain in the x-axis while moving towards the random pose
 
-     //marSub = nh_.subscribe("/Marker_pose",1,&ImageConverter::marCB,this);
-     
-    // Publish pose message and buffer up to 100 messages
-    MarPose_pub = nh_.advertise<geometry_msgs::PoseStamped>("/marker_pose", 100);
+    	// Publish pose message and buffer up to 100 messages
+    	MarPose_pub = node_vis.advertise<geometry_msgs::PoseStamped>("/marker_pose", 100);
+    	commandPub = node_cont.advertise<geometry_msgs::Twist>("/base_controller/command",1);
     
-    keepMoving = true;
-    commandPub = nh_.advertise<geometry_msgs::Twist>("/base_controller/command",1);
-    Sub = nh_.subscribe("/marker_pose",1,&ImageConverter::camCB,this);
-  }
+    	MarPose_Sub = node_vis.subscribe("/marker_pose",1,&VisCont::camCB,this);
+}
 
-  ImageConverter::~ImageConverter()
+  VisCont::~VisCont()
   {
-   //nh_.shutdown();
-   //destroyWindow("Preview");
+	
   }
-  Mat ImageConverter::getCurrentImage() 
+  Mat VisCont::getCurrentImage() 
   {
         return img;
   }
 
-void ImageConverter::cvTackBarEvents(int value,void* ptr)
+void VisCont::cvTackBarEvents(int value,void* ptr)
 {
-    ImageConverter* ic = (ImageConverter*)(ptr);
+    VisCont* ic = (VisCont*)(ptr);
     ic-> myhandler(value);
 }
 
-void ImageConverter::myhandler(int value)
+void VisCont::myhandler(int value)
 {
         if (Thresh1_min<3) Thresh1_min=3;
     
@@ -227,17 +216,17 @@ void ImageConverter::myhandler(int value)
         TheMarkers[i].draw(TheInputImageCopy,Scalar(205,0,0),1);
     }
 	
-    imshow("INPUT IMAGE",TheInputImageCopy);
-    imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
+    //imshow("INPUT IMAGE",TheInputImageCopy);
+    //imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
 }
-void ImageConverter::createTrackbars()
+void VisCont::createTrackbars()
 {    
 	namedWindow(trackbarWindowName, 0);
 	createTrackbar("ThresParam 1", trackbarWindowName, &Thresh1_min, Thresh1_max, cvTackBarEvents, this);
 	createTrackbar("ThresParam 2", trackbarWindowName, &Thresh2_min, Thresh2_max, cvTackBarEvents, this);
 	
 }
-void ImageConverter::imageCb(const sensor_msgs::ImageConstPtr& msg)
+void VisCont::imageCb(const sensor_msgs::ImageConstPtr& msg)
 {
     cv_bridge::CvImagePtr cv_ptr;
     try
@@ -252,7 +241,7 @@ void ImageConverter::imageCb(const sensor_msgs::ImageConstPtr& msg)
     img = cv_ptr->image;
     
 }
-bool ImageConverter::readArguments ( int argc,char **argv )
+bool VisCont::readArguments ( int argc,char **argv )
 {
     if (argc<2) {
         cerr<< "Invalid number of arguments!\n" <<endl;
@@ -269,7 +258,7 @@ bool ImageConverter::readArguments ( int argc,char **argv )
     return true;
 }
 
-void ImageConverter::ProgStart(int argc,char** argv)
+void VisCont::ProgStart(int argc,char** argv)
 {
 	// Show images, press "SPACE" to diable image
         // rendering to save CPU time
@@ -277,9 +266,9 @@ void ImageConverter::ProgStart(int argc,char** argv)
 	if (readArguments(argc,argv)==false)
 	{
 		cerr<<"check the authenticity of your file again!"<<endl;
-		nh_.shutdown();
+		node_vis.shutdown();
 	}
-	createTrackbars();
+	//createTrackbars();
 	
 	// IP address for raw3_lund    
         const std::string vsa = "http://192.168.0.101:8080/video?x.mjpeg";
@@ -287,13 +276,13 @@ void ImageConverter::ProgStart(int argc,char** argv)
         // -- publishing video stream with Android Camera--
         TheVideoCapturer.open(vsa);
 
-	//TheVideoCapturer.open(0);
+	//TheVideoCapturer.open(1);
 	
 	// Check video is open
 	if (!TheVideoCapturer.isOpened())
 	{
 		cerr<<"Could not open video!!"<<endl;
-		nh_.shutdown();
+		node_vis.shutdown();
 	}
 	dock_started = ros::Time::now().toSec();
 	// Read first image to get the dimensions
@@ -315,185 +304,51 @@ void ImageConverter::ProgStart(int argc,char** argv)
 	
 	char key=0;
 	int index=0;
+
+//TT_S = ros::Time::now().toSec();
+	//ros::Rate rate(10);
 	// Capture until press ESC or until the end of the video
-	while ((key != 'x') && (key!=27) && TheVideoCapturer.grab() && ros::ok() && keepMoving){
-                ros::spinOnce();
+	while ((key != 'x') && (key!=27) && TheVideoCapturer.grab() && node_vis.ok() && keepMoving)
+	{
+		
+  
+TT_S = ros::Time::now().toSec();
 
-		if (TheVideoCapturer.retrieve(TheInputImage))
-		{
-                        // Detection of markers in the image passed
-			MDetector.detect(TheInputImage,TheMarkers,TheCameraParameters,TheMarkerSize);
-			
-                        TheInputImage.copyTo(TheInputImageCopy);
-
-			geometry_msgs::PoseStamped msg;
-
-			float x_t, y_t, z_t;
-			
-			float roll,yaw,pitch;
-			float rollE,yawE,pitchE;
-			
-			if (TheMarkers.size()>0)
-			{
-			        found = true;
-			        //ROS_INFO("MARKER FOUND!!! ... \n");
-			}else
-			{
-			        found = false;
-				keepMoving = false;
-				ROS_INFO_STREAM("Marker is lost, successful docking trials : " << (docking_counter - 1) << "\n");					
-				//RandomPose(x_new,y_new,theta_new);			
-				//move2docking(-control_signalX, -control_signalY, control_signalYAW);
-			}
-			
-			if (ros::ok() && found)
-			{
-			        y_t = -TheMarkers[0].Tvec.at<Vec3f>(0,0)[0];
-				x_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[1];
-				z_t = TheMarkers[0].Tvec.at<Vec3f>(0,0)[2];
-			
-				//printf("\n X = %4.2f Y = %4.2f Z = %4.2f\n",x_t,y_t,z_t);
-				
-				cv::Mat R(3,3,cv::DataType<float>::type);
-				// You need to apply cv::Rodrigues() in order to obatain angles wrt to camera coords
-				cv::Rodrigues(TheMarkers[0].Rvec,R);
-                                
-                                
-                                //cout << "\nThe Marker =  " << TheMarkers[0] << endl;
-                                
-                                //cout << "\nR =  " << R << endl;
-                                
-                                // ----------- Euler angle -----------------//
-                                
-                                float roll1 = -asin(R.at<float>(2,0));
-                                float roll2 = CV_PI - roll1;
-
-                                float pitch1 = atan2(R.at<float>(2,1) / cos(roll1), R.at<float>(2,2) / cos(roll1));
-                                float pitch2 = atan2(R.at<float>(2,1) / cos(roll2), R.at<float>(2,2) / cos(roll2));
-
-                                float yaw2 = atan2(R.at<float>(1,0) / cos(roll2), R.at<float>(0,0) / cos(roll2));
-                                float yaw1 = atan2(R.at<float>(1,0) / cos(roll1), R.at<float>(0,0) / cos(roll1));
-
-                                //choose one solution to return
-                                //for example the "shortest" rotation
-                                if ((abs(roll1) + abs(pitch1) + abs(yaw1)) <= (abs(roll2) + abs(pitch2) + abs(yaw2)))
-                                {
-                                        rollE = roll1;
-                                        pitchE= pitch1;
-		                        yawE = yaw1;
-                                } else 
-        
-                                {
-                                        rollE = roll2;
-                                        pitchE = pitch2;
-		                        yawE = yaw2;
-                                }                   
-                                
-                                // ----------- Euler angle -----------------//
-                                
-				pitch   = -atan2(R.at<float>(2,0), R.at<float>(2,1));
-				yaw     = acos(R.at<float>(2,2));
-				roll    = -atan2(R.at<float>(0,2), R.at<float>(1,2));
-			
-			// Marker rotation should be initially zero (just for convenience)
-			float p_off = CV_PI;
-			float r_off = CV_PI/2;
-			float y_off = CV_PI/2;
-
-			// See: http://en.wikipedia.org/wiki/Flight_dynamics
-			
-			        
-			        ros::Time time = ros::Time::now();
-			        // Camera Frame ---
-                                transformCAM.setOrigin( tf::Vector3(.25, 0, .5) );
-                                transformCAM.setRotation( tf::Quaternion(0, 0, 0 , 1) );
-                                CAMbr.sendTransform(tf::StampedTransform(transformCAM, 
-                                                                        (ros::Time::now()/* - ros::Duration(35)*/),
-                                                                        "/base_link",
-                                                                        "/camera"));
-                                       
-                                       
-                                // Publish TF message including the offsets
-				tf::Quaternion quat = tf::createQuaternionFromRPY(roll-r_off, pitch+p_off, yaw-y_off);
-				broadcaster.sendTransform(tf::StampedTransform(tf::Transform(quat, 
-				                                               tf::Vector3(x_t, y_t, z_t)), 
-				                                               (ros::Time::now()/* - ros::Duration(35)*/),
-				                                                "/camera", 
-				                                                "/marker"));
-				
-				// Now publish the pose message, remember the offsets
-				msg.header.frame_id = "/camera";
-				msg.pose.position.x = x_t;
-				msg.pose.position.y = y_t;
-				msg.pose.position.z = z_t;
-				
-				geometry_msgs::Quaternion p_quat = tf::createQuaternionMsgFromRollPitchYaw(roll-r_off, pitch+p_off, yaw-y_off);
-				msg.pose.orientation = p_quat;
-				
-				MarPose_pub.publish(msg);
-				ros::spinOnce();
-			} 
-			/*// Print other rectangles that contains no valid markers
-			 for (unsigned int i=0;i<MDetector.getCandidates().size();i++) 
-			{
-				Marker m( MDetector.getCandidates()[i],10);
-				m.draw(TheInputImageCopy,Scalar(0,255,0),2);
-			}*/
-			
-			for (unsigned int i=0;i<TheMarkers.size();i++)
-				{
-					int currentMarID = TheMarkers[i].id;
-					TheMarkers[i].draw(TheInputImageCopy,Scalar(0,255,0),2)	;
-				
-					CvDrawingUtils::draw3dCube(TheInputImageCopy,TheMarkers[i],TheCameraParameters);
-					CvDrawingUtils::draw3dAxis(TheInputImageCopy,TheMarkers[i],TheCameraParameters);
-
-					//Marker ID to string
-    					stringstream marker_id_string;
-    					marker_id_string << "marker_ " << currentMarID;
-				}
+	Mat frame;
+        TheVideoCapturer >> frame; // get a new frame from camera
+	imshow("video stream",frame);
 
-			if (update_images)
-			{
-				imshow("INPUT IMAGE",TheInputImageCopy);
-				imshow("THRESHOLD IMAGE",MDetector.getThresholdedImage());
-			}
-		}else
-	        {
-		      printf("retrieve failed\n");
-		}
+	waitKey(30); // 30 ms 
 
-		key=cv::waitKey(1);
 
-                // If space is hit, don't render the image.
-		if (key == ' '){
-			update_images = !update_images;
-		}
-	
-        //++msec;
-        //usleep(100000);
+		ros::spinOnce();
+		//rate.sleep();
+TT_E = ros::Time::now().toSec();
+ROS_INFO_STREAM("start = "<< TT_S << " , end = " << TT_E << " , duration = " << (TT_E - TT_S) <<" \n");
+
 	}
-}
+		
 
+}
 // ---- Controller part ----------- START -------
 
-void ImageConverter::ContStart()
+/*void VisCont::ContStart()
 {
         ros::Rate rate(100);
-        
-        while (ros::ok() && keepMoving)
+       
+        while (node_cont.ok() && keepMoving)
         {
-                ros::spinOnce();
+		ros::spinOnce();
                 rate.sleep();
         }
 
-}
+}*/
 
-void ImageConverter::camCB(const geometry_msgs::PoseStamped::ConstPtr& CamFB) // subscriber
+void VisCont::camCB(const geometry_msgs::PoseStamped::ConstPtr& CamFB) // subscriber
 {
-camPose[0] = CamFB->pose.position.x;
+camPose[0] = CamFB->pose.position.x; // not important!!!
 camPose[1] = CamFB->pose.position.y; // y pose
-camPose[2] = CamFB->pose.position.z;
+camPose[2] = CamFB->pose.position.z; // x_rob
 camPose[3] = CamFB->pose.orientation.x; //  theta orientation
         
         // in Marker coordinate sys. 
@@ -544,7 +399,7 @@ camPose[3] = CamFB->pose.orientation.x; //  theta orientation
 			)
 			{	
 				ROS_INFO_STREAM(" delta_X < " << safety_margin_X << " m. , Fixing Y or theta. \n ");     			
-				Controller(RefPose[2], RefPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.01);
+				Controller(RefPose[2], RefPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.1);
 			} else if(
 				(abs(RefPose[1] - camPose[1]) <= Py_eps) && 
 				(abs(RefPose[3] - abs(camPose[3])) <= A_eps)				
@@ -552,12 +407,12 @@ camPose[3] = CamFB->pose.orientation.x; //  theta orientation
 			{
 				ROS_INFO("y & theta fixed successfully, MOVING STRAIGHT AHEAD ... \n");
 				speed_reducer = .1;
-				Controller(RefPose[2], camPose[2], RefPose[1], RefPose[1], RefPose[3], RefPose[3],.01);
+				Controller(RefPose[2], camPose[2], RefPose[1], RefPose[1], RefPose[3], RefPose[3],.1);
 			}
 		}else
         	{
-                	Controller(RefPose[2], camPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.01);
-			Q_Learning(camPose[1],camPose[3]); // performing Q_Learning...
+                	Controller(RefPose[2], camPose[2], RefPose[1], camPose[1], RefPose[3], camPose[3],.1);
+			//Q_Learning(camPose[1],camPose[3]); // performing Q_Learning...
 			//print_R();
 
 		}
@@ -568,7 +423,7 @@ camPose[3] = CamFB->pose.orientation.x; //  theta orientation
 	}
 }
 
-void ImageConverter::Controller(double RefX, double MarPoseX, double RefY, double MarPoseY, double RefYAW, double MarPoseYAW, double dt)
+void VisCont::Controller(double RefX, double MarPoseX, double RefY, double MarPoseY, double RefYAW, double MarPoseYAW, double dt)
 {
 	ROS_INFO_STREAM("--------------------- Controller started ----------------------\n "); 
         
@@ -687,7 +542,7 @@ void ImageConverter::Controller(double RefX, double MarPoseX, double RefY, doubl
 	dock(control_signalX, control_signalY, control_signalYAW);
 }
 
-void ImageConverter::dock(double VelX, double VelY, double omegaZ)
+void VisCont::dock(double VelX, double VelY, double omegaZ)
 {
         ROS_INFO(".... REAL .... !");
         geometry_msgs::Twist msg;
@@ -701,7 +556,7 @@ void ImageConverter::dock(double VelX, double VelY, double omegaZ)
 	ROS_INFO_STREAM(" Current speed of robot: \n" << msg << ".\n");
 }
 
-void ImageConverter::move2docking(double VelX_est, double VelY_est, double omegaZ_est)
+void VisCont::move2docking(double VelX_est, double VelY_est, double omegaZ_est)
 {
 	
         ROS_INFO_STREAM(" Zmar = " << camPose[2] << " m. \n");
@@ -733,7 +588,7 @@ void ImageConverter::move2docking(double VelX_est, double VelY_est, double omega
 }
 // ---- Controller part -------- END ------
 
-void ImageConverter::GenerateRandomVal()
+void VisCont::GenerateRandomVal()
 {
 
 	// ---------------- PID gains ------------------
@@ -742,12 +597,13 @@ void ImageConverter::GenerateRandomVal()
 	Kd_y = ((double) rand() / (RAND_MAX)) * .02;			// 0 < Kd < .01
 	
 	// ------------------ Generating Random Pose ------------------
-	x_new = ((double) rand() / (RAND_MAX)) * (1.1 - x_rand_SM) + x_rand_SM;
+	//x_new = ((double) rand() / (RAND_MAX)) * (1.1 - x_rand_SM) + x_rand_SM;
+	x_new = 1.1;
 	y_new = ((double) rand() / (RAND_MAX)) * (y_up - y_dwn) + y_dwn; // will be used for Q_Learning
 	theta_new = ((double) rand() / (RAND_MAX)) * (theta_up - theta_dwn) + theta_dwn; // will be used for Q_Learning
 }
 
-void ImageConverter::RandomPose(double X_rand, double Y_rand, double theta_rand)
+void VisCont::RandomPose(double X_rand, double Y_rand, double theta_rand)
 {
 	ROS_INFO_STREAM(" Xr = " << X_rand << ", Yr = " << Y_rand << ", Thetar = " << theta_rand << " rad ~ " << theta_rand * (180/CV_PI) << " deg\n");
 	ROS_INFO_STREAM(" -------------------------------------------------------------- \n");
@@ -851,7 +707,7 @@ commandPub.publish(msg_new);
 	
 }
 
-void ImageConverter::Q_Learning(double y, double theta)
+void VisCont::Q_Learning(double y, double theta)
 {
 
 /*	while ( iterations <= it_ ) 
@@ -873,6 +729,9 @@ void ImageConverter::Q_Learning(double y, double theta)
                 {
                         reward = R[y_new][theta_new];
                         Q_next_state = Q[y_new + 1][theta_new];
+
+
+
                         
                         sample = reward + gamma * Q_next_state;
                         Q[y_new][theta_new] = ((1 - alpha) * Q[y_new][theta_new]) + (alpha * sample);
@@ -959,7 +818,7 @@ void ImageConverter::Q_Learning(double y, double theta)
 }
 
 
-void ImageConverter::print_R()
+void VisCont::print_R()
 {
          cout << " R = \n";
         for(int i = 0; i <= (row - 1); i++)
@@ -978,7 +837,7 @@ void ImageConverter::print_R()
 }
 
 
-void ImageConverter::print_Q()
+void VisCont::print_Q()
 {
          cout << " Q = \n";
         for(int i = 0; i <= (row - 1); i++)
@@ -996,3 +855,4 @@ void ImageConverter::print_Q()
         cout << "\n";
 }
 
+
diff --git a/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp b/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp
index 7b5d84cba9a43009e8c84b7d46ce4b6955fbac5b..6c52b0ba8d82cd384dd013e32eaa19b16f643d63 100644
--- a/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp
+++ b/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp
@@ -3,11 +3,12 @@
 
 int main(int argc, char** argv)
 {
-
-       ros::init(argc, argv, "aruco_tf_publisher");
+      ros::init(argc, argv, "aruco_tf_publisher");
         
-        ImageConverter imgConv;
+        VisCont imgConv;
         imgConv.ProgStart(argc,argv);
+	
+	
 
         return 0;
 
diff --git a/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp~ b/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp~
new file mode 100644
index 0000000000000000000000000000000000000000..6c52b0ba8d82cd384dd013e32eaa19b16f643d63
--- /dev/null
+++ b/MobileRobot/AugReaMarker/CamMark/camtomar/src/run_vis_cont.cpp~
@@ -0,0 +1,16 @@
+#include <iostream>
+#include<VisionControl.h>
+
+int main(int argc, char** argv)
+{
+      ros::init(argc, argv, "aruco_tf_publisher");
+        
+        VisCont imgConv;
+        imgConv.ProgStart(argc,argv);
+	
+	
+
+        return 0;
+
+
+}
diff --git a/MobileRobot/AugReaMarker/CamMark/devel/lib/camtomar/camtomar b/MobileRobot/AugReaMarker/CamMark/devel/lib/camtomar/camtomar
index 7b58988013f83c3cf787e374b63f5cdef835d3a7..fac918e04fa288649b96c9072b44c3165a94d00f 100755
Binary files a/MobileRobot/AugReaMarker/CamMark/devel/lib/camtomar/camtomar and b/MobileRobot/AugReaMarker/CamMark/devel/lib/camtomar/camtomar differ
diff --git a/MobileRobot/CameraCalibration/data/camera_conf.xml b/MobileRobot/CameraCalibration/data/camera_conf.xml
index 2335bf2543682d1ab8adad3bcb608abe70995fc2..5bed2131c110ee15f68af56342826e6375d3d347 100644
--- a/MobileRobot/CameraCalibration/data/camera_conf.xml
+++ b/MobileRobot/CameraCalibration/data/camera_conf.xml
@@ -13,10 +13,11 @@
   
   <!-- The input to use for calibration. 
                 To use an input camera -> give the ID of the camera, like "1"
+                To use an input camera with IP address -> give the ID of the camera, like "http://192.168.0.101:8080/video?x.mjpeg"
                 To use an input video  -> give the path of the input video, like "/tmp/x.avi"
                 To use an image list   -> give the path to the XML or YAML file containing the list of the images, like "/tmp/circles_list.xml"
   -->
-  <Input>"http://192.168.0.101:8080/video?x.mjpeg"</Input>
+  <Input>"0"</Input>
   <!-- <Input>"images/CameraCalibraation/VID5/VID5.xml"</Input> -->
   
   <!--  If true (non-zero) we flip the input images around the horizontal axis.-->
@@ -36,7 +37,7 @@
   <Calibrate_FixPrincipalPointAtTheCenter> 1 </Calibrate_FixPrincipalPointAtTheCenter>
   
   <!-- The name of the output log file. -->
-  <Write_outputFileName>"Xperia_calibrated_data.yml"</Write_outputFileName>
+  <Write_outputFileName>"Logitech_C920_Calibration_file.yml"</Write_outputFileName>
   <!-- If true (non-zero) we write to the output file the feature points.-->
   <Write_DetectedFeaturePoints>1</Write_DetectedFeaturePoints>
   <!-- If true (non-zero) we write to the output file the extrinsic camera parameters.-->
diff --git a/MobileRobot/CameraCalibration/data/camera_conf.xml~ b/MobileRobot/CameraCalibration/data/camera_conf.xml~
index e3f89e0618a067e015ab6707467315e7f7e7f2d0..5bed2131c110ee15f68af56342826e6375d3d347 100644
--- a/MobileRobot/CameraCalibration/data/camera_conf.xml~
+++ b/MobileRobot/CameraCalibration/data/camera_conf.xml~
@@ -13,10 +13,11 @@
   
   <!-- The input to use for calibration. 
                 To use an input camera -> give the ID of the camera, like "1"
+                To use an input camera with IP address -> give the ID of the camera, like "http://192.168.0.101:8080/video?x.mjpeg"
                 To use an input video  -> give the path of the input video, like "/tmp/x.avi"
                 To use an image list   -> give the path to the XML or YAML file containing the list of the images, like "/tmp/circles_list.xml"
   -->
-  <Input>"http://192.168.0.101:8080/video?x.mjpeg"</Input>
+  <Input>"0"</Input>
   <!-- <Input>"images/CameraCalibraation/VID5/VID5.xml"</Input> -->
   
   <!--  If true (non-zero) we flip the input images around the horizontal axis.-->
@@ -36,7 +37,7 @@
   <Calibrate_FixPrincipalPointAtTheCenter> 1 </Calibrate_FixPrincipalPointAtTheCenter>
   
   <!-- The name of the output log file. -->
-  <Write_outputFileName>"Android_calibrated_data.yml"</Write_outputFileName>
+  <Write_outputFileName>"Logitech_C920_Calibration_file.yml"</Write_outputFileName>
   <!-- If true (non-zero) we write to the output file the feature points.-->
   <Write_DetectedFeaturePoints>1</Write_DetectedFeaturePoints>
   <!-- If true (non-zero) we write to the output file the extrinsic camera parameters.-->
diff --git a/MobileRobot/Controller/PID/dock/include/dock.h b/MobileRobot/Controller/PID/dock/include/dock.h
index 931efca5846d8e134142a0ab6f976851e7a7b65b..367d72eb3c27695f130fe3432641c513c292a616 100644
--- a/MobileRobot/Controller/PID/dock/include/dock.h
+++ b/MobileRobot/Controller/PID/dock/include/dock.h
@@ -3,6 +3,8 @@
 #include <actionlib/client/simple_action_client.h>
 #include <geometry_msgs/PoseStamped.h>
 
+#include <nav_msgs/Odometry.h>
+
 typedef actionlib::SimpleActionClient<move_base_msgs::MoveBaseAction> Client;
 
 
@@ -19,7 +21,7 @@ public:
         static double FORWARD_SPEED_MPS;
         
         static double refVal;
-        
+        static double robPose;
         static double prev_error;
         
         static double int_error;
@@ -45,6 +47,8 @@ private:
         
         ros::NodeHandle node;
 	ros::Publisher commandPub;
+	ros::Publisher OdomPub;
+
 	ros::Subscriber PosSub;
 	
         Client ac;
@@ -53,7 +57,7 @@ private:
         
         void dock(double speed);
         
-        void RobCurrPose(const geometry_msgs::PoseStamped::ConstPtr& feedback);
+        void RobCurrPose(const nav_msgs::Odometry::ConstPtr& feedback);
         
         void Controller(double Reference, double RobPose, double dt);
 
diff --git a/MobileRobot/Controller/PID/dock/include/dock.h~ b/MobileRobot/Controller/PID/dock/include/dock.h~
index 25549c61d8e910ab47b71ef11c68fbf1dc2ab257..931efca5846d8e134142a0ab6f976851e7a7b65b 100644
--- a/MobileRobot/Controller/PID/dock/include/dock.h~
+++ b/MobileRobot/Controller/PID/dock/include/dock.h~
@@ -18,8 +18,6 @@ public:
         
         static double FORWARD_SPEED_MPS;
         
-        static double robpose;
-        
         static double refVal;
         
         static double prev_error;
diff --git a/MobileRobot/Controller/PID/dock/src/dock.cpp b/MobileRobot/Controller/PID/dock/src/dock.cpp
index cd75ca610f6bfa95bbe26c8db02b2792496c34b2..949cab5c15670542ed132b987a0d958ebdabff1c 100644
--- a/MobileRobot/Controller/PID/dock/src/dock.cpp
+++ b/MobileRobot/Controller/PID/dock/src/dock.cpp
@@ -6,10 +6,27 @@
 
 #include <geometry_msgs/Twist.h>
 #include <geometry_msgs/PoseStamped.h>
+#include <nav_msgs/Odometry.h>
 
-double RobotDocking::refVal = .81; // add a ref. value 
 
-double RobotDocking::FORWARD_SPEED_MPS = .05;
+#include "opencv2/core/core_c.h"
+#include "opencv2/core/core.hpp"
+#include "opencv2/flann/miniflann.hpp"
+#include "opencv2/imgproc/imgproc_c.h"
+#include "opencv2/imgproc/imgproc.hpp"
+#include "opencv2/video/video.hpp"
+#include "opencv2/features2d/features2d.hpp"
+#include "opencv2/objdetect/objdetect.hpp"
+#include "opencv2/calib3d/calib3d.hpp"
+#include "opencv2/ml/ml.hpp"
+#include "opencv2/highgui/highgui_c.h"
+#include "opencv2/highgui/highgui.hpp"
+#include "opencv2/contrib/contrib.hpp"
+
+double RobotDocking::refVal = .52/*(CV_PI/180) * 45*/; // 1 deg. for ref. value 
+double RobotDocking::robPose /*= (CV_PI/180) * 1*/; // 1 deg.; // Robot Current Position 
+
+double RobotDocking::FORWARD_SPEED_MPS = .01;
 
 double RobotDocking::prev_error = 0;
 double RobotDocking::curr_error = 0;
@@ -20,8 +37,8 @@ double RobotDocking::p_term = 0;
 double RobotDocking::i_term = 0;
 double RobotDocking::d_term = 0;
 
-double RobotDocking::prop_gain = 0.1;
-double RobotDocking::integ_gain = 0.1;
+double RobotDocking::prop_gain = 0.0001;
+double RobotDocking::integ_gain = 0.01;
 double RobotDocking::deriv_gain = 0;
 
 double RobotDocking::control_signal = 0;
@@ -32,9 +49,11 @@ RobotDocking::RobotDocking() : ac("Robot_move", true)
         keepMoving = true;
         
         commandPub = node.advertise<geometry_msgs::Twist>("/base_controller/command",1);
-        
+        OdomPub = node.advertise<nav_msgs::Odometry>("/base_controller/odometry",1);
+
         //PosSub = node.subscribe("/visualize_pose_ekf",1,&RobotDocking::RobCurrPose,this);
-        
+        PosSub = node.subscribe("/base_controller/odometry",1,&RobotDocking::RobCurrPose,this);
+	
 }
 
 
@@ -44,7 +63,9 @@ void RobotDocking::ProgStart()
         
         while (ros::ok() && keepMoving)
         {
-                dock(FORWARD_SPEED_MPS);
+                //dock(FORWARD_SPEED_MPS);
+		Controller(refVal,robPose,.01);
+
                 ros::spinOnce();
                 rate.sleep();
         }
@@ -81,42 +102,64 @@ void RobotDocking::Controller(double Reference, double RobPose, double dt)
         
         // save the current error as the previous one
         // for the next iteration.
-        prev_error = curr_error;        
+        prev_error = curr_error;
+
+	//ROS_INFO_STREAM("RAW Control signalYAW = "<< control_signal <<". \n");
+	ROS_INFO_STREAM(" ------------------------------------------------------------- \n");	
+	
         dock(control_signal);
 }
 
 void RobotDocking::dock(double speed)
 {
-        geometry_msgs::Twist msg;
-	msg.linear.x = speed;
-	//msg.angular.z = speed;
-	commandPub.publish(msg);
+
+
+		geometry_msgs::Twist msg;
+		msg.linear.x = speed;
+		//msg.angular.z = speed;
+		commandPub.publish(msg);
+	
+		ROS_INFO_STREAM(" Current speed of robot: " << msg << " m/s \n");
+
+        
+	/*if(robPose >= refVal) // it moves forever.. cuz robot pose is a cte value that never achieved!!!!
+	{
+		ROS_INFO(" Dock is completed! \n ");
+                keepMoving = false;	
+	} else
+	{
+		geometry_msgs::Twist msg;
+		msg.linear.x = speed;
+		//msg.angular.z = speed;
+		commandPub.publish(msg);
 	
-	ROS_INFO_STREAM(" Current speed of robot: " << speed << " m/s \n");
+		ROS_INFO_STREAM(" Current speed of robot: " << msg << " m/s \n");
+
+	}*/
 }
 
-void RobotDocking::RobCurrPose(const geometry_msgs::PoseStamped::ConstPtr& feedback)
+void RobotDocking::RobCurrPose(const nav_msgs::Odometry::ConstPtr& feedback)
 {
         
         
-        ROS_INFO_STREAM(" X = " << feedback->pose.position.x << " m. \n");
-        ROS_INFO_STREAM(" Y = " << feedback->pose.position.y << " m. \n");
+        ROS_INFO_STREAM(" X = " << feedback->pose.pose.position.x << " m. \n");
+        /*ROS_INFO_STREAM(" Y = " << feedback->pose.position.y << " m. \n");
         ROS_INFO_STREAM(" Z = " << feedback->pose.position.z << " m. \n");
         
         ROS_INFO_STREAM(" Xorien = " << feedback->pose.orientation.x << " rad. \n");
         ROS_INFO_STREAM(" Yorien = " << feedback->pose.orientation.y << " rad. \n");
         ROS_INFO_STREAM(" Zorien = " << feedback->pose.orientation.z << " rad. \n");
         ROS_INFO_STREAM(" Worien = " << feedback->pose.orientation.w << " rad. \n");
+       */ 
         
-        
-        if (feedback->pose.orientation.z >= refVal)
+        if (feedback->pose.pose.position.x >= refVal)
         {
                 ROS_INFO(" Dock is completed! \n ");
                 keepMoving = false;
         }
         else
         {
-                Controller(refVal,feedback->pose.orientation.z,.01);
+                Controller(refVal,feedback->pose.pose.position.x,.01);
                 //ROS_INFO(" Calculating control signal! \n ");
         }
         
diff --git a/MobileRobot/Controller/PID/dock/src/dock.cpp~ b/MobileRobot/Controller/PID/dock/src/dock.cpp~
index 5743eb43e77a6168a17121cdba1a38610c562f20..cd75ca610f6bfa95bbe26c8db02b2792496c34b2 100644
--- a/MobileRobot/Controller/PID/dock/src/dock.cpp~
+++ b/MobileRobot/Controller/PID/dock/src/dock.cpp~
@@ -35,7 +35,6 @@ RobotDocking::RobotDocking() : ac("Robot_move", true)
         
         //PosSub = node.subscribe("/visualize_pose_ekf",1,&RobotDocking::RobCurrPose,this);
         
-
 }
 
 
diff --git a/MobileRobot/Machine_Learning/Practice/Q_Learning b/MobileRobot/Machine_Learning/Practice/Q_Learning
index a34fa75521dd5cbdb82b56dffb762afb74eb9a12..0d0d3255a39c1b2f6342705481ca0d3e9dd01f59 100755
Binary files a/MobileRobot/Machine_Learning/Practice/Q_Learning and b/MobileRobot/Machine_Learning/Practice/Q_Learning differ
diff --git a/MobileRobot/Machine_Learning/Practice/Q_learning.cpp b/MobileRobot/Machine_Learning/Practice/Q_learning.cpp
index 3a38fd7fabdfc2a33c70a37ae63a9e3266ce6a1e..1e1d2a7d33b295f262fa8b6d7be2612f7a414524 100644
--- a/MobileRobot/Machine_Learning/Practice/Q_learning.cpp
+++ b/MobileRobot/Machine_Learning/Practice/Q_learning.cpp
@@ -6,8 +6,8 @@
 
 using namespace std;
 
- const int row = 13;
- const int col = 13;
+ const int row = 7;
+ const int col = 7;
 
 
 double gamma = .8;
@@ -27,8 +27,8 @@ double Q_curr_state = Q[i][j];
 
 double reward;
 
-int R_indx_i = row - row;
-int R_indx_j = .5 * col;
+int R_indx_i = 5/*row - row*/;
+int R_indx_j = 3/*.5 * col*/;
 
 int P_indx_i = row - 2;
 int P_indx_j = col - 1;
diff --git a/MobileRobot/Machine_Learning/Practice/Q_learning.cpp~ b/MobileRobot/Machine_Learning/Practice/Q_learning.cpp~
index 856b3be48d3211c1465c450abb9c3f74023238a3..4ed5d34df2d66f0c3415b5cc36b061a63ffa6709 100644
--- a/MobileRobot/Machine_Learning/Practice/Q_learning.cpp~
+++ b/MobileRobot/Machine_Learning/Practice/Q_learning.cpp~
@@ -6,8 +6,8 @@
 
 using namespace std;
 
- const int row = 48;
- const int col = 13;
+ const int row = 7;
+ const int col = 7;
 
 
 double gamma = .8;
diff --git a/MobileRobot/MobRobGUI/MobileRobotGUI/MobileRobotGUI.pro.user b/MobileRobot/MobRobGUI/MobileRobotGUI/MobileRobotGUI.pro.user
index bed1f31b92abf10e6267b5bedbc981f0b97baefa..50590e2161f7926e56376487afc394af5a3d0a10 100644
--- a/MobileRobot/MobRobGUI/MobileRobotGUI/MobileRobotGUI.pro.user
+++ b/MobileRobot/MobRobGUI/MobileRobotGUI/MobileRobotGUI.pro.user
@@ -1,6 +1,6 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!DOCTYPE QtCreatorProject>
-<!-- Written by QtCreator 3.5.1, 2016-01-12T21:53:35. -->
+<!-- Written by QtCreator 3.5.1, 2016-04-13T19:04:29. -->
 <qtcreator>
  <data>
   <variable>EnvironmentId</variable>
diff --git a/MobileRobot/ObjectTrack/objTracking.cpp b/MobileRobot/ObjectTrack/objTracking.cpp
index 70dc0433505b6c0289d868bdce6e542de37020d2..32bc8fead4d3594281fa661a8c215539b4fbd2e0 100644
--- a/MobileRobot/ObjectTrack/objTracking.cpp
+++ b/MobileRobot/ObjectTrack/objTracking.cpp
@@ -291,9 +291,7 @@ const std::string vsa = "http://192.168.0.101:8080/video?x.mjpeg";
         //image will not appear without this waitKey() command
         waitKey(30);
 
-    }
-    
-    
+    }    
     
     
     
diff --git a/MobileRobot/ObjectTrack/objTracking.cpp~ b/MobileRobot/ObjectTrack/objTracking.cpp~
index 9259c3ca678fee51a5fc51e9e39ccaa9511424c3..d7f901d0356091ce2602bd50accf0dceac630119 100644
--- a/MobileRobot/ObjectTrack/objTracking.cpp~
+++ b/MobileRobot/ObjectTrack/objTracking.cpp~
@@ -254,8 +254,6 @@ const std::string vsa = "http://192.168.0.101:8080/video?x.mjpeg";
         std::cout << "Error opening video stream or file" << std::endl;
         return -1;
     }
-  
-    
     // open the Video Stream IP camera...
     capture.open(vsa);
 */
@@ -299,5 +297,48 @@ const std::string vsa = "http://192.168.0.101:8080/video?x.mjpeg";
     
     
     
+    
+    
+    
+    
+    
+    
+    
+    
+    {
+
+if (TheVideoCapturer.retrieve(TheInputImage))
+{
+                
+}else
+{
+        printf("retrieve failed\n");
+}
+
+key=cv::waitKey(30);
+// If space is hit, don't render the image.
+
+if (key == ' ')
+{
+	update_images = !update_images;
+}
+    
+    
+    }
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
     return 0;
 }