AUV-Coop-Assembly
Master Thesis for Robotics Engineering. Cooperative peg-in-hole assembly with two underwater vehicles guided by vision
detector.cpp
1 #include "header/detector.h"
2 
3 Detector::Detector(){} //private costructor not to be used
4 
5 
28 int Detector::findSquare(cv::Mat &image, std::vector<std::vector<cv::Point>> *found4CornersVector,
29  int threshLevels, int cannyThresh){
30 
31  std::vector<std::vector<cv::Point> > squares;
32 
33  //cv::cvtColor(image, image, cv::COLOR_GRAY2BGR); //actually not real conv in colors
34 
35  Detector::_findSquares(image, squares, threshLevels, cannyThresh);
36 
37  //Detector::drawSquares(image, squares, "asdasd");
38 
39  found4CornersVector->resize(squares.size());
40  Detector::orderAngles(squares, found4CornersVector);
41 
42  return 0;
43 
44 }
45 
53 void Detector::drawSquares( cv::Mat image, const std::vector<std::vector<cv::Point> > squares,
54  const char* wndname)
55 {
56 
57  using namespace std;
58  using namespace cv;
59 
60 
61  for( size_t i = 0; i < squares.size(); i++ )
62  {
63  const Point* p = &squares[i][0];
64  int n = (int)squares[i].size();
65  // std::cout << "number of point:" << n << "\n";
66  polylines(image, &p, &n, 1, true, Scalar(0,255,0), 2, LINE_AA);
67  circle(image, squares[i][0], 5, Scalar(0,0,0), FILLED);
68  circle(image, squares[i][1], 5, Scalar(255,255,0), FILLED);
69  circle(image, squares[i][2], 5, Scalar(0,255,255), FILLED);
70  circle(image, squares[i][3], 5, Scalar(255,255,255), FILLED);
71 
72  }
73  imshow(wndname, image);
74  waitKey(0);
75 }
76 
77 
78 void Detector::_findSquares( const cv::Mat& image, std::vector<std::vector<cv::Point> >& squares,
79  int N, int thresh)
80 {
81 
82  using namespace std;
83  using namespace cv;
84 
85  squares.clear();
86  Mat pyr, timg, gray0(image.size(), CV_8U), gray;
87  // down-scale and upscale the image to filter out the noise
88  pyrDown(image, pyr, Size(image.cols/2, image.rows/2));
89  pyrUp(pyr, timg, image.size());
90  vector<vector<Point> > contours;
91 
92  int channels = image.channels();
93  // find squares in every color plane of the image
94  for( int c = 0; c < channels; c++ )
95  {
96  int ch[] = {c, 0};
97  mixChannels(&timg, 1, &gray0, 1, ch, 1);
98  // try several threshold levels
99  for( int l = 0; l < N; l++ )
100  {
101  // hack: use Canny instead of zero threshold level.
102  // Canny helps to catch squares with gradient shading
103  if( l == 0 )
104  {
105  // apply Canny. Take the upper threshold
106  // and set the lower to 0 (which forces edges merging)
107  Canny(gray0, gray, 0, thresh, 5);
108  // dilate canny output to remove potential
109  // holes between edge segments
110  dilate(gray, gray, Mat(), Point(-1,-1));
111  }
112  else
113  {
114  // apply threshold if l!=0:
115  // tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0
116  gray = gray0 >= (l+1)*255/N;
117  }
118  // find contours and store them all as a list
119  findContours(gray, contours, RETR_LIST, CHAIN_APPROX_SIMPLE);
120  vector<Point> approx;
121  // test each contour
122  for( size_t i = 0; i < contours.size(); i++ )
123  {
124  // approximate contour with accuracy proportional
125  // to the contour perimeter
126  approxPolyDP(contours[i], approx, arcLength(contours[i], true)*0.02, true);
127  // square contours should have 4 vertices after approximation
128  // relatively large area (to filter out noisy contours)
129  // and be convex.
130  // Note: absolute value of an area is used because
131  // area may be positive or negative - in accordance with the
132  // contour orientation
133  if( approx.size() == 4 &&
134  fabs(contourArea(approx)) > 1000 &&
135  isContourConvex(approx) )
136  {
137  double maxCosine = 0;
138  for( int j = 2; j < 5; j++ )
139  {
140  // find the maximum cosine of the angle between joint edges
141  double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1]));
142  maxCosine = MAX(maxCosine, cosine);
143  }
144  // if cosines of all angles are small
145  // (all angles are ~90 degree) then write quandrange
146  // vertices to resultant sequence
147  if( maxCosine < 0.3 )
148  squares.push_back(approx);
149  }
150  }
151  }
152  }
153 }
154 
155 
156 
157 
167 double Detector::angle( cv::Point pt1, cv::Point pt2, cv::Point pt0 )
168 {
169  double dx1 = pt1.x - pt0.x;
170  double dy1 = pt1.y - pt0.y;
171  double dx2 = pt2.x - pt0.x;
172  double dy2 = pt2.y - pt0.y;
173  return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 1e-10);
174 }
175 
176 int Detector::orderAngles(std::vector<std::vector<cv::Point>> angles, std::vector<std::vector<cv::Point>> *orderedAngles){
177  for (int i=0; i< angles.size(); i++){
178  if (angles.at(i).size() != 4){
179  std::cerr << "[DETECTOR] orderAngles Angles must be 4\n";
180  return -1;
181  }
182  orderAngles(angles.at(i), &(orderedAngles->at(i)));
183  }
184  return 0;
185 }
186 
187 int Detector::orderAngles(std::vector<cv::Point> angles, std::vector<cv::Point> *orderedAngles){
188 
189  if (angles.size() != 4){
190  std::cerr << "[DETECTOR] orderAngles Angles must be 4\n";
191  return -1;
192  }
193 
194 
195  cv::Point center = getCenter(angles);
196  orderedAngles->resize(4);
197  for (int i=0; i<4; i++){
198 
199  if (angles.at(i).x < center.x){
200  if (angles.at(i).y < center.y){ //top left corner
201  orderedAngles->at(0) = angles.at(i);
202  } else { // bottom left corner
203  orderedAngles->at(3) = angles.at(i);
204  }
205 
206  } else {
207  if (angles.at(i).y < center.y){ //top right corner
208  orderedAngles->at(1) = angles.at(i);
209 
210  } else { // bottom rigth corner
211  orderedAngles->at(2) = angles.at(i);
212  }
213  }
214  }
215 
216 
217 }
218 
219 cv::Point Detector::getCenter(std::vector<cv::Point> points){
220 
221  cv::Point A = points.at(0);
222  cv::Point B = points.at(2);
223  cv::Point C = points.at(3);
224  cv::Point D = points.at(1);
225 
226  // Line AB represented as a1x + b1y = c1
227  double a1 = A.x - A.y;
228  double b1 = A.x - B.x;
229  double c1 = a1*(A.x) + b1*(A.y);
230 
231  // Line CD represented as a2x + b2y = c2
232  double a2 = D.y - C.y;
233  double b2 = C.x - D.x;
234  double c2 = a2*(C.x)+ b2*(C.y);
235 
236  double determinant = a1*b2 - a2*b1;
237 
238  if (determinant == 0)
239  {
241  }
242 
243  double x = (b2*c1 - b1*c2)/determinant;
244  double y = (a1*c2 - a2*c1)/determinant;
245  return cv::Point(x, y);
246 
247 }
248 
249 
269 int Detector::templateMatching(cv::Mat img, std::vector<cv::Mat> templVector,
270  std::vector<std::vector<cv::Point>> *found4CornersVector, std::vector<double> *bestValues,
271  int templ_method, std::vector<double> scaleFactors, bool showDisplay){
272 
273  for (int i=0; i< templVector.size(); i++){
274 
275  Detector::templateMatching(img, templVector.at(i), &(found4CornersVector->at(i)), &(bestValues->at(i)),
276  templ_method, scaleFactors, showDisplay);
277 
278  }
279 
280 
281 
282  return 0;
283 }
284 
285 
286 
315 int Detector::templateMatching(cv::Mat img, cv::Mat templ,
316  std::vector<cv::Point> *found4Corners, double* bestValue,
317  int templ_method,
318  std::vector<double> scaleFactors, bool showDisplay){
319 
320 
321  if (img.channels() == 1){ //GRAY image
322  cv::cvtColor(img, img, cv::COLOR_GRAY2BGR); //actually not real conv in colors, but algos need 3 channels
323  }
324 
325  const char* image_window;
326  cv::Mat img_display;
327  if(showDisplay){
328  image_window = "Template Matching result";
329  img_display = img.clone();
330  cv::namedWindow( image_window, cv::WINDOW_AUTOSIZE );
331  }
332 
333  if (scaleFactors.size() == 0){
334  scaleFactors = {0.75, 0.6, 0.65, 0.5,
335  0.48, 0.45, 0.42, 0.4, 0.38, 0.38, 0.32, 0.3,
336  0.28, 0.25, 0.24, 0.23, 0.22, 0.21, 0.20, 0.19, 0.18, 0.15, 0.1};
337  }
338 
339 
340  std::vector<double> minMaxValue(scaleFactors.size()),
341  scaleUpY(scaleFactors.size()),
342  scaleUpX(scaleFactors.size());
343 
344  std::vector<cv::Point> bestMatch(scaleFactors.size());
345  cv::Mat imgScaled = img.clone();
346 
347  //last scaling is the last scaling, then if the for is "break" last scale is modified
348  //it is used as index to consider only max/min values until the last scaling factor used
349  int lastScale=scaleFactors.size();
350  for (int i=0; i<scaleFactors.size(); i++){
351 
352  cv::resize(img, imgScaled, cv::Size(), scaleFactors[i], scaleFactors[i]);
353  //std::cout << "[DETECTOR] img size y, x: " << img.rows << " " << img.cols << "\n";
354  //std::cout << "scaled img y, x: " << imgScaled.rows << " " <<imgScaled.cols << "\n";
355  // actual scale can be approximated so calculate scaleUp in this way
356  // is better than 1/scaleFactors[i]
357  scaleUpY[i] = ((double)img.rows) / ((double)imgScaled.rows);
358  scaleUpX[i] = ((double)img.cols) / ((double)imgScaled.cols);
359 
360  //if scaled img little than template, break loop
361  if (imgScaled.rows < templ.rows || imgScaled.cols < templ.cols){
362  //std::cout << "[DETECTOR] template bigger\n";
363  lastScale = i-1;
364  break;
365  }
366 
367 
368  Detector::MatchingMethod(templ_method, imgScaled, templ, &(bestMatch[i]), &(minMaxValue[i]));
369 
370 
371 // std::cout << "scale factor UPX and UPY " << scaleUpX << " " << scaleUpY << "\n";
372 // std::cout << "bestMatrch.x: " << bestMatch[i].x << "\n";
373 // std::cout << "bestMatrch.y: " << bestMatch[i].y << "\n";
374 // std::cout << "topLeft.x: " << topLeft.x << "\n";
375 // std::cout << "topLeft.y: " << topLeft.y << "\n";
376 // std::cout << "bottomRi.x: " << bottomRight.x << "\n";
377 // std::cout << "bottomRi.y: " << bottomRight.y << "\n";
378 // std::cout << "temple row col " << templ.rows << " " << templ.cols << "\n\n";
379 
380 
381 // cv::rectangle( imgScaled, bestMatch[i],
382 // cv::Point( bestMatch[i].x + templ.cols , bestMatch[i].y + templ.rows ),
383 // cv::Scalar::all(0), 1, 8, 0 );
384 // std::string boh = "asdasda" + std::to_string(i);
385 // cv::imshow( boh, imgScaled );
386 // cv::Mat otherMat;
387 // cv::resize (imgScaled, otherMat, cv::Size(), scaleUpX[i], scaleUpY[i]);
388 // cv::imshow( "boh", otherMat );
389 // cv::waitKey();
390 
391 
392 
393  }
394 
395  int indexBest;
396  if( templ_method == cv::TM_SQDIFF || templ_method == cv::TM_SQDIFF_NORMED ){
397  indexBest = std::distance(minMaxValue.begin(),
398  std::min_element(minMaxValue.begin(),
399  minMaxValue.begin() + lastScale));
400 
401  } else{
402  indexBest = std::distance(minMaxValue.begin(),
403  std::max_element(minMaxValue.begin(),
404  minMaxValue.begin() + lastScale));
405  }
406 
407  std::cout << "[DETECTOR][TEMPLATE_MATCHING] BEST ITERATION: scaling factor " << scaleFactors[indexBest]
408  <<"\n \t with value: " << minMaxValue.at(indexBest) << "\n";
409 
410  cv::Point topLeft, bottomRight;
411  topLeft.x = (int)(bestMatch[indexBest].x * scaleUpX[indexBest]);
412  topLeft.y = (int)(bestMatch[indexBest].y * scaleUpY[indexBest]);
413  bottomRight.x = (int)( (bestMatch[indexBest].x + templ.cols) * scaleUpX[indexBest]);
414  bottomRight.y = (int)( (bestMatch[indexBest].y + templ.rows) * scaleUpY[indexBest]);
415 
416  if(showDisplay){
417 
418  cv::rectangle( img_display, topLeft, bottomRight,
419  cv::Scalar(0,255,0), 1, 8, 0);
420  cv::imshow( image_window, img_display);
421  cv::waitKey(0);
422  }
423 
424  found4Corners->resize(4);
425  found4Corners->at(0) = topLeft;
426  found4Corners->at(1) = cv::Point(bottomRight.x, topLeft.y);
427  found4Corners->at(2) = bottomRight;
428  found4Corners->at(3) = cv::Point(topLeft.x, bottomRight.y);
429 
430  if (bestValue != NULL){
431  *bestValue = minMaxValue.at(indexBest);
432  }
433 
434  return 0;
435 
436 }
437 
438 
439 void Detector::MatchingMethod(int match_method, cv::Mat img, cv::Mat templ,
440  cv::Point *bestMatch, double *minMaxVal){
441  using namespace std;
442  using namespace cv;
443 
444  Mat result;
445 
446  int result_cols = img.cols - templ.cols + 1;
447  int result_rows = img.rows - templ.rows + 1;
448 
449  result.create( result_rows, result_cols, CV_32FC1 );
450 
451  matchTemplate( img, templ, result, match_method);
452 
453  double minVal; double maxVal; Point minLoc; Point maxLoc;
454  minMaxLoc( result, &minVal, &maxVal, &minLoc, &maxLoc);
455  if( match_method == TM_SQDIFF || match_method == TM_SQDIFF_NORMED ) {
456  *bestMatch = minLoc;
457  *minMaxVal = minVal;
458  } else {
459  *bestMatch = maxLoc;
460  *minMaxVal = maxVal;
461  }
462 
463  return;
464 }
static int findSquare(cv::Mat &image, std::vector< std::vector< cv::Point >> *found4CornersVector, int threshLevels=11, int cannyThresh=50)
Detector::findSquare find all square in images, exploiting functions of opencv. In practice is a blob...
Definition: detector.cpp:28
static void drawSquares(cv::Mat image, const std::vector< std::vector< cv::Point > > squares, const char *wndname="Square Detection Demo")
Detector::drawSquares function to draw square in images.
Definition: detector.cpp:53
static int templateMatching(cv::Mat img, std::vector< cv::Mat > templVector, std::vector< std::vector< cv::Point >> *found4CornersVector, std::vector< double > *bestValues, int templ_method=cv::TM_SQDIFF, std::vector< double > scaleFactors=std::vector< double >(), bool showDisplay=true)
Detector::templateMatching check function below, this one is used if multiple templates want be used...
Definition: detector.cpp:269