00001 #include "Robots/SeaBeeIII/StraightEdgeFinder.H" 00002 00003 #include "Component/ModelParam.H" 00004 #include "Component/ModelOptionDef.H" 00005 #include "Image/OpenCVUtil.H" 00006 #include "Raster/Raster.H" 00007 #include "Image/Pixels.H" 00008 00009 #include "Image/Image.H" 00010 #include "Image/Pixels.H" 00011 #include "Raster/Raster.H" 00012 #include "Image/CutPaste.H" 00013 #include "Image/OpenCVUtil.H" 00014 00015 #include "BeoSub/IsolateColor.H" 00016 #include "Image/DrawOps.H" 00017 #include "Image/ColorOps.H" 00018 00019 #include "GUI/XWinManaged.H" 00020 00021 #include "SeaBee/PipeRecognizer.H" 00022 #include "BeoSub/ColorSegmenter.H" 00023 #include "SeaBee/VisionRecognizer.H" 00024 00025 #include "Image/ColorOps.H" 00026 00027 00028 #include "Media/MediaOpts.H" 00029 00030 #include "VFAT/segmentImageMerge.H" 00031 00032 #include "Util/Types.H" 00033 #include "Util/log.H" 00034 00035 00036 #ifndef STRAIGHT_EDGE_FINDER_C 00037 #define STRAIGHT_EDGE_FINDER_C 00038 00039 using namespace std; 00040 00041 #define MIN_CENTER_DIST 15 00042 #define MIN_AREA 150 00043 #define CORNER_TOLERANCE 4 00044 00045 #define IMG_WIDTH 320 00046 #define IMG_HEIGHT 240 00047 int thresh = 42; 00048 double angle_thresh = 0.3; 00049 00050 00051 // ###################################################################### 00052 StraightEdgeFinder::StraightEdgeFinder 00053 ( OptionManager& mgr, 00054 const std::string& descrName, 00055 const std::string& tagName) : 00056 VisionBrainComponentI(mgr, descrName, tagName) 00057 { 00058 00059 itsWidth = IMG_WIDTH; 00060 itsHeight = IMG_HEIGHT; 00061 itsDispImg.resize(2*itsWidth, 2*itsHeight); 00062 00063 00064 /*int wi = itsWidth/4; 00065 int hi = itsHeight/4; 00066 00067 segmenter = new segmentImageTrackMC<float,unsigned int, 4> (itsWidth*itsHeight); 00068 00069 segmenter->SITsetFrame(&wi,&hi); 00070 00071 segmenter->SITsetCircleColor(0,255,0); 00072 segmenter->SITsetBoxColor(255,255,0,0,255,255); 00073 segmenter->SITsetUseSmoothing(false,10); 00074 00075 00076 segmenter->SITtoggleCandidateBandPass(false); 00077 segmenter->SITtoggleColorAdaptation(false);*/ 00078 } 00079 00080 00081 // ###################################################################### 00082 StraightEdgeFinder::~StraightEdgeFinder() 00083 { 00084 } 00085 00086 // ###################################################################### 00087 void StraightEdgeFinder::registerTopics() 00088 { 00089 LINFO("Registering StraightEdge Message"); 00090 this->registerPublisher("StraightEdgeMessageTopic"); 00091 registerVisionTopics(); 00092 } 00093 00094 00095 00096 // ###################################################################### 00097 void StraightEdgeFinder::updateFrame(Image<PixRGB<byte> > img, std::string cameraId) 00098 { 00099 bool isFwdCamera = false; 00100 00101 if(cameraId == "FwdCamera") 00102 isFwdCamera = true; 00103 00104 LINFO("Image Received: %d", itsFrameCount); 00105 00106 itsDispImg.clear(); 00107 inplacePaste(itsDispImg, img, Point2D<int>(0, 0)); 00108 00109 uint w = itsWidth; uint h = itsHeight; 00110 rutz::shared_ptr<Image< PixRGB<byte> > > 00111 outputImg(new Image<PixRGB<byte> >(w,h, ZEROS)); 00112 00113 //get all of the orange pixels in the image 00114 rutz::shared_ptr<Image<byte> > orangeIsoImage; 00115 orangeIsoImage.reset(new Image<byte>(w,h, ZEROS)); 00116 orangeIsoImage->resize(w,h); 00117 float res = isolateOrange(img, *orangeIsoImage); 00118 LINFO("result: %f", res); 00119 00120 inplacePaste(itsDispImg, toRGB(*orangeIsoImage), Point2D<int>(w,0)); 00121 00122 //get all the orange lines in the image 00123 std::vector<LineSegment2D> pipelines = 00124 getPipeLocation 00125 (orangeIsoImage, outputImg, StraightEdgeFinder::HOUGH); 00126 00127 int minY = -1; //minimum midpoint y coordinate found 00128 int followLineIndex = -1; //index of pipeline with minimum y coordinate 00129 00130 //iterates through pipelines and finds the topmost one in the image 00131 for(uint i = 0; i < pipelines.size(); i++) 00132 { 00133 if(pipelines[i].isValid()) 00134 { 00135 LineSegment2D pipeline = pipelines[i]; 00136 Point2D<int> midpoint = (pipeline.point1() + pipeline.point2())/2; 00137 00138 if(midpoint.j < minY || minY == -1) 00139 { 00140 minY = midpoint.j; 00141 followLineIndex = i; 00142 } 00143 } 00144 } 00145 00146 //if we found a pipeline 00147 if(followLineIndex != -1) 00148 { 00149 LineSegment2D followLine = pipelines[followLineIndex]; 00150 Point2D<int> midpoint = (followLine.point1() + followLine.point2())/2; 00151 00152 Point2D<int> projPoint; 00153 projPoint.i = (int)(midpoint.i+30*cos(followLine.angle())); 00154 projPoint.j = (int)(midpoint.j+30*sin(followLine.angle())); 00155 00156 drawLine(*outputImg, midpoint, projPoint, 00157 PixRGB <byte> (255, 255,0), 3); 00158 00159 inplacePaste(itsDispImg, *outputImg, Point2D<int>(0,h)); 00160 00161 // publish the result 00162 RobotSimEvents::StraightEdgeMessagePtr msg = 00163 new RobotSimEvents::StraightEdgeMessage; 00164 00165 ImageIceMod::LineIce line; 00166 line.pt1.i = midpoint.i; 00167 line.pt1.j = midpoint.j; 00168 line.pt2.i = projPoint.i; 00169 line.pt2.j = projPoint.j; 00170 00171 float radAngle,normalAngle; 00172 radAngle = followLine.angle(); 00173 normalAngle = normalizeAngle(radAngle); 00174 00175 LINFO("angle in rads: %1.6f | normalized deg: %1.6f",radAngle,normalAngle); 00176 00177 line.angle = normalAngle; 00178 msg->line = line; 00179 msg->isFwdCamera = true; 00180 00181 LINFO("SEMessage: line: [%4d,%4d][%4d,%4d]: %10.6f", 00182 line.pt1.i, line.pt1.j, 00183 line.pt2.i, line.pt2.j, line.angle); 00184 this->publish("StraightEdgeMessageTopic", msg); 00185 } 00186 00187 itsOfs->writeRGB(itsDispImg, "Straight Edge Finder Image", 00188 FrameInfo("Straight Edge Finder Image", SRC_POS)); 00189 00190 itsOfs->updateNext(); 00191 } 00192 00193 // ###################################################################### 00194 std::vector<LineSegment2D> StraightEdgeFinder::getPipeLocation 00195 (rutz::shared_ptr<Image<byte> > colorSegmentedImage, 00196 rutz::shared_ptr<Image<PixRGB <byte> > > outputImage, 00197 PipeRecognizeMethod method) 00198 { 00199 if(!colorSegmentedImage->initialized()) 00200 return std::vector<LineSegment2D>(); 00201 00202 // Image<byte> lum = luminance(*colorSegmentedImage); 00203 Image<byte> lum = *colorSegmentedImage; 00204 00205 switch(method) 00206 { 00207 case HOUGH: 00208 return calculateHoughTransform(lum, 00209 outputImage); 00210 break; 00211 00212 default: 00213 LERROR("Invalid pipe recognizer method specified"); 00214 return std::vector<LineSegment2D>(); 00215 } 00216 } 00217 00218 // ###################################################################### 00219 std::vector<LineSegment2D> StraightEdgeFinder::calculateHoughTransform 00220 (Image<byte>& colorSegmentedImage, 00221 rutz::shared_ptr<Image<PixRGB<byte> > > outputImage) 00222 { 00223 #ifndef HAVE_OPENCV 00224 LFATAL("OpenCV must be installed in order to use this function"); 00225 #else 00226 // Do edge detection (canny) on the image. 00227 IplImage cannyImage = getCannyImage( colorSegmentedImage ); 00228 00229 // Clear output image and set it equal to canny image. 00230 // outputImage->clear(); 00231 //rutz::shared_ptr<Image<PixRGB<byte> > > temp 00232 //(new Image<PixRGB<byte> > ( toRGB( ipl2gray( &cannyImage ) ) ); 00233 // Cannot convert directly to RGB 00234 //since cannyImage has only 1 channel (black and white). 00235 // temp.resize(outputImage->getDims()); 00236 // *outputImage += temp; 00237 00238 // Do Hough transform. 00239 std::vector <LineSegment2D> lineSegments = getHoughLines( cannyImage ); 00240 00241 // Loop through hough lines and draw them to the screen. 00242 for(uint i = 0; i < lineSegments.size(); i++ ) 00243 { 00244 Point2D<int> pt1 = lineSegments[i].point1(); 00245 Point2D<int> pt2 = lineSegments[i].point2(); 00246 00247 if(pt1.isValid() && pt2.isValid()) 00248 { 00249 //draw line segment in output image 00250 drawLine(*outputImage, pt1, pt2, PixRGB<byte>(255,0,0)); 00251 } 00252 } 00253 00254 std::vector <LineSegment2D> prunedHoughLines = 00255 pruneHoughLines( lineSegments ); 00256 00257 return prunedHoughLines; 00258 00259 #endif // HAVE_OPENCV 00260 } 00261 00262 // ###################################################################### 00263 00264 uint StraightEdgeFinder::calculateLineBestFit 00265 (Image<byte> &colorSegmentedImage, 00266 Image<PixRGB <byte> > &outputImage, 00267 Point2D<int> &pipeCenter, 00268 double &pipeAngle) 00269 {return 0;} 00270 00271 uint StraightEdgeFinder::calculateContours 00272 (Image<byte> &colorSegmentedImage, 00273 Image<PixRGB <byte> > &outputImage, 00274 Point2D<int> &pipeCenter, 00275 double &pipeAngle) 00276 {return 0;} 00277 00278 // double PipeRecognizer::getOrangePixels(Image<byte> &cameraImage, 00279 // double &avgX, 00280 // double &avgY, 00281 // double &sumX, 00282 // double &sumY) 00283 // { 00284 // Timer tim(1000000); 00285 00286 // std::vector <Point2D<int> > edgePoints; 00287 // uint w = cameraImage.getWidth(); 00288 // uint h = cameraImage.getHeight(); 00289 00290 // Image<byte> (*colorSegmentedImage)(w,h, ZEROS); 00291 00292 // (*colorSegmentedImage) = cameraImage; 00293 00294 // avgX = 0.0; 00295 // avgY = 0.0; 00296 // sumX = 0.0; 00297 // sumY = 0.0; 00298 00299 // //Isolate the orange pixels in the image 00300 // tim.reset(); 00301 00302 // // isolateOrange(cameraImage, orangeIsoImage); //, fnb=0; 00303 00304 00305 // //find all the white edge pixels in the image and store them 00306 // for(int y = 0; y < orangeIsoImage.getHeight(); y++) 00307 // { 00308 // for(int x = 0; x < orangeIsoImage.getWidth(); x++) 00309 // { 00310 // if(orangeIsoImage.getVal(x,y) == 255) 00311 // { 00312 // // convert the x,y position of the pixel to an x,y position where 00313 // // the center of the image is the origin as opposed to the top left corner 00314 // // and store the pixel 00315 // edgePoints.push_back(Point2D<int>(x, y)); 00316 00317 // sumX += x; 00318 // sumY += y; 00319 // } 00320 // } 00321 // } 00322 00323 // avgX = sumX/edgePoints.size(); 00324 // avgY = sumY/edgePoints.size(); 00325 00326 // return getSlope(orangeIsoImage, edgePoints, avgX, avgY, sumX, sumY); 00327 // } 00328 00329 // double PipeRecognizer::getSlope(Image<byte> &cameraImage, 00330 // std::vector <Point2D<int> > &points, 00331 // double avgX, 00332 // double avgY, 00333 // double sumX, 00334 // doubley sumY) 00335 // { 00336 // double top = 0.0; 00337 // double bottom = 0.0; 00338 // double top2 = 0.0; 00339 // double bottom2 = 0.0; 00340 // double return_value = 0.0; 00341 // double return_value2 = 0.0; 00342 00343 // int x = 0; 00344 // int y = 0; 00345 00346 // /* loop through all the points in the picture and generate a slope 00347 // by finding the line of best fit*/ 00348 // for(uint i = 0; i < points.size(); i++) 00349 // { 00350 // x = points[i].i; 00351 // y = points[i].j; 00352 00353 // top += (x - avgX) * (y - avgY); 00354 // bottom += (x - avgX) * (x - avgX); 00355 00356 // int tx = x- cameraImage.getWidth()/2; 00357 // int ty = y- cameraImage.getHeight()/2; 00358 // x = ty +cameraImage.getHeight()/2; 00359 // y = -tx + cameraImage.getWidth()/2; 00360 00361 // top2 += (x - avgX) * (y - avgY); 00362 // bottom2 += (x - avgX) * (x - avgX); 00363 00364 // } 00365 00366 // if( bottom != 0.0 ) 00367 // return_value = atan2(top,bottom); 00368 // else 00369 // return_value = 1.62; //if the bottom is zero, we have a vertical line, 00370 // //so we want to return pi/2 00371 00372 // if( bottom2 != 0.0 ) 00373 // return_value2 = (atan2(top2,bottom2)+3.14159/2); 00374 // else 00375 // return_value2 = (1.62+3.14159/2); 00376 00377 00378 // double e1 = 0.0; 00379 // double e2 = 0.0; 00380 // for(uint i = 0; i < points.size(); i++) 00381 // { 00382 00383 // x = points[i].i; 00384 // y = points[i].j; 00385 00386 // e1 =pow(x/bottom*top+avgY-y,2); 00387 00388 // int tx = x- cameraImage.getWidth()/2; 00389 // int ty = y- cameraImage.getHeight()/2; 00390 // x = ty +cameraImage.getHeight()/2; 00391 // y = -tx + cameraImage.getWidth()/2; 00392 00393 00394 // e2 =pow(x/bottom2*top2+avgY-y,2); 00395 // } 00396 00397 00398 // if(e1<e2) 00399 // return return_value; 00400 // return return_value2; 00401 // } 00402 00403 00404 // ###################################################################### 00405 std::vector<LineSegment2D> StraightEdgeFinder::getHoughLines 00406 ( IplImage cannyImage ) 00407 { 00408 #ifndef HAVE_OPENCV 00409 LFATAL("OpenCV must be installed in order to use this function"); 00410 #else 00411 // Storage for use in hough transform. 00412 CvMemStorage* storage = cvCreateMemStorage(0); 00413 00414 // Perform hough transform and store hough lines. 00415 CvSeq* cvLines = cvHoughLines2(&cannyImage, storage, CV_HOUGH_PROBABILISTIC, 1, CV_PI/180, 30, 20, 10); 00416 00417 00418 // Storage for hough line segments. 00419 std::vector <LineSegment2D> lineSegments; 00420 00421 // Loop through hough lines, store them as line segments, and draw lines in output image. 00422 for(int i = 0; i < cvLines->total; i++ ) 00423 { 00424 // Get a line. 00425 CvPoint* line = (CvPoint*)cvGetSeqElem(cvLines,i); 00426 00427 // Get line end points. 00428 Point2D<int> pt1 = Point2D<int>(line[0].x,line[0].y); 00429 Point2D<int> pt2 = Point2D<int>(line[1].x,line[1].y); 00430 00431 // Create line segment from endpoints and store. 00432 lineSegments.push_back(LineSegment2D(pt1,pt2)); 00433 } 00434 cvReleaseMemStorage( &storage ); 00435 00436 return lineSegments; 00437 #endif // HAVE_OPENCV 00438 } 00439 00440 00441 // ###################################################################### 00442 IplImage StraightEdgeFinder::getCannyImage( Image<byte> colorSegmentedImage ) 00443 { 00444 #ifndef HAVE_OPENCV 00445 LFATAL("OpenCV must be installed in order to use this function"); 00446 #else 00447 // Find edges of segmented image using canny. 00448 IplImage *edge = cvCreateImage( cvGetSize( img2ipl( colorSegmentedImage ) ), 8, 1 ); 00449 cvCanny( img2ipl( luminance( colorSegmentedImage ) ), edge, 100, 150, 3 );//150,200,3 00450 00451 return *edge; 00452 #endif // HAVE_OPENCV 00453 } 00454 00455 00456 // ###################################################################### 00457 std::vector<LineSegment2D> StraightEdgeFinder::pruneHoughLines 00458 (const std::vector<LineSegment2D> lineSegments) 00459 { 00460 uint numLines = lineSegments.size(); 00461 if(numLines == 0) { LDEBUG("No hough lines to prune"); } 00462 00463 std::vector< std::vector<LineSegment2D> > pipeLines; 00464 00465 //Go through all the lines 00466 for(uint r = 0; r < numLines; r++) 00467 { 00468 int lnIndex = -1; 00469 00470 //check to see if the current lines fits into a bucket 00471 for(uint c = 0; c < pipeLines.size(); c++) 00472 { 00473 LineSegment2D pipeLine = pipeLines[c][0]; 00474 00475 if(pipeLine.isValid() && lineSegments[r].angleBetween(pipeLine) < 5*(M_PI/180))//convert 5 degrees to radians 00476 { 00477 lnIndex = c; 00478 break; 00479 } 00480 } 00481 00482 //if the line fits into a pre-existing bucket, add it to the bucket 00483 if( lnIndex > 0 ) 00484 { 00485 pipeLines[lnIndex].push_back(lineSegments[r]); 00486 //average the old bucket's value with the new line added 00487 //so as to create a moving bucket 00488 Point2D<int> newPt1 = 00489 Point2D<int>(((lineSegments[r].point1().i + pipeLines[lnIndex][0].point1().i)/2), 00490 ((lineSegments[r].point1().j + pipeLines[lnIndex][0].point1().j)/2)); 00491 00492 Point2D<int> newPt2 = Point2D<int>(((lineSegments[r].point2().i + pipeLines[lnIndex][0].point2().i)/2), 00493 ((lineSegments[r].point2().j + pipeLines[lnIndex][0].point2().j)/2)); 00494 00495 pipeLines[lnIndex][0] = LineSegment2D(newPt1,newPt2); 00496 00497 } 00498 //otherwise, create a new bucket 00499 else 00500 { 00501 std::vector<LineSegment2D> newCntrLines; 00502 newCntrLines.push_back(lineSegments[r]); 00503 pipeLines.push_back(newCntrLines); 00504 } 00505 } 00506 00507 std::vector<LineSegment2D> centerPipeLines; 00508 00509 uint pipeLineSize = pipeLines.size(); 00510 00511 for(uint c = 0; c < pipeLineSize; c++) 00512 { 00513 centerPipeLines.push_back(pipeLines[c][0]); 00514 } 00515 // std::vector<LineSegment2D> centerPipeLines; 00516 00517 // Point2D<int> two = Point2D<int>(2,2); 00518 00519 // for(uint c = 0; c < pipeLines.size(); c++) 00520 // { 00521 // if(pipeLines[c].size() == 2) 00522 // { 00523 // Point2D<int> endPoint1 = Point2D<int>((pipeLines[c][0].point1()+pipeLines[c][1].point1())/two); 00524 // Point2D<int> endPoint2 = Point2D<int>((pipeLines[c][0].point2()+pipeLines[c][1].point2())/two); 00525 00526 // centerPipeLines.push_back(LineSegment2D(endPoint1,endPoint2)); 00527 // } 00528 // } 00529 00530 return centerPipeLines; 00531 } 00532 00533 // //true test with frame 3498 00534 // float StraightEdgeFinder::isolateOrange 00535 // (Image< PixRGB<byte> > &inputImage, Image<byte> &outputImage) 00536 // { 00537 // Image< PixRGB<byte> > tempImage(inputImage); 00538 // Image<PixH2SV2<float> > h2svImage(tempImage); 00539 00540 // readConfig colorConf; 00541 // colorConf.openFile("colortrack.conf", false); 00542 00543 // int orangeCount = 0; 00544 00545 // Image<PixRGB<byte> >::iterator iptr = inputImage.beginw(); 00546 // Image<byte>::iterator optr = outputImage.beginw(); 00547 // Image<PixRGB<byte> >::iterator stop = inputImage.endw(); 00548 00549 // float tR = colorConf.getItemValueF("ORANGE_stdR");//70.0; 00550 // float tG = colorConf.getItemValueF("ORANGE_stdG");//200.0; 00551 // float tB = colorConf.getItemValueF("ORANGE_stdB");//128.0; 00552 00553 // float R = colorConf.getItemValueF("ORANGE_R");//255; 00554 // float G = colorConf.getItemValueF("ORANGE_G");//198; 00555 // float B = colorConf.getItemValueF("ORANGE_B");//0; 00556 00557 00558 // //for average 00559 // float totalHue = 0.0; 00560 // int numTotal = 0; 00561 00562 // //orange as a hue 00563 // float pure_orange_hue = 60*(((G/255)-(B/255))/((R/255)-(B/255)))+0; 00564 // float orange_hue = 60*(((tB/255)-(tR/255))/((tG/255) - (tR/255)))+120; 00565 // //orange saturation (of HSL) 00566 // float orange_sat = ((200.0/255.0)-(70/255.0))/(2.0-(270.0/255.0));//using tR,tG,tB, R,B,G gives '1' 00567 00568 // std::cout<<"orange hue is: "<<orange_hue<<std::endl; 00569 // std::cout<<"orange saturation(purity) is: "<<orange_sat<<std::endl; 00570 // std::cout<<"orange HSV saturation is: "<<(1.0-70.0/200.0)<<std::endl; 00571 // // LINFO("orange values (RGB):(std RGB): %f, %f, %f: %f, %f, %f", R, G, B, tR, tG, tB); 00572 00573 // while (iptr != stop) 00574 // { 00575 // float hue = 0.0; 00576 // float s = 0.0; //saturation 00577 // float avgR = (*iptr).red(); 00578 // float avgG = (*iptr).green(); 00579 // float avgB = (*iptr).blue(); 00580 // float r = avgR/255; 00581 // float g = avgG/255; 00582 // float b = avgB/255; 00583 00584 00585 00586 00587 // //do conversion to HSV to find the hue 00588 // float max = 0; 00589 // float min = 1; 00590 // //find max 00591 // if(r > max) { max = r;} 00592 // if(g > max) { max = g;} 00593 // if(b > max) { max = b;} 00594 // //find min 00595 // if(r < min){min = r;} 00596 // if(g < min){min = g;} 00597 // if(b < min){min = b;} 00598 00599 // //do conversion to find hue 00600 // if(max == min) {hue = 0.0;} 00601 // else if(max == r && g >= b) {hue = 60.0*((g-b)/(max - min)) + 0.0;} 00602 // else if(max == r && g < b) {hue = 60.0*((g-b)/(max - min)) + 360.0;} 00603 // else if(max == g) {hue = 60.0*((b-r)/(max-min))+120.0;} 00604 // else if(max == b) {hue = 60.0*((r-g)/(max-min))+240.0;} 00605 00606 00607 // //for average calculation 00608 // totalHue += hue; 00609 // numTotal++; 00610 00611 // //find saturation 00612 // if(max){s = max;} 00613 // if(max != 0){s = 1 - min/max;} 00614 // //std::cout<<" "<<hue; 00615 // if(hue == orange_hue)//result:get spects here and there 00616 // { 00617 // //(*optr) = (byte)255; // orange 00618 // //orangeCount++; 00619 // } 00620 // if(hue == pure_orange_hue)//result:nothing 00621 // { 00622 // //(*optr) = (byte)255; // orange 00623 // //orangeCount++; 00624 // } 00625 // //to reason these numbers 145 is about the value of orange hue 00626 // //pretty good but with spects, "s != 1" gets rid of specs, but also takes out some of the pipe 00627 // //value of 120 to 145 seems best 00628 // //using 130 as min makes it less accurate 00629 // //using a higher max does not seem to make a difference 00630 // //probably because of the colors involved here 00631 // if(!(120<hue && hue<146) && 00632 // s != 1) 00633 // { 00634 // //std::cout<<" "<<s; 00635 // (*optr) = (byte)255; // orange 00636 // orangeCount++; 00637 // } 00638 00639 // //float avg = (avgR+avgG+avgB)/3.0; 00640 // //float sigma = pow(avgR - avg, 2.) + pow(avgG - avg, 2.) + pow(avgB - avg, 2.); 00641 // //float stdDev = sqrt( (1./3.) * sigma ); 00642 00643 // //result: pretty good but is confused by highlights 00644 // if (avgR > R - tR && avgR < R + tR && 00645 // avgG > G - tG && avgG < G + tG && 00646 // avgB > B - tB && avgB < B + tB ) 00647 // { 00648 // (*optr) = (byte)255; // orange 00649 // orangeCount++; 00650 // } 00651 // // else 00652 // // { 00653 // // //if(outputImage.coordsOk(i,j)){ 00654 // // //(*optr) = (byte)0; //not orange 00655 // // //} 00656 // // } 00657 // iptr++; optr++; 00658 00659 // } 00660 00661 // //display image to compare to what we get with the color segmenter 00662 00663 // Image<PixRGB<byte> > Aux; 00664 // Aux.resize(100,450,true); 00665 00666 // /******************************************************************/ 00667 // // SEGMENT IMAGE ON EACH INPUT FRAME 00668 00669 // segmenter->SITtrackImageAny(h2svImage,&inputImage,&Aux,true); 00670 00671 // /* Retrieve and Draw all our output images */ 00672 // Image<byte> temp = quickInterpolate(segmenter->SITreturnCandidateImage(),4); 00673 // //display for now for testing purposes 00674 // //wini->drawImage(display); 00675 // //wino->drawImage(temp); 00676 00677 // std::cout<<"average hue was "<<totalHue/numTotal<<std::endl; 00678 // return float(orangeCount)/float( (inputImage.getHeight() * inputImage.getWidth())); 00679 // } 00680 00681 00682 //true test with frame 3498 00683 float StraightEdgeFinder::isolateOrange3(Image< PixRGB<byte> > &inputImage, Image<byte> &outputImage) 00684 { 00685 00686 readConfig colorConf; 00687 colorConf.openFile("colortrack.conf", false); 00688 00689 int orangeCount = 0; 00690 00691 Image<PixRGB<byte> >::iterator iptr = inputImage.beginw(); 00692 Image<byte>::iterator optr = outputImage.beginw(); 00693 Image<PixRGB<byte> >::iterator stop = inputImage.endw(); 00694 00695 // float tR = colorConf.getItemValueF("ORANGE_stdR");//70.0; 00696 // float tG = colorConf.getItemValueF("ORANGE_stdG");//200.0; 00697 // float tB = colorConf.getItemValueF("ORANGE_stdB");//128.0; 00698 00699 // float R = colorConf.getItemValueF("ORANGE_R");//255; 00700 // float G = colorConf.getItemValueF("ORANGE_G");//198; 00701 // float B = colorConf.getItemValueF("ORANGE_B");//0; 00702 00703 // seem to be unused values 00704 float tR = 70.0; 00705 float tG = 200.0; 00706 float tB = 128.0; 00707 00708 float R = 255; 00709 float G = 198; 00710 float B = 0; 00711 00712 00713 00714 00715 //for average 00716 float totalHue = 0.0; 00717 int numTotal = 0; 00718 00719 //orange as a hue 00720 float pure_orange_hue = 60*(((G/255)-(B/255))/((R/255)-(B/255)))+0; 00721 float orange_hue = 60*(((tB/255)-(tR/255))/((tG/255) - (tR/255)))+120; 00722 //orange saturation (of HSL) 00723 float orange_sat = ((200.0/255.0)-(70/255.0))/(2.0-(270.0/255.0));//using tR,tG,tB, R,B,G gives '1' 00724 std::cout<<"orange hue is: "<<orange_hue<<std::endl; 00725 std::cout<<"orange saturation(purity) is: "<<orange_sat<<std::endl; 00726 std::cout<<"orange HSV saturation is: "<<(1.0-70.0/200.0)<<std::endl; 00727 // LINFO("orange values (RGB):(std RGB): %f, %f, %f: %f, %f, %f", R, G, B, tR, tG, tB); 00728 while (iptr != stop) 00729 { 00730 float hue = 0.0; 00731 float s = 0.0; //saturation 00732 float avgR = (*iptr).red(); 00733 float avgG = (*iptr).green(); 00734 float avgB = (*iptr).blue(); 00735 float r = avgR/255; 00736 float g = avgG/255; 00737 float b = avgB/255; 00738 00739 00740 00741 00742 //do conversion to HSV to find the hue 00743 float max = 0; 00744 float min = 1; 00745 //find max 00746 if(r > max) { max = r;} 00747 if(g > max) { max = g;} 00748 if(b > max) { max = b;} 00749 //find min 00750 if(r < min){min = r;} 00751 if(g < min){min = g;} 00752 if(b < min){min = b;} 00753 00754 //do conversion to find hue 00755 if(max == min) {hue = 0.0;} 00756 else if(max == r && g >= b) {hue = 60.0*((g-b)/(max - min)) + 0.0;} 00757 else if(max == r && g < b) {hue = 60.0*((g-b)/(max - min)) + 360.0;} 00758 else if(max == g) {hue = 60.0*((b-r)/(max-min))+120.0;} 00759 else if(max == b) {hue = 60.0*((r-g)/(max-min))+240.0;} 00760 00761 00762 //for average calculation 00763 totalHue += hue; 00764 numTotal++; 00765 00766 //find saturation 00767 if(max){s = max;} 00768 if(max != 0){s = 1 - min/max;} 00769 //std::cout<<" "<<hue; 00770 if(hue == orange_hue)//result:get spects here and there 00771 { 00772 //(*optr) = (byte)255; // orange 00773 //orangeCount++; 00774 } 00775 if(hue == pure_orange_hue)//result:nothing 00776 { 00777 //(*optr) = (byte)255; // orange 00778 //orangeCount++; 00779 } 00780 //to reason these numbers 145 is about the value of orange hue 00781 //pretty good but with spects, "s != 1" gets rid of specs, but also takes out some of the pipe 00782 //value of 120 to 145 seems best 00783 //using 130 as min makes it less accurate 00784 //using a higher max does not seem to make a difference 00785 //probably because of the colors involved here 00786 if(!(120 <= hue && hue <= 145) && 00787 s != 1) 00788 { 00789 //std::cout<<" "<<s; 00790 (*optr) = (byte)255; // orange 00791 orangeCount++; 00792 } 00793 00794 //float avg = (avgR+avgG+avgB)/3.0; 00795 //float sigma = pow(avgR - avg, 2.) + pow(avgG - avg, 2.) + pow(avgB - avg, 2.); 00796 //float stdDev = sqrt( (1./3.) * sigma ); 00797 00798 //result: pretty good but is confused by highlights 00799 if (avgR > R - tR && avgR < R + tR && 00800 avgG > G - tG && avgG < G + tG && 00801 avgB > B - tB && avgB < B + tB ) 00802 { 00803 (*optr) = (byte)255; // orange 00804 orangeCount++; 00805 } 00806 // else 00807 // { 00808 // //if(outputImage.coordsOk(i,j)){ 00809 // //(*optr) = (byte)0; //not orange 00810 // //} 00811 // } 00812 00813 iptr++; optr++; 00814 } 00815 00816 std::cout<<"average hue was "<<totalHue/numTotal<<std::endl; 00817 return float(orangeCount)/float( (inputImage.getHeight() * inputImage.getWidth())); 00818 } 00819 00820 00821 //true test with frame 3498 00822 float StraightEdgeFinder::isolateOrange4(Image< PixRGB<byte> > &inputImage, Image<byte> &outputImage) 00823 { 00824 00825 // XWindow wini(Dims(width, height), 0, 0, "test-input window"); 00826 // XWindow wino(Dims(width/4, height/4), 0, 0, "test-output window 2"); 00827 // XWindow winAux(Dims(500, 450), 0, 0, "Channel levels"); 00828 // Timer tim; 00829 Image< PixRGB<byte> > display; 00830 00831 uint width = itsWidth; uint height = itsHeight; 00832 Image< PixRGB<byte> > ima = inputImage; 00833 Image< PixRGB<float> > fima; 00834 00835 //Image< PixRGB<byte> > display; 00836 // uint64 t[NAVG]; unsigned int frame = 0; 00837 Image<PixH2SV2<float> > H2SVimage; 00838 00839 /****************************************************************************/ 00840 /* create 2 trackers that are bound together (e.g. 2 trackers in the same 00841 camera input image 00842 */ 00843 /* 00844 //! Mean color to track (ideal color) 00845 std::vector<float> color(3,0.0F); 00846 color[0] = 20.0F; color[1] = 0.25F; color[2] = 156.0F; 00847 00848 //! +/- tollerance value on mean for track 00849 std::vector<float> std(3,0.0F); 00850 std[0] = 30.0F; std[1] = 0.30F; std[2] = 60.0F; 00851 00852 //! normalizer over color values (highest value possible) 00853 std::vector<float> norm(3,0.0F); 00854 norm[0] = 360.0F; norm[1] = 1.0F; norm[2] = 255.0F; 00855 00856 //! how many standard deviations out to adapt, higher means less bias 00857 std::vector<float> adapt(3,0.0F); 00858 adapt[0] = 3.5F; adapt[1] = 3.5F; adapt[2] = 3.5F; 00859 00860 //! highest value for color adaptation possible (hard boundary) 00861 std::vector<float> upperBound(3,0.0F); 00862 upperBound[0] = 50.0F; upperBound[1] = 0.4F ; upperBound[2] = 180.0F; 00863 00864 //! lowest value for color adaptation possible (hard boundary) 00865 std::vector<float> lowerBound(3,0.0F); 00866 lowerBound[0] = 12.0F; lowerBound[1] = 0.1F; lowerBound[2] = 120.0F; 00867 */ 00868 /****************************************************************************/ 00869 // //! Mean color to track (ideal color for red feducial) 00870 // std::vector<float> color(3,0.0F); 00871 // color[0] = 10.0F; color[1] = 0.80F; color[2] = 156.0F; 00872 00873 // //! +/- tollerance value on mean for track 00874 // std::vector<float> std(3,0.0F); 00875 // std[0] = 30.0F; std[1] = 0.30F; std[2] = 60.0F; 00876 00877 // //! normalizer over color values (highest value possible) 00878 // std::vector<float> norm(3,0.0F); 00879 // norm[0] = 360.0F; norm[1] = 1.0F; norm[2] = 255.0F; 00880 00881 // //! how many standard deviations out to adapt, higher means less bias 00882 // std::vector<float> adapt(3,0.0F); 00883 // adapt[0] = 3.5F; adapt[1] = 3.5F; adapt[2] = 3.5F; 00884 00885 // //! highest value for color adaptation possible (hard boundry) 00886 // std::vector<float> upperBound(3,0.0F); 00887 // upperBound[0] = 50.0F; upperBound[1] = 1.0F ; upperBound[2] = 255.0F; 00888 00889 // //! lowest value for color adaptation possible (hard boundry) 00890 // std::vector<float> lowerBound(3,0.0F); 00891 // lowerBound[0] = 0.0F; lowerBound[1] = 0.1F; lowerBound[2] = 10.0F; 00892 00893 /****************************************************************************/ 00894 //! Mean color to track (ideal color for blue feducial) 00895 00896 00897 std::vector<float> color(3,0.0F); 00898 //"PINK" 00899 color[0] = 0.0F; color[1] = 0.88F; color[2] = 180.0F; 00900 //BLUE 00901 //color[0] = 250.0F; color[1] = 0.50F; color[2] = 156.0F; 00902 00903 //! +/- tollerance value on mean for track 00904 std::vector<float> std(3,0.0F); 00905 std[0] = 60.0F; std[1] = 0.30F; std[2] = 60.0F; 00906 00907 //! normalizer over color values (highest value possible) 00908 std::vector<float> norm(3,0.0F); 00909 norm[0] = 360.0F; norm[1] = 1.0F; norm[2] = 255.0F; 00910 00911 //! how many standard deviations out to adapt, higher means less bias 00912 std::vector<float> adapt(3,0.0F); 00913 adapt[0] = 3.5F; adapt[1] = 3.5F; adapt[2] = 3.5F; 00914 00915 //! highest value for color adaptation possible (hard boundry) 00916 std::vector<float> upperBound(3,0.0F); 00917 upperBound[0] = 360.0F; upperBound[1] = 1.0F ; upperBound[2] = 255.0F; 00918 00919 //! lowest value for color adaptation possible (hard boundry) 00920 std::vector<float> lowerBound(3,0.0F); 00921 lowerBound[0] = 200.0F; lowerBound[1] = 0.1F; lowerBound[2] = 10.0F; 00922 00923 00924 /****************************************************************************/ 00925 //! extracted signature 00926 00927 // signature extracted for Nathan's mahogany shirt 00928 00929 // H1 - H2 - S - V 00930 // std::vector<float> color(4,0.0F); 00931 // color[0] = 0.350962; color[1] = 0.645527; color[2] = 0.313523; color[3] = 0.720654; 00932 00933 // //! +/- tollerance value on mean for track 00934 // std::vector<float> std(4,0.0F); 00935 // std[0] = 0.339556; std[1] = 0.368726; std[2] = 0.609608; std[3] = 0.34012; 00936 00937 // //! normalizer over color values (highest value possible) 00938 // std::vector<float> norm(4,0.0F); 00939 // norm[0] = 1.0F; norm[1] = 1.0F; norm[2] = 1.0F; norm[3] = 1.0F; 00940 00941 // //! how many standard deviations out to adapt, higher means less bias 00942 // std::vector<float> adapt(4,0.0F); 00943 // adapt[0] = 3.5F; adapt[1] = 3.5F; adapt[2] = 3.5F; adapt[3] = 3.5F; 00944 00945 // //! highest value for color adaptation possible (hard boundary) 00946 // std::vector<float> upperBound(4,0.0F); 00947 // upperBound[0] = color[0] + 0.45F; upperBound[1] = color[1] + 0.45F; 00948 // upperBound[2] = color[2] + 0.55F; upperBound[3] = color[3] + 0.55F; 00949 00950 // //! lowest value for color adaptation possible (hard boundary) 00951 // std::vector<float> lowerBound(4,0.0F); 00952 // lowerBound[0] = color[0] - 0.45F; lowerBound[1] = color[1] - 0.45F; 00953 // lowerBound[2] = color[2] - 0.55F; lowerBound[3] = color[3] - 0.55F; 00954 00955 00956 //int zero = 0; 00957 int wi = width/4; 00958 int hi = height/4; 00959 00960 segmentImageTrackMC<float,unsigned int, 4> segmenter(wi*hi); 00961 00962 segmenter.SITsetTrackColor(&color,&std,&norm,&adapt,&upperBound,&lowerBound); 00963 00964 /* This limits the area of consideration to an area smaller than 00965 the image size. That is, it creates a boundery in the image 00966 outside of which it will not consider pixes (i.e. a frame) 00967 */ 00968 segmenter.SITsetFrame(&wi,&hi); 00969 00970 00971 /* Set display colors for output of tracking. Strictly asthetic */ 00972 segmenter.SITsetCircleColor(255,255,0); 00973 segmenter.SITsetBoxColor(255,255,0,0,255,255); 00974 segmenter.SITsetUseSmoothing(true,10); 00975 //unsigned long counter = 0; 00976 00977 Image<PixRGB<byte> > Aux; 00978 Aux.resize(100,450,true); 00979 00980 /* Take in the image and color segment it */ 00981 H2SVimage = ima; 00982 display = ima; 00983 00984 /******************************************************************/ 00985 // SEGMENT IMAGE ON EACH INPUT FRAME 00986 00987 segmenter.SITtrackImageAny(H2SVimage,&display,&Aux,true); 00988 00989 /* Retrieve and Draw all our output images */ 00990 Image<byte> temp = segmenter.SITreturnCandidateImage(); 00991 00992 00993 outputImage = temp; 00994 00995 inplacePaste(itsDispImg, toRGB(temp), Point2D<int>(itsWidth,itsHeight)); 00996 00997 //wini.drawImage(display); 00998 //wino.drawImage(temp); 00999 //winAux.drawImage(Aux); 01000 01001 /******************************************************************/ 01002 // Uncomment these lines to write each frame to the hard drive 01003 /******************************************************************/ 01004 /* 01005 LINFO("COUNT %d",counter); 01006 // TRACKER DISPLAY 01007 Raster::WriteRGB(display,sformat("out.display.%d.ppm",counter)); 01008 // BOOL CANDIDATE MAP 01009 Raster::WriteRGB(temp,sformat("out.temp.%d.ppm",counter)); 01010 // ADAPTIVE THRESHOLDING BARS 01011 Raster::WriteRGB(Aux,sformat("out.Aux.%d.ppm",counter)); 01012 // BLOB ID MAP 01013 Image<byte> blobs = segmenter.SITreturnBlobMap(); 01014 inplaceNormalize(blobs, 0,255); 01015 Raster::WriteRGB(blobs,sformat("out.Blobs.%d.ppm",counter)); 01016 counter++; 01017 */ 01018 /******************************************************************/ 01019 01020 return 0.0; 01021 } 01022 01023 01024 float StraightEdgeFinder::isolateOrange5 01025 (Image< PixRGB<byte> > &inputImage, Image<byte> &outputImage) 01026 { 01027 // variables for segmenting and tracking 01028 int width = itsWidth, height = itsHeight; 01029 // float delay = 0; 01030 float H,S,V,Hs,Ss,Vs; 01031 float LOTcount = 0; 01032 01033 // timer initialization 01034 Image< PixRGB<byte> > ima = inputImage; 01035 Image< PixRGB<float> > fima; 01036 01037 // configure segmenter and tracker 01038 segmentImage segment(HSV); 01039 segmentImageTrack track(1000, &segment); 01040 H = 10; Hs = 200; 01041 S = .70; Ss = .20; 01042 V = 150; Vs = 250; 01043 segment.setHue(H,Hs,0); 01044 segment.setSat(S,Ss,0); 01045 segment.setVal(V,Vs,0); 01046 segment.setHSVavg(15); 01047 segment.setFrame(0,0,width/4,height/4,width/4,height/4); 01048 01049 // decimate image to 1/4 size 01050 fima = decXY(ima); 01051 fima = decXY(fima); 01052 01053 // segment image 01054 segment.segment(fima); 01055 Image<byte> outputI = segment.returnNormalizedCandidates(); 01056 segment.calcMassCenter(); 01057 track.track(0); 01058 01059 for(int i = 0; i < segment.numberBlobs(); i++) 01060 { 01061 if(track.isCandidate(i) == true) 01062 { 01063 segment.getHSVvalueMean(i,&H,&S,&V,&Hs,&Ss,&Vs); 01064 // int tt = segment.getYmin(i); int bb = segment.getYmax(i); 01065 // int ll = segment.getXmin(i); int rr = segment.getXmax(i); 01066 // if((bb != tt) && (ll != rr)) 01067 // drawRect(ima, Rectangle::tlbrI(tt*4,ll*4,bb*4,rr*4), 01068 // PixRGB<byte>(255,255,0),1); 01069 // drawCircle(ima, Point2D<int>((int)segment.getCenterX(i)*4 01070 // ,(int)segment.getCenterY(i)*4) 01071 // ,(int)sqrt((double)segment.getMass(i)), 01072 // PixRGB<byte>(0,0,255),2); 01073 // drawCircle(ima, Point2D<int>((int)segment.getCenterX(i)*4 01074 // ,(int)segment.getCenterY(i)*4) 01075 // ,2,PixRGB<byte>(255,0,0),2); 01076 } 01077 if(track.returnLOT() == true) 01078 { 01079 if(LOTcount == 2) 01080 { 01081 H = 200; Hs = 20; 01082 S = .70; Ss = .20; 01083 V = 150; Vs = 150; 01084 LOTcount = 0; 01085 } 01086 else 01087 { 01088 LOTcount++; 01089 } 01090 } 01091 segment.setHue(H,(Hs*3),0); 01092 segment.setSat(S,(Ss*3),0); 01093 segment.setVal(V,(Vs*3),0); 01094 } 01095 // drawCircle(ima, Point2D<int>((int)track.getObjectX()*4 01096 // ,(int)track.getObjectY()*4) 01097 // ,2,PixRGB<byte>(0,255,0)); 01098 01099 // if(camPause.get() > delay) 01100 // { 01101 // LINFO( "Object mass: %d", track.getMass() ); 01102 // int modi = (int)track.getObjectX()*8; 01103 // int modj = 480-((int)track.getObjectY()*8); 01104 // if(modi > 0 && modi < 640 && modj > 0 && modj < 480) 01105 // { 01106 // if(!track.returnLOT() && 01107 // track.getMass() < 2400 && track.getMass() > 30 ) 01108 // { 01109 // /* // send speed and steer command to Board B 01110 // if( car->getSpeed() < 0.18 ) 01111 // car->setSpeed( car->getSpeed() + 0.01 ); 01112 // car->setSteering( 1.0f * 1/320 * ( modi - 320 ) ); 01113 // */ 01114 // LINFO( "Steering to %f", 1.0f * 1/320 * ( modi - 320 ) ); 01115 // } 01116 // else 01117 // { 01118 // /* // send speed and steer command to Board B 01119 // car->setSpeed( 0.0 ); 01120 // car->setSteering( 0.0 ); 01121 // */ 01122 // LINFO("Loss of Track, stopping"); 01123 // } 01124 // } 01125 // } 01126 01127 // // display segment image if option was specified 01128 // ofs->writeRGB(ima, "input"); 01129 // ofs->writeGray(outputI, "normalizedCandidates"); 01130 01131 01132 outputImage = outputI; 01133 01134 itsDispImg.clear(); 01135 // inplacePaste(itsDispImg, inputImage, Point2D<int>(itsWidth,0)); 01136 inplacePaste(itsDispImg, toRGB(outputI), Point2D<int>(itsWidth,itsHeight)); 01137 01138 01139 return 1.0; 01140 } 01141 01142 01143 float StraightEdgeFinder::normalizeAngle(float angleInRads) { 01144 float angleInDegs; 01145 01146 angleInDegs = angleInRads * 180 / M_PI; 01147 if (angleInDegs < 0) angleInDegs += 90; 01148 else angleInDegs -= 90; 01149 01150 return angleInDegs; 01151 } 01152 01153 01154 #endif 01155 01156 // ###################################################################### 01157 /* So things look consistent in everyone's emacs... */ 01158 /* Local Variables: */ 01159 /* indent-tabs-mode: nil */ 01160 /* End: */