irobot-followColor.C

Go to the documentation of this file.
00001 /*!@file Beobot/irobot-followColor.C Test color segment following */
00002 
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file:  T. Moulard <thomas.moulard@gmail.com>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Beobot/irobot-followColor.C $
00035 // $Id: irobot-followColor.C 13993 2010-09-20 04:54:23Z itti $
00036 //
00037 
00038 
00039 
00040 // model manager
00041 #include "Component/ModelManager.H"
00042 #include "Util/log.H"
00043 #include "rutz/shared_ptr.h"
00044 
00045 // for images and display
00046 #include "Raster/Raster.H"
00047 #include "Image/Image.H"
00048 #include "Image/PixelsTypes.H"
00049 #include "Transport/FrameIstream.H"
00050 #include "GUI/XWinManaged.H"
00051 #include "GUI/XWindow.H"
00052 
00053 // Frame grabber
00054 #include "Devices/FrameGrabberConfigurator.H"
00055 #include "Devices/DeviceOpts.H"
00056 #include "Media/FrameSeries.H"
00057 
00058 // for color segmentation
00059 #include "Util/Timer.H"
00060 #include "Util/Types.H"
00061 #include "Util/log.H"
00062 #include "VFAT/segmentImageTrackMC.H"
00063 #include <cstdio>
00064 #include <cstdlib>
00065 #include <signal.h>
00066 
00067 // for image manipulation
00068 #include "Image/CutPaste.H"     // for inplacePaste()
00069 
00070 // number of frames over which framerate info is averaged:
00071 #define NAVG 20
00072 
00073 #if HAVE_LIBSERIAL && HAVE_LIBIROBOT_CREATE
00074 
00075 static bool goforever = true;
00076 
00077 // for Robot controller
00078 #include <SerialStream.h>
00079 #include <irobot-create.hh>
00080 
00081 // ######################################################################
00082 void terminate(int s)
00083 { LERROR("*** INTERRUPT ***"); goforever = false; exit(1); }
00084 
00085 // ######################################################################
00086 //! Receive signals from master node and performs requested actions
00087 int main(const int argc, const char **argv)
00088 {
00089   using namespace iRobot;
00090   using namespace LibSerial;
00091 
00092   MYLOGVERB = LOG_INFO;
00093 
00094   // instantiate a model manager
00095   ModelManager manager( "Following Color Segments " );
00096 
00097   nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager));
00098   manager.addSubComponent(ifs);
00099 
00100   manager.setOptionValString(&OPT_FrameGrabberMode, "RGB24");
00101   manager.setOptionValString(&OPT_FrameGrabberDims, "320x240");
00102   manager.setOptionValString(&OPT_FrameGrabberFPS, "30");
00103 
00104   manager.exportOptions(MC_RECURSE);
00105 
00106   // parse command-line
00107   if( manager.parseCommandLine( argc, argv, "", 0, 0 ) == false ) return(1);
00108 
00109   manager.start();
00110 
00111   // get a frame grabber
00112   Dims imageDims = ifs->peekDims();
00113   uint width  = imageDims.w();
00114   uint height = imageDims.h();
00115 
00116 
00117   // initialize the motor controller
00118   SerialStream stream(std::string("/dev/rfcomm1"), std::ios::in | std::ios::out);
00119 
00120   // display window
00121   XWindow wini(Dims(width, height), 0, 0, "test-input window");
00122   XWindow wino(Dims(width/4, height/4), 650, 0, "test-output window 2");
00123   XWindow winAux(Dims(500, 450), 1000, 0, "Channel levels");
00124   Image< PixRGB<byte> > ima; Image< PixRGB<float> > fima;
00125   Image< PixRGB<byte> > display;
00126   Image<PixH2SV2<float> > H2SVimage;
00127 
00128   // ######################################################################
00129   //! extracted color signature
00130 
00131   // H1 - H2 - S - V
00132   std::vector<float> color(4,0.0F);
00133 
00134   //BLUE
00135   //  color[0] = 0.350962; color[1] = 0.645527;
00136   //color[2] = 0.313523; color[3] = 0.720654;
00137 
00138   // YELLOW
00139   //color[0] = 0.8; color[1] = 0.44;
00140   //color[2] = 0.65; color[3] = 0.82;
00141 
00142   // RED
00143   color[0] = 0.75; color[1] = 0.87;
00144   color[2] = 0.48; color[3] = 0.70;
00145 
00146   //! +/- tolerance value on mean for track
00147   std::vector<float> std(4,0.0F);
00148   std[0] = 0.339556; std[1] = 0.368726;
00149   std[2] = 0.609608; std[3] = 0.34012;
00150 
00151   //! normalizer over color values (highest value possible)
00152   std::vector<float> norm(4,0.0F);
00153   norm[0] = 1.0F; norm[1] = 1.0F;
00154   norm[2] = 1.0F; norm[3] = 1.0F;
00155 
00156   //! how many standard deviations out to adapt, higher means less bias
00157   std::vector<float> adapt(4,0.0F);
00158   adapt[0] = 3.5F; adapt[1] = 3.5F;
00159   adapt[2] = 3.5F; adapt[3] = 3.5F;
00160 
00161   //! highest value for color adaptation possible (hard boundary)
00162   std::vector<float> upperBound(4,0.0F);
00163   upperBound[0] = color[0] + 0.45F; upperBound[1] = color[1] + 0.45F;
00164   upperBound[2] = color[2] + 0.55F; upperBound[3] = color[3] + 0.55F;
00165 
00166   //! lowest value for color adaptation possible (hard boundary)
00167   std::vector<float> lowerBound(4,0.0F);
00168   lowerBound[0] = color[0] - 0.45F; lowerBound[1] = color[1] - 0.45F;
00169   lowerBound[2] = color[2] - 0.55F; lowerBound[3] = color[3] - 0.55F;
00170 
00171   int wi = width/4;  int hi = height/4;
00172   segmentImageTrackMC<float,unsigned int, 4> segmenter(wi*hi);
00173   segmenter.SITsetTrackColor(&color,&std,&norm,&adapt,&upperBound,&lowerBound);
00174 
00175   // Limit area of consideration to within the image
00176   segmenter.SITsetFrame(&wi,&hi);
00177 
00178   // Set display colors for output of tracking. Strictly asthetic
00179   segmenter.SITsetCircleColor(0,255,0);
00180   segmenter.SITsetBoxColor(255,255,0,0,255,255);
00181   segmenter.SITsetUseSmoothing(true,10);
00182   // ######################################################################
00183 
00184   // catch signals and redirect them to terminate for clean exit:
00185   signal(SIGHUP, terminate); signal(SIGINT, terminate);
00186   signal(SIGQUIT, terminate); signal(SIGTERM, terminate);
00187   signal(SIGALRM, terminate);
00188 
00189   // instantiate a robot controller
00190   Create robot(stream);
00191 
00192   // Swith to full mode.
00193   robot.sendFullCommand();
00194 
00195   // Let's stream some sensors.
00196   Create::sensorPackets_t sensors;
00197   sensors.push_back(Create::SENSOR_BUMPS_WHEELS_DROPS);
00198   sensors.push_back(Create::SENSOR_WALL);
00199   sensors.push_back(Create::SENSOR_BUTTONS);
00200   robot.sendStreamCommand(sensors);
00201 
00202   // Let's turn!
00203   int speed = 200;
00204   //int ledColor = Create::LED_COLOR_GREEN;
00205   robot.sendDriveCommand(speed, Create::DRIVE_INPLACE_CLOCKWISE);
00206   robot.sendLedCommand(Create::LED_PLAY, 0, 0);
00207 
00208   // timer initialization
00209   Timer tim; Timer camPause;       // to pause the move command
00210   camPause.reset();
00211   //uint64 t[NAVG];
00212   uint fNum = 0;
00213 
00214   // get ready for main loop:
00215   while (!robot.playButton () && goforever)
00216     {
00217       // performace monitor
00218       tim.reset();
00219 
00220       // process robot input
00221       if (robot.bumpLeft () || robot.bumpRight ())
00222       {
00223 
00224         std::cout << "Bump !" << std::endl;
00225 
00226         // take a picture
00227 
00228 
00229       }
00230 
00231 
00232       //   if (robot.wall ())        std::cout << "Wall !" << std::endl;
00233 
00234      if (robot.advanceButton ())
00235         {
00236 
00237         }
00238 
00239       // get, convert, and display image
00240       ima = ifs->readRGB();
00241       //uint64 t0 = tim.get();  // to measure display time
00242 
00243       // segment image on each input frame
00244       Image<PixRGB<byte> > Aux; Aux.resize(100,450,true);
00245       H2SVimage = ima;    // NOTE: this line takes 50ms.
00246       display = ima;
00247       segmenter.SITtrackImageAny(H2SVimage,&display,&Aux,true);
00248 
00249       // Retrieve and Draw all our output images
00250       Image<byte> temp = segmenter.SITreturnCandidateImage();
00251       wini.drawImage(display);
00252       wino.drawImage(temp);
00253 
00254       // write in all the info
00255       Image<PixRGB<byte> > dispAux(500,450, ZEROS);
00256       inplacePaste(dispAux, Aux, Point2D<int>(0, 0));
00257 
00258       // if we find the tracked object
00259       float st,sp;
00260       if(true || !segmenter.SITreturnLOT())
00261         {
00262           int x, y, m; unsigned int minX, maxX, minY, maxY;
00263           segmenter.SITgetBlobPosition(x,y);
00264           segmenter.SITgetBlobWeight(m);
00265           segmenter.SITgetMinMaxBoundry(&minX, &maxX, &minY, &maxY);
00266           //LINFO("x = %d y = %d m = %d", x, y, m);
00267           std::string ntext(sformat("x = %5d y = %5d m = %6d", x, y, m));
00268           writeText(dispAux, Point2D<int>(100, 0), ntext.c_str());
00269 
00270 
00271           LDEBUG("[%3d %3d %3d %3d] -> %d",
00272                  minX, maxX, minY, maxY,
00273                  (maxX - minX) * (maxY - minY));
00274 
00275           int nBlobs = segmenter.SITnumberBlobs();
00276           for(int i = 0; i < nBlobs; i++)
00277             {
00278               int bx = segmenter.SITgetBlobPosX(i);
00279               int by = segmenter.SITgetBlobPosY(i);
00280               int bm = segmenter.SITgetBlobMass(i);
00281               LDEBUG("bx = %d by = %d bm = %d", bx, by, bm);
00282             }
00283 
00284           // steer is controlled by blob x-coor
00285           st = 0.0f;
00286           if(x < 120)      st = -0.8f;
00287           else if(x > 200) st =  0.8f;
00288           else             st = float((x - 120.0)/80.0*1.6 - 0.8);
00289 
00290           // speed is controlled by size of blob
00291           if(m <= 200)
00292             { sp = 0.3f; st = 0.0f; }
00293           if(m > 200 && m < 2000)
00294             sp = (m - 200.0) / 1800.0 * 0.4;
00295           else if(m >= 2000 && m < 6000)
00296             sp = (8000.0 - m) / 6000.0f * 0.4;
00297           else if(m >= 6000 && m < 10000)
00298             { sp = (6000.0 - m)/4000.0 * 0.3; st = 0.0; }
00299           else  { sp = 0.3f; st = 0.0f; }
00300 
00301 //           speed = -1 * speed;
00302 //           ledColor += 10;
00303 //           if (ledColor > 255)
00304 //             ledColor = 0;
00305 
00306 //           robot.sendDriveCommand (speed, Create::DRIVE_INPLACE_CLOCKWISE);
00307 //           if (speed < 0)
00308 //             robot.sendLedCommand (Create::LED_PLAY,
00309 //                                   ledColor,
00310 //                                   Create::LED_INTENSITY_FULL);
00311 //           else
00312 //             robot.sendLedCommand (Create::LED_ADVANCE,
00313 //                                   ledColor,
00314 //                                   Create::LED_INTENSITY_FULL);
00315 
00316 
00317         }
00318       else
00319         {
00320           std::string ntext("lost of track - will look for it");
00321           writeText(dispAux, Point2D<int>(100, 80), ntext.c_str());
00322           //LINFO("lost of track - will look for it");
00323 
00324           st = 0.0;
00325           sp = 0.4;
00326         }
00327 
00328 
00329       // move robot
00330       std::string ntext3(sformat("REAL sp = %f st = %f",
00331                                  sp * 200.0, st * 2000.0));
00332       writeText(dispAux, Point2D<int>(100, 120), ntext3.c_str());
00333       try
00334         {
00335           short radius = 0;
00336           if (st < 0)
00337             radius = 2000. + (st * 2000.0);
00338           else if (st > 0)
00339             radius = -2000. + (st * 2000.0);
00340           robot.sendDriveCommand (sp * 200.0, radius);
00341         }
00342       catch (...) {}
00343 
00344       std::string ntext2(sformat("st = %f sp = %f", st, sp));
00345       writeText(dispAux, Point2D<int>(100, 40), ntext2.c_str());
00346       //LINFO("st = %f sp = %f gr = %f", st, sp, gr);
00347 
00348       winAux.drawImage(dispAux);
00349 
00350       // You can add more commands here.
00351       usleep(20 * 1000);
00352 
00353   LINFO("I cannot work without LibSerial or libirobot-create");
00354 
00355       fNum++;
00356     }
00357 
00358   robot.sendDriveCommand (0, Create::DRIVE_STRAIGHT);
00359 
00360 }
00361 
00362 #else
00363 
00364 int main(const int argc, const char **argv)
00365 {
00366   //LINFO("I cannot work without LibSerial or libirobot-create");
00367   return 1;
00368 }
00369 
00370 #endif
00371 
00372 // ######################################################################
00373 /* So things look consistent in everyone's emacs... */
00374 /* Local Variables: */
00375 /* indent-tabs-mode: nil */
00376 /* End: */
Generated on Sun May 8 08:40:12 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3