DownwardVision.C

Go to the documentation of this file.
00001 /*!@file BeoSub/BeeBrain/DownwardVision.C Downward Vision Agent         */
00002 // //////////////////////////////////////////////////////////////////// //
00003 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00004 // University of Southern California (USC) and the iLab at USC.         //
00005 // See http://iLab.usc.edu for information about this project.          //
00006 // //////////////////////////////////////////////////////////////////// //
00007 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00008 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00009 // in Visual Environments, and Applications'' by Christof Koch and      //
00010 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00011 // pending; application number 09/912,225 filed July 23, 2001; see      //
00012 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00013 // //////////////////////////////////////////////////////////////////// //
00014 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00015 //                                                                      //
00016 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00017 // redistribute it and/or modify it under the terms of the GNU General  //
00018 // Public License as published by the Free Software Foundation; either  //
00019 // version 2 of the License, or (at your option) any later version.     //
00020 //                                                                      //
00021 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00022 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00023 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00024 // PURPOSE.  See the GNU General Public License for more details.       //
00025 //                                                                      //
00026 // You should have received a copy of the GNU General Public License    //
00027 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00028 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00029 // Boston, MA 02111-1307 USA.                                           //
00030 // //////////////////////////////////////////////////////////////////// //
00031 //
00032 // Primary maintainer for this file: Michael Montalbo <montalbo@usc.edu>
00033 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/BeoSub/BeeBrain/DownwardVision.C $
00034 // $Id: DownwardVision.C 9412 2008-03-10 23:10:15Z farhan $
00035 //
00036 //////////////////////////////////////////////////////////////////////////
00037 
00038 #include "BeoSub/BeeBrain/DownwardVision.H"
00039 
00040 // ######################################################################
00041 DownwardVisionAgent::DownwardVisionAgent(std::string name) : SensorAgent(name)
00042 {
00043 //   pipeRecognizer = BeoSubPipe();
00044   itsCrossRecognizer.reset(new BeoSubCross());
00045 //   binRecognizer = BeoSubPipe();
00046 
00047 }
00048 
00049 // ######################################################################
00050 DownwardVisionAgent::DownwardVisionAgent
00051 ( std::string name,
00052   rutz::shared_ptr<AgentManagerB> amb) : SensorAgent(name)
00053 {
00054   itsAgentManager = amb;
00055   itsCrossRecognizer.reset(new BeoSubCross());
00056 //   pipeRecognizer = BeoSubPipe();
00057 //   binRecognizer = BeoSubPipe();
00058 
00059 }
00060 
00061 // ######################################################################
00062 //Scheduler
00063 bool DownwardVisionAgent::pickAndExecuteAnAction()
00064 {
00065   bool lookedForObject = false;
00066 
00067   itsCameraImage = itsAgentManager->getCurrentImage();
00068 
00069   if(!itsJobs.empty())
00070     {
00071       //first clean out any jobs which are to be ignored
00072       cleanJobs();
00073 
00074       for(itsJobsItr = itsJobs.begin(); itsJobsItr != itsJobs.end(); ++itsJobsItr)
00075         {
00076           rutz::shared_ptr<OceanObject> currentOceanObject = (*itsJobsItr)->oceanObject;
00077           Job* currentJob = *itsJobsItr;
00078 
00079           if(currentJob->status != IGNORE)
00080             {
00081 
00082               //find it based on its type
00083               if(currentOceanObject->getType()  == OceanObject::PIPE)
00084                 {
00085                   lookForPipe(currentJob);
00086                 }
00087               else if(currentOceanObject->getType() == OceanObject::CROSS)
00088                 {
00089                   lookForCross(currentJob);
00090                 }
00091               else if(currentOceanObject->getType() == OceanObject::BIN)
00092                 {
00093                   lookForBin(currentJob);
00094                 }
00095 
00096               lookedForObject = true;
00097             }
00098         }
00099 
00100       return lookedForObject;
00101     }
00102   else
00103     {
00104       return false;
00105     }
00106 
00107 }
00108 
00109 // ######################################################################
00110 //Actions
00111 void DownwardVisionAgent::lookForPipe(Job* j)
00112 {
00113   if(j->status == NOT_STARTED) { j->status = IN_PROGRESS; }
00114 
00115   DataTypes jobDataType = j->dataType;
00116   rutz::shared_ptr<OceanObject> jobOceanObject = j->oceanObject;
00117 
00118   bool isFound = false;
00119 
00120 //   std::vector<LineSegment2D> lines = getHoughLines(itsCameraImage, outputImage);
00121 
00122   if(jobDataType == POSITION)
00123     {
00124       Do("Looking for pipe center");
00125 //       uint stalePointCount = 0;
00126 //       Point2D<int> projPoint = pipeRecognizer->getPipeProjPoint(lines, stalePointCount);
00127 //       if(projPoint.isValid() && stalePointCount < 10)
00128 //         {
00129           isFound = true;
00130 //           jobOceanObject->setPosition(Point3D(projPoint.i, projPoint.j, -1));
00131 //         }
00132     }
00133   else if(jobDataType == ORIENTATION)
00134     {
00135       Do("Looking for pipe orientation");
00136 //       uint staleAngleCount = 0;
00137 //       Angle pipeOrientation = pipeRecognizer->getPipeDir(lines, staleAngleCount);
00138 //       if(pipeOrientation.getVal() < 361 && staleAngleCount < 10)
00139 //         {
00140           isFound = true;
00141 //           jobOceanObject->setOrientation(pipeOrientation);
00142 //         }
00143     }
00144 
00145   oceanObjectUpdate(jobOceanObject, jobDataType, isFound);
00146 }
00147 
00148 // ######################################################################
00149 void DownwardVisionAgent::lookForBin(Job* j)
00150 {
00151   if(j->status == NOT_STARTED) { j->status = IN_PROGRESS; }
00152 
00153   DataTypes jobDataType = j->dataType;
00154   rutz::shared_ptr<OceanObject> jobOceanObject = j->oceanObject;
00155 
00156   bool isFound = false;
00157 
00158 //   std::vector<LineSegment2D> lines = getHoughLines(itsCameraImage, outputImage);
00159 
00160   if(jobDataType == POSITION)
00161     {
00162       Do("Looking for bin center");
00163 //       Point2D<int> binCenter = binRecognizer->getBinCenter(lines);
00164 //       if(pipeCenter.isValid())
00165 //         {
00166           isFound = true;
00167 //           jobOceanObject->setPosition(Point3D(binCenter.i, binCenter.j, -1));
00168 //         }
00169     }
00170 
00171   oceanObjectUpdate(jobOceanObject, jobDataType, isFound);
00172 }
00173 
00174 // ######################################################################
00175 void DownwardVisionAgent::lookForCross(Job* j)
00176 {
00177   j->status = IN_PROGRESS;
00178   DataTypes jobDataType = j->dataType;
00179   rutz::shared_ptr<OceanObject> jobOceanObject = j->oceanObject;
00180 
00181   Image<PixRGB<byte> > outputImage;
00182 
00183   bool isFound = false;
00184 
00185   std::vector<LineSegment2D> centerPointLines;
00186 
00187    std::vector<LineSegment2D> lines =
00188      itsCrossRecognizer->getHoughLines(itsCameraImage, outputImage);
00189 
00190   if(jobDataType == POSITION)
00191     {
00192       Do("Looking for cross center");
00193       uint stalePointCount = 0;
00194       Point2D<int> crossCenter =
00195         itsCrossRecognizer->getCrossCenter(lines,
00196                                            centerPointLines,
00197                                            stalePointCount);
00198 
00199       if(crossCenter.isValid() && stalePointCount < 10)
00200         {
00201 
00202           Image<PixRGB<byte> > crossOverlayImage = itsCameraImage;
00203 
00204           PixRGB <byte> crossColor;
00205 
00206           if(stalePointCount <= 20)
00207             {
00208               crossColor =  PixRGB <byte> (0, 255,0);
00209             }
00210           else
00211             {
00212               crossColor =  PixRGB <byte> (255, 0 ,0);
00213             }
00214 
00215           drawCrossOR(crossOverlayImage,
00216                       crossCenter,
00217                       crossColor,
00218                       20,5, 0);
00219 
00220           isFound = true;
00221           itsAgentManager->drawImage(crossOverlayImage,
00222                                 Point2D<int>(crossOverlayImage.getWidth(),0));
00223           jobOceanObject->setPosition(Point3D(crossCenter.i, crossCenter.j, -1));
00224           //jobOceanObject->setPosition(Point3D(123,23, -1));
00225           itsAgentManager->pushResult((CommandType)(SEARCH_OCEAN_OBJECT_CMD),
00226                                       jobDataType,
00227                                       jobOceanObject);
00228         }
00229     }
00230   else if(jobDataType == ORIENTATION)
00231     {
00232       Do("Looking for cross orientation");
00233 //       uint staleAngleCount = 0;
00234 //       Angle crossOrientation = crossRecognizer->getCrossDir(lines, staleAngleCount);
00235 //       if(crossOrientation.getVal() < 361 && staleAngleCount < 10)
00236 //         {
00237           isFound = true;
00238 //           jobOceanObject->setOrientation(crossOrientation);
00239 //         }
00240     }
00241   else if(jobDataType == MASS)
00242     {
00243       Do("Looking for cross mass");
00244 //       float crossMass = pipeRecognizer->getCrossMass();
00245 //       if(crossMass >= 0)
00246 //         {
00247           isFound = true;
00248 //           jobOceanObject->setMass(crossMass);
00249 //         }
00250     }
00251 
00252   oceanObjectUpdate(jobOceanObject, jobDataType, isFound);
00253 }
00254 
00255 // ######################################################################
00256 void DownwardVisionAgent::oceanObjectUpdate
00257 ( rutz::shared_ptr<OceanObject> o,
00258   DataTypes dataType,
00259   bool isFound)
00260 {
00261   if(isFound)
00262     {
00263       if(o->getStatus() == OceanObject::NOT_FOUND)
00264         {
00265           o->setStatus(OceanObject::FOUND);
00266           itsAgentManager->pushResult((CommandType)(OCEAN_OBJECT_STATUS), dataType, o);
00267         }
00268     }
00269   else
00270     {
00271       if(o->getStatus() == OceanObject::FOUND)
00272         {
00273           o->setStatus(OceanObject::LOST);
00274           itsAgentManager->pushResult((CommandType)(OCEAN_OBJECT_STATUS), dataType, o);
00275         }
00276     }
00277 }
00278 
00279 
00280 // ######################################################################
00281 /* So things look consistent in everyone's emacs... */
00282 /* Local Variables: */
00283 /* indent-tabs-mode: nil */
00284 /* End: */
Generated on Sun May 8 08:40:16 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3