GSlocalizer.H

Go to the documentation of this file.
00001 /*!@file Beobot/GSlocalizer.H takes in salient object and  gist vector
00002   to localize in the map it has. It also takes in command to go to a
00003   target location                                                       */
00004 // //////////////////////////////////////////////////////////////////// //
00005 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00006 // University of Southern California (USC) and the iLab at USC.         //
00007 // See http://iLab.usc.edu for information about this project.          //
00008 // //////////////////////////////////////////////////////////////////// //
00009 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00010 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00011 // in Visual Environments, and Applications'' by Christof Koch and      //
00012 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00013 // pending; application number 09/912,225 filed July 23, 2001; see      //
00014 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00015 // //////////////////////////////////////////////////////////////////// //
00016 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00017 //                                                                      //
00018 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00019 // redistribute it and/or modify it under the terms of the GNU General  //
00020 // Public License as published by the Free Software Foundation; either  //
00021 // version 2 of the License, or (at your option) any later version.     //
00022 //                                                                      //
00023 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00024 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00025 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00026 // PURPOSE.  See the GNU General Public License for more details.       //
00027 //                                                                      //
00028 // You should have received a copy of the GNU General Public License    //
00029 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00030 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00031 // Boston, MA 02111-1307 USA.                                           //
00032 // //////////////////////////////////////////////////////////////////// //
00033 //
00034 // Primary maintainer for this file: Christian Siagian <siagian@usc.edu>
00035 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Beobot/GSlocalizer.H $
00036 // $Id: GSlocalizer.H 10794 2009-02-08 06:21:09Z itti $
00037 //
00038 
00039 // ######################################################################
00040 
00041 
00042 #ifndef BEOBOT_GSLOCALIZER_H_DEFINED
00043 #define BEOBOT_GSLOCALIZER_H_DEFINED
00044 
00045 #define NUM_GSL_THREAD      16
00046 #define N_OBJECT_BLOCK      40
00047 
00048 #include "Beowulf/Beowulf.H"
00049 #include "Beobot/beobot-GSnav-def.H"
00050 
00051 #include "Component/ModelComponent.H"
00052 #include "Beobot/Landmark.H"
00053 #include "SIFT/Histogram.H"
00054 #include "Beobot/GSparticle.H"
00055 #include "Beobot/Environment.H"
00056 #include "Util/Timer.H"
00057 
00058 #include <list>
00059 #include <pthread.h>
00060 
00061 
00062 // ######################################################################
00063 
00064 struct GSlocJobData
00065 {
00066   GSlocJobData() { };
00067 
00068   GSlocJobData(const int inObjNum,
00069                const int inSegNum,
00070                const int inLmkNum,
00071                const int inVOstartNum,
00072                const int inVOendNum) :
00073     objNum(inObjNum),
00074     segNum(inSegNum),
00075     lmkNum(inLmkNum),
00076     voStartNum(inVOstartNum),
00077     voEndNum(inVOendNum)
00078   {
00079     pVal = 0.0;
00080     segVal = 0.0;
00081     salVal = 0.0;
00082     locVal = 0.0;
00083   }
00084 
00085   int objNum;
00086   int segNum;
00087   int lmkNum;
00088   int voStartNum;
00089   int voEndNum;
00090 
00091   //! priority value of the job
00092   float pVal;
00093   float segVal;
00094   float salVal;
00095   float locVal;
00096 
00097   bool operator < (const GSlocJobData& rhs)
00098   {
00099     return pVal < rhs.pVal;
00100   }
00101 
00102 };
00103 
00104 
00105 // ######################################################################
00106 //! Thread with localizer, Object database, search priority list
00107 //! takes in salient object and gist vector as input
00108 class GSlocalizer : public ModelComponent
00109 {
00110 public:
00111 
00112   // ######################################################################
00113   /*! @name Constructors and Destructors */
00114   //@{
00115 
00116   //! Constructor
00117   GSlocalizer(OptionManager& mgr,
00118               const std::string& descrName = "Gist and Saliency localizer",
00119               const std::string& tagName = "GSlocalizer");
00120 
00121   //! Destructor
00122   virtual ~GSlocalizer();
00123 
00124   //! set the prefix of file to save data  - has to be done
00125   void setSavePrefix(std::string prefix);
00126 
00127   //! set the environment - has to be done
00128   void setEnvironment(rutz::shared_ptr<Environment> env);
00129 
00130   //@}
00131 
00132   // ######################################################################
00133   //! @name Access functions
00134   //@{
00135 
00136   //! get number of objects compared in the search
00137   uint getNumObjectSearch(uint index);
00138 
00139   //! get the environment information
00140   rutz::shared_ptr<Environment> getEnvironment();
00141 
00142   //! set the window to display results
00143   void setWindow(rutz::shared_ptr<XWinManaged> inputWin);
00144 
00145   //! set beowulf access
00146   void setBeoWulf(nub::soft_ref<Beowulf> beo);
00147 
00148   //! get the input image
00149   Image<PixRGB<byte> > getInputImage();
00150 
00151   //! get the number of objects inputted
00152   uint getNumInputObject();
00153 
00154   //! get the visual object that we try to match
00155   rutz::shared_ptr<VisualObject> getInputVO(uint index);
00156 
00157   //! get the input gist
00158   Image<double> getInputGist();
00159 
00160   //! get the visual object match for the found object
00161   rutz::shared_ptr<VisualObjectMatch> getVOmatch(uint index);
00162 
00163   //! get the segment number of the object match found
00164   uint getSegmentNumberMatch(uint index);
00165 
00166   //! get the length traveled of the object match found
00167   float getLengthTraveledMatch(uint index);
00168 
00169   //! get the object offset of the visual object
00170   //! that we try to match
00171   Point2D<int> getInputObjOffset(uint index);
00172 
00173   //! get the last input frame number
00174   int getInputFnum();
00175 
00176   //! get the last input frame number where search is started
00177   int getSearchInputFnum();
00178 
00179   //! get the segment histogram from the segment classifier
00180   rutz::shared_ptr<Histogram> getSegmentHistogram();
00181 
00182   //! get our geographical location
00183   Point2D<int> getLocation();
00184 
00185   //! get our segment location
00186   uint getSegmentLocation();
00187 
00188   //! get the length traveled within the segment
00189   float getSegmentLengthTraveled();
00190 
00191   //! set ground truth
00192   void setGroundTruth(uint snum, float ltrav);
00193 
00194   //! get ground truth
00195   void getGroundTruth(uint &snum, float &ltrav);
00196 
00197   //@}
00198 
00199   // ######################################################################
00200   /*! @name member functions */
00201   //@{
00202 
00203   //! initialize the localization particles
00204   void initParticles(std::string belFName = std::string(""));
00205 
00206   //! get the belief particles (usually for recovering crashes)
00207   std::vector<GSparticle> getBeliefParticles();
00208 
00209   //! check if the serach is finished
00210   bool outputReady();
00211 
00212   //! return the result of the matching search
00213   bool isMatchFound(uint index);
00214 
00215   //! input the image, visual object and gist feature for search
00216   //! also add the odometry change
00217   void input(Image<PixRGB<byte> > ima,
00218              std::vector<rutz::shared_ptr<VisualObject> > inputVO,
00219              std::vector<Point2D<int> > inputObjOffset,
00220              int inputFnum, Image<double> cgist,
00221              float dx = -1.0F, float dy = -1.0F);
00222 
00223   //! the belief histogram for segment only localization
00224   rutz::shared_ptr<Histogram> getSegmentBeliefHistogram();
00225 
00226   //! For internal thread use: Compute a conspicuity map from an image
00227   void threadCompute();
00228 
00229   //! stop search by cleaning up the queue
00230   //! NOTE: this is a hard stop (blocking operation)
00231   //!       may take time (500ms, or even longer)
00232   void stopSearch();
00233 
00234   //! stop search by flipping a stop-search bit
00235   //! NOTE: this is a soft/non-blocking operation
00236   void stopSearch2();
00237 
00238   //! update belief using the input just processed
00239   //! update our likely location
00240   void updateBelief();
00241 
00242   //! move the object from the previous location
00243   void actionUpdateBelief();
00244 
00245   //! update belief using the segment prediction
00246   void segmentUpdateBelief();
00247 
00248   //! update belief using all the objects found
00249   void objectUpdateBelief();
00250 
00251   //! update belief using object 'index'
00252   void objectUpdateBelief(uint index);
00253 
00254   //! set the most likely location
00255   void setLocation();
00256 
00257   //! get the belief image (it is put on top of a map)
00258   Image<PixRGB<byte> > getBeliefImage(uint w, uint h, int &scale);
00259 
00260   //! get the match image
00261   Image<PixRGB<byte> > getMatchImage(uint index, Dims d);
00262 
00263   //! get motor signal
00264   /*! motor signal can be used (using PID, for example) to obtain
00265     motor command. Motor signal is a delta signal (in image coordinate)
00266     of where the robot should go to get to the goal state.
00267    */
00268   Point2D<int> getMotorSignal();
00269 
00270   //@}
00271 
00272 protected:
00273   void start1(); //!< get started
00274   void stop2();  //!< get stopped
00275 
00276 private:
00277 
00278   // ######################################################################
00279   /*! @name private functions */
00280   //@{
00281 
00282   //! set the segment and object search priority for landmark DB
00283   void setSearchPriority();
00284 
00285   //! add the search order preference randomly
00286   void addRandomPriority();
00287 
00288   //! add the search order preference based on segment
00289   void addSegmentPriority();
00290 
00291   //! add the search order preference based on saliency match
00292   void addSaliencyPriority();
00293 
00294   //! add the search order preference based on current belief location
00295   void addLocationPriority();
00296 
00297   //! get the match
00298   GSlocJobData getMatch(uint index);
00299 
00300   //@}
00301 
00302   //!  file prefix to save data
00303   std::string itsSavePrefix;
00304 
00305   //! localization particles for beliefs
00306   std::vector<GSparticle> itsBeliefParticles;
00307   std::vector<Point2D<int> > itsBeliefLocations;
00308 
00309   //! all the environment information
00310   rutz::shared_ptr<Environment> itsEnvironment;
00311 
00312   //! from its environment: topological map
00313   rutz::shared_ptr<TopologicalMap> itsTopologicalMap;
00314 
00315   //! from its environment: visual landmark database
00316   rutz::shared_ptr<LandmarkDB> itsLandmarkDB;
00317 
00318   //! the input image, visual objects, and gist
00319   Image<PixRGB<byte> > itsInputImage;
00320   std::vector<rutz::shared_ptr<VisualObject> > itsInputVO;
00321   std::vector<bool> itsVOKeypointsComputed;
00322   std::vector<Point2D<int> > itsInputObjOffset;
00323   Image<double> itsInputGist;
00324   int itsInputFnum;
00325   int itsSearchInputFnum;
00326 
00327   //! ground truth information - default to (0,0.0)
00328   uint  itsSnumGT;
00329   float itsLtravGT;
00330 
00331   //! the current robot movement
00332   float itsRobotDx;
00333   float itsRobotDy;
00334 
00335   //! segment histogram from the classifier
00336   rutz::shared_ptr<Histogram> itsSegmentHistogram;
00337 
00338   //! result of search
00339   std::vector<rutz::shared_ptr<VisualObjectMatch> > itsVOmatch;
00340   std::vector<GSlocJobData> itsLmkMatch;
00341   std::vector<uint> itsSegNumMatch;
00342   std::vector<float> itsLenTravMatch;
00343   std::vector<bool> itsMatchFound;
00344   std::vector<uint> itsNumObjectSearch;
00345 
00346   //! resulting geographic location
00347   uint itsSegmentLocation;
00348   float itsSegmentLengthTraveled;
00349   Point2D<int> itsLocation;
00350 
00351   //! job queue and number of jobs to do
00352   //! Note: they are on jobLock
00353   std::list<GSlocJobData> itsJobQueue;
00354   bool itsIsQueueSorted;         //!< note if the queue is sorted
00355   uint itsNumJobsProcessed;      //!< number of jobs that has been processed
00356   uint itsLastSuccessfulJob;     //!< job index last found
00357   uint itsNumObjectFound;        //!< number of objects found
00358   uint itsNumJobs;               //!< original number of jobs
00359   bool itsStopSearch;            //!< stop search request
00360 
00361   uint itsNumWorking;            //!< the number of threads that are working
00362 
00363   //! master node to send to
00364   nub::soft_ref<Beowulf> itsBeowulf;
00365 
00366   //! segment histogram from the belief particles
00367   rutz::shared_ptr<Histogram> itsSegmentBeliefHistogram;
00368 
00369   //! especially for input
00370   bool itsOutputReady2;
00371 
00372   //! thread stuff
00373   pthread_t *worker;
00374   pthread_mutex_t jobLock;       //!< locking jobQueue
00375   pthread_mutex_t fnumLock;      //!< locking frame number
00376   pthread_mutex_t or2Lock;       //!< locking itsOutputReady2
00377   pthread_mutex_t stopSearchLock;//!< locking stop search
00378   pthread_mutex_t resLock;       //!< locking results
00379   pthread_mutex_t workLock;      //!< locking number of working threads
00380   pthread_mutex_t particleLock;  //!< locking belief particles
00381   pthread_cond_t jobCond;
00382   uint numWorkers;
00383 
00384   rutz::shared_ptr<XWinManaged> itsWin;
00385 
00386   rutz::shared_ptr<Timer> itsTimer;
00387 };
00388 
00389 #endif
00390 
00391 // ######################################################################
00392 /* So things look consistent in everyone's emacs... */
00393 /* Local Variables: */
00394 /* indent-tabs-mode: nil */
00395 /* End: */
Generated on Sun May 8 08:04:29 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3