FoeDetector.H

00001 /*!@file Robots2/Beobot2/Navigation/FOE_Navigation/FoeDetector.H 
00002   detects an FOE from the input image sequence */
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: Christian Siagian <siagian@usc.edu>
00034 // $HeadURL: svn://ilab.usc.edu/trunk/saliency/src/Robots/Beobot2/Navigation/FOE_Navigation/FoeDetector.H
00035 // $Id: $
00036 //
00037 
00038 #include "Component/ModelComponent.H"
00039 #include "Component/ModelParam.H"
00040 
00041 #include "Image/Image.H"
00042 #include "Image/Layout.H"
00043 
00044 #include "Robots/Beobot2/Navigation/FOE_Navigation/SpatioTemporalEnergy.H"
00045 #include "Robots/Beobot2/Navigation/FOE_Navigation/MiddleTemporal.H"
00046 #include "Robots/Beobot2/Navigation/FOE_Navigation/MotionOps.H"
00047 #include "Raster/Raster.H"
00048 
00049 #ifndef ROBOTS_BEOBOT2_NAVIGATION_FOENAVIGATION_FOEDETECTOR_H
00050 #define ROBOTS_BEOBOT2_NAVIGATION_FOENAVIGATION_FOEDETECTOR_H
00051 
00052 #define FOE_METHOD_TEMPLATE    10000
00053 #define FOE_METHOD_AVERAGE     10001
00054 
00055 class FoeDetector: public ModelComponent
00056 {
00057 public:
00058 
00059   FoeDetector(OptionManager& mgr,
00060                 const std::string& descrName = "FoeDetector",
00061                 const std::string& tagName = "FoeDetector");
00062 
00063   void reset(uint numPyrLevel, uint numDirs, uint numSpeeds);
00064 
00065   ~FoeDetector();
00066 
00067   //! various ways to get an estimate of focus of expansion
00068   //! point or the probability map
00069   //! Note: if new features/image is inputted
00070   //! FOE will be recomputed using those features
00071   //! otherwise it will return the current information 
00072   //! without computation
00073 
00074   //! compute and return FOE location given an input image
00075   Point2D<int> getFoe(Image<byte> lum, 
00076                       uint method = FOE_METHOD_TEMPLATE,
00077                       bool tempFilter = true);
00078 
00079   //! compute and return FOE map given an input image
00080   Image<float> getFoeMap(Image<byte> lum, 
00081                          uint method = FOE_METHOD_TEMPLATE,
00082                          bool tempFilter = true);
00083 
00084   //! compute and return FOE location given a set of correspondences
00085   Point2D<int> getFoe
00086   ( rutz::shared_ptr<OpticalFlow> flow,
00087     uint method = FOE_METHOD_TEMPLATE,
00088     bool tempFilter = true);
00089 
00090   //! compute and return FOE location given a set of correspondences
00091   Image<float> getFoeMap
00092   ( rutz::shared_ptr<OpticalFlow> flow,
00093     uint method = FOE_METHOD_TEMPLATE,
00094     bool tempFilter = true);
00095   
00096   //! compute and return FOE location given MT features
00097   //! use MiddleTemporal.C & .H to compute it
00098   Point2D<int> getFoe
00099   ( std::vector <Image<float> > mtFeatures,
00100     std::vector <Image<float> > mtOptimalShift,
00101     uint method = FOE_METHOD_TEMPLATE, 
00102     bool tempFilter = true);
00103   
00104   //! compute and return FOE map given MT features
00105   //! use MiddleTemporal.C & .H to compute it
00106   Image<float> getFoeMap
00107   ( std::vector <Image<float> > mtFeatures,
00108     std::vector <Image<float> > mtOptimalShift,
00109     uint method = FOE_METHOD_TEMPLATE,
00110     bool tempFilter = true);
00111   
00112   //! return the current information without computation
00113   Point2D<int> getFoe();
00114   Image<float> getFoeMap();
00115 
00116   //! hard-code input observer motion
00117   void setObserverRotation(uint dir, float speed);
00118   void setObserverRotation(float di, float dj);
00119 
00120   //! return displayable result of FOE computation
00121   Layout<byte> getMTfeaturesDisplay(Image<byte> image);
00122 
00123 private:
00124 
00125   //! internal functions for different types of FOE computation 
00126   Point2D<int> getFoeTemplate(Image<byte> lum);
00127   Point2D<int> getFoeAvg(Image<byte> lum);
00128 
00129   //! temporally filter the location of the FOE
00130   //! as images are inputted
00131   Point2D<int> temporalFilterFoeComputation();
00132 
00133   //! reset direction weights for FOE calculation
00134   //! at the MST level
00135   void resetDirectionWeights(uint width, uint height);
00136 
00137   //! compute the visual cortex features related to motion
00138   //! this is if the input is in series of image
00139   //! if input is MT features or correspondences
00140   //! this step is skipped
00141   void computeV1features();
00142 
00143   //! detect whether observer is stationary
00144   //! FIXXX_NOTE: need some work
00145   float detectObserverStationarity();
00146 
00147   //! detect observer rotation correct for FOE templates
00148   //! FIXXX_NOTE: need some work
00149   void detectObserverRotation();
00150   float maxMean(Image<float> image);
00151 
00152   //! correct FOE templates to account for observer rotation
00153   //! FIXXX_NOTE: need some work
00154   void correctForObserverRotation();
00155 
00156   //! compute the Focus of Expansion from the MT features
00157   //! using Perrone 1992 templates method 
00158   Point2D<int> computeFoeTemplate();
00159   float computeFoeTemplateValue(uint foeI, uint foeJ);
00160   Point2D<int> computeFoeTemplate(rutz::shared_ptr<OpticalFlow> flow);
00161   float computeFoeTemplateValue
00162   (uint foeI, uint foeJ, rutz::shared_ptr<OpticalFlow> flow);
00163 
00164   //! return direction weight
00165   float getDirectionWeight
00166   (Point2D<float> pt, Point2D<float> foe, float length, float mangle);
00167   float getDirectionWeight2(uint quad, uint dir);
00168 
00169   //! compute the Focus of Expansion from the MT features
00170   //! using Bonn 1994 vertical and horizontal averaging method 
00171   Point2D<int> computeFoeAverage();
00172   Point2D<int> computeFoeAverage(rutz::shared_ptr<OpticalFlow> flow);
00173 
00174   //! printing/displaying procedures
00175   //! for debugging
00176   void print(Image<float> img, 
00177              uint si, uint ei, uint sj, uint ej, bool stop);
00178   void display(Image<float> img, std::string info);
00179   void display(ImageSet<float> imgs, std::string info);
00180 
00181 
00182 
00183   //! the various directional pyrbuilders
00184   std::vector<std::vector
00185               <rutz::shared_ptr<SpatioTemporalEnergyPyrBuilder<float> > > >
00186   itsSpatioTemporalPyrBuilders; 
00187 
00188   //! its Medial Temporal module
00189   rutz::shared_ptr<MiddleTemporal> itsMT;
00190 
00191   std::vector<std::vector<std::vector<Image<float> > > > itsDirWeights;
00192 
00193   uint  itsNumPyrLevels;
00194   uint  itsNumDirs;
00195   uint  itsNumSpeeds;
00196   //uint  itsNumDFrames; // 1 pix in between, 2, 4 
00197 
00198   //! current image holder
00199   Image<byte>  itsCurrentImage;
00200 
00201   //! raw motion energy for each direction
00202   std::vector<std::vector<ImageSet<float> > > itsRawSpatioTemporalEnergy;
00203   std::vector<ImageSet<float> > itsSpatioTemporalEnergy;
00204   //std::vector<ImageSet<float> > itsSpatioTemporalEnergyOptimalShift;
00205 
00206   //! filtered: collapsed to the number of directions 
00207   //! this already includes:
00208   //!   lateral inhibition
00209   //!   center surround opponencies
00210   std::vector <Image<float> > itsMTfeatures;
00211   std::vector <Image<float> > itsMToptimalShift;
00212 
00213   //! map of likelihood 
00214   //! that a coordinate location is the focus of expansion
00215   Image<float> itsFoeMap; 
00216   std::vector<Image<float> > itsRecentFoeMaps;
00217   int itsCurrentFoeMapIndex;
00218 
00219   //! most likely focus of expansion location
00220   Point2D<int> itsFoe;
00221 
00222   //! estimated current observer motion
00223   //! can (or have been) be used to adjust FOE 
00224   uint  itsCurrentRotMotionDir;
00225   float itsCurrentRotMotionSpeed;
00226   float itsCurrentRotMotionDi;
00227   float itsCurrentRotMotionDj;
00228 
00229   //! debugging window
00230   rutz::shared_ptr<XWinManaged> itsWin;
00231 };
00232 #endif
00233 
00234 // ######################################################################
00235 /* So things look consistent in everyone's emacs... */
00236 /* Local Variables: */
00237 /* indent-tabs-mode: nil */
00238 /* End: */
Generated on Sun May 8 08:41:18 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3