00001 /*!@file Neuro/AttentionGuidanceMap.H Class declarations for attentional guidance map class */ 00002 00003 // //////////////////////////////////////////////////////////////////// // 00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the // 00005 // University of Southern California (USC) and the iLab at USC. // 00006 // See http://iLab.usc.edu for information about this project. // 00007 // //////////////////////////////////////////////////////////////////// // 00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected // 00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency // 00010 // in Visual Environments, and Applications'' by Christof Koch and // 00011 // Laurent Itti, California Institute of Technology, 2001 (patent // 00012 // pending; application number 09/912,225 filed July 23, 2001; see // 00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status). // 00014 // //////////////////////////////////////////////////////////////////// // 00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit. // 00016 // // 00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can // 00018 // redistribute it and/or modify it under the terms of the GNU General // 00019 // Public License as published by the Free Software Foundation; either // 00020 // version 2 of the License, or (at your option) any later version. // 00021 // // 00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope // 00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the // 00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // 00025 // PURPOSE. See the GNU General Public License for more details. // 00026 // // 00027 // You should have received a copy of the GNU General Public License // 00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write // 00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, // 00030 // Boston, MA 02111-1307 USA. // 00031 // //////////////////////////////////////////////////////////////////// // 00032 // 00033 // Primary maintainer for this file: Laurent Itti <itti@usc.edu> 00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Neuro/AttentionGuidanceMap.H $ 00035 // $Id: AttentionGuidanceMap.H 14677 2011-04-04 19:37:18Z dberg $ 00036 // 00037 00038 #ifndef ATTENTIONGUIDANCEMAP_H_DEFINED 00039 #define ATTENTIONGUIDANCEMAP_H_DEFINED 00040 00041 #include "Component/ModelComponent.H" 00042 #include "Component/ModelParam.H" 00043 #include "Image/Image.H" 00044 #include "Image/Range.H" 00045 #include "Neuro/NeuroSimEvents.H" 00046 #include "Simulation/SimModule.H" 00047 #include "Simulation/SimEvents.H" 00048 00049 #ifdef INVT_USE_CPPOX//we need c++ 0X features for this to work 00050 #include "ModelNeuron/SupColliculusModule.H" 00051 #include "ModelNeuron/NeuralFieldModule.H" 00052 class SCInterface; 00053 class StructurePlot; 00054 #endif 00055 00056 class FrameOstream; 00057 class ModelManager; 00058 00059 // ###################################################################### 00060 //! The attentional guidance map base class 00061 // ###################################################################### 00062 /*! This is a 2D attentional guidance, or priority map. It is just a 00063 base class with virtual function definitions. Various 00064 implementations are available below. The AGM is a topographic map 00065 that represents the priority of spatial locations. Priority is the 00066 combination of bottom-up or stimulus driven processing with top-down 00067 or goal directed processing. Top-down might also include 00068 higher-order elements of visual processing such as object 00069 recognition. Smaller values indicate that a location is of low 00070 priority while higher values indicate higher priority.*/ 00071 class AttentionGuidanceMap : public SimModule 00072 { 00073 public: 00074 // ###################################################################### 00075 //! @name Constructor, destructor, and reset 00076 //@{ 00077 00078 //! Ininitialized constructor 00079 /*! The map will be resized and initialized the first time input() is 00080 called */ 00081 AttentionGuidanceMap(OptionManager& mgr, 00082 const std::string& descrName = "Attention Guidance Map", 00083 const std::string& tagName = "AttentionGuidanceMap"); 00084 00085 //! Destructor 00086 virtual ~AttentionGuidanceMap(); 00087 00088 //! Reset to initial state just after construction 00089 virtual void reset() = 0; 00090 00091 //@} 00092 00093 protected: 00094 //! Callback for when a new saliency map is available 00095 SIMCALLBACK_DECLARE(AttentionGuidanceMap, SimEventSaliencyMapOutput); 00096 00097 //! Callback for when a new task relevance map is available 00098 SIMCALLBACK_DECLARE(AttentionGuidanceMap, SimEventTaskRelevanceMapOutput); 00099 00100 //! Callback on every clock tick 00101 SIMCALLBACK_DECLARE(AttentionGuidanceMap, SimEventClockTick); 00102 00103 //! Callback for every time we should save our outputs 00104 SIMCALLBACK_DECLARE(AttentionGuidanceMap, SimEventSaveOutput); 00105 00106 //! Save our internals when saveResults() is called? 00107 OModelParam<bool> itsSaveResults; 00108 00109 //! Set new bottom-up input 00110 /*! This should initialize and resize the map if it is currently 00111 uninitialized (e.g., just after construction or reset()). */ 00112 virtual void inputBU(const Image<float>& in) = 0; 00113 00114 //! Set new top-down input 00115 /*! This should initialize and resize the map if it is currently 00116 uninitialized (e.g., just after construction or reset()). */ 00117 virtual void inputTD(const Image<float>& in) = 0; 00118 00119 //! Return all our relevance values as an Image<float> 00120 virtual Image<float> getV() const = 0; 00121 00122 //!run on overy time step 00123 virtual void doClockTick(SimEventQueue& q); 00124 00125 //! save results 00126 /*! A default implementation is provided, which just calls 00127 getV() and saves that map with a file name prefix of "AGM". 00128 @param ofs will figure out file name/format and save our 00129 results. */ 00130 virtual void save1(const ModelComponentSaveInfo& sinfo); 00131 00132 private: 00133 Image<float> itsOutputCache; 00134 00135 // forbid assignment and copy-construction: 00136 AttentionGuidanceMap& operator=(const AttentionGuidanceMap& sm); 00137 AttentionGuidanceMap(const AttentionGuidanceMap& sm); 00138 }; 00139 00140 // ###################################################################### 00141 //! AttentionGuidanceMap configurator 00142 // ###################################################################### 00143 /*! This will export the --agm-type=XX command-line option and will 00144 instantiate an AGM of the desired type as the option gets assigned a 00145 value. As this happens, new options may become available in the 00146 command-line. To see them, use --help AFTER you have chosen the type 00147 to use. The current AGM may be retrieved using getAGM(). */ 00148 class AttentionGuidanceMapConfigurator : public ModelComponent 00149 { 00150 public: 00151 //! Constructor 00152 AttentionGuidanceMapConfigurator(OptionManager& mgr, 00153 const std::string& descrName = 00154 "Attention Guidance Map Configurator", 00155 const std::string& tagName = 00156 "AttentionGuidanceMapConfigurator"); 00157 00158 //! destructor 00159 virtual ~AttentionGuidanceMapConfigurator(); 00160 00161 //! Get the chosen AGM 00162 /*! You should call this during start() of the ModelComponent that 00163 needs the AGM. */ 00164 nub::ref<AttentionGuidanceMap> getAGM() const; 00165 00166 protected: 00167 OModelParam<std::string> itsAGMtype; //!< type of map 00168 00169 //! Intercept people changing our ModelParam 00170 /*! See ModelComponent.H; as parsing the command-line or reading a 00171 config file sets our name, we'll also here instantiate a 00172 controller of the proper type (and export its options) */ 00173 virtual void paramChanged(ModelParamBase* const param, 00174 const bool valueChanged, 00175 ParamClient::ChangeStatus* status); 00176 00177 private: 00178 nub::ref<AttentionGuidanceMap> itsAGM; // the map 00179 }; 00180 00181 00182 // ###################################################################### 00183 //! The standard attentional guidance map 00184 // ###################################################################### 00185 /*! This is our current standard AGM implementation. It just takes the 00186 pointwise product between bottom-up and top-down inputs. */ 00187 class AttentionGuidanceMapStd : public AttentionGuidanceMap 00188 { 00189 public: 00190 //! Uninitialized constructor 00191 AttentionGuidanceMapStd(OptionManager& mgr, const std::string& descrName = 00192 "Task-Relevance Map Std", 00193 const std::string& tagName = 00194 "AttentionGuidanceMapStd"); 00195 00196 //! Destructor 00197 virtual ~AttentionGuidanceMapStd(); 00198 00199 //! Reset to initial state just after construction 00200 virtual void reset(); 00201 00202 protected: 00203 //! Set new bottom-up input 00204 virtual void inputBU(const Image<float>& in); 00205 00206 //! Set new top-down input 00207 virtual void inputTD(const Image<float>& in); 00208 00209 //! Return all our values as an Image<float> 00210 virtual Image<float> getV() const; 00211 00212 private: 00213 Image<float> itsBUmap; 00214 Image<float> itsTDmap; 00215 }; 00216 00217 // ###################################################################### 00218 //! The Optimized Attention Guidance Map 00219 // ###################################################################### 00220 /*! This is our current optimized standard AGM implementation. It just takes the 00221 a*BU + b*TD + BU*TD mode, and (a,b) are optimized coefficients */ 00222 class AttentionGuidanceMapOpt : public AttentionGuidanceMap 00223 { 00224 public: 00225 //! Uninitialized constructor 00226 AttentionGuidanceMapOpt(OptionManager& mgr, const std::string& descrName = 00227 "Attention Guidance Map Optimized", 00228 const std::string& tagName = 00229 "AttentionGuidanceMapOpt"); 00230 00231 //! Destructor 00232 virtual ~AttentionGuidanceMapOpt(); 00233 00234 //! Reset to initial state just after construction 00235 virtual void reset(); 00236 00237 protected: 00238 //! Set new bottom-up input 00239 virtual void inputBU(const Image<float>& in); 00240 00241 //! Set new top-down input 00242 virtual void inputTD(const Image<float>& in); 00243 00244 //! Return all our values as an Image<float> 00245 virtual Image<float> getV() const; 00246 00247 private: 00248 Image<float> itsBUmap; 00249 Image<float> itsTDmap; 00250 }; 00251 00252 #ifdef INVT_USE_CPPOX//we need c++ 0X features for this to work 00253 // ###################################################################### 00254 //! An Attentional Guidance Map to interface the ModelNeuron classes. 00255 // This is just an adaptor class, see below for full implementations 00256 // ###################################################################### 00257 class AttentionGuidanceMapNeuralSim : public AttentionGuidanceMap 00258 { 00259 public: 00260 AttentionGuidanceMapNeuralSim(OptionManager& mgr, const std::string& descrName, const std::string& tagName); 00261 virtual ~AttentionGuidanceMapNeuralSim(); 00262 00263 protected: 00264 //!run on overy time step 00265 void doClockTick(SimEventQueue& q); 00266 00267 //! post our output 00268 virtual void postMessage(SimEventQueue& q) = 0; 00269 00270 //! update our internals 00271 virtual void update(const SimTime& time) = 0; 00272 00273 //! our output rate 00274 OModelParam<SimTime> itsOutRate; 00275 00276 private: 00277 SimTime itsTime;//the current time 00278 }; 00279 00280 // ###################################################################### 00281 //! An Attentional Guidance Map based on the mammalian superior colliculus 00282 // ###################################################################### 00283 class AttentionGuidanceMapSC : public AttentionGuidanceMapNeuralSim 00284 { 00285 public: 00286 //! Uninitialized constructor 00287 AttentionGuidanceMapSC(OptionManager& mgr, 00288 const std::string& descrName = "AttentionGuidance Map SC", 00289 const std::string& tagName = "AttentionGuidanceMapSC"); 00290 //! Destructor 00291 virtual ~AttentionGuidanceMapSC(); 00292 00293 //! Reset to initial state just after construction 00294 virtual void reset(); 00295 00296 protected: 00297 00298 //! Set new bottom-up input 00299 virtual void inputBU(const Image<float>& in); 00300 00301 //! Set new top-down input 00302 virtual void inputTD(const Image<float>& in); 00303 00304 //! Return all our relevance values as an Image<float> 00305 virtual Image<float> getV() const; 00306 00307 //! update our internals 00308 virtual void update(const SimTime& time); 00309 00310 //!run on overy time step 00311 virtual void postMessage(SimEventQueue& q); 00312 00313 //! save results 00314 virtual void save1(const ModelComponentSaveInfo& sinfo); 00315 00316 nub::ref<SupColliculusModule> itsSC;//our actual component 00317 }; 00318 00319 // ###################################################################### 00320 //! An Attentional Guidance Map based on neural fields 00321 // ###################################################################### 00322 class AttentionGuidanceMapNF : public AttentionGuidanceMapNeuralSim 00323 { 00324 public: 00325 //! Uninitialized constructor 00326 AttentionGuidanceMapNF(OptionManager& mgr, 00327 const std::string& descrName = "AttentionGuidance Map SC", 00328 const std::string& tagName = "AttentionGuidanceMapSC"); 00329 //! Destructor 00330 virtual ~AttentionGuidanceMapNF(); 00331 00332 //! Reset to initial state just after construction 00333 virtual void reset(); 00334 00335 protected: 00336 //! Set new bottom-up input 00337 virtual void inputBU(const Image<float>& in); 00338 00339 //! Set new top-down input 00340 virtual void inputTD(const Image<float>& in); 00341 00342 //! Return all our relevance values as an Image<float> 00343 virtual Image<float> getV() const; 00344 00345 //! update our internals 00346 virtual void update(const SimTime& time); 00347 00348 //!run on overy time step 00349 virtual void postMessage(SimEventQueue& q); 00350 00351 //! save results 00352 virtual void save1(const ModelComponentSaveInfo& sinfo); 00353 00354 nub::ref<NeuralFieldModule> itsNF;//our actual component 00355 }; 00356 00357 #endif//use cpp0x features 00358 #endif 00359 00360 // ###################################################################### 00361 /* So things look consistent in everyone's emacs... */ 00362 /* Local Variables: */ 00363 /* indent-tabs-mode: nil */ 00364 /* End: */