RawVisualCortex.H

Go to the documentation of this file.
00001 /*!@file Channels/RawVisualCortex.H The early visual processing stages */
00002 
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: Laurent Itti <itti@usc.edu>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Channels/RawVisualCortex.H $
00035 // $Id: RawVisualCortex.H 14535 2011-02-18 22:40:51Z siagian $
00036 //
00037 
00038 #ifndef CHANNELS_RAWVISUALCORTEX_H_DEFINED
00039 #define CHANNELS_RAWVISUALCORTEX_H_DEFINED
00040 
00041 #include "Channels/ComplexChannel.H"
00042 #include "Channels/InputFrame.H"
00043 #include "Component/ModelComponent.H"
00044 #include "Component/ModelParam.H"
00045 #include "Image/Image.H"
00046 #include "Image/LevelSpec.H"
00047 #include "Image/fancynorm.H" // for MaxNormType
00048 
00049 #include "GUI/XWinManaged.H"
00050 
00051 
00052 // Forward declarations will suffice instead of #include's here, and will
00053 // be more efficient in compile times and in minimizing dependencies.
00054 template <class T> class PixRGB;
00055 class MgzDecoder;
00056 class MgzEncoder;
00057 
00058 // ######################################################################
00059 //! The Visual Cortex Class
00060 /*! In brief, RawVisualCortex holds a collection of ChannelBase objects,
00061     and most of RawVisualCortex's operations are achieved by some kind of
00062     iteration over that collection. That is, RawVisualCortex now does
00063     little work by itself, but delegates its operations to the
00064     channels, accumulating their results if necessary. NOTE:
00065     RawVisualCortex has a virtual ModelComponent base which is shared
00066     among its inheritance from ModelComponent via the ComplexChannel
00067     inheritance path, and its inheritance from ModelComponent via its
00068     SimModule inheritance path. Just beware! */
00069 class RawVisualCortex : public ComplexChannel
00070 {
00071 public:
00072   // ######################################################################
00073   /*! @name Constructors/Destructor */
00074   //@{
00075 
00076   //! Construct with no channel; channels must then be added with addSubChan().
00077   /*! @param mgr our ModelManager (see ModelManager.H)
00078       @param descrName descriptive name for human usage
00079       @param tagName name for ParamMap usage */
00080   RawVisualCortex(OptionManager& mgr,
00081                   const std::string& descrName = "Raw Visual Cortex",
00082                   const std::string& tagName = "RawVisualCortex");
00083 
00084   //! Virtual destructor for safe inheritance.
00085   virtual ~RawVisualCortex();
00086 
00087   //@}
00088 
00089   //! Save our results
00090   virtual void saveResults(const nub::ref<FrameOstream>& ofs);
00091 
00092   //! Get the output of the vc map from a given image
00093   const Image<float> getVCOutput(const Image<PixRGB<byte> >& rgbin);
00094 
00095 protected:
00096   //! Implementation of ChannelBase input() functions
00097   virtual void doInput(const InputFrame& inframe);
00098 
00099   //! Combine the outputs of our subchannels
00100   /*! We do this in a slightly different way than the ComplexChannel
00101       base version. */
00102   virtual Image<float> combineOutputs();
00103 
00104   //! get weighted (but not resized) channel output map
00105   virtual Image<float> getChannelOutputMap(const uint idx) const;
00106 
00107   //! post-process raw weighted sum of channel output maps
00108   /*! Derived classes may overload this to provide custom
00109     post-processing. For example, VisualCortexSurprise may pass the
00110     output through a sigmoidal nonlinearity or spatial
00111     competition. Default implementation is to treat the special case
00112     of VCXNORM_LANDMARK. */
00113   virtual Image<float> postProcessOutputMap(const Image<float>& outmap);
00114 
00115   virtual void paramChanged(ModelParamBase* const param,
00116                             const bool valueChanged,
00117                             ParamClient::ChangeStatus* status);
00118 
00119   OModelParam<std::string> itsType;
00120   OModelParam<MaxNormType> itsNormType; //!< maxNormalization to use
00121   OModelParam<bool> itsUseRandom;       //!< add random noise to output
00122   OModelParam<float> itsOutputFactor;   //!< output range factor
00123   OModelParam<float> itsNoise;          //!< output noise range
00124 
00125   /* older version: raw CS submaps -> [normalize, spatial competition,
00126      sum] -> SO -> [spatial competition, sum] -> CO -> [sum, spatial
00127      competition] -> VCX o/p :
00128 
00129      Since different feature types may have different range of
00130      responses, their responses are normalized to a fixed range so as
00131      to treat them equally. Ideally, normalization should occur at the
00132      visualCortex when it combines the outputs of the different
00133      feature types. Instead, the older version performs the
00134      normalization within each feature type, forcing all scales within
00135      a feature type to be treated as equally important. This is
00136      undesirable as it artificially magnifies even those scales which
00137      contain only noise, and we lose information about the relative
00138      goodness of the different scales.
00139 
00140      new version: raw CS submaps -> [spatial competition, sum] -> SO
00141      -> [spatial competition, sum] -> CO -> [normalize, sum, spatial
00142      competition] -> VCX o/p */
00143   OModelParam<bool> itsUseOlderVersion;
00144 
00145   //! LevelSpec used by our channels, used to compute output dims
00146   OModelParam<LevelSpec> itsLevelSpec;
00147 
00148   OModelParam<std::string> itsSaveOutTo; //!< save our outputs to MGZ file?
00149   OModelParam<std::string> itsLoadOutFrom; //!< load outputs from MGZ file?
00150   OModelParam<bool> itsSaveOutput;      //!< save our output?
00151   OModelParam<bool> itsUseMax;     //!< use max across features instead of sum?
00152   OModelParam<float> itsWeightThresh;     //!< threshold on channel total weights for last round of maxnorm
00153 
00154   //! start
00155   virtual void start1();
00156 
00157   //! stop
00158   virtual void stop2();
00159 
00160 private:
00161   RawVisualCortex(const RawVisualCortex&); // not allowed
00162   RawVisualCortex& operator=(const RawVisualCortex&); // not allowed
00163 
00164   // NOTE: We use the low-level MgzEncoder and MgzDecoder objects here
00165   // instead of the MgzInputStream and MgzOutputStream just to avoid
00166   // having extra ModelComponent objects which may confuse the
00167   // VisualCortex if we add them as subcomponents:
00168   rutz::shared_ptr<MgzDecoder> itsOutputMgzIn; // read precomputed outputs
00169   rutz::shared_ptr<MgzEncoder> itsOutputMgzOut; // write computed outputs
00170 
00171   rutz::shared_ptr<XWinManaged> itsWin;
00172   uint itsFrame;
00173 };
00174 
00175 
00176 /* So things look consistent in everyone's emacs... */
00177 /* Local Variables: */
00178 /* indent-tabs-mode: nil */
00179 /* End: */
00180 
00181 #endif
Generated on Sun May 8 08:04:41 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3