00001 /*!@file Neuro/VisualCortexEyeMvt.C Implementation for the fake 00002 human-eye-movement visual cortex class */ 00003 00004 // //////////////////////////////////////////////////////////////////// // 00005 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the // 00006 // University of Southern California (USC) and the iLab at USC. // 00007 // See http://iLab.usc.edu for information about this project. // 00008 // //////////////////////////////////////////////////////////////////// // 00009 // Major portions of the iLab Neuromorphic Vision Toolkit are protected // 00010 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency // 00011 // in Visual Environments, and Applications'' by Christof Koch and // 00012 // Laurent Itti, California Institute of Technology, 2001 (patent // 00013 // pending; application number 09/912,225 filed July 23, 2001; see // 00014 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status). // 00015 // //////////////////////////////////////////////////////////////////// // 00016 // This file is part of the iLab Neuromorphic Vision C++ Toolkit. // 00017 // // 00018 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can // 00019 // redistribute it and/or modify it under the terms of the GNU General // 00020 // Public License as published by the Free Software Foundation; either // 00021 // version 2 of the License, or (at your option) any later version. // 00022 // // 00023 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope // 00024 // that it will be useful, but WITHOUT ANY WARRANTY; without even the // 00025 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // 00026 // PURPOSE. See the GNU General Public License for more details. // 00027 // // 00028 // You should have received a copy of the GNU General Public License // 00029 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write // 00030 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, // 00031 // Boston, MA 02111-1307 USA. // 00032 // //////////////////////////////////////////////////////////////////// // 00033 // 00034 // Primary maintainer for this file: Laurent Itti <itti@usc.edu> 00035 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Neuro/VisualCortexEyeMvt.C $ 00036 // $Id: VisualCortexEyeMvt.C 13065 2010-03-28 00:01:00Z itti $ 00037 // 00038 00039 #include "Neuro/VisualCortexEyeMvt.H" 00040 00041 #include "Channels/InputFrame.H" 00042 #include "Channels/ChannelOpts.H" 00043 #include "Component/OptionManager.H" 00044 #include "Image/Kernels.H" // for gaussianBlob() 00045 #include "Image/MathOps.H" // for takeMax() 00046 #include "Media/MediaOpts.H" // for OPT_InputFrameDims 00047 #include "Neuro/NeuroOpts.H" 00048 #include "Neuro/NeuroSimEvents.H" 00049 #include "Psycho/EyeTrace.H" 00050 #include "Simulation/SimEventQueue.H" 00051 #include "Transport/FrameInfo.H" 00052 #include "Transport/FrameOstream.H" 00053 #include "Util/SimTime.H" 00054 #include "Util/sformat.H" 00055 #include "Util/StringUtil.H" 00056 00057 #include <algorithm> 00058 #include <cctype> 00059 #include <cstdio> 00060 00061 // ###################################################################### 00062 VisualCortexEyeMvt::VisualCortexEyeMvt(OptionManager& mgr, 00063 const std::string& descrName, 00064 const std::string& tagName) : 00065 VisualCortex(mgr, descrName, tagName), 00066 SIMCALLBACK_INIT(SimEventClockTick), 00067 SIMCALLBACK_INIT(SimEventRetinaImage), 00068 SIMCALLBACK_INIT(SimEventSaveOutput), 00069 SIMREQHANDLER_INIT(SimReqVCXfeatures), 00070 SIMREQHANDLER_INIT(SimReqVCXmaps), 00071 itsFnames(&OPT_VCEMeyeFnames, this), 00072 itsSigma(&OPT_VCEMsigma, this), 00073 itsForgetFac(&OPT_VCEMforgetFac, this), 00074 itsDelay(&OPT_VCEMdelay, this), 00075 itsUseMax(&OPT_VCEMuseMax, this), 00076 itsSaccadeOnly(&OPT_VCEMsaccadeOnly, this), 00077 itsLevelSpec(&OPT_LevelSpec, this), 00078 itsSaveOutput(&OPT_RawVisualCortexSaveOutput, this),// see Neuro/NeuroOpts.{H,C} 00079 itsOutputFactor(&OPT_RawVisualCortexOutputFactor, this), 00080 itsMaps(), itsEyeTrace(), itsEyeSample(), itsOutputCache() 00081 { } 00082 00083 // ###################################################################### 00084 VisualCortexEyeMvt::~VisualCortexEyeMvt() 00085 { } 00086 00087 // ###################################################################### 00088 void VisualCortexEyeMvt::start1() 00089 { 00090 VisualCortex::start1(); 00091 00092 // parse our filename config string and instantiate all our eye traces: 00093 std::vector<std::string> tok; 00094 split(itsFnames.getVal(), ",", std::back_inserter(tok)); 00095 if (tok.empty()) LFATAL("I cannot run without at least one eyetrace."); 00096 00097 for (uint i = 0; i < tok.size(); i ++) 00098 { 00099 LINFO("Instantiating EyeTrace %03d with file '%s'", i, tok[i].c_str()); 00100 rutz::shared_ptr<EyeTrace> et(new EyeTrace(tok[i], PixRGB<byte>(255))); 00101 itsEyeTrace.push_back(et); 00102 itsEyeSample.push_back(itsDelay.getVal()); 00103 } 00104 00105 // create empty maps: 00106 itsMaps.reset(itsEyeTrace.size()); 00107 } 00108 00109 // ###################################################################### 00110 void VisualCortexEyeMvt:: 00111 onSimEventClockTick(SimEventQueue& q, rutz::shared_ptr<SimEventClockTick>& e) 00112 { 00113 const SimTime t = q.now(); 00114 const uint sml = itsLevelSpec.getVal().mapLevel(); 00115 00116 // do some forgetting: 00117 if (itsForgetFac.getVal() != 1.0f) { 00118 for (uint i = 0; i < itsMaps.size(); ++i) itsMaps[i] *= itsForgetFac.getVal(); 00119 itsOutputCache.freeMem(); 00120 } 00121 00122 // evolve all our eye traces and see what they have: 00123 bool keepgoing = true; 00124 while(keepgoing) { // will go on until no tracker has any new data 00125 keepgoing = false; 00126 00127 // loop over our eye traces: 00128 for (uint i = 0; i < itsEyeTrace.size(); i ++) 00129 if (itsEyeTrace[i]->hasData(itsEyeSample[i] - itsDelay.getVal(), t)) { 00130 // ok, this warrants that we continue our while() loop: 00131 keepgoing = true; 00132 00133 // get the next data sample: 00134 rutz::shared_ptr<EyeData> data = itsEyeTrace[i]->data(itsEyeSample[i]); 00135 00136 CLDEBUG("Eye trace %03u [%07"ZU"] (%d, %d) at %.1fms", 00137 i, itsEyeSample[i], data->position().i, data->position().j, 00138 itsEyeSample[i] * itsEyeTrace[i]->period().msecs()); 00139 00140 // stop here if we are only interested in saccades but there is 00141 // not one now, otherwise use the saccade endpoint for our 00142 // plots; if not only interested in saccades, use all samples: 00143 float ex, ey; 00144 if (itsSaccadeOnly.getVal()) { 00145 if (data->hasSaccadeTargetData() == false) { ++ itsEyeSample[i]; continue; } 00146 data->getSaccadeTarget(ex, ey); 00147 } else data->getPosition(ex, ey); 00148 00149 // convert eye coords to scale of saliency map: 00150 Point2D<int> p(int(ex / float(1 << sml) + 0.4999f), int(ey / float(1 << sml) + 0.4999f)); 00151 00152 // inject new blob if valid coords: 00153 if (itsMaps[i].coordsOk(p)) 00154 { 00155 if (itsSigma.getVal() > 0.0f) 00156 { 00157 // draw a blob at current eye: 00158 Image<float> blob = gaussianBlob<float>(itsMaps[i].getDims(), p, itsSigma.getVal(), itsSigma.getVal()); 00159 00160 // take max between current and old blobs: 00161 itsMaps[i] = takeMax(itsMaps[i], blob); 00162 } 00163 else 00164 // single spike mode: 00165 itsMaps[i].setVal(p, itsMaps[i].getVal(p) + 1.0f); 00166 00167 // our maps have changed, hence our output cache is now invalid: 00168 itsOutputCache.freeMem(); 00169 } 00170 00171 // ready for next eye movement sample: 00172 ++ itsEyeSample[i]; 00173 } 00174 } 00175 } 00176 00177 // ###################################################################### 00178 void VisualCortexEyeMvt:: 00179 onSimEventRetinaImage(SimEventQueue& q, rutz::shared_ptr<SimEventRetinaImage>& e) 00180 { 00181 // start new maps fresh if first time: 00182 if (itsMaps[0].initialized() == false) 00183 { 00184 // compute map dims: 00185 const int w = e->frame().getWidth() >> itsLevelSpec.getVal().mapLevel(); 00186 const int h = e->frame().getHeight() >> itsLevelSpec.getVal().mapLevel(); 00187 00188 for (uint i = 0; i < itsMaps.size(); i ++) itsMaps[i].resize(w, h, true); 00189 00190 // invalidate our output cache 00191 itsOutputCache.freeMem(); 00192 } 00193 00194 // Our internal maps change at every clock tick, but it may be a bit 00195 // too intensive to post a new VisualCortexOutput SimEvent at that 00196 // rate. So instead we will do it only each time there is a new 00197 // input frame, like other VisualCortex derivatives do: 00198 q.post(rutz::make_shared(new SimEventVisualCortexOutput(this, getOutput()))); 00199 } 00200 00201 // ###################################################################### 00202 void VisualCortexEyeMvt::handleSimReqVCXfeatures(SimEventQueue& q, rutz::shared_ptr<SimReqVCXfeatures>& r) 00203 { 00204 // just return a single feature, our map value at the requested location: 00205 const uint sml = itsLevelSpec.getVal().mapLevel(); 00206 const Image<float> out = getOutput(); 00207 Point2D<int> p((r->loc().i + sml/2) >> sml, (r->loc().j + sml/2) >> sml); 00208 if (out.coordsOk(p)) r->features().push_back(out.getVal(p)); 00209 else r->features().push_back(0.0F); 00210 } 00211 00212 // ###################################################################### 00213 void VisualCortexEyeMvt::handleSimReqVCXmaps(SimEventQueue& q, rutz::shared_ptr<SimReqVCXmaps>& r) 00214 { 00215 r->populateChannelMaps(this); 00216 } 00217 00218 // ###################################################################### 00219 Image<float> VisualCortexEyeMvt::getOutput() 00220 { 00221 // do we already have it cached? 00222 if (itsOutputCache.initialized()) return itsOutputCache; 00223 00224 // compute output from all human maps: 00225 Image<float> out = itsMaps[0]; 00226 if (itsUseMax.getVal()) for (uint idx = 1; idx < itsMaps.size(); idx ++) out = takeMax(out, itsMaps[idx]); 00227 else for (uint idx = 1; idx < itsMaps.size(); idx ++) out += itsMaps[idx]; 00228 00229 // output is now typically in the (0.0..0.1) range; we want saliency map input current in nA: 00230 out *= 50.0f * itsOutputFactor.getVal(); 00231 float mi, ma; getMinMax(out, mi, ma); 00232 LINFO("Salmap input range is [%f .. %f] nA", mi * 1.0e9F, ma * 1.0e9F); 00233 LINFO("Computed VisualCortex output."); 00234 00235 itsOutputCache = out; 00236 00237 return out; 00238 } 00239 00240 // ###################################################################### 00241 void VisualCortexEyeMvt::onSimEventSaveOutput(SimEventQueue& q, rutz::shared_ptr<SimEventSaveOutput>& e) 00242 { 00243 // get the OFS to save to, assuming sinfo is of type 00244 // SimModuleSaveInfo (will throw a fatal exception otherwise): 00245 nub::ref<FrameOstream> ofs = dynamic_cast<const SimModuleSaveInfo&>(e->sinfo()).ofs; 00246 00247 // save our own output: 00248 if (itsSaveOutput.getVal()) 00249 ofs->writeFloat(getOutput(), FLOAT_NORM_PRESERVE, "VCO", 00250 FrameInfo("visual cortex eyemvt output (input to saliency map)", SRC_POS)); 00251 } 00252 00253 // ###################################################################### 00254 /* So things look consistent in everyone's emacs... */ 00255 /* Local Variables: */ 00256 /* indent-tabs-mode: nil */ 00257 /* End: */