SimulationViewerEyeMvtNeuro.C

Go to the documentation of this file.
00001 /*!@file Neuro/SimulationViewerEyeMvtNeuro.C get the saliency values at a
00002    proble location relative the the current eye position for comparing
00003    with a neural response */
00004 
00005 // //////////////////////////////////////////////////////////////////// //
00006 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2000-2003   //
00007 // by the University of Southern California (USC) and the iLab at USC.  //
00008 // See http://iLab.usc.edu for information about this project.          //
00009 // //////////////////////////////////////////////////////////////////// //
00010 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00011 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00012 // in Visual Environments, and Applications'' by Christof Koch and      //
00013 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00014 // pending; application number 09/912,225 filed July 23, 2001; see      //
00015 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00016 // //////////////////////////////////////////////////////////////////// //
00017 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00018 //                                                                      //
00019 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00020 // redistribute it and/or modify it under the terms of the GNU General  //
00021 // Public License as published by the Free Software Foundation; either  //
00022 // version 2 of the License, or (at your option) any later version.     //
00023 //                                                                      //
00024 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00025 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00026 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00027 // PURPOSE.  See the GNU General Public License for more details.       //
00028 //                                                                      //
00029 // You should have received a copy of the GNU General Public License    //
00030 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00031 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00032 // Boston, MA 02111-1307 USA.                                           //
00033 // //////////////////////////////////////////////////////////////////// //
00034 //
00035 // Primary maintainer for this file: David J Berg <dberg@usc.edu>
00036 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Neuro/SimulationViewerEyeMvtNeuro.C $
00037 
00038 #include "Neuro/SimulationViewerEyeMvtNeuro.H"
00039 #include "Component/ModelComponent.H"
00040 #include "Component/OptionManager.H"
00041 #include "Channels/ChannelOpts.H" // for LevelSpec option
00042 #include "Image/FilterOps.H" // for lowPass3()
00043 #include "Image/ShapeOps.H" // for rescale()
00044 #include "Image/MathOps.H" // for getLcalAvg()
00045 #include "Image/DrawOps.H"  //for LinePlot()
00046 #include "Image/Transforms.H" //for composite()
00047 #include "Image/ColorOps.H" //for toRGB()
00048 #include "Image/Layout.H"
00049 #include "Image/ImageSetOps.H"
00050 #include "Raster/Raster.H"  //for readGray()
00051 #include "Neuro/NeuroOpts.H"
00052 #include "Neuro/NeuroSimEvents.H"
00053 #include "Psycho/EyeData.H"
00054 #include "Simulation/SimEventQueue.H"
00055 #include "Transport/FrameInfo.H"
00056 #include "Transport/FrameOstream.H"
00057 #include "Util/StringUtil.H" //for split()
00058 #include "Util/StringConversions.H" //for toStr()
00059 #include "rutz/trace.h"
00060 
00061 #include <fstream>
00062 #include <iostream>
00063 
00064 // ######################################################################
00065 SimulationViewerEyeMvtNeuro::
00066 SimulationViewerEyeMvtNeuro(OptionManager& mgr, const std::string& descrName,
00067                        const std::string& tagName) :
00068   SimulationViewerAdapter(mgr, descrName, tagName),
00069   SIMCALLBACK_INIT(SimEventClockTick),
00070   SIMCALLBACK_INIT(SimEventSaveOutput),
00071   itsSaveTraj(&OPT_SVsaveTraj, this),
00072   itsSaveMegaCombo(&OPT_SVEMsaveMegaCombo, this),
00073   itsDelayCacheSize(&OPT_SVEMdelayCacheSize, this),
00074   itsEyePatchSize(&OPT_SVpatchSize, this),
00075   itsLevelSpec(&OPT_LevelSpec, this),
00076   itsUseDiagColor(&OPT_SVEMuseDiagColors, this),
00077   itsMaxComboWidth(&OPT_SVEMmaxComboWidth, this),
00078   itsShiftInput(&OPT_ShiftInputToEye, this), 
00079   itsUseSpaceVariantBoundary("UseSpaceVariantBoundary", this, false),
00080   itsOutFname(&OPT_SVEMNoutFName, this),
00081   itsProbe(&OPT_SVEMNprobeLocation, this),
00082   itsRFSize(&OPT_SVEMNrfSize, this),
00083   itsRFMaskName(&OPT_SVEMNrfMaskName, this),
00084   itsNeuronFileName(&OPT_SVEMNneuroFileName, this),
00085   itsDisplayTime(&OPT_SVEMNdisplayTime, this),
00086   itsDelaySpike(&OPT_SVEMNdelayNeuron, this),
00087   itsBufferLength(&OPT_SVEMNplotBufLength, this),
00088   itsSalScale(&OPT_SVEMNSalScale, this),
00089   itsNeuroScale(&OPT_SVEMNNeuroScale, this),
00090   itsVisRFName(&OPT_SVEMNNeuroRFVisFile, this),
00091   itsMotRFName(&OPT_SVEMNNeuroRFMotFile, this),
00092   itsMotWindow(&OPT_SVEMNMotWindow, this),
00093   itsVisWindow(&OPT_SVEMNVisWindow, this),
00094   itsVisOffset(&OPT_SVEMNVisOffset, this),
00095   itsDelayCache(), itsHeadSM(), itsDrawings(), itsRFMask(),
00096   itsVisRf(), itsVisRfBuf(), itsMotRf(), itsMotSamp(0), 
00097   itsVisSamp(0), itsVisSampOff(0),
00098   itsOutFile(0), itsNeuronFile(0), itsRate(SimTime::ZERO()), 
00099   itsPpdx(0.0F), itsPpdy(0.0F),
00100   itsProbeP(), itsRawCenter(), itsRFSizeP(0), itsInputDims(0,0), itsOgInputDims(0,0), 
00101   itsSpikeVals(), itsSpikePos(0), itsSalPb(), itsSpikePb(),
00102   itsHposPb(), itsVposPb(), itsXlabel(), itsTitle()
00103 {
00104 GVX_TRACE(__PRETTY_FUNCTION__);
00105 
00106   // disable IOR:
00107   LINFO("NOTE: disabling IOR and SE, selecting FixedSaccadeController");
00108   getManager().setOptionValString(&OPT_IORtype, "None");
00109   getManager().setOptionValString(&OPT_ShapeEstimatorMode, "None");
00110 
00111   // select an eyetrack EyeHeadController:
00112   getManager().setOptionValString(&OPT_EyeHeadControllerType, "EyeTrack");
00113 
00114   // change default to --display-additive=false; user can still
00115   // override this on the command-line
00116   getManager().setOptionValString(&OPT_SVdisplayAdditive, "false");
00117 }
00118 
00119 // ######################################################################
00120 SimulationViewerEyeMvtNeuro::~SimulationViewerEyeMvtNeuro()
00121 {
00122 GVX_TRACE(__PRETTY_FUNCTION__);
00123 }
00124 
00125 // ######################################################################
00126 void SimulationViewerEyeMvtNeuro::start1()
00127 {
00128 GVX_TRACE(__PRETTY_FUNCTION__);
00129 
00130  if (itsUseSpaceVariantBoundary.getVal() && !itsShiftInput.getVal())
00131    LFATAL("You must shift the input (--shift-input) if you are using this simulation viewer "
00132           "with a space variant transform");
00133 
00134  //!add a delay, if any, to our spike vector
00135  itsSpikeVals.resize(itsDelaySpike.getVal(), 0.0F);
00136 
00137  //!set the ploting buffer lengths
00138  itsSalPb.reset(itsBufferLength.getVal(), itsSalScale.getVal(), itsRate);
00139  itsSpikePb.reset(itsBufferLength.getVal(), itsNeuroScale.getVal(), itsRate);
00140  itsHposPb.reset(itsBufferLength.getVal(), 0.0F, itsRate);
00141  itsVposPb.reset(itsBufferLength.getVal(), 0.0F, itsRate);
00142 
00143  //setup delay cache
00144  if (itsDelayCacheSize.getVal())
00145    itsDelayCache.setMaxSize(itsDelayCacheSize.getVal());
00146  
00147  // open output file:
00148  if (itsOutFname.getVal().empty() == false)
00149    {
00150      if (itsOutFile) delete itsOutFile;
00151      itsOutFile = new std::ofstream(itsOutFname.getVal().c_str());
00152      if (itsOutFile->is_open() == false)
00153        LFATAL("Cannot open '%s' for writing", itsOutFname.getVal().c_str());
00154    }
00155  
00156  // open neuron file
00157  if (itsNeuronFileName.getVal().empty() == false)
00158    {
00159      if (itsNeuronFile) delete itsNeuronFile;
00160      itsNeuronFile = new std::ifstream(itsNeuronFileName.getVal().c_str());
00161      if (itsNeuronFile->is_open() == false)
00162        LFATAL("Cannot open '%s' for reading",
00163               itsNeuronFileName.getVal().c_str());
00164      else
00165        {
00166          while (! itsNeuronFile->eof())
00167            {
00168              float val;
00169              (*itsNeuronFile) >> val;
00170              itsSpikeVals.push_back(val);
00171            }
00172          itsNeuronFile->close();
00173        }
00174    }
00175 
00176  //read visual rf
00177  if (itsVisRFName.getVal().empty() == false)
00178    {
00179      readImageSet(itsVisRf, itsVisRFName.getVal());
00180      if (itsVisRf.size() > 0)
00181        LINFO("Loading saved visual RF spike accumulator");
00182      else
00183        LINFO("Starting new visual RF spike accumulator");
00184    }
00185  
00186  //read the motor rf
00187  if (itsMotRFName.getVal().empty() == false)
00188    {
00189      if (Raster::fileExists(itsMotRFName.getVal()))
00190        {
00191          itsMotRf = Raster::ReadFloat(itsMotRFName.getVal());
00192          LINFO("Loading saved motor RF spike accumulator");
00193        }
00194      else
00195        LINFO("Starting new motor RF spike accumulator");
00196    }
00197 
00198  //load a weight mask if one exists
00199  if (itsRFMaskName.getVal().empty() == false)
00200    itsRFMask = Raster::ReadFloat(itsRFMaskName.getVal());
00201 
00202  //set our title and xlabel
00203  itsXlabel = itsBufferLength.getVal().toString() + " window";
00204  itsTitle = "RF center: " + toStr<Point2D<float> >(itsProbe.getVal()) + 
00205    ", RF size: " + toStr<float>(itsRFSize.getVal()); 
00206  
00207  SimulationViewer::start1();
00208 }
00209 
00210 // ######################################################################
00211 void SimulationViewerEyeMvtNeuro::stop1()
00212 {
00213 GVX_TRACE(__PRETTY_FUNCTION__);
00214 
00215  //write out the vis rf
00216  if (itsVisRFName.getVal().empty() == false)
00217    {
00218      LINFO("Saving visual RF set: %s", itsVisRFName.getVal().c_str());
00219      saveImageSet(itsVisRf, FLOAT_NORM_PRESERVE, itsVisRFName.getVal());
00220    }
00221 
00222  //write out the motor rf
00223  if (itsMotRFName.getVal().empty() == false)
00224    {
00225      LINFO("Saving motor RF set: %s", itsMotRFName.getVal().c_str());
00226      Raster::WriteFloat(itsMotRf, FLOAT_NORM_PRESERVE, itsMotRFName.getVal());
00227    }
00228 
00229  itsOutFile->close();
00230  if (itsOutFile) 
00231    { 
00232      delete itsOutFile; 
00233      itsOutFile = 0; 
00234    }
00235  
00236  if (itsNeuronFile) 
00237   {
00238     delete itsNeuronFile; 
00239     itsNeuronFile = 0;
00240   }
00241 }
00242 
00243 // ######################################################################
00244 void SimulationViewerEyeMvtNeuro::
00245 doEventRetinaImage(SimEventQueue& q, rutz::shared_ptr<SimEventRetinaImage>& e)
00246 {
00247   // any new input frame from the retina? If so, let's initialize or
00248   // clear all our drawings:
00249   itsDrawings.resize(e->frame().getDims(), true);
00250   
00251   //check dimensions of retinal output
00252   if (itsInputDims != e->frame().getDims()) 
00253     {
00254       LINFO("Input Image Dimensions have changed");
00255       itsInputDims =  e->frame().getDims(); //dims of retinal image
00256       itsOgInputDims = e->getRawInputDims(); //dims of input
00257       
00258       itsRawCenter = Point2D<int>(itsInputDims.w()/2, itsInputDims.h()/2);
00259       
00260       itsHposPb.reset(itsBufferLength.getVal(), 
00261                       (float)itsOgInputDims.w() / itsPpdx, itsRate);
00262       itsVposPb.reset(itsBufferLength.getVal(), 
00263                       (float)itsOgInputDims.h() / itsPpdy, itsRate);
00264     }  
00265 }
00266 
00267 // ######################################################################
00268 void SimulationViewerEyeMvtNeuro::
00269 onSimEventClockTick(SimEventQueue& q, rutz::shared_ptr<SimEventClockTick>& ect)
00270 {
00271   // Let's update our sliding caches with the current SM/AGM/etc:
00272   const Image<float> currsm = getMap(q, false);//automatically inverse transformed if requested
00273 
00274   //if our map dimensions are different then our spike triggered accumulators, reset them
00275   if ((itsMotRFName.getVal().empty() == false) && (itsMotRf.getDims() != itsOgInputDims))
00276     itsMotRf.resize(itsOgInputDims, ZEROS);
00277   
00278   if ((itsVisRFName.getVal().empty() == false) && (itsVisRf.back().getDims() != currsm.getDims()))
00279     itsVisRf.clear();
00280 
00281   // if we have a delay cache, we push the SM into it and grab what comes out
00282   if (itsDelayCacheSize.getVal())
00283     {
00284       // let's push the SM into our delay sliding cache:
00285       itsDelayCache.push_back(currsm);
00286       
00287       //grab the latest
00288       if (int(itsDelayCache.size()) >= itsDelayCacheSize.getVal())
00289         itsHeadSM = itsDelayCache.front();
00290       else
00291         itsHeadSM.resize(currsm.getDims(), true);  // blank map
00292     }
00293   else
00294     itsHeadSM = currsm;  // no delay cache
00295     
00296   // any eye-tracker action?
00297   SeC<SimEventEyeTrackerData> e = q.check<SimEventEyeTrackerData>(this);
00298 
00299   while(e.is_valid()) {
00300     // parse that event:
00301     const rutz::shared_ptr<EyeData> trackCurrPos = e->data();
00302     const int tnum = e->trackerNum();
00303     setDegtoPixels(e->trackerPpd().ppdx(), e->trackerPpd().ppdy());
00304     
00305     //if our tracking rate changed make some updates
00306     SimTime rate = e->trackerHz();
00307     if (itsRate != rate)
00308       {
00309         itsRate = rate;
00310         
00311         itsHposPb.reset(itsBufferLength.getVal(), 
00312                         (float)itsOgInputDims.w() / itsPpdx, itsRate);
00313 
00314         itsVposPb.reset(itsBufferLength.getVal(), 
00315                         (float)itsOgInputDims.h() / itsPpdy, itsRate);
00316 
00317         itsSalPb.reset(itsBufferLength.getVal(), itsSalScale.getVal(), 
00318                        itsRate);
00319 
00320         itsSpikePb.reset(itsBufferLength.getVal(),itsNeuroScale.getVal(),
00321                          itsRate);
00322 
00323         //set motor window in samples
00324         itsMotSamp = uint(itsMotWindow.getVal().secs() * itsRate.hertz());
00325         itsVisSamp = uint(itsVisWindow.getVal().secs() * itsRate.hertz());
00326         itsVisSampOff = uint(itsVisOffset.getVal().secs() * itsRate.hertz());
00327       }
00328     
00329     //lets make sure we only have one tracker
00330     if (tnum > 0)          
00331       LFATAL("SimulationEyeMvtNeuro only supports one eye tracker");
00332     
00333     // select a drawing color & size:
00334     PixRGB<byte> col(1,1,1);
00335     // note: we use 'almost black' (1,1,1) instead of black so that we can later
00336     // composite our drawings with various images, with pure black
00337     // then being treated as transparent):
00338     if (!itsUseDiagColor.getVal())
00339       {
00340         col = e->trackerColor();
00341         if (trackCurrPos->isInSaccade()) col += byte(64);//will clamp/saturate 
00342         if (trackCurrPos->isInBlink()) col -= byte(128);//will clamp/saturate 
00343       }
00344     else //diagnostic colors
00345       {
00346         byte r = 1, g = 1, b = 1;
00347         if (trackCurrPos->isInSaccade()) 
00348           g = 255;//green for saccade
00349         else if (trackCurrPos->isInSmoothPursuit()) 
00350           {r = 255;b = 255;} //magenta for smooth pursuit
00351         else if (trackCurrPos->isInFixation()) 
00352           b = 255;// blue for fixation
00353         if (trackCurrPos->isInCombinedSaccade()) 
00354           {b = 255;g = 255;}//cyan for combined
00355 
00356         col = PixRGB<byte>(r,g,b);    //default black
00357         if (trackCurrPos->isInBlink()) 
00358           col-=byte(128); // let's just make the cursor darker
00359       }
00360   
00361     // convert to retinal coordinates (accounting for any shifting,
00362     // embedding, etc):
00363     Point2D<int>  rawTrackerPos = trackCurrPos->position();
00364     Point2D<int>  trackerPos = (itsShiftInput.getVal()) ? itsRawCenter : rawTrackerPos;    
00365 
00366     // draw small patch at current eye position:
00367     if (itsEyePatchSize.getVal())
00368       drawPatchBB(itsDrawings, trackerPos, itsEyePatchSize.getVal(), 
00369                   col, PixRGB<byte>(1));
00370     
00371     //display simulation time
00372     if (itsDisplayTime.getVal())
00373       {
00374         const SimpleFont sf = SimpleFont::fixedMaxWidth(itsInputDims.w() / 90);
00375         writeText(itsDrawings, Point2D<int>(0, 0),
00376                   q.now().toString().c_str(), 
00377                   PixRGB<byte>(1, 1, 1),PixRGB<byte>(255,255,255),sf);
00378       }
00379 
00380     double rfsal = -1.0;
00381     //if we have a valid probe use it, otherwise use an rf mask
00382     if (itsProbeP != Point2D<int>(-1,-1))
00383       {
00384         //get our rf position
00385         Point2D<int> rfPos = itsProbeP;
00386         toRetinal(rfPos);
00387         rfPos += trackerPos;
00388    
00389         LINFO("Collecting receptive field sample at (%d,%d) at time: %s", 
00390               rfPos.i, rfPos.j, q.now().toString().c_str());
00391         
00392         //draw a small circle around the receptive field
00393         if (itsRFSizeP > 0)
00394           {
00395             if (itsTransform->validTransforms() && itsInverseRetinal.getVal())
00396               drawCircle(itsDrawings, rfPos, itsInputDims.w()/20, col, 2);
00397             else
00398               drawCircle(itsDrawings, rfPos, itsRFSizeP,col, 2);
00399           }
00400         else
00401           drawPatchBB(itsDrawings, rfPos, itsEyePatchSize.getVal(), 
00402                       col, PixRGB<byte>(1));
00403       
00404         //collect saliency values
00405         // get the map level to scale things down:
00406         int sml = itsLevelSpec.getVal().mapLevel();
00407         Point2D<int> p(rfPos.i >> sml, rfPos.j >> sml);
00408         
00409         const int rad = itsRFSizeP >> sml;
00410         if (itsHeadSM.coordsOk(p)) //is our probe valid
00411           rfsal = getLocalAvg(itsHeadSM, p, rad);
00412       }
00413     else if (itsRFMaskName.getVal().empty() == false) //do we have a rf mask
00414       {
00415         if (itsRFMask.getDims() != itsInputDims)
00416           LFATAL("Receptive field weight mask must be the same size as"
00417                  "the input.");
00418 
00419         if (itsRFMask.getDims() != itsHeadSM.getDims())
00420           LFATAL("Output map must be scaled to RF Mask size.");
00421         
00422         //if using mask, we must be inverting transforms or not use them
00423         if (itsTransform->validTransforms() && itsInverseRetinal.getVal())
00424           LINFO("When using masks, everything must be in stimulus coordinates, so "
00425                 "if you are trying to use a transform, you must inverse it with "
00426                 "--inverse-retinal ");
00427 
00428         LINFO("Collecting receptive field weighted sample at time: %s",
00429               q.now().toString().c_str());
00430 
00431         rfsal = 0.0;
00432         Image<float>::const_iterator witer(itsRFMask.begin()),
00433           wend(itsRFMask.end()), iiter(itsHeadSM.begin());
00434         while (witer != wend)
00435           rfsal += (*witer++) * (*iiter++);
00436       }
00437     else
00438       LINFO("Probe is invalid and no RFMask has been supplied, "
00439             "storing -1.0 as saliency  value");
00440     
00441     //add our current sal val to our plot buffer
00442     itsSalPb.push(q.now(), rfsal); 
00443 
00444     //if we read in neuron data, add it to the plot buffer
00445     bool didspike = false;
00446     if (itsNeuronFile)
00447       {
00448         itsSpikePb.push(q.now(), itsSpikeVals[itsSpikePos]);
00449 
00450         if (itsSpikeVals[itsSpikePos] > 0)
00451           didspike = true; 
00452         ++itsSpikePos;
00453       }
00454     
00455     //for capturing our spike triggered stimulus ensemble
00456     if (itsVisRFName.getVal().empty() == false)
00457       {
00458         itsVisRfBuf.push_back(itsHeadSM);//this will always be the display-type
00459         while (itsVisRfBuf.size() > (itsVisSamp + itsVisSampOff))
00460           itsVisRfBuf.pop_front();
00461       }
00462     
00463     //if we spike, grab the stimulus from our queue and add it to the count
00464     if (didspike && (itsVisRfBuf.size() == (itsVisSamp + itsVisSampOff)))
00465       { 
00466         ImageSet<float> temp = itsVisRfBuf.subSet(itsVisSampOff,
00467                                                   itsVisRfBuf.size());
00468 
00469         if (itsVisRf.isNonEmpty()) itsVisRf += temp; else itsVisRf = temp;
00470       }
00471 
00472     //if we are at the onset of a saccade, see if there are any spikes
00473     //and store that count at the saccadic enpoint
00474     if (didspike && itsMotRf.initialized() && 
00475         trackCurrPos->hasSaccadeTargetData() && !trackCurrPos->isInBlink())
00476       {
00477         //get the endpoint
00478         Point2D<int> dp = trackCurrPos->saccadeTarget();
00479         dp.j = itsOgInputDims.h() - dp.j;//flip around
00480         
00481         //get the number of spikes that occured in a small window
00482         //around saccade onset
00483         uint spos = itsSpikePos - itsMotSamp;
00484         if (spos < 0) spos = 0;
00485         uint epos = itsSpikePos + itsMotSamp;
00486         if (epos >= itsSpikeVals.size()) epos = itsSpikeVals.size() - 1;
00487         
00488         for (uint i = spos; i <= epos; ++i)
00489           itsMotRf[dp] += itsSpikeVals[i];
00490       }
00491 
00492     //grab our horizontal and vertical eye position for plotting
00493     Point2D<float> eyeDeg((float)rawTrackerPos.i / itsPpdx, 
00494                           ((float)itsOgInputDims.h() - (float)rawTrackerPos.j) / itsPpdy);
00495                           
00496     itsHposPb.push(q.now(), eyeDeg.i);
00497     itsVposPb.push(q.now(), eyeDeg.j);
00498     
00499     //should we output samples
00500     if (itsOutFile) 
00501       (*itsOutFile) << toStr<float>(rfsal) << std::endl;
00502 
00503     // any more events like that in the queue?
00504     e = q.check<SimEventEyeTrackerData>(this);
00505   }//end eye tracker data
00506 }
00507 
00508 // ######################################################################
00509 Image< PixRGB<byte> > SimulationViewerEyeMvtNeuro::getTraj()
00510 {
00511 GVX_TRACE(__PRETTY_FUNCTION__);
00512 
00513   // make a composite of the input + the drawings:
00514   Image< PixRGB<byte> > comp = composite(itsDrawings, itsInput);
00515   const int linewidth = comp.getWidth() / 400;
00516   
00517   //if we have a mega combo
00518   Dims ldims(comp.getWidth(), int(comp.getHeight()/3.0F));
00519   Image<PixRGB<byte> > linecomp;
00520   Layout< PixRGB<byte> > layout;
00521   if (itsSaveMegaCombo.getVal())
00522     {
00523       // let's get the current saliency map (normalized):
00524       const Dims dims = itsInput.getDims();
00525 
00526       //rescale if necessary
00527       if (itsHeadSM.initialized()) 
00528         itsHeadSM = rescaleOpt(itsHeadSM, dims, itsDisplayInterp.getVal());
00529       else 
00530         itsHeadSM.resize(dims, true); // blank
00531       Image< PixRGB<byte> > smc = toRGB(Image<byte>(itsHeadSM));
00532       
00533       // make a composite of the instantaneous SM + the drawings:
00534       Image< PixRGB<byte> > smcomp = composite(itsDrawings, smc);
00535       
00536       drawLine(comp, Point2D<int>(dims.w()-1, 0), 
00537                Point2D<int>(dims.w()-1, dims.h()-1),
00538                PixRGB<byte>(255, 255, 0), linewidth);
00539       
00540       drawLine(smcomp, Point2D<int>(0, 0), 
00541                Point2D<int>(0, dims.h()-1),
00542                PixRGB<byte>(255, 255, 0), linewidth);
00543       
00544       //input right of salmap 
00545       layout = hcat(comp, smcomp);
00546 
00547       //get our layout dims
00548       ldims = Dims(layout.getWidth(), int(layout.getHeight()/2.5F));
00549 
00550       //if we are doing a combo plot draw a line of saliency values over time
00551       linecomp = itsSalPb.draw(ldims.w(), 
00552                                ldims.h(),
00553                                itsTitle.c_str(), //probe location
00554                                "Sal", //y label
00555                                itsXlabel.c_str(),  //window length
00556                                PixRGB<byte>(1,1,1),//black line
00557                                0, false);//number of x axis tick marks
00558     }
00559   else
00560     layout = Layout< PixRGB<byte> >(comp);
00561 
00562   //create some eye position plots
00563   Image<PixRGB<byte> > 
00564     eyeposh = itsHposPb.draw(ldims.w(), 
00565                              ldims.h(),
00566                              "Horizontal eye position", //title
00567                              "Deg", //y label
00568                              "",  //x label
00569                              PixRGB<byte>(1,1,1),//green line
00570                              0, false);//number of x axis tick marks
00571   
00572   Image<PixRGB<byte> > 
00573     eyeposv = itsVposPb.draw(ldims.w(), 
00574                              ldims.h(),
00575                              "Vertical eye position", //probe location
00576                              "Deg", //y label
00577                              "",  //x label
00578                              PixRGB<byte>(1,1,1),//green line
00579                              0, false);//number of x axis tick marks
00580 
00581   //if we have neural data, plot it as well
00582   if (itsNeuronFile) 
00583     {
00584       Image<PixRGB<byte> > 
00585         linespk = itsSpikePb.draw(ldims.w(), 
00586                                   ldims.h(),
00587                                   "", //title
00588                                   "Sp/S", //ylabel
00589                                   "", //xlabel
00590                                   PixRGB<byte>(1,1,255), //blue line
00591                                   0, true); //number of x axis tick marks
00592 
00593       linecomp = (linecomp.initialized())?
00594         composite(linecomp,linespk, PixRGB<byte>(255,255,255)):
00595         linecomp = linespk;
00596     }
00597 
00598   //combine all our elements and render
00599   layout = vcat(layout, vcat(vcat(eyeposh, eyeposv),linecomp));
00600   Image<PixRGB<byte> > ret = layout.render();
00601 
00602   //add a border to seperate movie and line plots
00603   drawLine(ret, Point2D<int>(0, comp.getHeight() - 1), 
00604            Point2D<int>(ldims.w()-1, comp.getHeight() - 1),
00605            PixRGB<byte>(255, 255, 0), linewidth);
00606   
00607   drawLine(ret, Point2D<int>(0, comp.getHeight()), 
00608            Point2D<int>(ldims.w() - 1, comp.getHeight()),
00609            PixRGB<byte>(255, 255, 0), linewidth);
00610   
00611   // make sure image is not unreasonably large:
00612   const int x = itsMaxComboWidth.getVal();
00613   const int y = int((float)x / (float)ret.getWidth() * (float)ret.getHeight());
00614   
00615   if (ret.getWidth() > itsMaxComboWidth.getVal())
00616     ret = rescaleBilinear(ret, Dims(x, y));
00617   
00618   return ret;
00619 }
00620 
00621 // ######################################################################
00622 void SimulationViewerEyeMvtNeuro::
00623 onSimEventSaveOutput(SimEventQueue& q, rutz::shared_ptr<SimEventSaveOutput>& e)
00624 {
00625   this->save1(e->sinfo());
00626 }
00627 
00628 // ######################################################################
00629 void SimulationViewerEyeMvtNeuro::save1(const ModelComponentSaveInfo& sinfo)
00630 {
00631 GVX_TRACE(__PRETTY_FUNCTION__);
00632 
00633 // update the trajectory:
00634  Image< PixRGB<byte> > res = getTraj();
00635  
00636  // save results?
00637  if (itsSaveMegaCombo.getVal() || itsSaveTraj.getVal())
00638    {
00639      // get the OFS to save to, assuming sinfo is of type
00640      // SimModuleSaveInfo (will throw a fatal exception otherwise):
00641      nub::ref<FrameOstream> ofs =
00642        dynamic_cast<const SimModuleSaveInfo&>(sinfo).ofs;
00643      
00644      ofs->writeRGB(res, "T", 
00645                    FrameInfo("SimulationViewerEyeMvtNeuro trajectory",SRC_POS));
00646      
00647     }
00648 }
00649 
00650 // ######################################################################
00651 void SimulationViewerEyeMvtNeuro::setDegtoPixels(const float& ppdx, 
00652                                                  const float& ppdy)
00653 {
00654   if ( (ppdx != itsPpdx) && (ppdy != itsPpdy) )
00655     {
00656       itsPpdx = ppdx;
00657       itsPpdy = ppdy;
00658       
00659       //get in pixels our probe position and its size
00660       Point2D<float> p = itsProbe.getVal();
00661       
00662       if ((p.i == -1.0F) || (p.j == -1.0F))
00663         itsProbeP = Point2D<int>(-1,-1);
00664       else
00665         itsProbeP = Point2D<int>(int(itsPpdx * p.i),int(-1.0F * itsPpdy * p.j));
00666       
00667       itsRFSizeP = int( (itsPpdx + itsPpdy)/2.0F * itsRFSize.getVal());
00668     }
00669 }
00670 
00671 // ######################################################################
00672 void SimulationViewerEyeMvtNeuro::
00673 readImageSet(ImageSet<float>& set, const std::string filename)
00674 {
00675   std::string base, ext;
00676   prepFileName(filename, base, ext);
00677 
00678   for (uint i = 0; 1; ++i)
00679     {
00680       std::string name = base + toStr<uint>(i) + "." + ext;
00681       if (Raster::fileExists(name))
00682         {
00683           LINFO("Loading image %d in set: %s",i, name.c_str());
00684           set.push_back(Raster::ReadFloat(name));
00685         }
00686       else
00687         break;
00688     }
00689 }
00690 
00691 // ######################################################################
00692 void SimulationViewerEyeMvtNeuro::
00693 saveImageSet(const ImageSet<float>& set,
00694              const int flags, std::string fname,const RasterFileFormat ftype)
00695 {
00696   std::string base, ext;
00697   prepFileName(fname, base, ext);
00698   for (uint i = 0; i < set.size(); ++i)
00699     {
00700       std::string name = base + toStr<uint>(i) + "." + ext;
00701       LINFO("Writing image %d in set: %s",i, name.c_str());
00702       Raster::WriteFloat(set[i], FLOAT_NORM_PRESERVE, name);
00703     }
00704 }
00705 
00706 
00707 // ######################################################################
00708 void 
00709 SimulationViewerEyeMvtNeuro::prepFileName(const std::string& name, 
00710                                           std::string& base, std::string& ext)
00711 {
00712   std::vector<std::string> tok;
00713   split(name, ".", std::back_inserter(tok));
00714   ext = (tok.size() < 2) ? ".pfm" : tok.back();
00715   base = (tok.size() < 2) ? tok[0] : join(tok.begin(), tok.end()-1,".");
00716 }
00717 
00718 // ######################################################################
00719 SVEMNPlot::PlotBuf::PlotBuf() 
00720   : itsBufLength(SimTime::ZERO()), itsScale(0.0F),
00721     itsMax(itsScale), itsRate(SimTime::ZERO()),
00722     itsData(), itsTimes() 
00723 {
00724 
00725 }
00726 // ######################################################################
00727 SVEMNPlot::PlotBuf::PlotBuf(const SimTime& buflength, const float scale,
00728                             const SimTime& sample_rate) 
00729   : itsBufLength(buflength), itsScale(scale),itsMax(itsScale), 
00730     itsRate(sample_rate), 
00731     itsData(int(itsRate.hertz() * itsBufLength.secs()), 0.0F),
00732     itsTimes()
00733 {
00734   if (itsRate != SimTime::ZERO())
00735     for (SimTime t = -1 * itsBufLength; t <= SimTime::ZERO(); t += itsRate)
00736       {
00737         itsTimes.push_back(t);
00738         itsData.push_back(0.0F);
00739       }
00740 }
00741 
00742 // ######################################################################
00743 void SVEMNPlot::PlotBuf::push(const SimTime& time, const float& data)
00744 {
00745   if ((itsScale == 0.0F) && (data > itsMax))
00746     itsMax = data;
00747     
00748   itsTimes.push_back(time);
00749   itsData.push_back(data);
00750   SimTime d = itsTimes.back() - itsTimes.front();
00751   while (d > itsBufLength)
00752     {
00753       itsData.pop_front();
00754       itsTimes.pop_front();
00755       
00756       d = itsTimes.back() - itsTimes.front();
00757       
00758       if (d.nsecs() < 0)
00759         LFATAL("Cannot push past events into the buffer");
00760     }
00761 }
00762 
00763 // ######################################################################
00764 void SVEMNPlot::PlotBuf::reset(const SimTime& buflength, const float scale,
00765                                const SimTime& sample_rate)
00766 {
00767   itsBufLength = buflength;
00768   itsScale = scale;
00769   itsMax = itsScale;
00770   itsRate = sample_rate;
00771   this->reset();
00772 }
00773 
00774 // ######################################################################
00775 void SVEMNPlot::PlotBuf::reset()
00776 {
00777   itsData.clear();
00778   itsTimes.clear();
00779   if (itsRate != SimTime::ZERO())
00780     for (SimTime t = -1 * itsBufLength; t <= SimTime::ZERO(); t += itsRate)
00781       {
00782         itsTimes.push_back(t);
00783         itsData.push_back(0.0F);
00784       }
00785 }
00786 
00787 // ######################################################################
00788 Image<PixRGB<byte> > SVEMNPlot::PlotBuf::draw(const uint w, const uint h, 
00789                                               const char* title, 
00790                                               const char* ylabel, 
00791                                               const char* xlabel, 
00792                                               const PixRGB<byte>& linecol, 
00793                                               const int numticks, 
00794                                               const bool reverse)
00795 {
00796   return linePlot(itsData, w, h, 0.0F, itsMax, 
00797                   title, ylabel, xlabel, linecol, 
00798                   PixRGB<byte>(255,255,255), numticks, reverse);
00799 }
00800 
00801 // ######################################################################
00802 /* So things look consistent in everyone's emacs... */
00803 /* Local Variables: */
00804 /* indent-tabs-mode: nil */
00805 /* End: */
Generated on Sun May 8 08:05:25 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3