SalientRegionTracker.C

Go to the documentation of this file.
00001 /*!@file Beobot/SalientRegionTracker.C template matching tracker
00002   on conspicuity maps                                                   */
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: Christian Siagian <siagian@usc.edu>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Beobot/SalientRegionTracker.C $
00035 // $Id: SalientRegionTracker.C 13902 2010-09-09 15:31:09Z lior $
00036 //
00037 
00038 // ######################################################################
00039 
00040 #include "Image/OpenCVUtil.H"
00041 #include "Beobot/SalientRegionTracker.H"
00042 
00043 #include "Component/ModelManager.H"
00044 #include "Raster/Raster.H"
00045 #include "GUI/XWinManaged.H"
00046 #include "Image/Image.H"
00047 #include "Image/Pixels.H"
00048 #include "Util/Timer.H"
00049 
00050 #include <signal.h>
00051 
00052 #include "Image/ShapeOps.H"
00053 #include "Image/CutPaste.H"     // for inplacePaste()
00054 #include "Image/MathOps.H"      // for findMax
00055 #include "Image/DrawOps.H"
00056 
00057 #define WINSIZE           7
00058 #define templThresh       2000.0F
00059 
00060 // ######################################################################
00061 SalientRegionTracker::SalientRegionTracker(OptionManager& mgr,
00062                              const std::string& descrName,
00063                              const std::string& tagName)
00064   :
00065   ModelComponent(mgr, descrName, tagName),
00066   itsTimer(1000000)
00067 {
00068   itsCurrTrackedPoints.clear();
00069 
00070   //uint w = 160; uint h = 120;
00071   //itsWin.reset
00072   //  (new XWinManaged(Dims(7*w, 2*h), 0, 0, "Cmap Window" ));
00073 }
00074 
00075 // ######################################################################
00076 SalientRegionTracker::~SalientRegionTracker()
00077 { }
00078 
00079 // ######################################################################
00080 void SalientRegionTracker::input
00081 (Image<PixRGB<byte> > image, ImageSet<float> cmap, bool resetTracker,
00082  std::vector<Point2D<int> > points, std::vector<Rectangle> rects,
00083 std::vector<rutz::shared_ptr<VisualObject> > visualObjects)
00084 {
00085   itsTimer.reset();
00086   itsCurrCmap = cmap;
00087 
00088   // check if the tracker is currently inactive
00089   // if so, automatic reset tracker
00090   itsResetTracker = resetTracker;
00091 
00092   if(itsResetTracker == false && itsCurrTrackedPoints.size() == 0)
00093     return;
00094 
00095   // process the tracking task
00096   if(itsResetTracker)
00097     {
00098       itsOriginalInputImage       = image;
00099       itsCurrInputImage           = image;
00100       itsCurrTrackedPoints        = points;
00101       itsCurrTrackedVisualObjects = visualObjects;
00102 
00103       itsCurrTrackedROI = rects;
00104     }
00105 
00106   track();
00107 
00108   // print timer
00109   LINFO("Time: %6.3f ms", itsTimer.get()/1000.0);
00110 }
00111 
00112 // ######################################################################
00113 void SalientRegionTracker::clear()
00114 {
00115   itsPrevTrackedPointsScaled.clear();
00116   itsTrackerBias.clear();
00117   itsTrackerBiasOffset.clear();
00118   itsCurrTrackedPoints.clear();
00119   itsCurrTrackedROI.clear();
00120 }
00121 
00122 // ######################################################################
00123 void SalientRegionTracker::move
00124 (nub::soft_ref<SalientRegionTracker> tracker2, uint i)
00125 {
00126   itsPrevTrackedPointsScaled.push_back
00127     (tracker2->getPrevTrackedPointsScaled(i));
00128   itsTrackerBias.push_back
00129     (tracker2->getTrackerBias(i));
00130   itsTrackerBiasOffset.push_back
00131     (tracker2->getTrackerBiasOffset(i));
00132   itsCurrTrackedPoints.push_back
00133     (tracker2->getCurrTrackedPoints(i));
00134   itsCurrTrackedROI.push_back
00135     (tracker2->getCurrTrackedROI(i));
00136 
00137   // FIXX: maybe want to move this as well
00138   //itsCurrTrackedVisualObjects = visualObjects;
00139 }
00140 
00141 // ######################################################################
00142 void SalientRegionTracker::track()
00143 {
00144   std::vector<Point2D<int> > diffs;
00145   for(uint i = 0; i < itsCurrTrackedPoints.size(); i++)
00146     {
00147       Point2D<int> pt =
00148         itsCurrTrackedROI[i].topLeft() - itsCurrTrackedPoints[i];
00149       diffs.push_back(pt);
00150 
00151       LDEBUG("diff[%3d]: (%4d,%4d) - (%4d,%4d) = (%4d,%4d)", i,
00152              itsCurrTrackedROI[i].topLeft().i,
00153              itsCurrTrackedROI[i].topLeft().j,
00154              itsCurrTrackedPoints[i].i,
00155              itsCurrTrackedPoints[i].j,
00156              diffs[i].i, diffs[i].j);
00157     }
00158 
00159   trackCmaps();
00160 
00161   Dims imgDims = itsCurrInputImage.getDims();
00162   Rectangle imgRect(Point2D<int>(0,0), imgDims);
00163   for(uint i = 0; i <  itsCurrTrackedPoints.size(); i++)
00164     {
00165       Point2D<int> tl = diffs[i] + itsCurrTrackedPoints[i];
00166       Dims d = itsCurrTrackedROI[i].dims();
00167 
00168       LDEBUG("imgRect[%4d,%4d,%4d,%4d]",
00169              imgRect.topLeft().i, imgRect.topLeft().j,
00170              imgRect.bottomRight().i, imgRect.bottomRight().j);
00171 
00172       Rectangle newRect(tl, d);
00173       LDEBUG("newRect[%4d,%4d,%4d,%4d]",
00174              newRect.topLeft().i, newRect.topLeft().j,
00175              newRect.bottomRight().i, newRect.bottomRight().j);
00176 
00177       itsCurrTrackedROI[i] = imgRect.getOverlap(newRect);
00178 
00179       LINFO("Resulting ROI[%4d,%4d,%4d,%4d]",
00180             itsCurrTrackedROI[i].topLeft().i,
00181             itsCurrTrackedROI[i].topLeft().j,
00182             itsCurrTrackedROI[i].bottomRight().i,
00183             itsCurrTrackedROI[i].bottomRight().j);
00184     }
00185 
00186   //trackVisualObjects();
00187 }
00188 
00189 // ######################################################################
00190 void SalientRegionTracker::trackVisualObjects()
00191 {
00192   if(itsResetTracker) return;
00193 
00194   // create a visual object for the scene
00195   std::string sName("scene");
00196   std::string sfName = sName + std::string(".png");
00197   rutz::shared_ptr<VisualObject>
00198     scene(new VisualObject(sName, sfName, itsCurrInputImage));
00199 
00200   // match the input visual objects
00201   for(uint i = 0; i < itsCurrTrackedVisualObjects.size(); i++)
00202     {
00203       // check for match
00204       Timer tim(1000000);
00205       VisualObjectMatchAlgo voma(VOMA_SIMPLE);
00206       rutz::shared_ptr<VisualObjectMatch> matchRes
00207         (new VisualObjectMatch(scene, itsCurrTrackedVisualObjects[i], voma));
00208       uint64 t = tim.get();
00209 
00210       // let's prune the matches:
00211       uint orgSize = matchRes->size();
00212        tim.reset();
00213       uint np = matchRes->prune();
00214       uint t2 = tim.get();
00215 
00216       LINFO("Found %u matches (%s & %s) in %.3fms:"
00217             " pruned %u in %.3fms",
00218             orgSize, scene->getName().c_str(),
00219             itsCurrTrackedVisualObjects[i]->getName().c_str(),
00220             float(t) * 0.001F,
00221             np, float(t2) * 0.001F);
00222 
00223       // matching score
00224       float kpAvgDist    = matchRes->getKeypointAvgDist();
00225       float afAvgDist    = matchRes->getAffineAvgDist();
00226       float score        = matchRes->getScore();
00227       bool isSIFTaffine  = matchRes->checkSIFTaffine
00228         (M_PI/4,5.0F,0.25F);
00229       SIFTaffine siftAffine = matchRes->getSIFTaffine();
00230       LINFO("kpAvgDist = %.4f|affAvgDist = %.4f|"
00231             " score: %.4f|aff? %d",
00232             kpAvgDist, afAvgDist, score, isSIFTaffine);
00233 
00234       if (!isSIFTaffine)
00235         LINFO("### Affine is too weird -- BOGUS MATCH");
00236       else
00237         {
00238           // show our final affine transform:
00239           LINFO("[testX]  [ %- .3f %- .3f ] [refX]   [%- .3f]",
00240                 siftAffine.m1, siftAffine.m2, siftAffine.tx);
00241           LINFO("[testY]= [ %- .3f %- .3f ] [refY] + [%- .3f]",
00242                 siftAffine.m3, siftAffine.m4, siftAffine.ty);
00243         }
00244 
00245       bool isSIFTfit = (isSIFTaffine && (score > 2.5) &&
00246                         (matchRes->size() > 3));
00247       LINFO("OD isSIFTfit %d", isSIFTfit);
00248 
00249       // get an image showing the matches and the fused image
00250       // int wo = itsWin->getDims().w()/2;
00251       // int ho = itsWin->getDims().h()/2;
00252       // Image< PixRGB<byte> > mImg = matchRes->getMatchImage(1.0F);
00253       // Image< PixRGB<byte> > fImg = matchRes->getFusedImage(0.25F);
00254       // Image< PixRGB<byte> > tImg(2*wo,2*ho,ZEROS);
00255       // inplacePaste(tImg, mImg,   Point2D<int>(0,  0));
00256       // inplacePaste(tImg, fImg,   Point2D<int>(wo, 0));
00257       // itsWin->drawImage(tImg,0,0);
00258       // Raster::waitForKey();
00259 
00260       if(isSIFTfit)
00261         {
00262 
00263           // // get the location
00264           // Point2D<int> loffset = itsLandmarks[currSegNum][j]
00265           //   ->getLatestOffsetCoords();
00266           // Point2D<int> salpt1 = lobj->getSalPoint() + loffset;
00267           // Point2D<int> salpt2 = inputVO[i]->getSalPoint() +
00268           //   objOffset[i];
00269           // LINFO("loffset: %d %d", loffset.i, loffset.j);
00270 
00271           // get all the tracked location of the corners of the ROI
00272           // -> get min,max top/bot/left/right
00273 
00274           // crop the window
00275 
00276 
00277           // -> make sure the window does go out of control big.
00278           // always check if the width/height is too far from the previous frame
00279           // can't just always increasing
00280 
00281 
00282 
00283           // // forward affine transform [A * ref -> tst ]
00284           // float u, v; siftAffine.transform
00285           //               (salpt1.i, salpt1.j, u, v);
00286           // float dist = salpt2.distance
00287           //   (Point2D<int>(int(u+0.5F), int(v+0.5F)));
00288           // LINFO("pos1: (%d,%d) -> (%f,%f) & "
00289           //       "pos2: (%d,%d): dist: %f",
00290           //       salpt1.i, salpt1.j, u, v,
00291           //       salpt2.i, salpt2.j, dist);
00292 
00293           // float sdist  = 1.0 - dist/imgD;
00294 
00295           // if(dist < 10.0F)
00296           //   {
00297           //     inDB[i][j] = indb;
00298           //     inTDB[i][j] = intdb;
00299           //     tIndex[i][j] = tindex;
00300           //     scores[i][j] = (sdist * sscore);
00301           //     siftscores[i][j] = (score);
00302           //     sdiffscores[i][j] = (sscore);
00303           //   }
00304         }
00305     }
00306 //
00307 }
00308 
00309 // ######################################################################
00310 void SalientRegionTracker::trackCmaps()
00311 {
00312   int smscale = (int)(pow(2,sml));
00313 
00314   // reset tracker?
00315   if(itsResetTracker)
00316     {
00317       itsPrevTrackedPointsScaled.clear();
00318       itsTrackerBias.clear();
00319       itsTrackerBiasOffset.clear();
00320     }
00321 
00322   for(uint i = 0; i < itsCurrTrackedPoints.size(); i++)
00323     {
00324       // if we are resetting the tracker
00325       if(itsResetTracker)
00326         {
00327           if(!itsCurrTrackedPoints[i].isValid())
00328             LFATAL("invalid input tracked point[%d]", i);
00329 
00330           itsPrevTrackedPointsScaled.push_back
00331             (Point2D<int>(itsCurrTrackedPoints[i].i/smscale,
00332                           itsCurrTrackedPoints[i].j/smscale));
00333 
00334           Point2D<int> tempOffset;
00335           itsTrackerBias.push_back
00336             (setNewBias(itsPrevTrackedPointsScaled[i], tempOffset));
00337           itsTrackerBiasOffset.push_back(tempOffset);
00338         }
00339       // else we are tracking (pt still not lost)
00340       else if(itsPrevTrackedPointsScaled[i].isValid())
00341         {
00342           LINFO("tracking current point[%d]", i);
00343           itsPrevTrackedPointsScaled[i] = trackPoint
00344             (itsTrackerBias[i],
00345              itsTrackerBiasOffset[i],
00346              itsPrevTrackedPointsScaled[i]);
00347         }
00348       // else it's previously lost
00349       else { LINFO("lost current point[%d]", i); }
00350 
00351       if(itsPrevTrackedPointsScaled[i].isValid())
00352         itsCurrTrackedPoints[i] =
00353           Point2D<int>(itsPrevTrackedPointsScaled[i].i*smscale,
00354                        itsPrevTrackedPointsScaled[i].j*smscale);
00355       else  itsCurrTrackedPoints[i] = Point2D<int>(-1,-1);
00356       LINFO("current track[%d] result: [%d,%d] -> [%d,%d]", i,
00357             itsCurrTrackedPoints[i].i,
00358             itsCurrTrackedPoints[i].j,
00359             itsPrevTrackedPointsScaled[i].i,
00360             itsPrevTrackedPointsScaled[i].j);
00361     }
00362 }
00363 
00364 // ######################################################################
00365 ImageSet<float> SalientRegionTracker::setNewBias
00366 (Point2D<int> inTrackLoc, Point2D<int> &biasOffset)
00367 {
00368   int w = itsCurrCmap[0].getWidth();
00369   int h = itsCurrCmap[0].getHeight();
00370 
00371   ImageSet<float> bias(NUM_CHANNELS);
00372 
00373   // set bias offset
00374   if(inTrackLoc.i < (WINSIZE/2))
00375     biasOffset.i = inTrackLoc.i;
00376   else if(inTrackLoc.i > ((w - 1) - (WINSIZE/2)))
00377     biasOffset.i = WINSIZE - (w - inTrackLoc.i);
00378   else
00379     biasOffset.i = WINSIZE/2;
00380 
00381   if(inTrackLoc.j < (WINSIZE/2))
00382     biasOffset.j = inTrackLoc.j;
00383   else if(inTrackLoc.j > ((h - 1) - (WINSIZE/2)))
00384     biasOffset.j = WINSIZE - (h - inTrackLoc.j);
00385   else
00386     biasOffset.j = WINSIZE/2;
00387 
00388   LINFO("Set new bias[%d,%d]: offset: (%d, %d)",
00389         inTrackLoc.i, inTrackLoc.j, biasOffset.i, biasOffset.j);
00390 
00391   // get the features at the loc point
00392   for(int i = 0; i < NUM_CHANNELS; i++)
00393     {
00394       Point2D<int> upLeftsc(inTrackLoc.i - biasOffset.i,
00395                        inTrackLoc.j - biasOffset.j);
00396       Image<float> target = crop(itsCurrCmap[i], upLeftsc,
00397                                  Dims(WINSIZE,WINSIZE));
00398       bias[i] = target;
00399 
00400 //       LINFO("bias[%d]", i);
00401 //       int scale = (int)(pow(2,sml));
00402 //       Image<float> disp(2*w*scale, h*scale, ZEROS);
00403 //       Image<float> cmapDisp = zoomXY(itsCurrCmap[i], scale);
00404 //       Image<float> biasDisp = zoomXY(bias[i], scale);
00405 
00406 //       Point2D<int> sp(inTrackLoc.i*scale, inTrackLoc.j*scale);
00407 //       Point2D<int> upLeft(upLeftsc.i*scale, upLeftsc.j*scale);
00408 //       Rectangle br(upLeft, Dims(WINSIZE*scale, WINSIZE*scale));
00409 
00410 //       float mn, mx; getMinMax(cmapDisp,mn,mx);
00411 //       drawRect(cmapDisp, br, mx);
00412 //       drawDisk(cmapDisp, sp, 3, mn);
00413 
00414 //       inplacePaste(disp, cmapDisp, Point2D<int>(0,0));
00415 //       inplacePaste(disp, biasDisp, Point2D<int>(w*scale, 0) + upLeft);
00416 //       dispWin->drawImage(disp,0,0); Raster::waitForKey();
00417     }
00418   return bias;
00419 }
00420 
00421 // ######################################################################
00422 Point2D<int> SalientRegionTracker::trackPoint
00423 (ImageSet<float> &bias, Point2D<int> biasOffset,
00424   Point2D<int> trackLoc)
00425 {
00426   int w = itsCurrCmap[0].getWidth();
00427   int h = itsCurrCmap[0].getHeight();
00428 //   int scale = (int)(pow(2,sml));
00429 //   Point2D<int> bOffset(scale*(WINSIZE/2), scale*(WINSIZE/2));
00430 //   Image<PixRGB<byte> > disp(4*w*scale, h*scale, ZEROS);
00431 
00432   // match templates
00433   Image<float> smap = getBiasedSMap(bias);
00434 //   Image<float> smapDisp = zoomXY(smap, scale);
00435 //   float mn, mx; getMinMax(smapDisp, mn, mx); LINFO("mn:%f mx:%f", mn,mx);
00436 //   inplaceNormalize(smapDisp, 0.0F, 255.0F);
00437 //   inplacePaste(disp, Image<PixRGB<byte> >(toRGB(smapDisp)), bOffset);
00438 //   Raster::WriteRGB(Image<PixRGB<byte> >(toRGB(smapDisp)), "smap.ppm");
00439 
00440   // add a value to saliency based on distance from last point
00441   int i = 0; float maxDist = sqrt(w*w + h*h); //LINFO("maxDist: %f", maxDist);
00442   int wsmap = smap.getWidth();
00443 //   int hsmap = smap.getHeight();
00444 //   Image<float> dmap(wsmap, hsmap, ZEROS);
00445 //   Image<float>::iterator itr2 = dmap.beginw();
00446   Point2D<int> prevLoc(trackLoc.i - biasOffset.i, trackLoc.j - biasOffset.j);
00447   for (Image<float>::iterator itr = smap.beginw(), stop = smap.endw();
00448        itr != stop; ++itr, i++)
00449     {
00450       int x = i % wsmap;
00451       int y = i / wsmap;
00452       float dist = (prevLoc.distance(Point2D<int>(x,y))+.1)/maxDist;
00453       *itr = *itr * dist;
00454 
00455 //       *itr2 = dist; ++itr2;
00456     }
00457 
00458 //   Image<float> smapDisp2 = zoomXY(smap, scale);
00459 //   inplaceNormalize(smapDisp2, 0.0F, 255.0F);
00460 //   inplacePaste(disp, Image<PixRGB<byte> >(toRGB(smapDisp2)),
00461 //                Point2D<int>(w*scale,0)+bOffset);
00462 //   Raster::WriteRGB(Image<PixRGB<byte> >(toRGB(smapDisp2)), "smap2.ppm");
00463 
00464   // get the min val location
00465   // since the smap is corroded by WINSIZE all around,
00466   // winner coordinate point is actually the topleft of the bias window
00467   float minval; Point2D<int> upLeft; findMin(smap, upLeft, minval);
00468 
00469 //   Image<float> smapDisp3 = zoomXY(smap, scale);
00470 //   Point2D<int> cwsc(upLeft.i*scale + scale/2, upLeft.j*scale + scale/2);
00471 //   inplaceNormalize(smapDisp3, 0.0F, 255.0F);
00472 //   Image<PixRGB<byte> > csmapDisp3(w*scale, h*scale, ZEROS);
00473 //   inplacePaste(csmapDisp3, Image<PixRGB<byte> >(toRGB(smapDisp3)), bOffset);
00474 //   drawDisk(csmapDisp3, cwsc + bOffset, 3, PixRGB<byte>(255, 0, 0));
00475 //   drawRect(csmapDisp3,
00476 //            Rectangle(upLeft*scale,
00477 //                      Dims((WINSIZE+1)*scale-1, (WINSIZE+1)*scale-1)),
00478 //            PixRGB<byte>(255,0,0));
00479 //   inplacePaste(disp, csmapDisp3, Point2D<int>(2*w*scale,0));
00480 //   Raster::WriteRGB(csmapDisp3, "csmap.ppm");
00481 
00482 //   Image<float> dmapDisp = zoomXY(dmap, scale);
00483 //   inplaceNormalize(dmapDisp, 0.0F, 255.0F);
00484 //   inplacePaste(disp, Image<PixRGB<byte> >(toRGB(dmapDisp)),
00485 //                Point2D<int>(3*w*scale,0)+bOffset);
00486 
00487 //   dispWin->drawImage(disp, 0, 0);
00488 //   Raster::waitForKey();
00489 
00490   // update the template
00491   updateTemplate(upLeft, bias);
00492 
00493   // get new tracking point
00494   Point2D<int> newTrackLoc = upLeft + biasOffset;
00495   return newTrackLoc;
00496 }
00497 
00498 // ######################################################################
00499 void SalientRegionTracker::updateTemplate
00500 ( Point2D<int> upLeft, ImageSet<float> &bias)
00501 {
00502   double dist = 0;
00503   ImageSet<float> newBias(NUM_CHANNELS);
00504 
00505   for(int i = 0; i < NUM_CHANNELS; i++)
00506     {
00507       Image<float> target =
00508         crop(itsCurrCmap[i], upLeft, Dims(WINSIZE,WINSIZE));
00509 
00510       // take more of the old template but still incorporate the new template
00511       newBias[i] = bias[i]*0.9 + target*(1 - 0.9);
00512       dist += distance(bias[i], newBias[i]);
00513     }
00514 
00515   // if the difference is too big, then do not update the template
00516   LINFO("Distance %f (thresh: %f)", dist, templThresh);
00517   if (dist < templThresh)
00518     {
00519       bias = newBias;
00520     }
00521   else LINFO("not adding bias");
00522 
00523   // did we lose the tracking completely?
00524   //float winDist = lastWinner.distance(trackLoc);
00525 }
00526 
00527 // ######################################################################
00528 Image<float> SalientRegionTracker::getBiasedSMap(ImageSet<float> bias)
00529 {
00530 #ifndef HAVE_OPENCV
00531   LFATAL("OpenCV must be installed to use this function");
00532   return Image<float>();
00533 #else
00534 
00535   int w = itsCurrCmap[0].getWidth();
00536   int h = itsCurrCmap[0].getHeight();
00537   //   int scale = (int)(pow(2,sml));
00538 
00539   Image<float> biasedCMap(w - WINSIZE + 1, h - WINSIZE + 1, ZEROS);
00540   Image<float> res(w - WINSIZE + 1, h - WINSIZE + 1, ZEROS);
00541 
00542   // add the bias of all the channels
00543   for(uint i = 0; i < NUM_CHANNELS; i++)
00544     {
00545       cvMatchTemplate(img2ipl(itsCurrCmap[i]), img2ipl(bias[i]),
00546                       img2ipl(biasedCMap), CV_TM_SQDIFF); //CV_TM_CCOEFF);
00547 
00548       // biasedCMap = correlation(cmap[i], bias[i]);
00549 
00550 //       LINFO("bias[%d]", i);
00551 //       Image<float> disp(2*w*scale, h*scale, ZEROS);
00552 //       Image<float> cmapDisp = zoomXY(cmap[i], scale);
00553 //       Image<float> biasDisp = zoomXY(biasedCMap, scale);
00554 //       inplacePaste(disp, cmapDisp, Point2D<int>(0,0));
00555 //       inplacePaste(disp, biasDisp, Point2D<int>(w*scale,0));
00556 //       dispWin->drawImage(disp,0,0);
00557 //       Raster::waitForKey();
00558 
00559       // Add to saliency map: //save the cmap
00560       res += biasedCMap;
00561     }
00562 
00563   return res;
00564 
00565 #endif
00566 }
00567 
00568 // ######################################################################
00569 /* So things look consistent in everyone's emacs... */
00570 /* Local Variables: */
00571 /* indent-tabs-mode: nil */
00572 /* End: */
Generated on Sun May 8 08:40:12 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3