LogLikelihoodClassifier.C

00001 /*!@file Learn/LogLikelikhoodClassifier.C Log Likelihood Classifier for Histograms module */
00002 // //////////////////////////////////////////////////////////////////// //
00003 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00004 // University of Southern California (USC) and the iLab at USC.         //
00005 // See http://iLab.usc.edu for information about this project.          //
00006 // //////////////////////////////////////////////////////////////////// //
00007 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00008 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00009 // in Visual Environments, and Applications'' by Christof Koch and      //
00010 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00011 // pending; application number 09/912,225 filed July 23, 2001; see      //
00012 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00013 // //////////////////////////////////////////////////////////////////// //
00014 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00015 //                                                                      //
00016 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00017 // redistribute it and/or modify it under the terms of the GNU General  //
00018 // Public License as published by the Free Software Foundation; either  //
00019 // version 2 of the License, or (at your option) any later version.     //
00020 //                                                                      //
00021 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00022 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00023 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00024 // PURPOSE.  See the GNU General Public License for more details.       //
00025 //                                                                      //
00026 // You should have received a copy of the GNU General Public License    //
00027 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00028 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00029 // Boston, MA 02111-1307 USA.                                           //
00030 // //////////////////////////////////////////////////////////////////// //
00031 //
00032 // Primary maintainer for this file: Dan Parks <danielfp@usc.edu>
00033 // $HeadURL$
00034 // $Id$
00035 //
00036 
00037 #include <fstream>
00038 #include <iostream>
00039 #include <iomanip>
00040 #include <string>
00041 #include <cstdlib>
00042 #include <limits>
00043 #include <cmath>
00044 #include <map>
00045 #include <numeric>
00046 #include "Util/Assert.H"
00047 #include "Util/log.H"
00048 #include "LogLikelihoodClassifier.H"
00049 
00050 
00051 LogLikelihoodClassifier::LogLikelihoodClassifier(int k)
00052 {
00053   itsK=k;
00054 }
00055 
00056 float LogLikelihoodClassifier::calcLogLikelihood(const std::vector<float>& sample, const std::vector<float>& model)
00057 {
00058   float ll=0;
00059   ASSERT(sample.size()==model.size());
00060   for(uint i=0;i<sample.size();i++)
00061     {
00062       ASSERT(model[i]!=0);
00063       // Can't take log of zero
00064       ll+=sample[i]*log(model[i]);
00065     }
00066   ASSERT(!isnan(ll));
00067   return ll;
00068 }
00069 
00070 void LogLikelihoodClassifier::addModel(std::vector<float> hist, int id)
00071 {
00072   if(itsHistLength <= 0)
00073     itsHistLength = hist.size();
00074   ASSERT(hist.size() == itsHistLength);
00075   // Initialize if no key is present
00076   if(itsModels.find(id) == itsModels.end())
00077     itsModels[id] = std::vector<std::vector<float> >();
00078   itsModels[id].push_back(hist);
00079 }
00080 
00081 void LogLikelihoodClassifier::setModels(MapModelVector models)
00082 {
00083   itsModels = models;
00084 }
00085 
00086 LogLikelihoodClassifier::MapModelVector LogLikelihoodClassifier::getModels()
00087 {
00088   return itsModels;
00089 }
00090 
00091 //! Predict using classifier
00092 int LogLikelihoodClassifier::predict(const std::vector<float>& hist)
00093 {
00094   std::map<int,double>pdf = predictPDF(hist);
00095   std::map<int,double>::const_iterator piter;
00096   double maxVal=-std::numeric_limits<double>::max();
00097   int maxClass = -1;
00098   for(piter=pdf.begin();piter!=pdf.end();piter++)
00099     {
00100       //LINFO("Comparing curVal[%d] %f to currentMax:%f",piter->first,piter->second,maxVal);
00101       if(piter->second > maxVal)
00102         {
00103           maxClass=piter->first;
00104           maxVal=piter->second;
00105         }
00106     }
00107   return maxClass;
00108 }
00109 
00110 std::map<int,double> LogLikelihoodClassifier::predictPDF(const std::vector<float>& hist)
00111 {
00112   // This is a "hybrid" operation.  The K nearest exemplars are determined, in addition to the best performing exemplar for each class
00113   // This will leave us with a total number of stored likelihoods between NumClasses (if the best K are all among them) and NumClasses+K-1 (if all the best K sit in one class)
00114   // We add all of these likelihoods up per class and then normalize to get a PDF
00115   
00116   // Store the maximum log likelihoods and their corresponding locations with the nearest neighbors
00117   std::list<float> maxLL = std::list<float>(itsK,-std::numeric_limits<float>::max());
00118   std::list<int> maxLLPos = std::list<int>(itsK,0);
00119   MapModelVector::const_iterator mmitr = itsModels.begin(), mmstop = itsModels.end();
00120   std::map<int,std::list<float> > maxLLClass;
00121 
00122   for(;mmitr!=mmstop;mmitr++)
00123     {
00124       maxLLClass[mmitr->first] = std::list<float>(itsK,-std::numeric_limits<double>::max());
00125       for(uint i=0;i<mmitr->second.size();i++)
00126         {
00127           // Calculate log likelihood on histogram
00128           float ll = calcLogLikelihood(mmitr->second[i],hist);
00129           bool found=false;
00130           // Check if this is one of the best K in class
00131           std::list<float>::iterator maxClassIter = maxLLClass[mmitr->first].begin(), maxClassStop=maxLLClass[mmitr->first].end();
00132           
00133           for(;maxClassIter!=maxClassStop;maxClassIter++)
00134             {
00135               if(*maxClassIter < ll)
00136                 {
00137                   maxLLClass[mmitr->first].insert(maxClassIter,ll);
00138                   found=true;
00139                   break;
00140                 }
00141             }
00142           // Only check for overall max if this exemplar is in the best K exemplars in its own class
00143           if(found)
00144             {
00145               ASSERT(maxLL.size()==maxLLPos.size());
00146               std::list<float>::iterator maxLLIter = maxLL.begin(), maxLLStop=maxLL.end();        
00147               std::list<int>::iterator maxLLPosIter = maxLLPos.begin();//, maxLLPosStop=maxLLPos.end();
00148               for(;maxLLIter!=maxLLStop;maxLLIter++,maxLLPosIter++)
00149                 {
00150                   if(*maxLLIter < ll)
00151                     {
00152                       maxLL.insert(maxLLIter,ll);
00153                       maxLLPos.insert(maxLLPosIter,mmitr->first);
00154                       break;
00155                     }
00156                 }
00157             }
00158           // Truncate best fits to be number of nearest neighbors in size
00159           if(found)
00160             {
00161               if(maxLLClass[mmitr->first].size()>itsK)
00162                 {
00163                   maxLLClass[mmitr->first].resize(itsK);
00164                 }
00165               if(maxLL.size()>itsK)
00166                 {
00167                   maxLL.resize(itsK);
00168                   maxLLPos.resize(itsK);
00169                 }
00170             }
00171         }
00172     }
00173   // Count the number of elements per class in the maxLL
00174   std::map<int,int> numEntries;
00175   for(std::list<int>::const_iterator lit=maxLLPos.begin();lit!=maxLLPos.end();lit++)
00176     {
00177       numEntries[*lit]++;
00178     }
00179   // Now resize each class list based on the number of entries
00180   std::map<int,std::list<float> >::iterator maxClassIter = maxLLClass.begin(), maxClassStop=maxLLClass.end();
00181   for(;maxClassIter!=maxClassStop;maxClassIter++)
00182     {
00183       // Each class gets a guaranteed one entry
00184       maxClassIter->second.resize(std::max(numEntries[maxClassIter->first],1));
00185     }
00186   std::map<int,double> pdf;
00187   nearestNeighborVotePDF(maxLLClass,pdf);
00188   return pdf;
00189 }
00190 
00191 void LogLikelihoodClassifier::nearestNeighborVotePDF(const std::map<int,std::list<float> >& logLikelihood, std::map<int,double>& pdf)
00192 {
00193   std::map<int,std::list<float> >::const_iterator llIter = logLikelihood.begin(), llStop = logLikelihood.end();
00194   float nrm=0;
00195   int numClasses=0;
00196   float minVal = std::numeric_limits<float>::max();
00197   float offset=0;
00198   for(;llIter!=llStop;llIter++)
00199     {
00200       float val=std::accumulate(llIter->second.begin(),llIter->second.end(),0);
00201       pdf[llIter->first] = val;
00202       nrm+=val;
00203       if(val<minVal)
00204         minVal=val;
00205       numClasses++;
00206     }
00207   ASSERT(numClasses>0 && nrm != 0);
00208 
00209   // Subtract off minimum and normalize to get a pdf
00210   std::map<int,double>::iterator piter;
00211   // Now take off minval for each class, if that doesn't cause a discontinuity
00212   if(fabs(nrm-minVal*numClasses) > 0.0001)
00213     {
00214       nrm-=minVal*numClasses;
00215       offset=minVal;
00216     }
00217   for(piter=pdf.begin();piter!=pdf.end();piter++)
00218     {
00219       piter->second=(piter->second-offset)/nrm;
00220       ASSERT(!isnan(piter->second));
00221     }
00222 }
00223 
00224 
00225 
00226 //! Get number of models
00227 uint LogLikelihoodClassifier::getNumModels()
00228 {
00229   return itsModels.size();
00230 }
00231 
00232 
00233 
00234 
Generated on Sun May 8 08:40:58 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3