SVMClassifier.C

Go to the documentation of this file.
00001 /*!@file Learn/SVMClassifier.C Support Vector Machine Classifier module */
00002 
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: Laurent Itti <itti@usc.edu>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Learn/SVMClassifier.C $
00035 // $Id: SVMClassifier.C 14581 2011-03-08 07:18:09Z dparks $
00036 //
00037 
00038 #include <fstream>
00039 #include <iostream>
00040 #include <iomanip>
00041 #include <string>
00042 #include <cstdlib>
00043 #include <map>
00044 
00045 #include "svm.h"
00046 #include "SVMClassifier.H"
00047 #include "Component/ModelComponent.H"
00048 #include "Component/ModelParam.H"
00049 #include "Component/OptionManager.H"
00050 
00051 
00052 SVMClassifier::SVMClassifier(float gamma, int C)
00053 {
00054   itsSVMModel = NULL;
00055 
00056   // Default parameters
00057   itsSVMParams.svm_type = C_SVC;
00058   itsSVMParams.kernel_type = RBF;
00059   itsSVMParams.degree = 3;
00060   itsSVMParams.gamma = gamma;        // 1/k
00061   itsSVMParams.coef0 = 0;
00062   itsSVMParams.nu = 0.5;
00063   itsSVMParams.cache_size = 100;
00064   itsSVMParams.C = C;
00065   itsSVMParams.eps = 1e-3;
00066   itsSVMParams.p = 0.1;
00067   itsSVMParams.shrinking = 1;
00068   itsSVMParams.probability = 1;
00069   itsSVMParams.nr_weight = 0;
00070   itsSVMParams.weight_label = NULL;
00071   itsSVMParams.weight = NULL;
00072   itsSVMRangeEnabled = false;
00073 }
00074 
00075 SVMClassifier::~SVMClassifier()
00076 {
00077 }
00078 
00079 
00080 void SVMClassifier::readModel(std::string modelFileName)
00081 {
00082   itsSVMModel = svm_load_model(modelFileName.c_str());
00083   if(itsSVMModel == NULL)
00084     LFATAL("Model file undefined (%s)", modelFileName.c_str());
00085 }
00086 
00087 void SVMClassifier::readRange(std::string rangeFileName)
00088 {
00089   itsSVMRangeEnabled = true;
00090   FILE *frange;
00091   /* frange rewinded in finding max_index */
00092   int idx, c;
00093   double fmin, fmax;
00094   // Size to request memory in
00095   int block_size = 1024;
00096   int cur_size = block_size;
00097   // Current largest index
00098   int largest_index = 0;
00099   itsSVMFeatureRangeMax.clear();
00100   itsSVMFeatureRangeMin.clear();
00101   itsSVMFeatureRangeMax.resize(cur_size);
00102   itsSVMFeatureRangeMin.resize(cur_size);
00103   frange = fopen(rangeFileName.c_str(),"r");
00104   if(frange == NULL)
00105     {
00106       LFATAL("Unable to open SVM range file");
00107     }
00108   if((c = fgetc(frange)) == 'y')
00109     {
00110       LFATAL("Y Scaling is not implemented");
00111     }
00112   else
00113     ungetc(c, frange);
00114 
00115   if (fgetc(frange) == 'x') {
00116     if(fscanf(frange, "%lf %lf\n", &itsSVMFeatureRangeLower, &itsSVMFeatureRangeUpper) != 2) LFATAL("Failed to load from: %s", rangeFileName.c_str() );
00117     //printf("%f:%f\n",itsSVMFeatureRangeLower,itsSVMFeatureRangeUpper);
00118     while(fscanf(frange,"%d %lf %lf\n",&idx,&fmin,&fmax)==3)
00119       {
00120         //printf("%d L:%f\tH:%f....",idx,fmin,fmax);
00121         if(largest_index < idx) largest_index = idx;
00122         if(idx >= cur_size-1)
00123           {
00124             while(idx >= cur_size-1)
00125               cur_size += block_size;
00126             itsSVMFeatureRangeMax.resize(cur_size);
00127             itsSVMFeatureRangeMin.resize(cur_size);
00128           }
00129         itsSVMFeatureRangeMin[idx] = fmin;
00130         itsSVMFeatureRangeMax[idx] = fmax;
00131       }
00132   }
00133   itsSVMFeatureRangeMin.resize(largest_index+1);
00134   itsSVMFeatureRangeMax.resize(largest_index+1);
00135   fclose(frange);
00136   // for(int i=1;i<itsSVMFeatureRangeMin.size();i++)
00137   //   {
00138   //     printf("%d L:%f\tH:%f\n",i,itsSVMFeatureRangeMin[i],itsSVMFeatureRangeMax[i]);
00139   //   }
00140 
00141 }
00142 
00143 
00144 
00145 void SVMClassifier::writeModel(std::string modelFileName)
00146 {
00147   svm_save_model(modelFileName.c_str(),itsSVMModel);
00148 }
00149 
00150 float SVMClassifier::rescaleValue(float value, unsigned int index)
00151 {
00152   if(itsSVMRangeEnabled)
00153     {
00154       if(itsSVMFeatureRangeMax[index] == itsSVMFeatureRangeMin[index])
00155         value = itsSVMFeatureRangeMax[index];
00156       if(value == itsSVMFeatureRangeMin[index])
00157         value = itsSVMFeatureRangeLower;
00158       else if(value == itsSVMFeatureRangeMax[index])
00159         value = itsSVMFeatureRangeUpper;
00160       else
00161         value = itsSVMFeatureRangeLower + (itsSVMFeatureRangeUpper-itsSVMFeatureRangeLower) *
00162           (value-itsSVMFeatureRangeMin[index])/
00163           (itsSVMFeatureRangeMax[index]-itsSVMFeatureRangeMin[index]);
00164     }
00165   return value;
00166 }
00167 
00168 double SVMClassifier::predict(std::vector<float> &feature, double *probability)
00169 {
00170   unsigned int ind=0;
00171   svm_node *node = new svm_node[feature.size()+1]; // One extra to signal end of list
00172   for(ind=0;ind<feature.size();ind++){
00173       node[ind].index = ind+1;
00174       node[ind].value = rescaleValue(feature[ind],ind+1);
00175       //printf("%f,%f ",feature[ind],node[ind].value);
00176   }
00177   // Set the last to -1 to indicate the end of the list
00178   node[ind].index = -1;
00179   node[ind].value = -1;
00180   int label = _predict(node,feature.size(),probability);
00181   delete [] node;
00182   return label;
00183 }
00184 
00185 double SVMClassifier::predict(float * &feature, unsigned int fdim, double *probability)
00186 {
00187   unsigned int ind=0;
00188   svm_node *node = new svm_node[fdim+1]; // One extra to signal end of list
00189   for(ind=0;ind<fdim;ind++){
00190       node[ind].index = ind+1;
00191       node[ind].value = rescaleValue(feature[ind],ind+1);
00192       //printf("%f,%f ",feature[ind],node[ind].value);
00193   }
00194   // Set the last to -1 to indicate the end of the list
00195   node[ind].index = -1;
00196   node[ind].value = -1;
00197   int label = _predict(node,fdim,probability);
00198   delete [] node;
00199   return label;
00200 }
00201 
00202 double SVMClassifier::predict(float **&feature, unsigned int fdim1, unsigned int fdim2, double *probability)
00203 {
00204   unsigned int ind = 0;
00205   svm_node *node = new svm_node[fdim1*fdim2+1]; // One extra to signal end of list
00206   for(unsigned int i=0;i<fdim1;i++){
00207     for(unsigned int j=0;j<fdim2;j++){
00208       node[ind].index = ind+1;
00209       node[ind].value = rescaleValue(feature[i][j],ind+1);
00210       //printf("%f,%f ",feature[i][j],node[ind].value);
00211       ind++;
00212     }
00213   }
00214   // Set the last to -1 to indicate the end of the list
00215   node[ind].index = -1;
00216   node[ind].value = -1;
00217   int label= _predict(node,fdim1*fdim2,probability);
00218   delete [] node;
00219   return label;
00220 }
00221 
00222 double SVMClassifier::_predict(struct svm_node *node, unsigned int fdim, double * probability)
00223 {
00224   std::map<int, double> pdf = predictPDF(node);
00225   return _getBestLabel(pdf,probability);
00226 }
00227 
00228 
00229 std::map<int,double> SVMClassifier::predictPDF(std::vector<float> &feature)
00230 {
00231   unsigned int ind=0;
00232   svm_node *node = new svm_node[feature.size()+1]; // One extra to signal end of list
00233   for(ind=0;ind<feature.size();ind++){
00234       node[ind].index = ind+1;
00235       node[ind].value = rescaleValue(feature[ind],ind+1);
00236       //printf("%f,%f ",feature[ind],node[ind].value);
00237   }
00238   // Set the last to -1 to indicate the end of the list
00239   node[ind].index = -1;
00240   node[ind].value = -1;
00241   std::map<int,double> pdf= predictPDF(node);
00242   delete [] node;
00243   return pdf;
00244 }
00245 
00246 
00247 
00248 std::map<int,double> SVMClassifier::predictPDF(const svm_node* dataPointNodes)
00249 {
00250   std::map<int,double> pdf;
00251   int numberOfLabels = svm_get_nr_class(itsSVMModel);
00252   int *labels = new int[numberOfLabels];
00253   svm_get_labels(itsSVMModel,labels);
00254   double *probEst = new double[numberOfLabels];
00255   svm_predict_probability(itsSVMModel,dataPointNodes,probEst);
00256   // Need to find the index of the returned label
00257   for(int i=0;i<numberOfLabels;i++)
00258     {
00259       pdf[labels[i]] = probEst[i];
00260     }
00261   delete [] probEst;
00262   delete [] labels;
00263   return pdf;
00264 }
00265 
00266 
00267 double SVMClassifier::predict(Image<double> dataPoint, double *probability)
00268 {
00269   if(!itsSVMModel)
00270   {
00271     LERROR("Model not created. Run SVMClassifier::train before running predict");
00272     return -1;
00273   }
00274 
00275   //Make sure that the data point is a column vector for now...
00276   //This can be generalized later
00277   ASSERT(dataPoint.getWidth() == 1);
00278 
00279   //Construct the svm node to predict
00280   svm_node dataPointNodes[dataPoint.getHeight()+1];
00281   for(int dimIdx=0; dimIdx<dataPoint.getHeight(); dimIdx++)
00282   {
00283     svm_node tmpNode;
00284     tmpNode.index = dimIdx+1;
00285     tmpNode.value = rescaleValue(dataPoint.getVal(0, dimIdx),dimIdx);
00286     dataPointNodes[dimIdx] = tmpNode;
00287   }
00288   svm_node endNode;
00289   endNode.index = -1;
00290   endNode.value = -1;
00291   dataPointNodes[dataPoint.getHeight()] = endNode;
00292 
00293   std::map<int, double> pdf = predictPDF(dataPointNodes);
00294   return _getBestLabel(pdf,probability);
00295 }
00296 
00297 int SVMClassifier::_getBestLabel(std::map<int,double> pdf, double *probability)
00298 {
00299   double maxProb = -1;
00300   int bestLabel = 0;
00301   for(std::map<int, double>::iterator pdfIt = pdf.begin(); pdfIt != pdf.end(); ++pdfIt) 
00302     {
00303       int label = pdfIt->first;
00304       double prob = pdfIt->second;
00305       if(maxProb < prob)
00306         {
00307           bestLabel = label;
00308           maxProb = prob;
00309         }
00310     }
00311   if(probability)
00312     *probability = maxProb;
00313   return bestLabel;
00314 }
00315 
00316 void SVMClassifier::train(std::string outputFileName, int id, std::vector<float> &feature)
00317 {
00318   std::ofstream outfile;
00319   outfile.open(outputFileName.c_str(),std::ios::out | std::ios::app);
00320   if (outfile.is_open()) {
00321     outfile << id << " ";
00322     for(unsigned int i=0;i<feature.size();i++) {
00323       outfile << std::setiosflags(std::ios::fixed) << std::setprecision(4) <<
00324         (i+1) << ":" << feature[i] << " ";
00325     }
00326     outfile << std::endl;
00327     outfile.close();
00328   }
00329   else {
00330     LFATAL("Could not open output file");
00331   }
00332 }
00333 
00334 
00335 void SVMClassifier::train(std::string outputFileName, int id, float *&feature, unsigned int fdim)
00336 {
00337   std::ofstream outfile;
00338   outfile.open(outputFileName.c_str(),std::ios::out | std::ios::app);
00339   if (outfile.is_open()) {
00340     outfile << id << " ";
00341     for(unsigned int i=0;i<fdim;i++) {
00342       outfile << std::setiosflags(std::ios::fixed) << std::setprecision(4) <<
00343         (i+1) << ":" << feature[i] << " ";
00344     }
00345     outfile << std::endl;
00346     outfile.close();
00347   }
00348   else {
00349     LFATAL("Could not open output file");
00350   }
00351 }
00352 
00353 void SVMClassifier::train(std::string outputFileName, int id, float **&feature, unsigned int fdim1, unsigned int fdim2)
00354 {
00355   std::ofstream outfile;
00356   outfile.open(outputFileName.c_str(),std::ios::out | std::ios::app);
00357   if (outfile.is_open()) {
00358     outfile << id << " ";
00359     for(unsigned int i=0;i<fdim1;i++) {
00360       for(unsigned int j=0;j<fdim2;j++) {
00361         outfile << std::setiosflags(std::ios::fixed) << std::setprecision(4) <<
00362           (i*fdim2+j+1) << ":" << feature[i][j] << " ";
00363       }
00364     }
00365     outfile << std::endl;
00366     outfile.close();
00367   }
00368   else {
00369     LFATAL("Could not open output file");
00370   }
00371 }
00372 
00373 void SVMClassifier::train(Image<double> trainingData, std::vector<double> dataClasses)
00374 {
00375   ASSERT((uint)trainingData.getWidth() == dataClasses.size());
00376 
00377   //Setup the svm classifier
00378   svm_problem trainingProblem;
00379 
00380   //Tell the problem how many data points we have
00381   trainingProblem.l = dataClasses.size();
00382 
00383   //Copy the data classes into the training problem
00384   trainingProblem.y = new double[dataClasses.size()];
00385   std::copy(dataClasses.begin(), dataClasses.end(), trainingProblem.y);
00386 
00387   //Fill in the training data by creating a matrix of svm nodes
00388   trainingProblem.x = new svm_node*[trainingData.getWidth()];
00389   for(int ptIdx=0; ptIdx < trainingData.getWidth(); ptIdx++)
00390   {
00391     //Allocate the nodes for this data point
00392     trainingProblem.x[ptIdx] = new svm_node[trainingData.getHeight()+1];
00393     for(int dimIdx=0; dimIdx<trainingData.getHeight(); dimIdx++)
00394     {
00395       svm_node tmpNode;
00396       tmpNode.index = dimIdx+1;
00397       tmpNode.value = trainingData.getVal(ptIdx, dimIdx);
00398 
00399       trainingProblem.x[ptIdx][dimIdx] = tmpNode;
00400     }
00401     //Create the end-node so that libsvm knows that this data point entry is over
00402     svm_node endNode;
00403     endNode.index = -1;
00404     endNode.value = -1;
00405     trainingProblem.x[ptIdx][trainingData.getHeight()] = endNode;
00406   }
00407   _train(trainingProblem);
00408 }
00409 
00410 
00411 void SVMClassifier::train(std::vector<std::vector<float> > trainingData, std::vector<float> dataClasses)
00412 {
00413   ASSERT(trainingData.size() == dataClasses.size());
00414 
00415   //Setup the svm classifier
00416   svm_problem trainingProblem;
00417 
00418   //Tell the problem how many data points we have
00419   trainingProblem.l = dataClasses.size();
00420 
00421   //Copy the data classes into the training problem
00422   trainingProblem.y = new double[dataClasses.size()];
00423   std::copy(dataClasses.begin(), dataClasses.end(), trainingProblem.y);
00424 
00425   //Fill in the training data by creating a matrix of svm nodes
00426   trainingProblem.x = new svm_node*[trainingData.size()];
00427   for(uint i=0; i < trainingData.size(); i++)
00428   {
00429     //Allocate the nodes for this data point
00430     trainingProblem.x[i] = new svm_node[trainingData[i].size()+1];
00431     for(uint dimIdx=0; dimIdx<trainingData[i].size(); dimIdx++)
00432     {
00433       svm_node tmpNode;
00434       tmpNode.index = dimIdx+1;
00435       tmpNode.value = trainingData[i][dimIdx];
00436       trainingProblem.x[i][dimIdx] = tmpNode;
00437     }
00438     //Create the end-node so that libsvm knows that this data point entry is over
00439     svm_node endNode;
00440     endNode.index = -1;
00441     endNode.value = -1;
00442     trainingProblem.x[i][trainingData[i].size()] = endNode;
00443   }
00444   _train(trainingProblem);
00445 }
00446 
00447 
00448 void SVMClassifier::_train(svm_problem& trainingProblem)
00449   {
00450   //Check to make sure that our parameters and training problem are sane
00451   //TODO: Do something with this check...
00452   const char* check = svm_check_parameter(&trainingProblem, &itsSVMParams);
00453   LDEBUG("SVM Pararameter Check: %s", check);
00454 
00455 
00456   itsSVMModel = svm_train(&trainingProblem, &itsSVMParams);
00457 }
00458 
Generated on Sun May 8 08:05:19 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3