train-FFN.C

Go to the documentation of this file.
00001 /*!@file Gist/train-FFN.C train an multilayer feed-forward netwark
00002          with backpropagation                                           */
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: Christian Siagian <siagian@usc.edu>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Gist/train-FFN.C $
00035 // $Id: train-FFN.C 14376 2011-01-11 02:44:34Z pez $
00036 //
00037 
00038 // ######################################################################
00039 /*! training a neural network                                           */
00040 // //////////////////////////////////////////////////////////
00041 // train-FFN.C <*_train.txt>
00042 
00043 // This is a general purpose neural network trainer
00044 // in which we can specify the network architecture as well as
00045 // a set of training parameters in the training file <*_train.txt>.
00046 
00047 // Below is an example of a training file to convey the format used:
00048 
00049 // // ######################################################################
00050 
00051 // File name: Campus_train.txt
00052 // Purpose: train a neural network to recognize different location
00053 // at the USC campus.
00054 // Content:
00055 
00056 // /home2/tmp/u/christian/movies/SAL_JEP/  # training folder
00057 // /home2/tmp/u/christian/movies/SAL_JEP2/ # testing folder
00058 // 11                            # number of categories
00059 // PCA                           # check if have (PCA/NO_PCA) step
00060 // Campus.evec                   # PCA eigenvector matrix
00061 // 714                           # number of original dimension
00062 // 80                            # number of reduced dimension (same if NO_PCA)
00063 // 400                           # number of hidden unit at first layer
00064 // 200                           # number of hidden unit at second layer
00065 // .002                          # learning rate
00066 // Campus_gistList.txt           # training file
00067 // Campus2_gistList.txt          # testing file
00068 // CampusB_train_hidden1.nnwt    # first hidden layer weight file name
00069 // CampusB_train_hidden2.nnwt    # second hidden layer weight file name
00070 // CampusB_train_out.nnwt        # output layer weight file name
00071 
00072 // Note:
00073 // Most of the content is self explanatory: the first two are the absolute
00074 // location of the training and tsting sample folders.
00075 
00076 // The next parameters pertains to the architecture. A three layer network
00077 // with 80, 400, 200, 11 nodes for the respective input, first hidden layer,
00078 // second hidden layer, and output layer.
00079 
00080 // The next two files contains list of samples along with their corresponding
00081 // ground truths. Which will be discussed below.
00082 
00083 // The last three file names are the file names for the training weights.
00084 
00085 // // ######################################################################
00086 
00087 // The following is an example of a *_gistList.txt file for the trainer to use.
00088 
00089 // // ######################################################################
00090 
00091 // File name: Campus_gistList.txt
00092 // Purpose: provide 5254 samples of images of campus at prespecified locations
00093 // ground truth.
00094 // Content:
00095 //
00096 // 5254                     # number of samples (up to 1000 per file tag)
00097 // 11                       # number of categories
00098 // ABSOLUTE                 # ground truth type (MIXTURE/ABSOLUTE)
00099 // tag             start     num     groundTruth extension
00100 // SAL_JEP_1_       0         120     0          .gist
00101 // SAL_JEP_1_       120       258     1          .gist
00102 // SAL_JEP_2_       0         173     1          .gist
00103 // SAL_JEP_3_       0         53      1          .gist
00104 // SAL_JEP_3_       53        21      2          .gist
00105 // SAL_JEP_4_       0         408     2          .gist
00106 // SAL_JEP_5_       0         608     3          .gist
00107 // WaterFntn_       0         292     4          .gist
00108 // LeaveyA_         0         219     5          .gist
00109 // LeaveyB_         0         101     6          .gist
00110 // LeaveyC_         0         178     7          .gist
00111 // w34st_           0         1201    8          .gist
00112 // McClintock_      0         1221    9          .gist
00113 // w37th_           0         401     10         .gist
00114 
00115 // Note:
00116 // The tags are prefixes for the file name. Each entry has 'num' files
00117 // (consecutive indexes) from 'start' index.
00118 // So, the first file is SAL_JEP_1_000.gist
00119 // which corresponds to ground truth output of 1.0 for the first node and 0.0
00120 // for the remaining nodes.
00121 
00122 // The MIXTURE ground truth type is for desired outputs that are more
00123 // descriptive than simply one winning node. This is NOT YET IMPLEMENTED.
00124 
00125 // // ######################################################################
00126 
00127 // Related files of interest: FFN.C (and .H) The feed-forward network class.
00128 
00129 #include "Component/ModelManager.H"
00130 #include "Gist/FFN.H"
00131 #include "Raster/Raster.H"
00132 #include "Util/MathFunctions.H"
00133 #include "Gist/trainUtils.H"
00134 #include "Image/MatrixOps.H"
00135 
00136 #include <vector>
00137 
00138 #define ABSOLUTE        0
00139 #define MIXTURE         1
00140 #define ERR_THRESHOLD   .01
00141 #define MAX_EPOCH       1000
00142 // CloseButtonListener wList;
00143 
00144 void setupCases
00145 (std::string folder, std::string fname, bool equalize);
00146 void train();
00147 void test();
00148 void run(int isTest);
00149 void diff
00150 (Image<double> ideal, Image<double> out, double &tErr, int &tFc, int &tIc);
00151 
00152 
00153 
00154 rutz::shared_ptr<FeedForwardNetwork> ffn;
00155 int nSamples = 0;
00156 std::vector<Image<double> > in;
00157 std::vector<Image<double> > out;
00158 Image<double> pcaIcaMatrix;
00159 
00160 // information from training file
00161 rutz::shared_ptr<FFNtrainInfo> info;
00162 
00163 // ######################################################################
00164 // training procedure
00165 int main(const int argc, const char **argv)
00166 {
00167   // Instantiate a ModelManager:
00168   ModelManager manager("Feed-Forward Network trainer");
00169 
00170   // Parse command-line:
00171   if (manager.parseCommandLine(argc, argv, "<input_train.txt>",
00172                                1, 1) == false)
00173     return(1);
00174 
00175   // setup the trainer: multi-layer neural networks with back propagation
00176   info.reset(new FFNtrainInfo(manager.getExtraArg(0)));
00177 
00178   // instantiate a three-layer feed-forward network
00179   // initialize with the given parameters
00180   ffn.reset(new FeedForwardNetwork());
00181   ffn->init3L(info->h1Name, info->h2Name, info->oName,
00182               info->redFeatSize, info->h1size, info->h2size, info->nOutput,
00183               info->learnRate, 0.0);
00184 
00185   // setup PCA/ICA reduction matrix - if necessary
00186   if(info->isPCA)
00187     {
00188       pcaIcaMatrix = setupPcaIcaMatrix
00189         (info->trainFolder+info->evecFname,
00190          info->oriFeatSize, info->redFeatSize);
00191     }
00192 
00193   // train the network
00194   printf("would you like to skip training and just test the network? "
00195          "(y/n - default y)");
00196   char spC = getchar(); getchar();
00197   if(spC == 'n')
00198   {
00199     printf("would you like to equalize the number of samples? "
00200            "(y/n default y)");
00201     char spC = getchar(); getchar();
00202     bool equalize = true;
00203     if(spC == 'n') equalize = false;
00204 
00205     setupCases(info->trainFolder, info->trainSampleFile, equalize);
00206     train();
00207     Raster::waitForKey();
00208   }
00209 
00210   // test the network
00211   setupCases(info->testFolder, info->testSampleFile, false);
00212   test();
00213   Raster::waitForKey();
00214 
00215   // save the results
00216   ffn->write3L(info->h1Name, info->h2Name, info->oName);
00217 }
00218 
00219 // ######################################################################
00220 // open a testing file containing images and corresponding ground truth
00221 void setupCases(std::string folder, std::string fname, bool equalize)
00222 {
00223   char comment[200];  FILE *fp;  char inLine[100];
00224 
00225   // open a file that lists the sample with ground truth
00226   std::string name = folder + fname;
00227   if((fp = fopen(name.c_str(),"rb")) == NULL)
00228     {
00229       LINFO("samples file: %s not found", name.c_str());
00230 
00231       // input and output vector
00232       out.resize(0);
00233       in.resize(0);
00234       nSamples = 0;
00235 
00236       return;
00237     }
00238   LINFO("tName: %s",name.c_str());
00239 
00240   // get number of samples
00241   if (fgets(inLine, 1000, fp) == NULL) LFATAL("fgets failed"); sscanf(inLine, "%d %s", &nSamples, comment);
00242 
00243   // the number of categories -> has to agree with the training file
00244   uint tNout;
00245   if (fgets(inLine, 1000, fp) == NULL) LFATAL("fgets failed"); sscanf(inLine, "%d %s", &tNout, comment);
00246   if(tNout != info->nOutput)
00247     LFATAL("Num categories differ: %d != %d", tNout, info->nOutput);
00248 
00249   // get the type of ground truth
00250   char gtOpt[100]; int gtType = -1;
00251   if (fgets(inLine, 1000, fp) == NULL) LFATAL("fgets failed"); sscanf(inLine, "%s %s", gtOpt, comment);
00252   if(strcmp(gtOpt,"ABSOLUTE") == 0)
00253     gtType = ABSOLUTE;
00254   else if(strcmp(gtOpt,"MIXTURE" ) == 0)
00255     gtType = MIXTURE;
00256   else
00257     LFATAL("unknown ground truth type %s",gtOpt);
00258 
00259   // set up the size input and output vector
00260   out.resize(nSamples);
00261   in.resize(nSamples);
00262 
00263   // skip column headers
00264   if (fgets(inLine, 1000, fp) == NULL) LFATAL("fgets failed");
00265 
00266   char cName[100]; char sName[100]; char iName[100]; char ext[100];
00267   int cStart, cNum; int gTruth;
00268   FILE *ifp;
00269   int count = 0;  int tSamples = 0;
00270   std::vector<uint> nSamples;
00271   while(fgets(inLine, 1000, fp) != NULL)
00272   {
00273     if(gtType == ABSOLUTE)
00274       {
00275         // get the files in this category and ground truth
00276         sscanf(inLine, "%s %d %d %d %s", cName, &cStart, &cNum,  &gTruth, ext);
00277         sprintf(sName,"%s%s", folder.c_str(), cName);
00278         printf("    sName: %s %d %d %d %s\n",sName, cStart, cNum, gTruth, ext);
00279       }
00280     else if(gtType == MIXTURE)
00281       {
00282         // get the files in this category and ground truth
00283         //char tStr[300];
00284         //sscanf(inLine, "%s %d %d %s %s", cName, &cStart, &cNum,  tStr, ext);
00285         //sprintf(sName,"%s%s", folder, cName);
00286         //printf(" sName: %s %d %d %d %s\n",sName, cStart, cNum, gTruth, ext);
00287 
00288         // change to mixture values
00289         LFATAL("MIXTURE ground truth type not yet implemented");
00290       }
00291     else LFATAL("unknown ground truth type %s",gtOpt);
00292 
00293     nSamples.push_back(cNum);
00294 
00295     // go through every sample
00296     for(int j = cStart; j < cStart+cNum; j++)
00297       {
00298         tSamples++;
00299         // get the corresponding vector file (if exist)
00300         sprintf(iName,"%s%06d%s", sName,j,ext);
00301 
00302         // open the file
00303         if((ifp = fopen(iName,"rb")) != NULL)
00304           {
00305             Image<double> tData(1,info->oriFeatSize, NO_INIT);
00306             Image<double>::iterator aptr = tData.beginw();
00307 
00308             for(int i = 0; i < tData.getSize(); i++)
00309               {
00310                 double val; if (fread(&val, sizeof(double), 1, ifp) != 1) LFATAL("fread failed");
00311                 *aptr++ = val;
00312               }
00313 
00314             LINFO("feature file found: %s (%d)",//%7.4f %7.4f %7.4f %7.4f\n",
00315                    iName,gTruth);//,tData[0], tData[21], tData[42], tData[63]);
00316             fclose(ifp);
00317 
00318             // calculate the reduced features
00319             if(info->isPCA) in[count] = matrixMult(pcaIcaMatrix, tData);
00320             else in[count] = tData;
00321 
00322             // load the ground truth
00323             if(gtType == ABSOLUTE)
00324               {
00325                 Image<double> res(1,info->nOutput, ZEROS);
00326                 res.setVal(0, gTruth, 1.0);
00327                 out[count] = res;
00328               }
00329             else if(gtType == MIXTURE)
00330               {
00331                 LFATAL("MIXTURE ground truth type not yet implemented");
00332               }
00333             else LFATAL("unknown ground truth type %s",gtOpt);
00334 
00335 //             // just to test stuff
00336 //             for(int k = 0; k < info->oriFeatSize; k++)
00337 //                 printf("ori[%7d]: %f \n", k, tData.getVal(k));
00338 //             printf("\n");
00339 
00340 //             for(int k = 0; k < info->redFeatSize; k++)
00341 //                 printf("red[%7d]: %f \n", k, in[count].getVal(k));
00342 //             printf("\n");
00343 //             //for(uint k = 0; k < info->nOutput; k++)
00344 //             //    printf("%f \n",out[count].getVal(k));
00345 //             Raster::waitForKey();
00346 
00347             count++;
00348           }
00349         else LFATAL("file: %s not found\n",iName);
00350       }
00351   }
00352 
00353   // equalize the number of samples if requested
00354   if(equalize)
00355     {
00356       // find the max
00357       uint max = 0;
00358 //       for(uint i = 0; i < nSamples.size(); i++)
00359 //         if(max < nSamples[i]) max = nSamples[i];
00360       max = *max_element(nSamples.begin(),nSamples.end());
00361       LINFO("max element: %d", max);
00362 
00363       uint offset = 0;
00364       for(uint i = 0; i < nSamples.size(); i++)
00365         {
00366           LINFO("extra samples for class[%3d]: %d - %d -> %d",  
00367                 i, max,  nSamples[i], max - nSamples[i]);
00368           for(uint j = 0; j < max - nSamples[i]; j++)
00369             {
00370               // index to be copied
00371               uint index = rand()/(RAND_MAX + 1.0) * nSamples[i];
00372               LINFO("[%d] Duplicating class[%3d] sample[%3d]"
00373                     " -> actual ind: %3d", 
00374                     j, i, index, index + offset);
00375               index = index + offset;
00376 
00377               in.push_back(in[index]);
00378               out.push_back(out[index]);
00379             }
00380           offset += nSamples[i];
00381         }
00382       LINFO("Total samples before equalized: %d \n",tSamples);
00383       tSamples = in.size();
00384     }
00385 
00386   LINFO("Actual total samples: %d \n",tSamples);
00387   fclose(fp);
00388 }
00389 
00390 // ######################################################################
00391 // train the network
00392 void train() { run(0);};
00393 
00394 // ######################################################################
00395 // test the network
00396 void test()  { run(1);};
00397 
00398 // ######################################################################
00399 // train or test the network
00400 void run(int isTest)
00401 {
00402   LINFO("Run the samples");
00403   double errSum = double(nSamples);
00404   double err; Image<double> ffnOut;
00405   int nfc = nSamples; int fc;
00406   int nfcClass[info->nOutput][info->nOutput];//confusion matrix[target][output]
00407   int nTrials = 0;
00408   int target = 0;
00409 
00410   if(nSamples == 0) return;
00411   int order[nSamples];
00412   for(int i = 0; i < nSamples; i++) order[i] = i;
00413 
00414   while(nTrials < MAX_EPOCH && !isTest && nfc > int(nSamples*ERR_THRESHOLD))
00415   {
00416     // reinitialize statistic variables
00417     for(uint i = 0; i < info->nOutput; i++)
00418       for(uint j = 0; j < info->nOutput; j++)
00419         nfcClass[i][j] = 0;
00420     errSum = 0.0; nfc = 0;
00421 
00422     // run the input in random order
00423     randShuffle(order, nSamples);
00424 
00425     for(int i = 0; i < nSamples; i++)
00426     {
00427       // run the input
00428       ffn->run3L(in[order[i]]);
00429       ffnOut = ffn->getOutput();
00430 
00431       // get the error
00432       diff(out[order[i]], ffnOut, err, fc, target);
00433 
00434       // add misclassification count
00435       if(fc != -1)
00436         {
00437           nfc++;
00438           nfcClass[target][fc]++;
00439         }
00440       else
00441         nfcClass[target][target]++;
00442 
00443       // and the numerical deviation
00444       errSum += err;
00445 
00446       if(fc != -1)
00447         {
00448           //ffn->setLearnRate(learnRate*10);
00449           ffn->backprop3L(out[order[i]]);
00450           //ffn->setLearnRate(learnRate);
00451         }
00452     }
00453     nTrials++;
00454 
00455     // periodically report progress
00456     if(nTrials %1 == 0)
00457       {
00458         printf("Trial_%04d_Err: %f nfc: %5d/%5d -> %f%%\n",
00459                nTrials, errSum/nSamples,
00460                nfc,nSamples,(double)(nfc)/(0.0 + nSamples)*100.0);
00461 
00462         printf("class |");
00463         for(uint k = 0;  k < info->nOutput; k++)
00464           printf(" %4d", k);
00465         printf("\n");
00466         for(uint k = 0;  k < info->nOutput; k++)
00467           printf("------");
00468         printf("\n");
00469         for(uint k = 0; k < info->nOutput; k++)
00470         {
00471           printf("%6d|",k);
00472           for(uint j = 0; j < info->nOutput; j++)
00473             printf(" %4d",nfcClass[k][j]);
00474           printf("\n");
00475         }
00476       }
00477     printf("\n");
00478   }
00479 
00480   // print the results if testing
00481   if(isTest)
00482     {
00483       nfc = 0; errSum = 0.0; err = 0;
00484       for(uint i = 0; i < info->nOutput; i++)
00485         for(uint j = 0; j < info->nOutput; j++)
00486           nfcClass[i][j] = 0;
00487 
00488       for(int i = 0; i < nSamples; i++)
00489         {
00490           // run the input
00491           ffn->run3L(in[i]);
00492 
00493           // get the output
00494           ffnOut = ffn->getOutput();
00495 
00496           // get the error
00497           diff(out[i], ffnOut, err, fc, target);
00498 
00499           // add misclassification count
00500           if(fc != -1)
00501             {
00502               nfc++;
00503               nfcClass[target][fc]++;
00504             }
00505           else
00506             nfcClass[target][target]++;
00507 
00508           // and the numerical deviation
00509           errSum += err;
00510 
00511           if((fc != -1) | 1)
00512             {
00513               printf("sample %5d: ",i);
00514               for(uint j = 0; j < info->nOutput; j++)
00515                 printf("%.3f ",out[i][j]);
00516               printf(" -:- ");
00517               for(uint j = 0; j < info->nOutput; j++)
00518                 printf("%.3f ",ffnOut[j]);
00519             }
00520           if(fc != -1) printf(" WRONG! NO:%d  [%d][%d] = %d \n",
00521                               nfc, target, fc, nfcClass[target][fc]);
00522           else printf("\n");
00523         }
00524     }
00525 
00526   // final error count
00527   printf("Final Trial_%04d_Err: %f nfc: %5d/%5d -> %.3f%%\n",
00528          nTrials,errSum/nSamples,
00529          nfc,nSamples,(double)(nfc)/(0.0 + nSamples)*100.0);
00530 
00531   printf("class |");
00532   for(uint k = 0;  k < info->nOutput; k++)
00533     printf(" %5d",k);
00534   printf("     Total          pct. err \n-------");
00535   for(uint k = 0;  k < info->nOutput; k++)
00536     printf("------");
00537   printf("\n");
00538   for(uint k = 0; k < info->nOutput; k++)
00539     {
00540       int t = 0, e = 0;
00541       printf("%6d|",k);
00542       for(uint j = 0; j < info->nOutput; j++)
00543         {
00544           printf(" %5d",nfcClass[k][j]);
00545           if(k == j)
00546             t = nfcClass[k][j];
00547           else
00548             e += nfcClass[k][j];
00549         }
00550       if(e+t == 0)
00551         printf(" %6d/%6d     N/A%%\n",0,0);
00552       else
00553         printf(" %6d/%6d  %6.2f%%\n",e,e+t, float(e)/float(e+t)*100.0);
00554     }
00555 
00556   for(uint k = 0;  k < info->nOutput; k++)
00557     printf("------");
00558   printf("-------\nFalse+|");
00559   for(uint k = 0; k < info->nOutput; k++)
00560     {
00561       int e = 0;
00562       for(uint j = 0; j < info->nOutput; j++)
00563         {
00564           if(k == j)
00565             ; //t = nfcClass[j][k];
00566           else
00567             e += nfcClass[j][k];
00568         }
00569       printf(" %5d",e);
00570     }
00571   printf("\ntotal |");
00572   for(uint k = 0; k < info->nOutput; k++)
00573     {
00574       int t = 0, e = 0;
00575       for(uint j = 0; j < info->nOutput; j++)
00576         {
00577           if(k == j)
00578             t = nfcClass[j][k];
00579           else
00580             e += nfcClass[j][k];
00581         }
00582       printf(" %5d",e+t);
00583     }
00584   printf("\nerr:  |");
00585   for(uint k = 0; k < info->nOutput; k++)
00586     {
00587       int t = 0, e = 0;
00588       for(uint j = 0; j < info->nOutput; j++)
00589         {
00590           if(k == j)
00591             t = nfcClass[j][k];
00592           else
00593             e += nfcClass[j][k];
00594         }
00595       if(e+t == 0)
00596         printf("  N/A");
00597       else
00598         printf(" %5.2f",float(e)/float(e+t)*100.0);
00599     }
00600   printf("\n");
00601 }
00602 
00603 // ######################################################################
00604 //  calculate the difference between the 2 vectors
00605 void diff
00606 (Image<double> ideal, Image<double> out,
00607  double &tErr, int &tFc, int &tIc)
00608 {
00609   tErr = 0.0;
00610   Image<double>::iterator iptr = ideal.beginw();
00611   Image<double>::iterator optr = out.beginw();
00612   for(uint i = 0; i < info->nOutput; i++)
00613     tErr += fabs(*iptr++ - *optr++);
00614 
00615   int iMaxI = 0, oMaxI = 0;
00616   iptr = ideal.beginw(); optr = out.beginw();
00617   double iMax = *iptr++, oMax = *optr++;
00618   for(uint i = 1; i < info->nOutput; i++)
00619     {
00620       double ival = *iptr++;
00621       double oval = *optr++;
00622       if(ival > iMax) { iMax = ival; iMaxI = i; }
00623       if(oval > oMax) { oMax = oval; oMaxI = i; }
00624     }
00625 
00626   // indication of incorrect output
00627   tFc = -1; if(iMaxI != oMaxI) tFc = oMaxI;
00628   tIc = iMaxI;
00629 }
00630 
00631 // code for best/second best stuff
00632 //       // get output of the first and second most likely level
00633 //       if(vout[0] > vout[1])
00634 //       {
00635 //         max  = 0; mval  =  vout[0];
00636 //         max2 = 1; mval2 =  vout[1];
00637 //       }
00638 //       else
00639 //       {
00640 //         max  = 1; mval  =  vout[1];
00641 //         max2 = 0; mval2 =  vout[0];
00642 //       }
00643 
00644 //       for(int j = 2; j < NUM_H_LEV; j++)
00645 //         if(mval < vout[j])
00646 //         {
00647 //           max2  = max;
00648 //           mval2 = mval;
00649 //           max   = j;
00650 //           mval  = vout[j];
00651 //         }
00652 //         else
00653 //         if(mval2 < vout[j])
00654 //         {
00655 //           max2   = j;
00656 //           mval2  = vout[j];
00657 //         }
00658 
00659 //       // add the level misclassification
00660 //       if(max != rmax) nfc++;
00661 //       if(max != rmax && max2 != rmax ) nfc2++;
00662 
00663 //     printf("avgErr: %f real:  %d predict( %d,%d) ",
00664 //            errSum/NUM_H_LEV, rmax, max, max2);
00665 //     if(max != rmax)
00666 //       printf(" WRONG %d ",nfc);
00667 //     if(max != rmax && max2 != rmax )
00668 //       printf(" WRONG2 %d\n",nfc2);
00669 //     else
00670 //       printf(" \n");
00671 //     printf("\n");
00672 //   }
00673 //   printf("Test_%04d_Err: %f nfc: %d nfc2: %d\n",
00674 //          nTrials,avgErr/nSamples/NUM_H_LEV, nfc, nfc2);
00675 
00676 
00677 // ######################################################################
00678 /* So things look consistent in everyone's emacs... */
00679 /* Local Variables: */
00680 /* indent-tabs-mode: nil */
00681 /* End: */
Generated on Sun May 8 08:40:39 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3