00001 /*!@file Gist/faceDetection.C saliency based face detection */ 00002 00003 // //////////////////////////////////////////////////////////////////// // 00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the // 00005 // University of Southern California (USC) and the iLab at USC. // 00006 // See http://iLab.usc.edu for information about this project. // 00007 // //////////////////////////////////////////////////////////////////// // 00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected // 00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency // 00010 // in Visual Environments, and Applications'' by Christof Koch and // 00011 // Laurent Itti, California Institute of Technology, 2001 (patent // 00012 // pending; application number 09/912,225 filed July 23, 2001; see // 00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status). // 00014 // //////////////////////////////////////////////////////////////////// // 00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit. // 00016 // // 00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can // 00018 // redistribute it and/or modify it under the terms of the GNU General // 00019 // Public License as published by the Free Software Foundation; either // 00020 // version 2 of the License, or (at your option) any later version. // 00021 // // 00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope // 00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the // 00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // 00025 // PURPOSE. See the GNU General Public License for more details. // 00026 // // 00027 // You should have received a copy of the GNU General Public License // 00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write // 00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, // 00030 // Boston, MA 02111-1307 USA. // 00031 // //////////////////////////////////////////////////////////////////// // 00032 // 00033 // Primary maintainer for this file: Christian Siagian <siagian@usc.edu> 00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Gist/faceDetection.C $ 00035 // $Id: faceDetection.C 10982 2009-03-05 05:11:22Z itti $ 00036 // 00037 00038 // ###################################################################### 00039 /*! face detection program */ 00040 00041 #include "Channels/ChannelOpts.H" 00042 #include "Component/GlobalOpts.H" 00043 #include "Component/ModelManager.H" 00044 #include "GUI/XWinManaged.H" 00045 #include "Gist/FFN.H" 00046 #include "Image/FFTWWrapper.H" 00047 #include "Gist/ModelFace.H" 00048 #include "Image/CutPaste.H" // for inplacePaste() 00049 #include "Image/DrawOps.H" 00050 #include "Image/Image.H" 00051 #include "Image/ImageSet.H" 00052 #include "Image/Kernels.H" // for gaborFilter() 00053 #include "Image/MathOps.H" 00054 #include "Image/Pixels.H" 00055 #include "Image/PyramidOps.H" 00056 #include "Image/ShapeOps.H" // for zoomXY() etc. 00057 #include "Image/Transforms.H" 00058 #include "Media/FrameSeries.H" 00059 #include "Media/MediaSimEvents.H" 00060 #include "Neuro/NeuroOpts.H" 00061 #include "Neuro/NeuroSimEvents.H" 00062 #include "Neuro/ShapeEstimator.H" 00063 #include "Neuro/VisualCortex.H" 00064 #include "Neuro/StdBrain.H" 00065 #include "Raster/Raster.H" 00066 #include "Simulation/SimEventQueueConfigurator.H" 00067 00068 00069 #include "Channels/BlueYellowChannel.H" 00070 #include "Channels/ColorChannel.H" 00071 #include "Channels/GaborChannel.H" 00072 #include "Channels/IntensityChannel.H" 00073 #include "Channels/OrientationChannel.H" 00074 #include "Channels/RedGreenChannel.H" 00075 00076 00077 CloseButtonListener wList; 00078 XWinManaged *imgWin; 00079 XWinManaged *fftWin; 00080 XWinManaged *salWin; 00081 XWinManaged *workWin; 00082 00083 ImageSet<float> pyrImg; 00084 00085 // for Gist 00086 FFTWWrapper *fftw; 00087 rutz::shared_ptr<FeedForwardNetwork> ffn_pl; 00088 int primeLev = 0; 00089 double *input = NULL; 00090 Image<double> gft; 00091 double *iptr = NULL; 00092 double **outFftw = NULL; 00093 double ****gaborMask = NULL; 00094 Image<float> gaborMaskImg[NUM_G_LEV][NUM_G_DIR]; 00095 void setupPrimeLevEstimator(); 00096 void setupFFTW(Image<float> img); 00097 int* getPrimeLev(Image<float> img); 00098 void setupGaborMask(Image<float> img); 00099 Image<float> normalize(Image<float> img); 00100 Image<float> getFftImage(double **outFft, int w, int h); 00101 void fftCompute(Image<float> img, double **outFft); 00102 void getFeatureVector 00103 (double **outFft, Image<double> &res, int w, int h); 00104 void getFeatureVector2 00105 (nub::soft_ref<StdBrain> brain, Image<double> &vec, int w, int h); 00106 void freeFftwData(Image<float> img); 00107 00108 // for part detections 00109 rutz::shared_ptr<FeedForwardNetwork> ffn_e; 00110 rutz::shared_ptr<FeedForwardNetwork> ffn_n; 00111 rutz::shared_ptr<FeedForwardNetwork> ffn_m; 00112 00113 // CURRENT IMAGE BEING PROCESS - MINIMIZING PROCESSING 00114 // IN REAL APPLICATION A BRAIN WOULD DO THIS 00115 // Change the option from ORISteerable 00116 Image<float> currImg; 00117 ImageSet<float> currGaPyr[NUM_DIR]; 00118 00119 void setupPartDetectors(); 00120 Rectangle getWindow (Image<float> img, Point2D<int> p, int s); 00121 void getFacePartProb (Point2D<int> p, float ang, double *pPart); 00122 void correctGabors (Image<float> **Img, float ang); 00123 void getFacePartFeatures(Rectangle r, FacePart part, Image<double> &features); 00124 //void getFeature (Image<float> **img, FacePart part, 00125 // Image<double> &features); 00126 void getEyeFeature (Rectangle r, Image<double> &features); 00127 void getEyeFeature (Image<float> **img, Image<double> &features); 00128 void getNoseFeature (Rectangle r, Image<double> &features); 00129 void getNoseFeature (Image<float> **img, Image<double> &features); 00130 void getMouthFeature (Rectangle r, Image<double> &features); 00131 void getMouthFeature (Image<float> **img, Image<double> &features); 00132 00133 // face detection main path 00134 void detectFace (Image<float> img, nub::soft_ref<StdBrain> brain, 00135 Point2D<int> winner); 00136 Point2D<int> getIntSalPt (Image<float> img, Point2D<int> p); 00137 Point2D<int> getIntSalPt (Image<float> img, Image<float>& vis, Point2D<int> p); 00138 float getSalG (Point2D<int> p, int lev); 00139 void getLocalSalPt (Point2D<int> pWinner, int max, 00140 Point2D<int> **pt, double **ang, int *np); 00141 void crop (Image<float>& img, Point2D<int> p); 00142 void getContextPt (Point2D<int> *attPt, float *dir, double **pProb, 00143 int n, int max, 00144 Point2D<int> **pt, double **ang, int *np); 00145 double getAng (Point2D<int> p); 00146 float getPotVal (Point2D<int> *attPt, float *dir, 00147 double *pProb[NUM_FACE_PART], int n, 00148 Point2D<int> pt, float a, double 00149 tProb[NUM_FACE_PART]); 00150 float getGeomRelProb (FacePart o,FacePart c, Point2D<int> oPt, float oD, 00151 Point2D<int> cPt, float cA); 00152 bool locateFace (Point2D<int> *attPt, float *dir, 00153 double *pProb[NUM_FACE_PART], int n); 00154 void drawFace (Image<float>& img); 00155 void printRegion (Image<float> img,int sX,int eX,int dX, 00156 int sY,int eY, int dY); 00157 void displayPartImage (Image<float> img, Rectangle pr); 00158 00159 // location of the face 00160 Point2D<int> eyeP, eye2P, noseP, mouthP; 00161 float eyeD, eye2D, noseD, mouthD; 00162 00163 // ###################################################################### 00164 // START detecting faces :-) 00165 int main(const int argc, const char **argv) 00166 { 00167 MYLOGVERB = LOG_INFO; // suppress debug messages 00168 00169 // Instantiate a ModelManager: 00170 ModelManager manager("Face Detection Model"); 00171 00172 nub::soft_ref<SimEventQueueConfigurator> 00173 seqc(new SimEventQueueConfigurator(manager)); 00174 manager.addSubComponent(seqc); 00175 00176 nub::soft_ref<InputFrameSeries> ifs(new InputFrameSeries(manager)); 00177 manager.addSubComponent(ifs); 00178 00179 nub::soft_ref<OutputFrameSeries> ofs(new OutputFrameSeries(manager)); 00180 manager.addSubComponent(ofs); 00181 00182 // Instantiate our various ModelComponents: 00183 nub::soft_ref<StdBrain> brain(new StdBrain(manager)); 00184 manager.addSubComponent(brain); 00185 00186 manager.setOptionValString(&OPT_MaxNormType, "FancyOne"); 00187 manager.setOptionValString(&OPT_UseRandom, "false"); 00188 // manager.setOptionValString(&OPT_ShapeEstimatorMode,"SaliencyMap"); 00189 // manager.setOptionValString(&OPT_ShapeEstimatorMode,"ConspicuityMap"); 00190 manager.setOptionValString(&OPT_ShapeEstimatorMode, "FeatureMap"); 00191 manager.setOptionValString(&OPT_ShapeEstimatorSmoothMethod, "Chamfer"); 00192 //manager.setOptionValString(&OPT_ShapeEstimatorSmoothMethod, "Gaussian"); 00193 manager.setOptionValString(&OPT_RawVisualCortexChans,"OIC"); 00194 //manager.setOptionValString(&OPT_IORtype, "ShapeEstFM"); 00195 manager.setOptionValString(&OPT_IORtype, "Disc"); 00196 00197 // Parse command-line: 00198 if (manager.parseCommandLine(argc, argv, "<image>", 1, 1) == false) 00199 return(1); 00200 00201 LFATAL("FIXME"); 00202 /* 00203 nub::soft_ref<SimEventQueue> seq = seqc->getQ(); 00204 00205 Image<float> img//; img.resize(240,240); 00206 = Raster::ReadGray(manager.getExtraArg(0)); 00207 const Dims dims = img.getDims(); 00208 int w = img.getWidth(); int h = img.getHeight(); 00209 //img = crop(img,Rectangle::tlbrI(0,0,h-1,w/2-1)); 00210 //w = img.getWidth(); h = img.getHeight(); 00211 00212 printf("\nImage size: %d by %d\n",w,h); 00213 00214 // for(int i = 0; i < 240; i++) 00215 // for(int j = 0; j < 240; j++) 00216 // { 00217 // //float k = sqrt((i - 120.0)*(i - 120.0) + (j-120.0)*(j-120.0)); 00218 // img.setVal(i,j, 255.0*sin(j*2*M_PI/30.0)*sin(i*2*M_PI/30.0)); 00219 // } 00220 // Point2D<int> a(100,120); 00221 // int rx = 0, ry = 0; int mL = 0; 00222 // if(w/40 < h/50) mL = w/40; else mL = h/50; 00223 // printf("ML: %d\n", mL); 00224 // for(int i = 1; i < mL; i*=2) 00225 // drawRect(img, Rectangle::tlbrI(ry, rx, ry+i*50-1, rx+i*40-1), 255, 1); 00226 00227 // Point2D<int> b(140,120); 00228 // Point2D<int> c(120, 80); 00229 // Point2D<int> d(120,160); 00230 // Point2D<int> ac( 80, 80); 00231 // Point2D<int> ad( 80,160); 00232 // Point2D<int> bc(160, 80); 00233 // Point2D<int> bd(160,160); 00234 // //drawLine(img, a,b,255,10); 00235 // //drawDisk(img, a,48,255); 00236 // drawDisk(img, a,4,255); 00237 // drawPatch(img, a, 16, 200); //pt,rad,intens 00238 // drawPatch(img, b, 2, 200); //pt,rad,intens 00239 // drawPatch(img, c, 2, 200); //pt,rad,intens 00240 // drawPatch(img, d, 2, 200); //pt,rad,intens 00241 // drawPatch(img, ac, 2, 200); //pt,rad,intens 00242 // drawPatch(img, ad, 2, 200); //pt,rad,intens 00243 // drawPatch(img, bc, 2, 200); //pt,rad,intens 00244 // drawPatch(img, bd, 2, 200); //pt,rad,intens 00245 00246 imgWin = new XWinManaged(dims, 0, 0, manager.getExtraArg(0).c_str()); 00247 wList.add(imgWin); 00248 fftWin = new XWinManaged(dims, w+15, 0, "FFT" ); 00249 wList.add(fftWin); 00250 salWin = new XWinManaged(dims, 0, h+25, "Sal" ); 00251 wList.add(salWin); 00252 workWin = new XWinManaged(dims, w+15, h+25, "Scan Path" ); 00253 wList.add(workWin); 00254 00255 // let's get all our ModelComponent instances started: 00256 LINFO("\nSTART\n"); 00257 manager.start(); 00258 00259 // take out the motion and flicker channel 00260 double wIntens = 1.0, wOrient = 1.0, wColor = 1.0; 00261 00262 nub::soft_ref<VisualCortex> vc;////////// = brain->getVC(); 00263 nub::soft_ref<ShapeEstimator> se;////////// = brain->getSE(); 00264 vc->setSubchanTotalWeight("color", wColor); 00265 vc->setSubchanTotalWeight("intensity", wIntens); 00266 vc->setSubchanTotalWeight("orientation", wOrient); 00267 //vc->setSubchanTotalWeight("flicker", 0.0); 00268 //vc->setSubchanTotalWeight("motion", 0.0); 00269 00270 // setup the networks to detect each face parts 00271 setupPartDetectors(); 00272 setupPrimeLevEstimator(); 00273 00274 // setup the Fourier Transform feature vector 00275 gft.resize(1, NUM_G_LEV*NUM_G_DIR, NO_INIT); 00276 00277 // main loop: 00278 LINFO("\nMAIN_LOOP\n"); 00279 00280 // are we training or testing? 00281 bool isTesting = true; 00282 00283 // training protocol to obtain samples 00284 // actual training done on train-FFN 00285 if (!isTesting) 00286 { 00287 // get scale samples 00288 // NEED FOR LOOP 00289 std::string sName("sample fileName"); 00290 const Image<float> sImg = Raster::ReadGray(sName); 00291 const int w = sImg.getWidth(); 00292 const int h = sImg.getHeight(); 00293 const Image<float> nsImg = normalize(sImg); 00294 //iWin->drawImage(sImg,0,0); //Raster::waitForKey(); 00295 00296 // extract the gist features 00297 bool useFFT = true; 00298 Image<double> inputVal(1,1,NO_INIT); 00299 if(useFFT) 00300 { 00301 // resize the fftw properly 00302 setupFFTW(nsImg); 00303 00304 //compute Fourier Transform of the image 00305 fftCompute(nsImg, outFftw); 00306 00307 // compute the feature vector 00308 getFeatureVector(outFftw, inputVal, w, h); 00309 00310 //sXW[i%4] = new XWinManaged(Dims(w,h),0,0,"ft"); 00311 //iXW[i%4] = new XWinManaged(Dims(w,h),0,0,inLine); 00312 Image<float> tImg = getFftImage(outFftw,w,h); 00313 //float min, max; getMinMax(tImg, min,max); 00314 //LINFO("min: %f, max: %f", min, max); 00315 //sXW[i%4]->drawImage(tImg,0,0); 00316 //iXW[i%4]->drawImage(nsImg,0,0); 00317 //Raster::waitForKey(); 00318 00319 // free the data for FFTW computation 00320 freeFftwData(nsImg); 00321 } 00322 else 00323 { 00324 rutz::shared_ptr<SimEventInputFrame> 00325 e(new SimEventInputFrame(brain.get(), GenericFrame(Image<byte>(nsImg)), 0)); 00326 seq->post(e); // post the image to the brain 00327 (void) seq->evolve(); 00328 00329 // compute the feature vector 00330 getFeatureVector2(brain, inputVal, w, h); 00331 } 00332 00333 // save it to a file 00334 // Image<double>::iterator aptr = inputVal.beginw(); 00335 // Image<double>::iterator stop = inputVal.endw(); 00336 // if((ifp = fopen(iName,"wb")) != NULL) 00337 // { 00338 // while(aptr != stop) 00339 // { 00340 // double val = *aptr++; fwrite(&val,sizeof(double), 1, ifp); 00341 // } 00342 // fclose(ifp); 00343 // } 00344 00345 // get face parts samples 00346 //NOTE: all face part training done by Gist/train-faceParts.C 00347 } 00348 00349 int* probLev = NULL; 00350 while(isTesting) 00351 { 00352 // read new image in? 00353 const FrameState is = ifs->update(seq->now()); 00354 if (is == FRAME_COMPLETE) break; // done 00355 if (is == FRAME_NEXT || is == FRAME_FINAL) // new frame 00356 { 00357 Image< PixRGB<byte> > input = ifs->readRGB(); 00358 00359 // empty image signifies end-of-stream 00360 if (!input.initialized()) 00361 break; 00362 00363 rutz::shared_ptr<SimEventInputFrame> 00364 e(new SimEventInputFrame(brain.get(), GenericFrame(Image<byte>(img)), 0)); 00365 seq->post(e); // post the image to the brain 00366 00367 // prepare gabor filter 00368 currImg = normalize(img); 00369 pyrImg = buildPyrGaussian(currImg, 0, 9, 3); 00370 00371 for(int i = 0; i< NUM_DIR; i++) 00372 currGaPyr[i] = buildPyrGabor(currImg,0,NUM_H_LEV+3,i*45,7,1,9,0); 00373 00374 // for every new image (new dimensions) 00375 // the fourier transform arrays has to be resized as well 00376 setupFFTW(img); 00377 00378 // estimate the level to work with 00379 // with respect to the size of our model head 00380 probLev = getPrimeLev(currImg); 00381 for(int i = 0; i < NUM_H_LEV; i++) 00382 printf("PL* %d: %d \n",i,probLev[i]); 00383 } 00384 00385 // evolve brain: 00386 const SimStatus status = seq->evolve(); 00387 00388 // call face detection method if salient location is selected 00389 if (SeC<SimEventWTAwinner> e = seq->check<SimEventWTAwinner>(0)) { 00390 const Point2D<int> winner = e->winner().p; 00391 00392 // display the input image 00393 Image<float> dispImg = img; 00394 inplaceNormalize(dispImg, 0.0f, 255.0f); 00395 imgWin->drawImage(dispImg,0,0); 00396 //Raster::waitForKey(); 00397 00398 // setup the necessary objects from the brain 00399 LFATAL("fixme"); 00400 nub::soft_ref<ShapeEstimator> se;///////// = brain->getSE(); 00401 //nub::soft_ref<OrientationChannel> oriChan; 00402 //dynCastWeakToFrom(oriChan, vc->subChan("orientation")); 00403 00404 // use Shape estimator to focus on the attended region 00405 Image<float> fmask; std::string label; 00406 if (SeC<SimEventShapeEstimatorOutput> 00407 e = seq->check<SimEventShapeEstimatorOutput>(0)) 00408 { fmask = e->smoothMask(); label = e->winningLabel(); } 00409 00410 Image<float> roiImg; 00411 if (fmask.initialized()) 00412 roiImg = img*fmask; 00413 else 00414 { roiImg = img; fmask.resize(img.getDims(),ZEROS); } 00415 drawCircle(roiImg, winner, 10, 0.0f, 1); 00416 writeText(roiImg, Point2D<int>(0,0), label.c_str()); 00417 salWin->drawImage(roiImg,0,0); 00418 00419 printf("want to try this salient point\n"); 00420 char spC = getchar();getchar(); 00421 if(spC == 'y') 00422 { 00423 printf("trying the salient point\n"); 00424 for(int i = 0; i < NUM_H_LEV; i++) 00425 { 00426 primeLev = probLev[i]; 00427 printf("want to try primeLev: %d \n",primeLev); 00428 char plC = getchar();getchar(); 00429 if(plC == 'y') 00430 { 00431 printf("trying: %d\n",primeLev); 00432 detectFace(img, brain, winner); 00433 } 00434 else 00435 printf("skipping: %d\n",primeLev); 00436 } 00437 } 00438 else 00439 printf("skipping the salient point\n"); 00440 00441 } 00442 00443 if (SIM_BREAK == status) // Brain decided it's time to quit 00444 break; 00445 00446 // write outputs or quit? 00447 bool gotcovert = false; 00448 if (seq->check<SimEventWTAwinner>(0)) gotcovert = true; 00449 const FrameState os = ofs->update(seq->now(), gotcovert); 00450 00451 if (os == FRAME_NEXT || os == FRAME_FINAL) 00452 brain->save(SimModuleSaveInfo(ofs, *seq)); 00453 00454 if (os == FRAME_FINAL) break; // done 00455 00456 // if we displayed a bunch of images, let's pause: 00457 if (ifs->shouldWait() || ofs->shouldWait()) 00458 Raster::waitForKey(); 00459 } 00460 */ 00461 00462 while(!(wList.pressedAnyCloseButton()))sleep(1); 00463 // stop all our ModelComponents 00464 manager.stop(); 00465 00466 // all done! 00467 printf("All done\n"); 00468 return 0; 00469 } 00470 00471 // ###################################################################### 00472 // setup neural networks for part detectors 00473 void setupPartDetectors() 00474 { 00475 // instantiate a feed forward network for each face part 00476 ffn_e.reset(new FeedForwardNetwork()); 00477 ffn_n.reset(new FeedForwardNetwork()); 00478 ffn_m.reset(new FeedForwardNetwork()); 00479 00480 // get the weight values for each part 00481 std::string h1EName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"hidden1EYE.dat")); 00482 std::string h2EName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"hidden2EYE.dat")); 00483 std::string oEName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"outEYE.dat")); 00484 00485 std::string h1NName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"hidden1NOSE.dat")); 00486 std::string h2NName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"hidden2NOSE.dat")); 00487 std::string oNName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"outNOSE.dat")); 00488 00489 std::string h1MName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"hidden1MOUTH.dat")); 00490 std::string h2MName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"hidden2MOUTH.dat")); 00491 std::string oMName(sformat("%s%s%s",WEIGHT_DATA_FOLDER,FILE_TAGNAME,"outMOUTH.dat")); 00492 00493 // initialize the size of network 00494 // use 3-weight layer network 00495 ffn_e->init3L(h1EName, h2EName, oEName, 00496 RE_INPUT, RE_HIDDEN_1, RE_HIDDEN_2, 1, 0.0, 0.0); 00497 ffn_n->init3L(h1NName, h2NName, oNName, 00498 N_INPUT, N_HIDDEN_1, N_HIDDEN_2, 1, 0.0, 0.0); 00499 ffn_m->init3L(h1MName, h2MName, oMName, 00500 M_INPUT, M_HIDDEN_1, M_HIDDEN_2, 1, 0.0, 0.0); 00501 } 00502 00503 // ###################################################################### 00504 // setup neural networks to train 00505 void setupPrimeLevEstimator() 00506 { 00507 // instantiate a feed forward network for prime level training 00508 ffn_pl.reset(new FeedForwardNetwork()); 00509 00510 // get the weight values for each part 00511 std::string h1PLName(sformat("%s%s",WEIGHT_DATA_FOLDER,"hidden1PL.dat")); 00512 std::string h2PLName(sformat("%s%s",WEIGHT_DATA_FOLDER,"hidden2PL.dat")); 00513 std::string oPLName(sformat("%s%s",WEIGHT_DATA_FOLDER,"outPL.dat")); 00514 00515 // initialize the size of network 00516 // use 3-weight layer network 00517 ffn_pl->init3L(h1PLName, h2PLName, oPLName, 00518 NUM_G_LEV*NUM_G_DIR, PL_HIDDEN_1, PL_HIDDEN_2, NUM_H_LEV, 00519 0.0, 0.0); 00520 } 00521 00522 // ###################################################################### 00523 // detect face 00524 void detectFace(Image<float> img, nub::soft_ref<StdBrain> brain, Point2D<int> winner) 00525 { 00526 LFATAL("FIXME"); 00527 /* 00528 printf("detectFace \n"); 00529 00530 Image<float> wImg; 00531 int scale = int(pow(2,primeLev)); 00532 00533 // move about the local neighborhood to search 00534 // for possible face part sites 00535 00536 // get dominant salient feature of the location currently attended 00537 LFATAL("fixme"); 00538 nub::soft_ref<ShapeEstimator> se;//////// = brain->getSE(); 00539 int lev=3, delta=3; char chanName[30] = ""; 00540 00541 //FIXME//sscanf(se->getWinningLabel().c_str(), 00542 //FIXME// "%s %*s %d %*s %d", chanName, &lev, &delta); 00543 printf("name: %s lev: %d, delta %d\n",chanName,lev,delta); 00544 00545 Point2D<int> pWinner; float angR = 0.0; 00546 00547 // if the winner is an intensity channel 00548 if(chanName[0] == 'I' && chanName[1] == 'n') 00549 { 00550 // get an edge around the attended location 00551 pWinner.i = int(winner.i/pow(2,primeLev)); 00552 pWinner.j = int(winner.j/pow(2,primeLev)); 00553 //pWinner = getIntSalPt(pyrImg[primeLev], pWinner); 00554 angR = getAng(pWinner); 00555 } 00556 // if the winner is a Gabor Channel 00557 else if(chanName[0] == 'G' && chanName[1] == 'a') 00558 { 00559 // starting point of the face finding 00560 pWinner.i = int(winner.i/pow(2,primeLev)); 00561 pWinner.j = int(winner.j/pow(2,primeLev)); 00562 00563 // get the dominant angle at the attended location 00564 int angD; sscanf(chanName,"Gabor(%d)",&angD); 00565 angR = (float)(angD)/180.0 * M_PI; 00566 } 00567 else 00568 LINFO("Neither Gabor Channel nor Intensity Channel"); 00569 00570 // shift the attention location to look for a face 00571 // limit our shifts to 10 (for now) 00572 Point2D<int> *attPt = new Point2D<int>[10]; 00573 float dir[10]; dir[0] = angR; 00574 attPt[0] = pWinner; int nShift = 1; 00575 typedef double* doubleptr; 00576 double **pProb = new doubleptr[10]; 00577 for(int i = 0; i < 10; i++) pProb[i] = new double[NUM_FACE_PART]; 00578 00579 // check up if the first salient location has a face part 00580 // get a vector: probability for each tested part 00581 getFacePartProb(pWinner, dir[0], pProb[0]); 00582 00583 // while there is still an edge to follow or HAS SHIFT LESS THAN 10 TIMES 00584 // limit edges to follow to 20 00585 int nMax = 1; int maxPt = 20; 00586 Point2D<int> *pt = new Point2D<int>[maxPt]; 00587 double *ang = new double[maxPt]; 00588 int np; 00589 int faceFound = 0; 00590 00591 while((nMax > 0) && (nShift < 10) && !faceFound) 00592 { 00593 printf(" winner[%d]: (%d,%d,%f)\n", nShift-1, 00594 pWinner.i,pWinner.j, dir[nShift-1]); 00595 nMax = 0; 00596 00597 wImg = pyrImg[primeLev]; 00598 inplaceNormalize(wImg, 0.0f, 128.0f); 00599 // draw scanpath in the work image 00600 for(int i = 0; i< nShift; i++) 00601 { 00602 drawCircle(wImg, attPt[i], 2, 255.0f, 1); 00603 Point2D<int> ePt(int(6*cos(dir[i])),int(6*sin(dir[i]))); 00604 drawLine(wImg, attPt[i] - ePt, attPt[i] + ePt, 1.0f, 1); 00605 } 00606 00607 // get surrounding salient points - limit to maxPt/2 00608 getLocalSalPt(pWinner, maxPt/2, &pt, &ang, &np); 00609 int nsal = np; 00610 00611 // add context points given that we have found some other parts 00612 getContextPt(attPt, dir, pProb, nShift, maxPt, &pt, &ang, &np); 00613 00614 // check up all the salient points 00615 // pick which one we want as the next focus of attention 00616 double tProb[NUM_FACE_PART]; 00617 double tProbMax[NUM_FACE_PART]; 00618 float maxPVal = 0.0, pVal = 0.0; Point2D<int> maxLoc(0,0); float maxA = 0.0; 00619 for(int i = 0; i < np; i++) 00620 { 00621 // use it to guide next search area 00622 getFacePartProb(pt[i], ang[i], tProb); 00623 pVal = getPotVal(attPt,dir,pProb,nShift,pt[i],ang[i],tProb); 00624 00625 if(i < nsal) printf("SAL "); else printf("CTX "); 00626 printf("(%d,%d,%5.3f) ",pt[i].i, pt[i].j,ang[i]); 00627 printf("-> pVal : %f ", pVal); 00628 printf("( "); 00629 for(int nPa = 0; nPa < NUM_FACE_PART; nPa++) 00630 printf("%f ",tProb[nPa]); 00631 printf(")\n"); 00632 00633 // if this edge is more promising than the previous one 00634 if(pVal > maxPVal) 00635 { 00636 maxPVal = pVal; maxLoc = pt[i]; maxA = ang[i];nMax = 1; 00637 for(int nPa = 0; nPa < NUM_FACE_PART; nPa++) 00638 tProbMax[nPa] = tProb[nPa]; 00639 } 00640 drawCircle(wImg, pt[i], 1, 255.0f, 1); 00641 00642 //--- 00643 workWin->drawImage(zoomXY(wImg,scale,-1),0,0); 00644 //printf("PT: ");Raster::waitForKey(); 00645 //--- 00646 } 00647 00648 // if we found a part in the neighborhood 00649 if(nMax > 0) 00650 { 00651 printf("MAX pt:(%d,%d), ang: %f MVAL: %f\n", 00652 maxLoc.i,maxLoc.j,maxA,maxPVal); 00653 00654 attPt[nShift] = maxLoc; dir[nShift] = maxA; 00655 printf("( "); 00656 for(int nPa = 0; nPa < NUM_FACE_PART; nPa++) 00657 { 00658 pProb[nShift][nPa] = tProbMax[nPa]; 00659 printf("%f ",tProbMax[nPa]); 00660 } 00661 printf(")\n"); 00662 nShift++; pWinner = maxLoc; 00663 } 00664 00665 // display current progress 00666 workWin->drawImage(zoomXY(wImg,scale,-1),0,0); 00667 Raster::waitForKey(); 00668 00669 // hypothesize on the recovered parts if we have found a face 00670 if(locateFace(attPt,dir,pProb,nShift)) 00671 { 00672 faceFound = 1; 00673 00674 // draw the last visited point 00675 drawCircle(wImg, pWinner, 2, 255.0f, 1); 00676 workWin->drawImage(zoomXY(wImg,scale,-1),0,0); 00677 printf("Found a face "); Raster::waitForKey(); 00678 00679 // clean the working image 00680 // draw the eyes, nose, and mouth and outer face 00681 wImg = pyrImg[primeLev]; 00682 drawFace(wImg); 00683 } 00684 } 00685 00686 // draw all results 00687 workWin->drawImage(zoomXY(wImg,scale,-1),0,0); 00688 printf("end of detectFace \n"); Raster::waitForKey(); 00689 */ 00690 } 00691 00692 // ###################################################################### 00693 Point2D<int> getIntSalPt(Image<float> img, Point2D<int> p) 00694 { 00695 // use this image to note that the coordinate has been visited 00696 Image<float> vis(img.getWidth(), img.getHeight(), ZEROS); 00697 00698 Point2D<int> a = getIntSalPt(img,vis,p); 00699 // XWinManaged xw(vis.getDims(), 700,0,"getIntSalPt"); 00700 // drawImage(xw, vis,0,0); 00701 // Raster::waitForKey(); 00702 00703 return a; 00704 } 00705 00706 // ###################################################################### 00707 Point2D<int> getIntSalPt(Image<float> img, Image<float>& vis, Point2D<int> p) 00708 { 00709 float a = img.getVal(p); float b; 00710 if(vis.getVal(p) == 255.0) return p; 00711 else vis.setVal(p,255.0); 00712 Point2D<int> n; Point2D<int> mn; float mc = 0.0; 00713 int lev = primeLev; 00714 00715 // recurse outward to find the boundary of the region 00716 // stop only if the change in value is not too steep 00717 int w = img.getWidth(); int h = img.getHeight(); 00718 if(p.i > 0) 00719 { 00720 n = p + Point2D<int>(-1,0); b = img.getVal(n); 00721 if(fabs(a - b)/a < .5 && b > 0.001) n = getIntSalPt(img, vis, n); 00722 mn = n; mc = getSalG(n,lev); 00723 } 00724 if(p.i < w-1) 00725 { 00726 n = p + Point2D<int>(1,0); b = img.getVal(n); 00727 if(fabs(a - b)/a < .5 && b > 0.001) n = getIntSalPt(img, vis, n); 00728 if(mc < getSalG(n,lev)) 00729 { mn = n; mc = getSalG(n,lev); } 00730 } 00731 00732 if(p.j > 0) 00733 { 00734 n = p + Point2D<int>(0,-1); b = img.getVal(n); 00735 if(fabs(a - b)/a < .5 && b > 0.001) n = getIntSalPt(img, vis, n); 00736 if(mc < getSalG(n,lev)) 00737 { mn = n; mc = getSalG(n,lev); } 00738 } 00739 if(p.j < h-1) 00740 { 00741 n = p + Point2D<int>(0,1); b = img.getVal(n); 00742 if(fabs(a - b)/a < .5 && b > 0.001) n = getIntSalPt(img, vis, n); 00743 if(mc < getSalG(n,lev)) 00744 { mn = n; mc = getSalG(n,lev); } 00745 } 00746 00747 return mn; 00748 } 00749 00750 // ###################################################################### 00751 float getSalG(Point2D<int> p, int lev) 00752 { 00753 float max = 0.0; 00754 for(int i = 0; i< NUM_DIR; i++) 00755 { 00756 if(currGaPyr[i][lev].getVal(p) > max) 00757 max = currGaPyr[i][lev].getVal(p); 00758 } 00759 return max; 00760 } 00761 00762 // ###################################################################### 00763 XWinManaged *tXW; int txwC = 0; 00764 void getLocalSalPt(Point2D<int> pWinner, int max, 00765 Point2D<int> **pt, double **ang, int *np) 00766 { 00767 // get the region of interest at the primeLev 00768 *np = 0; 00769 Image<float> temp = pyrImg[primeLev]; 00770 00771 //--- 00772 // int w = temp.getWidth(), h = temp.getHeight(); 00773 // if(txwC == 0) 00774 // {tXW= new XWinManaged(Dims(2*3*w, 3*h), 0,0,"GetFaceProb"); wList.add(*tXW); txwC = 1;} 00775 //--- 00776 // FIX THE RECTANGLE ERROR 00777 // get rectangle bounding the local neighborhood 00778 Rectangle r = getWindow(pyrImg[primeLev], pWinner, 40); 00779 r = Rectangle::tlbrO(r.top(),r.left(), 00780 (r.bottomO()/4)*4,(r.rightO()/4)*4); 00781 printf("r(%d,%d,%d,%d) = PI[%d,%d],pW(%d,%d) \n", 00782 r.top(),r.left(),r.bottomI(),r.rightI(), 00783 pyrImg[primeLev].getWidth(),pyrImg[primeLev].getHeight(),pWinner.i,pWinner.j); 00784 00785 Image<float> sumS; 00786 Image<float> sMap; sMap.resize(r.width(),r.height(),true); 00787 00788 Image<float> gI[NUM_DIR]; 00789 for(int i = 0; i< NUM_DIR; i++) gI[i].resize(r.width(),r.height(),true); 00790 00791 // center-surround to localize sharpest edges 00792 for(int c = 0; c < 3; c++) 00793 for(int s = c+1; s< 3; s++) 00794 { 00795 for(int i = 0; i < NUM_DIR; i++) 00796 { 00797 // NOTE: using crop() from Image/CutPaste.H instead of copy(), 00798 // since copy() was a dup of crop(): 00799 gI[i] = crop(zoomXY(currGaPyr[i][primeLev+c],int(pow(2.0,c)),-1),r) - 00800 crop(zoomXY(currGaPyr[i][primeLev+s],int(pow(2.0,s)),-1),r) ; 00801 } 00802 00803 sumS = gI[0]+gI[1]+gI[2]+gI[3]; 00804 inplaceNormalize(sumS, 0.0f, 255.0f); 00805 sumS = squared(squared(sumS)); 00806 sMap+= sumS; 00807 00808 //--- 00809 // tXW->drawImage(zoomXY(sumS,2*2,-1),2*w,r.height()*4); 00810 // tXW->drawImage(zoomXY(sMap,2*2,-1),2*w+r.width()*4,r.height()*4); 00811 // Raster::waitForKey(); 00812 //---- 00813 } 00814 00815 //---- 00816 // Image<float> temp2 = crop(temp,r); 00817 // inplaceNormalize(temp2, 0,255.0); 00818 // drawRect(temp, r, 255,1); 00819 // tXW->drawImage(zoomXY(temp ,2*1,-1),0,0); 00820 // tXW->drawImage(zoomXY(temp2,2*2,-1),2*w,0); 00821 // tXW->drawImage(zoomXY(sumS,2*2,-1),2*w,r.height()*4); 00822 // Raster::waitForKey(); 00823 //--- 00824 00825 // get each salient points 00826 // crop out the whole region belonging to that salient point 00827 sMap = squared(sMap); 00828 inplaceNormalize(sMap, 0.0f, 255.0f); 00829 float t; 00830 for(int i = 0; i < max; i++) 00831 { 00832 findMax(sMap, (*pt)[i],t); 00833 00834 // break when saliency value is below the chosen constant 00835 if(t < 20.0) break; 00836 crop(sMap,(*pt)[i]); 00837 00838 // do not accept points that are to close to other salient points 00839 for(int j = 0; j < i; j++) 00840 if((*pt)[j].distance((*pt)[i]) < 6.0) 00841 { 00842 j = i+1; // break from the for loop 00843 i--;(*np)--; 00844 } 00845 00846 (*ang)[i] = getAng((*pt)[i]+Point2D<int>(r.left(),r.top())); 00847 00848 //--- 00849 // printf("I: %d i: %d j: %d val: %f\n", 00850 // i,(*pt)[i].i,(*pt)[i].j, t); 00851 // temp2.setVal((*pt)[i],2*255.0); 00852 // tXW->drawImage(zoomXY(sMap,2*2,-1),2*w+r.width()*4,r.height()*4); 00853 // tXW->drawImage(zoomXY(temp2,2*2,-1),2*w,0); 00854 // Raster::waitForKey(); 00855 //--- 00856 00857 (*np)++; 00858 } 00859 // add the offset to make points be locations in whole image 00860 for(int i = 0; i< *np; i++) (*pt)[i] = (*pt)[i] + Point2D<int>(r.left(),r.top()); 00861 00862 // printf("getLocalSalPt ");Raster::waitForKey(); 00863 } 00864 00865 // ###################################################################### 00866 // recursive cropping of current salient neighboorhood 00867 void crop(Image<float>& img, Point2D<int> p) 00868 { 00869 // crop current location 00870 float a = img.getVal(p); 00871 float b; 00872 img.setVal(p,0.0); 00873 int w = img.getWidth(); int h = img.getHeight(); 00874 00875 // recurse after checking boundary condition 00876 // crop only if the change in value is not too steep 00877 if(p.i > 0) 00878 { 00879 b = img.getVal(p + Point2D<int>(-1,0)); 00880 if(fabs(a - b)/a < .5 && b > 0.001) crop(img, p+Point2D<int>(-1,0)); 00881 } 00882 if(p.i < w-1) 00883 { 00884 b = img.getVal(p + Point2D<int>(1,0)); 00885 if(fabs(a - b)/a < .5 && b > 0.001) crop(img, p+Point2D<int>(1,0)); 00886 } 00887 if(p.j > 0) 00888 { 00889 b = img.getVal(p + Point2D<int>(0,-1)); 00890 if(fabs(a - b)/a < .5 && b > 0.001) crop(img, p+Point2D<int>(0,-1)); 00891 } 00892 if(p.j < h-1) 00893 { 00894 b = img.getVal(p + Point2D<int>(0,1)); 00895 if(fabs(a - b)/a < .5 && b > 0.001) crop(img, p+Point2D<int>(0,1)); 00896 } 00897 } 00898 00899 // ###################################################################### 00900 double getAng(Point2D<int> p) 00901 { 00902 // get the dominant orientation of the current location 00903 int iMax = 0; float max = 0.0; 00904 for(int i = 0; i< NUM_DIR; i++) 00905 { 00906 if(currGaPyr[i][primeLev].getVal(p) > max) 00907 {max = currGaPyr[i][primeLev].getVal(p); iMax = i;} 00908 } 00909 return iMax*M_PI/NUM_DIR; 00910 } 00911 00912 // ###################################################################### 00913 void getContextPt(Point2D<int> *attPt, float *dir, double **pProb, int n, int max, 00914 Point2D<int> **pt, double **ang, int *np) 00915 { 00916 // use the last attended point 00917 int i = n-1; 00918 00919 //printf("getContextPt n:%d max:%d np:%d \n",n,max,*np); 00920 //printf("PPROB[%d]: (%5.3f,%5.3f,%5.3f)\n", 00921 // i, pProb[i][EYE], pProb[i][NOSE], pProb[i][MOUTH]); 00922 00923 float dx, dy; int dxR, dyR; Point2D<int> temp; 00924 Rectangle r = pyrImg[primeLev].getBounds(); 00925 00926 // if the likelihood of a part is above 20% 00927 // make sure this value is not too high 00928 // so that we won't discard reasonable locations 00929 if(pProb[i][EYE] > .08) 00930 { 00931 float c = cos(dir[i]); float s = sin(dir[i]); 00932 00933 // add points for the other eye 00934 dx = 24.0; dy = 0.0; 00935 dxR = int(round( dx*c + -dy*s)); 00936 dyR = int(round( dx*s + dy*c)); 00937 temp = attPt[i] + Point2D<int>(dxR,dyR); 00938 if(r.contains(temp) & (*np < max)) 00939 { 00940 (*pt)[*np] = temp; 00941 (*ang)[*np] = dir[i]; 00942 (*np)++; 00943 } 00944 dx = -24.0; dy = 0.0; 00945 dxR = int(round( dx*c + -dy*s)); 00946 dyR = int(round( dx*s + dy*c)); 00947 temp = attPt[i] + Point2D<int>(dxR,dyR); 00948 if(r.contains(temp) & (*np < max)) 00949 { 00950 (*pt)[*np] = temp; 00951 (*ang)[*np] = dir[i]; 00952 (*np)++; 00953 } 00954 00955 // add points for the nose 00956 dx = 12.0; dy = 8.0; 00957 dxR = int(round( dx*c + -dy*s)); 00958 dyR = int(round( dx*s + dy*c)); 00959 temp = attPt[i] + Point2D<int>(dxR,dyR); 00960 if(r.contains(temp) & (*np < max)) 00961 { 00962 (*pt)[*np] = temp; 00963 (*ang)[*np] = fmod(dir[i]+M_PI/2.0, M_PI); 00964 (*np)++; 00965 } 00966 dx = -12.0; dy = 8.0; 00967 dxR = int(round( dx*c + -dy*s)); 00968 dyR = int(round( dx*s + dy*c)); 00969 temp = attPt[i] + Point2D<int>(dxR,dyR); 00970 if(r.contains(temp) & (*np < max)) 00971 { 00972 (*pt)[*np] = temp; 00973 (*ang)[*np] = fmod(dir[i]+M_PI/2.0, M_PI); 00974 (*np)++; 00975 } 00976 00977 // add points for the mouth 00978 dx = 12.0; dy = 28.0; 00979 dxR = int(round( dx*c + -dy*s)); 00980 dyR = int(round( dx*s + dy*c)); 00981 temp = attPt[i] + Point2D<int>(dxR,dyR); 00982 if(r.contains(temp) & (*np < max)) 00983 { 00984 (*pt)[*np] = temp; 00985 (*ang)[*np] = dir[i]; 00986 (*np)++; 00987 } 00988 dx = -12.0; dy = 28.0; 00989 dxR = int(round( dx*c + -dy*s)); 00990 dyR = int(round( dx*s + dy*c)); 00991 temp = attPt[i] + Point2D<int>(dxR,dyR); 00992 if(r.contains(temp) & (*np < max)) 00993 { 00994 (*pt)[*np] = temp; 00995 (*ang)[*np] = dir[i]; 00996 (*np)++; 00997 } 00998 } 00999 01000 if(pProb[i][NOSE] > .08) 01001 { 01002 float c = cos(fmod(dir[i]+M_PI/2.0, M_PI)); 01003 float s = sin(fmod(dir[i]+M_PI/2.0, M_PI)); 01004 01005 // add points for the other eye 01006 dx = 12.0; dy = -8.0; 01007 dxR = int(round( dx*c + -dy*s)); 01008 dyR = int(round( dx*s + dy*c)); 01009 temp = attPt[i] + Point2D<int>(dxR,dyR); 01010 if(r.contains(temp) & (*np < max)) 01011 { 01012 (*pt)[*np] = temp; 01013 (*ang)[*np] = fmod(dir[i]+M_PI/2.0, M_PI); 01014 (*np)++; 01015 } 01016 dx = -12.0; dy = -8.0; 01017 dxR = int(round( dx*c + -dy*s)); 01018 dyR = int(round( dx*s + dy*c)); 01019 temp = attPt[i] + Point2D<int>(dxR,dyR); 01020 if(r.contains(temp) & (*np < max)) 01021 { 01022 (*pt)[*np] = temp; 01023 (*ang)[*np] = fmod(dir[i]+M_PI/2.0, M_PI); 01024 (*np)++; 01025 } 01026 01027 // add points for the mouth 01028 dx = 0.0; dy = 20.0; 01029 dxR = int(round( dx*c + -dy*s)); 01030 dyR = int(round( dx*s + dy*c)); 01031 temp = attPt[i] + Point2D<int>(dxR,dyR); 01032 if(r.contains(temp) & (*np < max)) 01033 { 01034 (*pt)[*np] = temp; 01035 (*ang)[*np] = fmod(dir[i]+M_PI/2.0, M_PI); 01036 (*np)++; 01037 } 01038 } 01039 01040 if(pProb[i][MOUTH] > .08) 01041 { 01042 float c = cos(dir[i]); float s = sin(dir[i]); 01043 01044 // add points for the other eye 01045 dx = 12.0; dy = -28.0; 01046 dxR = int(round( dx*c + -dy*s)); 01047 dyR = int(round( dx*s + dy*c)); 01048 temp = attPt[i] + Point2D<int>(dxR,dyR); 01049 if(r.contains(temp) & (*np < max)) 01050 { 01051 (*pt)[*np] = temp; 01052 (*ang)[*np] = dir[i]; 01053 (*np)++; 01054 } 01055 dx = -12.0; dy = -28.0; 01056 dxR = int(round( dx*c + -dy*s)); 01057 dyR = int(round( dx*s + dy*c)); 01058 temp = attPt[i] + Point2D<int>(dxR,dyR); 01059 if(r.contains(temp) & (*np < max)) 01060 { 01061 (*pt)[*np] = temp; 01062 (*ang)[*np] = dir[i]; 01063 (*np)++; 01064 } 01065 01066 // add points for the nose 01067 dx = 0.0; dy = -20.0; 01068 dxR = int(round( dx*c + -dy*s)); 01069 dyR = int(round( dx*s + dy*c)); 01070 temp = attPt[i] + Point2D<int>(dxR,dyR); 01071 if(r.contains(temp) & (*np < max)) 01072 { 01073 (*pt)[*np] = temp; 01074 (*ang)[*np] = fmod(dir[i]+M_PI/2.0, M_PI); 01075 (*np)++; 01076 } 01077 } 01078 } 01079 01080 // ###################################################################### 01081 float getPotVal(Point2D<int> *attPt, float *dir, double *pProb[NUM_FACE_PART], 01082 int n, Point2D<int> pt, float a, double tProb[NUM_FACE_PART]) 01083 { 01084 //printf("pt: (%d,%d) n: %d \n",pt.i,pt.j,n); 01085 //for(int i = 0; i < n; i++) 01086 // printf("attPt[%d]: (%d,%d,%f) \n",i,attPt[i].i,attPt[i].j,dir[i]); 01087 01088 // check if this location has been visited (CYCLE CHECK) 01089 // no point of revisiting that point 01090 for(int i = 0; i < n; i++) 01091 if(pt.distance(attPt[i]) <= 3.0) return 0.0; 01092 01093 // relate the point with each previously visited points 01094 float mVal = 0.0; 01095 for(int i = 0; i < n; i++) 01096 { 01097 // find the best fit relationship between the two points 01098 for(int o = 0; o < NUM_FACE_PART; o++) 01099 for(int c = 0; c < NUM_FACE_PART; c++) 01100 { 01101 float val = getGeomRelProb((FacePart)o,(FacePart)c,attPt[i],dir[i],pt,a); 01102 //printf("(%d,%d) -> %f * %f * %f = %f\n",o,c,val, 01103 // pProb[i][o], tProb[c], val * pProb[i][o] * tProb[c]); 01104 val *= pProb[i][o] * tProb[c]; 01105 01106 if (val > mVal) mVal = val; 01107 } 01108 //printf("i: %d\n",i); 01109 } 01110 01111 return mVal; 01112 } 01113 01114 // ###################################################################### 01115 float getGeomRelProb(FacePart o,FacePart c, Point2D<int> oPt, float oD, 01116 Point2D<int> cPt, float cA) 01117 { 01118 // get the geometric relationship between the two points 01119 int dx = cPt.i - oPt.i; int dy = cPt.j - oPt.j; 01120 float dist = cPt.distance(oPt); 01121 float dir = atan2(dy,dx) + oD; 01122 if(dir > M_PI) dir -= 2.0*M_PI; 01123 01124 // probability of the agreement of 01125 // the orientation of the two parts 01126 float pOri[NUM_DIR] = {0.35, 0.25, 0.005, 0.25}; 01127 01128 // probability relationship between the 2 parts 01129 // in direction, distance and agreement in part orientation 01130 // (aProb and dProb , respectively) 01131 switch(o) 01132 { 01133 case EYE: case L_EYE: case R_EYE: 01134 switch(c) 01135 { 01136 case EYE: case L_EYE: case R_EYE: 01137 { 01138 // probability of agreement of orientation of the two parts 01139 int dOri = int(round(4.0 * fabs(oD - cA)/M_PI)); 01140 01141 // probability of distance 01142 float mean = 24.0; float stdev = 6.0; 01143 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01144 pow(M_E,-1*pow(dist-mean, 2.0)/(2.0*stdev*stdev)); 01145 01146 // probability of direction 01147 float aProb; 01148 if(fabs(dir) <= M_PI/2.0) 01149 aProb = (-fabs(dir)/(M_PI/2) + 1.0)/2.0; 01150 else 01151 aProb = ((fabs(dir) - M_PI)/(M_PI/2) + 1.0)/2.0; 01152 01153 return pOri[dOri] * dProb * aProb; 01154 } 01155 case NOSE: 01156 { 01157 // probability of agreement of orientation of the two parts 01158 int dOri = int(round(4.0 * fabs(fmod(oD + M_PI/2.0, M_PI) - cA)/M_PI)); 01159 01160 // probability of distance 01161 float mean = 15.0; float stdev = 7.0; 01162 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01163 pow(M_E,-1*pow(dist-mean, 2.0)/(2.0*stdev*stdev)); 01164 01165 // probability of direction 01166 float nDir = fmod(fabs(dir),M_PI/2); 01167 float aProb = (-fabs(nDir-M_PI/4)/(M_PI/4) + 1.0)/4.0; 01168 01169 return pOri[dOri] * dProb * aProb; 01170 } 01171 case MOUTH: 01172 { 01173 // probability of agreement of orientation of the two parts 01174 int dOri = int(round(4.0 * fabs(oD - cA)/M_PI)); 01175 01176 // probability of distance 01177 float mean = 30.0; float stdev = 8.0; 01178 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01179 pow(M_E,-1*pow(dist-mean,2.0)/(2.0*stdev*stdev)); 01180 01181 // probability of direction 01182 float nDir = fmod(fabs(dir),M_PI/2); 01183 float aProb = (-fabs(nDir-M_PI/4)/(M_PI/4) + 1.0)/4.0; 01184 01185 return pOri[dOri] * dProb * aProb; 01186 } 01187 } 01188 01189 case NOSE: 01190 switch(c) 01191 { 01192 case EYE: case L_EYE: case R_EYE: 01193 { 01194 // probability of agreement of orientation of the two parts 01195 int dOri = int(round(4.0 * fabs(fmod(oD + M_PI/2.0, M_PI) - cA)/M_PI)); 01196 01197 // probability of distance 01198 float mean = 15.0; float stdev = 6.0; 01199 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01200 pow(M_E,-1*pow(dist-mean,2.0)/(2.0*stdev*stdev)); 01201 01202 // probability of direction 01203 float nDir = fmod(fabs(dir),M_PI/2); 01204 float aProb = (-fabs(nDir-M_PI/4)/(M_PI/4) + 1.0)/4.0; 01205 01206 return pOri[dOri] * dProb * aProb; 01207 } 01208 case NOSE: 01209 { 01210 return 0.0; 01211 } 01212 case MOUTH: 01213 { 01214 // probability of agreement of orientation of the two parts 01215 int dOri = int(round(4.0 * fabs(fmod(oD + M_PI/2.0, M_PI) - cA)/M_PI)); 01216 01217 // probability of distance 01218 float mean = 20.0; float stdev = 5.0; 01219 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01220 pow(M_E,-1*pow(dist-mean, 2.0)/(2.0*stdev*stdev)); 01221 01222 // probability of direction 01223 float nDir = fabs(dir); 01224 float aProb = (-fabs(nDir-M_PI/2)/(M_PI/2) + 1.0)/2.0; 01225 01226 return pOri[dOri] * dProb * aProb; 01227 } 01228 } 01229 01230 case MOUTH: 01231 switch(c) 01232 { 01233 case EYE: case L_EYE: case R_EYE: 01234 { 01235 // probability of agreement of orientation of the two parts 01236 int dOri = int(round(4.0 * fabs(oD - cA)/M_PI)); 01237 01238 // probability of distance 01239 float mean = 30.0; float stdev = 6.0; 01240 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01241 pow(M_E,-1*pow(dist-mean, 2.0)/(2.0*stdev*stdev)); 01242 01243 // probability of direction 01244 float nDir = fmod(fabs(dir),M_PI/2); 01245 float aProb = (-fabs(nDir-M_PI/4)/(M_PI/4) + 1.0)/4.0; 01246 01247 return pOri[dOri] * dProb * aProb; 01248 } 01249 case NOSE: 01250 { 01251 // probability of agreement of orientation of the two parts 01252 int dOri = int(round(4.0 * fabs(fmod(oD + M_PI/2.0, M_PI) - cA)/M_PI)); 01253 01254 // probability of distance 01255 float mean = 20.0; float stdev = 5.0; 01256 float dProb = 1.0/(stdev*sqrt(2*M_PI))* 01257 pow(M_E,-1*pow(dist-mean, 2.0)/(2.0*stdev*stdev)); 01258 01259 // probability of direction 01260 float nDir = fabs(dir); 01261 float aProb = (-fabs(nDir-M_PI/2)/(M_PI/2) + 1.0)/2.0; 01262 01263 return pOri[dOri] * dProb * aProb; 01264 } 01265 case MOUTH: 01266 { 01267 return 0.0; 01268 } 01269 } 01270 } 01271 return 0.0; 01272 } 01273 01274 // ###################################################################### 01275 bool locateFace(Point2D<int> *attPt, float *dir, double *pProb[NUM_FACE_PART], 01276 int n) 01277 { 01278 if(n < 4) return false; 01279 //for(int i = 0; i < n; i++) 01280 // printf("PT[%d]: (%d,%d,%f) :(%5.3f,%5.3f,%5.3f) \n", 01281 // i,attPt[i].i, attPt[i].j,dir[i], 01282 // pProb[i][EYE],pProb[i][NOSE],pProb[i][MOUTH]); 01283 01284 // store the parts individually 01285 int nE = 0, nM = 0, nN = 0; 01286 Point2D<int> pE[n], pM[n], pN[n]; 01287 float dE[n], dM[n], dN[n]; 01288 01289 for(int i = 2; i < n; i++) 01290 { 01291 if(pProb[i][EYE] > .10) 01292 { pE[nE] = attPt[i]; dE[nE] = dir[i]; nE++; } 01293 01294 if(pProb[i][MOUTH] > .10) 01295 { pM[nM] = attPt[i]; dM[nM] = dir[i]; nM++; } 01296 01297 if(pProb[i][NOSE] > .0005 && fabs(dir[i] - M_PI/2)<.01 ) 01298 { pN[nN] = attPt[i]; dN[nN] = dir[i]; nN++; } 01299 } 01300 01301 printf("-+-+-+-(%d,%d,%d)\n",nE,nM,nN); 01302 // set face when the configuration of parts is satisfied 01303 for(int i = 0; i < nE-1; i++) 01304 for(int i2 = i+1; i2 < nE; i2++) 01305 for(int j = 0; j < nM; j++) 01306 for(int k = 0; k < nN; k++) 01307 { 01308 // the 2 eyes relationship 01309 float ee2 = getGeomRelProb(EYE, EYE , pE[i], dE[i], pE[i2], dE[i2]); 01310 01311 // the eye-mouth relationship 01312 float em = getGeomRelProb(EYE, MOUTH, pE[i], dE[i], pM[j], dM[j]); 01313 01314 // the eye-nose relationship 01315 float en = getGeomRelProb(EYE, NOSE , pE[i], dE[i], pN[k], dN[k]); 01316 01317 // the nose-mouth relationship 01318 float nm = getGeomRelProb(NOSE, MOUTH, pN[k], dN[k], pM[j], dM[j]); 01319 01320 printf("[%d,%d,%d,%d] ee2: %f, en: %f em: %f nm: %f\n", 01321 i,i2,k,j,ee2,en,em,nm); 01322 // condition for a face 01323 // condition threshold: 01324 // [Dir][Dist][ori] 01325 // [.05][ .1][ .2] = .001 01326 if(ee2 > .001 && em > .001 && en >.002) 01327 { 01328 eyeP = pE[i]; eyeD = 0.0;//dE[i]; 01329 eye2P = pE[i2]; eye2D = 0.0;//dE[i2]; 01330 mouthP = pM[j]; mouthD = 0.0;//dM[j]; 01331 noseP = pN[k]; noseD = dN[k]; 01332 return true; 01333 } 01334 } 01335 return false; 01336 } 01337 01338 // ###################################################################### 01339 void drawFace (Image<float>& img) 01340 { 01341 inplaceNormalize(img, 0.0f, 128.0f); 01342 01343 // draw the eyes 01344 drawCircle(img, eyeP , 2, 255.0f, 1); 01345 Point2D<int> ePt(int(5*cos(eyeD)),int(5*sin(eyeD))); 01346 drawLine(img, eyeP - ePt, eyeP + ePt, 255.0f, 1); 01347 drawCircle(img, eye2P, 2, 255.0f, 1); 01348 Point2D<int> e2Pt(int(5*cos(eye2D)),int(5*sin(eye2D))); 01349 drawLine(img, eye2P - e2Pt, eye2P + e2Pt, 255.0f, 1); 01350 01351 // draw the nose 01352 Point2D<int> nPt(int(10*cos(noseD)),int(10*sin(noseD))); 01353 drawLine(img, noseP - nPt, noseP + nPt, 255.0f, 1); 01354 01355 // draw the mouth 01356 drawCircle(img, mouthP , 5, 255.0f, 1); 01357 Point2D<int> mPt(int(8*cos(mouthD)),int(8*sin(mouthD))); 01358 drawLine(img, mouthP - mPt, mouthP + mPt, 255.0f, 1); 01359 01360 // draw the face border 01361 //drawCircle(img, noseP , 25, 255.0f, 1); 01362 } 01363 01364 // ###################################################################### 01365 // normalize image by subtracting it with mean and dividing by stdev 01366 Image<float> normalize(Image<float> img) 01367 { 01368 Image<float> tImg = img; 01369 float tMean = float(mean(img)); 01370 float tStdev = float(stdev(img)); 01371 tImg -= tMean; 01372 tImg /= tStdev; 01373 01374 return tImg; 01375 } 01376 01377 // ###################################################################### 01378 // setup input and output array for FFT 01379 void setupFFTW(Image<float> img) 01380 { 01381 int w = img.getWidth(), h = img.getHeight(); 01382 01383 fftw = new FFTWWrapper(w,h); 01384 input = new double[w*h]; 01385 fftw->init(input); 01386 01387 // setup output array 01388 outFftw = new double*[h]; 01389 for(int i = 0; i < h; i++) outFftw[i] = new double[w/2+1]; 01390 01391 // setup the log-gabor masks 01392 setupGaborMask(img); 01393 } 01394 01395 // ###################################################################### 01396 void setupGaborMask(Image<float> img) 01397 { 01398 // calculate the appropriate gabor filter constant 01399 int w = img.getWidth(), h = img.getHeight(); 01400 int dim = (w<h)?w:h; 01401 float stdev = float(floor((dim - 3.0)/2.0/sqrt(10.0))); 01402 float period = float(stdev * 2.0); 01403 01404 // setup storage for gabor masks 01405 gaborMask = new double***[NUM_G_LEV]; 01406 for(int i = 0; i < NUM_G_LEV; i++) 01407 gaborMask[i] = new double**[NUM_G_DIR]; 01408 01409 //---- 01410 //XWinManaged *xwSGM = new XWinManaged(Dims(2*w,h),0, 0,"xwSGM"); 01411 //--- 01412 01413 // traverse through the different frequency and orientation 01414 float scale = 1.0; 01415 for(int i = 0; i < NUM_G_LEV; i++) 01416 { 01417 for(int j = 0; j < NUM_G_DIR; j++) 01418 { 01419 // setup the gabor kernel 01420 Image<float> gaborF = gaborFilter<float>(stdev/scale,period/scale,0, 01421 (j*180.0)/NUM_G_DIR); 01422 Image<float> temp (img.getDims(), ZEROS); 01423 inplacePaste(temp, gaborF, Point2D<int>(0,0)); 01424 01425 // allocate space for the solution 01426 gaborMask[i][j] = new double*[h]; 01427 for(int k = 0; k < h; k++) 01428 gaborMask[i][j][k] = new double[w/2+1]; 01429 01430 // perform the fourier transform and save the results 01431 fftCompute(temp, gaborMask[i][j]); 01432 gaborMaskImg[i][j] = getFftImage(gaborMask[i][j], w, h); 01433 01434 //--- 01435 //xwSGM->drawImage(temp,0,0); 01436 //xwSGM->drawImage(gaborMaskImg[i][j],w,0); 01437 //printf("xwSGM ");Raster::waitForKey(); 01438 //--- 01439 } 01440 scale *= 2; 01441 } 01442 } 01443 01444 // ###################################################################### 01445 // convert image to array to input to FFT 01446 void fftCompute(Image<float> img, double **outFft) 01447 { 01448 int w = img.getWidth(), h = img.getHeight(); 01449 01450 iptr = input; 01451 for(int j = 0; j < h; j++) 01452 for(int i = 0; i < w; i++) 01453 *iptr++ = (double)img.getVal(i,j); 01454 01455 fftw->compute(outFft); 01456 } 01457 01458 // ###################################################################### 01459 void getFeatureVector(double **outFft, Image<double> &res, int w, int h) 01460 { 01461 double sum = 0.0; int c = 0; 01462 01463 // go through each gabor masks 01464 for(int i = 0; i < NUM_G_LEV; i++) 01465 for(int j = 0; j < NUM_G_DIR; j++) 01466 { 01467 // weighted sum of fft for a feature 01468 sum = 0.0; 01469 for(int k = 0; k < h; k++) 01470 for(int l = 0; l < w/2+1; l++) 01471 sum += log(1.0 + gaborMask[i][j][k][l]) * log(1.0 + outFft[k][l]); 01472 res[c] = sum/(h*w/2+1); 01473 c++; 01474 } 01475 } 01476 01477 // ###################################################################### 01478 void getFeatureVector2 01479 (nub::soft_ref<StdBrain> brain, Image<double> &vec, int w, int h) 01480 { 01481 LFATAL("fixme"); 01482 /* 01483 int w_fm = w/4; 01484 int h_fm = h/4; 01485 01486 printf("getFeatureVector\n"); 01487 nub::soft_ref<VisualCortex> vc;/////////// = brain->getVC(); 01488 uint nc = vc->numChans(); 01489 printf("Num Channels: %d\n",nc); 01490 01491 nub::soft_ref<ColorChannel> cc; 01492 dynCastWeakToFrom(cc, vc->subChan("color")); 01493 printf("CC numChans: %d \n",cc->numChans()); 01494 RedGreenChannel& cc_rg = cc->rg(); 01495 BlueYellowChannel& cc_by = cc->by(); 01496 01497 nub::soft_ref<IntensityChannel> ic; 01498 dynCastWeakToFrom(ic, vc->subChan("intensity")); 01499 01500 nub::soft_ref<OrientationChannel> oc; 01501 dynCastWeakToFrom(oc, vc->subChan("orientation")); 01502 printf("OC numChans: %d \n",oc->numChans()); 01503 GaborChannel& oc_gc000 = oc->gabor(0); 01504 GaborChannel& oc_gc045 = oc->gabor(1); 01505 GaborChannel& oc_gc090 = oc->gabor(2); 01506 GaborChannel& oc_gc135 = oc->gabor(3); 01507 01508 // use numsubmap 01509 Image<float> img0 = oc_gc000.getImage(2); 01510 Image<float> img1 = oc_gc045.getImage(2); 01511 Image<float> img2 = oc_gc090.getImage(2); 01512 Image<float> img3 = oc_gc135.getImage(2); 01513 Image<float> img4 = cc_rg.getImage(2); 01514 Image<float> img5 = cc_by.getImage(2); 01515 Image<float> img6 = ic->getImage(2); 01516 XWinManaged *fWin; 01517 01518 fWin = new XWinManaged(Dims(w_fm,7*(h_fm)),100,100,"Image"); 01519 wList.add(*fWin); 01520 fWin->drawImage(img0,0,0); 01521 fWin->drawImage(img1,0,h_fm); 01522 fWin->drawImage(img2,0,2*h_fm); 01523 fWin->drawImage(img3,0,3*h_fm); 01524 fWin->drawImage(img4,0,4*h_fm); 01525 fWin->drawImage(img5,0,5*h_fm); 01526 fWin->drawImage(img6,0,6*h_fm); 01527 Raster::waitForKey(); 01528 */ 01529 } 01530 01531 // ###################################################################### 01532 int* getPrimeLev(Image<float> img) 01533 { 01534 // at this point just use the finest resolution 01535 int w = img.getWidth(), h = img.getHeight(); 01536 01537 //compute Fourier Transform of the image 01538 fftCompute(img, outFftw); 01539 01540 // compute the feature vector 01541 getFeatureVector(outFftw, gft, w, h); 01542 01543 // print feature vector 01544 // for(int i = 0; i < NUM_G_LEV; i++) 01545 // { 01546 // for(int j = 0; j < NUM_G_DIR; j++) 01547 // printf("%12.3f ",gft[i*NUM_G_DIR+j]); 01548 // printf("\n"); 01549 // } 01550 // printf("\n\n"); 01551 01552 // run the feature vector on the neural network 01553 Image<double> vout = ffn_pl->run3L(gft); 01554 float mVal; int m; double tLev[NUM_H_LEV]; 01555 int* probLev = new int[NUM_H_LEV]; 01556 01557 for(int i = 0; i < NUM_H_LEV; i++) 01558 tLev[i] = vout[i]; 01559 01560 for(int i = 0; i < NUM_H_LEV; i++) 01561 { 01562 mVal = tLev[0]; m = 0; 01563 for(int j = 1; j < NUM_H_LEV; j++) 01564 { 01565 if (mVal <= tLev[j]) 01566 { 01567 mVal = tLev[j]; m = j; 01568 } 01569 } 01570 probLev[i] = m; 01571 tLev[m] = 0.0; 01572 } 01573 01574 // display the image of the transform 01575 Image<float> ftImg = getFftImage(outFftw, w, h); 01576 float min,max; getMinMax(ftImg, min,max); 01577 printf("ftIMg: Min: %f, Max: %f\n",min,max); 01578 inplaceNormalize(ftImg, 0.0f, 255.0f); 01579 fftWin->drawImage(zoomXY(ftImg,1,-1),0,0); 01580 return probLev; 01581 } 01582 01583 // ###################################################################### 01584 Image<float> getFftImage(double **outFft, int w, int h) 01585 { 01586 // scaling the large dynamic range of the image Fourier Transform 01587 // using log: ln(1.0 + |F(i,j)|) 01588 // origin is centered to the middle of the image 01589 float ln2 = log(2.0); 01590 01591 // gamma correction 01592 float gc = 0.75; 01593 01594 // redraw the images 01595 Image<float> ftImg; ftImg.resize(w,h); 01596 for(int i = 0; i < w/2; i++) 01597 for(int j = 0; j < h/2; j++) 01598 ftImg.setVal(i+w/2, h/2-j, 01599 pow(log(1.0 + outFft[j][i+1])/ln2, gc)); 01600 01601 for(int i = 0; i < w/2; i++) 01602 for(int j = h/2; j < h; j++) 01603 ftImg.setVal(i+w/2, 3*h/2-1-j, 01604 pow(log(1.0 + outFft[j][i+1])/ln2, gc)); 01605 01606 for(int i = 0; i < w/2; i++) 01607 for(int j = 0; j < h; j++) 01608 ftImg.setVal(i, j, ftImg.getVal(w-1-i, h-1-j)); 01609 return ftImg; 01610 } 01611 01612 // ###################################################################### 01613 // free the old block of data whenever a new sample is in 01614 void freeFftwData(Image<float> img) 01615 { 01616 int h = img.getHeight(); 01617 01618 free(input); 01619 01620 for(int i = 0; i < h; i++) free(outFftw[i]); 01621 free(outFftw); 01622 01623 for(int i = 0; i < NUM_G_LEV; i++) 01624 for(int j = 0; j < NUM_G_DIR; j++) 01625 for(int k = 0; k < h; k++) 01626 free(gaborMask[i][j][k]); 01627 for(int i = 0; i < NUM_G_LEV; i++) 01628 for(int j = 0; j < NUM_G_DIR; j++) 01629 free(gaborMask[i][j]); 01630 for(int i = 0; i < NUM_G_LEV; i++) free(gaborMask[i]); 01631 free(gaborMask); 01632 } 01633 01634 // ###################################################################### 01635 XWinManaged *xwi[2][4]; int xwiC = 0; 01636 void getFacePartProb(Point2D<int> p, float ang, double *pPart) 01637 { 01638 ang = 0.0; 01639 01640 // set of gabor images for the inputted window 01641 Image<float> **gImg = new Image<float>*[2]; 01642 for(int i = 0; i < 2; i++) gImg[i] = new Image<float>[NUM_DIR]; 01643 01644 // use NOSE size (biggest part) + slack 01645 // to ensure window is big enough to capture a part 01646 01647 int w = 2+MODEL_NOSE_DIAGONAL, h = 2+MODEL_NOSE_DIAGONAL; 01648 // printf("****** p: (%d,%d), w: %d PI[%d,%d]\n", 01649 // p.i,p.j,w,pyrImg[primeLev].getWidth(),pyrImg[primeLev].getHeight()); 01650 Rectangle r = getWindow(pyrImg[primeLev], p, w); 01651 Rectangle r2 = 01652 Rectangle::tlbrI(r.top()/2 , r.left()/2 , 01653 r.top()/2+r.height()/2-1, r.left()/2+r.width()/2-1); 01654 01655 // if the image is cut off by the border, add zeros to the edges 01656 if(r.width() < w || r.height() < h || r2.width() < w/2 || r2.height() < h/2) 01657 { 01658 int ph = r.left() - p.i + w/2; 01659 int pv = r.top() - p.j + h/2; 01660 int ph2 = r2.left() - p.i/2 + w/4; 01661 int pv2 = r2.top() - p.j/2 + h/4; 01662 //--- 01663 // printf("PI.w: %d, PI.h: %d\n", 01664 // pyrImg[primeLev].getWidth(),pyrImg[primeLev].getHeight()); 01665 // printf("r.w: %d, r.h: %d,r2.w: %d, r2.h: %d\n", 01666 // r.width(),r.height(),r2.width(),r2.height()); 01667 // printf("p: (%d,%d), r.l: %d, r.t: %d, r.b: %d, r.r: %d, r2.l: %d, r2.t: %d r2.b: %d, r2.r: %d,w: %d, h: %d\n", 01668 // p.i,p.j,r.left(),r.top(), r.bottomI(),r.rightI(), 01669 // r2.left(),r2.top(),r2.bottomI(),r2.rightI(), w,h); 01670 // printf("ph: %d, pv: %d, ph2: %d, pv2: %d\n",ph,pv,ph2,pv2); 01671 01672 for(int i = 0; i < NUM_DIR; i++) 01673 { 01674 gImg[0][i].resize(w, h, true); 01675 for(int j = ph; j < ph + r.width(); j++) 01676 for(int k = pv; k < pv + r.height(); k++) 01677 gImg[0][i].setVal(j,k, 01678 currGaPyr[i][primeLev].getVal(r.left()+j-ph, r.top()+k-pv)); 01679 01680 gImg[1][i].resize(w/2, h/2, true); 01681 for(int j = ph2; j < ph2 + r2.width(); j++) 01682 for(int k = pv2; k < pv2 + r2.height(); k++) 01683 gImg[1][i].setVal(j,k, 01684 currGaPyr[i][primeLev+1].getVal(r2.left()+j-ph2, r2.top()+k-pv2)); 01685 } 01686 } 01687 // else just use the copy operation to fill gabor array to be inputted 01688 else 01689 for(int i = 0; i < NUM_DIR; i++) 01690 { 01691 gImg[0][i] = crop(currGaPyr[i][primeLev ], r ); 01692 gImg[1][i] = crop(currGaPyr[i][primeLev+1], r2); 01693 } 01694 01695 //--- 01696 // int scl = int(pow(2.0,2)); 01697 // for (int i = 0; i< 2; i++) 01698 // for (int j = 0; j< 4; j++) 01699 // { 01700 // if(xwiC == 0) 01701 // xwi[i][j] = new XWinManaged(Dims(scl*w,scl*h),scl*j*(h+6),scl*(i+3)*(w+4),"hi"); 01702 // xwi[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01703 // } 01704 // xwiC = 1; 01705 // Raster::waitForKey(); 01706 //-- 01707 01708 //--- 01709 // float min,max; 01710 // XWinManaged tXW(Dims(2*2*pyrImg[primeLev].getWidth(), 01711 // 2*pyrImg[primeLev].getHeight()), 0,0,"GetFaceProb"); 01712 // wList.add(tXW); 01713 01714 // Rectangle tr = Rectangle::tlbrI (h/2 - MODEL_NOSE_HEIGHT/2, w/2 - MODEL_NOSE_WIDTH/2, 01715 // h/2 + MODEL_NOSE_HEIGHT/2 - 1, w/2 + MODEL_NOSE_WIDTH/2 - 1); 01716 // Rectangle tr2 = Rectangle::tlbrI(tr.top()/2 , tr.left()/2 , 01717 // tr.top()/2+tr.height()/2-1, tr.left()/2+tr.width()/2-1); 01718 // printf("t: %d, b: %d, l: %d, r: %d\n", 01719 // r.top()+tr.top(),r.top()+tr.bottomI(), 01720 // r.top()+tr.left(),r.top()+tr.rightI()); 01721 // printf("t: %d, b: %d, l: %d, r: %d\n", 01722 // r2.top()+tr2.top(),r2.top()+tr2.bottomI(), 01723 // r2.top()+tr2.left(),r2.top()+tr2.rightI()); 01724 // printf("Angle to rotate: %f\n",ang); 01725 01726 // Image<float> pic = pyrImg[primeLev]; 01727 // getMinMax(pic, min,max); 01728 // drawRect(pic, r, max, 1); 01729 // tXW.drawImage(zoomXY(pic ,2*1,-1),0,0); 01730 01731 // Image<float> temp = pyrImg[primeLev]; 01732 // temp = crop(temp,r); 01733 // //temp = rotate(temp, w/2, h/2,M_PI-ang); 01734 // getMinMax(temp, min,max); 01735 // drawRect(temp, tr, max,1); 01736 01737 // Image<float> temp2 = pyrImg[primeLev+1]; 01738 // temp2 = crop(temp2,r2); 01739 // //temp2 = rotate(temp2, w/4, h/4, M_PI-ang); 01740 // getMinMax(temp2, min,max); 01741 // drawRect(temp2, tr2, max, 1); 01742 01743 // tXW.drawImage(zoomXY(temp ,2*1,-1),2*pyrImg[primeLev].getWidth(),0); 01744 // Raster::waitForKey(); 01745 // tXW.drawImage(zoomXY(temp2,2*2,-1), 01746 // 2*pyrImg[primeLev].getWidth(),2*temp.getHeight()); 01747 // Raster::waitForKey(); 01748 //-- 01749 01750 // correct the gabor images (for EYE and MOUTH angle) 01751 // corrective planar rotation to make face upright 01752 int a = int(4*ang/M_PI); 01753 float cAng = ((4-a)%4)*M_PI/4.0; 01754 //printf("## Ang: %f -> %f a: %d\n\n",ang,cAng,a); 01755 correctGabors(gImg,cAng); 01756 01757 //--- 01758 // scl = int(pow(2.0,2)); 01759 // for (int i = 0; i< 2; i++) 01760 // for (int j = 0; j< 4; j++) 01761 // xwi[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01762 // printf("EYE AND MOUTH "); Raster::waitForKey(); 01763 //-- 01764 01765 // check if it is an eye 01766 Image<double> eFeat(1, RE_INPUT + 1, NO_INIT); 01767 Rectangle er = 01768 Rectangle::tlbrI (h/2 - MODEL_EYE_HEIGHT/2, w/2 - MODEL_EYE_WIDTH/2, 01769 h/2 + MODEL_EYE_HEIGHT/2 - 1, w/2 + MODEL_EYE_WIDTH/2 - 1); 01770 Rectangle er2 = 01771 Rectangle::tlbrI(er.top()/2 , er.left()/2 , 01772 er.top()/2+er.height()/2-1, er.left()/2+er.width()/2-1); 01773 01774 // Eye image array to be inputted 01775 Image<float> **eImg = new Image<float>*[2]; 01776 for(int i = 0; i < 2; i++) eImg[i] = new Image<float>[NUM_DIR]; 01777 for(int j = 0; j < NUM_DIR; j++) 01778 { 01779 eImg[0][j] = crop(gImg[0][j], er ); 01780 eImg[1][j] = crop(gImg[1][j], er2); 01781 } 01782 getEyeFeature(eImg, eFeat); 01783 Image<double> evout = ffn_e->run3L(eFeat); 01784 pPart[EYE] = evout[0]; 01785 01786 // check if it is a mouth 01787 Image<double> mFeat(1, M_INPUT + 1, NO_INIT); 01788 Rectangle mr = 01789 Rectangle::tlbrI(h/2 - MODEL_MOUTH_HEIGHT/2, w/2 - MODEL_MOUTH_WIDTH/2, 01790 h/2 + MODEL_MOUTH_HEIGHT/2 - 1, w/2 + MODEL_MOUTH_WIDTH/2 - 1); 01791 Rectangle mr2 = 01792 Rectangle::tlbrI(mr.top()/2 , mr.left()/2 , 01793 mr.top()/2+mr.height()/2-1, mr.left()/2+mr.width()/2-1); 01794 01795 // Mouth image array to be inputted 01796 Image<float> **mImg = new Image<float>*[2]; 01797 for(int i = 0; i < 2; i++) mImg[i] = new Image<float>[NUM_DIR]; 01798 for(int j = 0; j < NUM_DIR; j++) 01799 { 01800 mImg[0][j] = crop(gImg[0][j], mr ); 01801 mImg[1][j] = crop(gImg[1][j], mr2); 01802 } 01803 getMouthFeature(mImg, mFeat); 01804 Image<double> mvout =ffn_m->run3L(mFeat); 01805 pPart[MOUTH] = mvout[0]; 01806 01807 // correct the gabor images (for NOSE angle) 01808 //a = int(4*tAng/M_PI); 01809 //cAng = ((4-a)%4)*M_PI/4.0; 01810 correctGabors(gImg,M_PI/2.0); 01811 //correctGabors(gImg,cAng); 01812 01813 //--- 01814 // scl = int(pow(2.0,2)); 01815 // for (int i = 0; i< 2; i++) 01816 // for (int j = 0; j< 4; j++) 01817 // xwi[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01818 // printf("NOSE "); Raster::waitForKey(); 01819 //-- 01820 01821 // check if it is a nose 01822 Image<double> nFeat(1, N_INPUT + 1, NO_INIT); 01823 Rectangle nr = 01824 Rectangle::tlbrI(h/2 - MODEL_NOSE_HEIGHT/2, w/2 - MODEL_NOSE_WIDTH/2, 01825 h/2 + MODEL_NOSE_HEIGHT/2 - 1, w/2 + MODEL_NOSE_WIDTH/2 - 1); 01826 Rectangle nr2 = 01827 Rectangle::tlbrI(nr.top()/2 , nr.left()/2 , 01828 nr.top()/2+nr.height()/2-1, nr.left()/2+nr.width()/2-1); 01829 01830 // printf("t: %d, b: %d, l: %d, r: %d\n", 01831 // nr.top(),nr.bottomI(),nr.left(),nr.rightI()); 01832 // printf("t: %d, b: %d, l: %d, r: %d\n", 01833 // nr2.top(),nr2.bottomI(),nr2.left(),nr2.rightI()); 01834 // printf("Angle to rotate: %f\n",ang); 01835 01836 // Nose image array to be inputted 01837 Image<float> **nImg = new Image<float>*[2]; 01838 for(int i = 0; i < 2; i++) nImg[i] = new Image<float>[NUM_DIR]; 01839 for(int j = 0; j < NUM_DIR; j++) 01840 { 01841 nImg[0][j] = crop(gImg[0][j], nr ); 01842 nImg[1][j] = crop(gImg[1][j], nr2); 01843 } 01844 01845 // printRegion(nImg[0][0],0,11,1,0,13,1); 01846 // printf(" pr ");Raster::waitForKey(); 01847 01848 getNoseFeature(nImg, nFeat); 01849 Image<double> nvout = ffn_n->run3L(nFeat); 01850 pPart[NOSE] = nvout[0]; 01851 // printf("NOSE: %f \n",pPart[NOSE]); 01852 01853 // inverse Nose image array to be inputted (in case nose is upside down) 01854 correctGabors(gImg,M_PI/2.0); 01855 correctGabors(gImg,M_PI/2.0); 01856 01857 //--- 01858 // scl = int(pow(2.0,2)); 01859 // for (int i = 0; i< 2; i++) 01860 // for (int j = 0; j< 4; j++) 01861 // xwi[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01862 // printf("NOSE "); Raster::waitForKey(); 01863 //-- 01864 01865 for(int j = 0; j < NUM_DIR; j++) 01866 { 01867 nImg[0][j] = crop(gImg[0][j], nr ); 01868 nImg[1][j] = crop(gImg[1][j], nr2); 01869 } 01870 getNoseFeature(nImg, nFeat); 01871 Image<double> n2vout = ffn_n->run3L(nFeat); 01872 if(pPart[NOSE] < n2vout[0]) 01873 pPart[NOSE] = n2vout[0]; 01874 01875 // printf("p EYE:%5.3f, NOSE: %5.3f MOUTH:%5.3f \n", 01876 // pPart[EYE],pPart[NOSE],pPart[MOUTH]); 01877 } 01878 01879 // ###################################################################### 01880 // frame a salient point with a rectangle to used for part testing 01881 Rectangle getWindow(Image<float> img, Point2D<int> p, int s) 01882 { 01883 int w = img.getWidth(), h = img.getHeight(); 01884 01885 int t = (p.j - s/2 < 0 )? 0 : p.j - s/2; 01886 int l = (p.i - s/2 < 0 )? 0 : p.i - s/2; 01887 int b = (p.j + s/2 - 1 > h - 1)? h - 1 : p.j + s/2 - 1; 01888 int r = (p.i + s/2 - 1 > w - 1)? w - 1 : p.i + s/2 - 1; 01889 01890 return Rectangle::tlbrI(t,l,b,r); 01891 } 01892 01893 // ###################################################################### 01894 XWinManaged *xwCG[2][4]; 01895 void correctGabors(Image<float> **gImg, float ang) 01896 { 01897 // int w = gImg[0][0].getWidth(), h = gImg[0][1].getHeight(); 01898 // int scl = int(pow(2.0,2)); 01899 // for (int i = 0; i< 2; i++) 01900 // for (int j = 0; j< 4; j++) 01901 // { 01902 // xwCG[i][j] = new XWinManaged(Dims(scl*w,scl*h),scl*i*(w+4),scl*j*(h+6),"hello"); 01903 // xwCG[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01904 // } 01905 // Raster::waitForKey(); 01906 01907 // rotate each of the gabor output 01908 for(int j = 0; j < NUM_DIR; j++) 01909 { 01910 gImg[0][j] = rotate(gImg[0][j], MODEL_NOSE_DIAGONAL/2+1, 01911 MODEL_NOSE_DIAGONAL/2+1 , ang); 01912 gImg[1][j] = rotate(gImg[1][j], (MODEL_NOSE_DIAGONAL/2+1)/2, 01913 (MODEL_NOSE_DIAGONAL/2+1)/2, ang); 01914 } 01915 01916 // for (int i = 0; i< 2; i++) 01917 // for (int j = 0; j< 4; j++) 01918 // xwCG[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01919 // Raster::waitForKey(); 01920 01921 Image<float> tG[2][4]; 01922 for (int i = 0; i< 2; i++) 01923 for (int j = 0; j< 4; j++) 01924 tG[i][j] = gImg[i][j]; 01925 01926 int i = int(round(ang*4/M_PI)); 01927 for(int j = 0; j < NUM_DIR; j++) 01928 { 01929 gImg[0][j] = tG[0][(i+j)%4]; 01930 gImg[1][j] = tG[1][(i+j)%4]; 01931 } 01932 01933 // for (int i = 0; i< 2; i++) 01934 // for (int j = 0; j< 4; j++) 01935 // xwCG[i][j]->drawImage(zoomXY(gImg[i][j],scl*int(pow(2.0,i)),-1),0,0); 01936 // Raster::waitForKey(); 01937 01938 } 01939 01940 // ###################################################################### 01941 // get the face part features 01942 void getFacePartFeatures(Rectangle r, FacePart part, Image<double> &features) 01943 { 01944 switch(part) 01945 { 01946 case EYE: getEyeFeature (r, features); break; 01947 case NOSE: getNoseFeature (r, features); break; 01948 case MOUTH: getMouthFeature(r, features); break; 01949 default: 01950 LFATAL("Unknown face part"); 01951 } 01952 } 01953 01954 // ###################################################################### 01955 void getEyeFeature(Rectangle r, Image<double> &features) 01956 { 01957 //displayPartImage(currImg,r); 01958 Image<float> **img = new Image<float>*[2]; 01959 for(int i = 0; i < 2; i++) img[i] = new Image<float>[NUM_DIR]; 01960 01961 Rectangle r2 = 01962 Rectangle::tlbrI(r.top()/2 , r.left()/2 , 01963 r.top()/2+r.height()/2-1, r.left()/2+r.width()/2-1 ); 01964 01965 // image array to be inputted 01966 for(int j = 0; j < NUM_DIR; j++) 01967 img[0][j] = crop(currGaPyr[j][primeLev ], r ); 01968 for(int j = 0; j < NUM_DIR; j++) 01969 img[1][j] = crop(currGaPyr[j][primeLev+1], r2); 01970 getEyeFeature(img, features); 01971 } 01972 01973 // ###################################################################### 01974 void getEyeFeature(Image<float> **img, Image<double> &features) 01975 { 01976 // the eye model is 12 by 6 01977 // we will extract information from specific points only 01978 // so as to reduce the input numbers 01979 01980 // for level 1 01981 // ______________ 01982 // | XXXX | 01983 // |X X X X| 01984 // |X X X X| 01985 // |X X X X| 01986 // |X X X | 01987 // |____XXXX____| 01988 01989 int totalF1 = 24; 01990 int xF1[24] = { 4, 5, 6, 7, 4, 5, 6, 7, 0, 3, 8,11, 01991 0, 3, 8,11, 0, 3, 8,11, 0, 3, 8,11 }; 01992 int yF1[24] = { 0, 0, 0, 0, 5, 5, 5, 5, 1, 1, 1, 1, 01993 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4 }; 01994 01995 // for level 2 01996 // ________ 01997 // | XX | 01998 // |X XX X| 01999 // |__XX__| 02000 02001 int totalF2 = 8; 02002 int xF2[8] = { 2, 3, 0, 2, 3, 5, 2, 3 }; 02003 int yF2[8] = { 0, 0, 1, 1, 1, 1, 2, 2 }; 02004 02005 int k = 0; 02006 // get feature from all angle of gabor response 02007 for(int n = 0; n < NUM_DIR; n++) 02008 { 02009 for(int i = 0; i< totalF1; i++) 02010 { 02011 features[k] = double(img[0][n].getVal(xF1[i], yF1[i])); 02012 k++; 02013 } 02014 02015 for(int i = 0; i< totalF2; i++) 02016 { 02017 features[k] = double(img[1][n].getVal(xF2[i], yF2[i])); 02018 k++; 02019 } 02020 } 02021 } 02022 02023 // ###################################################################### 02024 void getNoseFeature(Rectangle r, Image<double> &features) 02025 { 02026 //displayPartImage(currImg,r); 02027 Image<float> **img = new Image<float>*[2]; 02028 for(int i = 0; i < 2; i++) img[i] = new Image<float>[NUM_DIR]; 02029 02030 Rectangle r2 = 02031 Rectangle::tlbrI(r.top()/2 , r.left()/2 , 02032 r.top()/2+r.height()/2-1, r.left()/2+r.width()/2-1 ); 02033 02034 // image array to be inputted 02035 for(int j = 0; j < NUM_DIR; j++) 02036 img[0][j] = crop(currGaPyr[j][primeLev ], r ); 02037 for(int j = 0; j < NUM_DIR; j++) 02038 img[1][j] = crop(currGaPyr[j][primeLev+1], r2); 02039 getNoseFeature(img, features); 02040 } 02041 02042 // ###################################################################### 02043 void getNoseFeature(Image<float> **img, Image<double> &features) 02044 { 02045 // the Nose model is 20 by 10 02046 // we will extract information from specific points only 02047 // so as to reduce the number of inputs 02048 02049 // for level 1 02050 // ________________ 02051 // | | 02052 // | | 02053 // | XX | 02054 // | XX | 02055 // | XX | 02056 // | XX | 02057 // | XX | 02058 // | XX | 02059 // | XX | 02060 // | XX | 02061 // | XX | 02062 // | XX | 02063 // | XX | 02064 // | XXXXXXXX | 02065 // | X X | 02066 // | X X | 02067 // | X X | 02068 // | X X | 02069 // | XXXXXXXX | 02070 // | | 02071 // |______________| 02072 02073 int totalF1 = 44; 02074 int xF1[44] = { 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 02075 3, 4, 5, 6, 7, 8, 9,10, 3,10, 3,10, 3,10, 3,10, 3, 4, 5, 6, 02076 7, 8, 9,10 }; 02077 int yF1[44] = { 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9,10,10,11,11, 02078 12,12,12,12,12,12,12,12,13,13,14,14,15,15,16,16,17,17,17,17, 02079 17,17,17,17 }; 02080 02081 // for level 2 02082 // _________ 02083 // | | 02084 // | X | 02085 // | X | 02086 // | X | 02087 // | X | 02088 // | X | 02089 // | XXXXX | 02090 // | X X | 02091 // | XXXXX | 02092 // |_______| 02093 02094 int totalF2 = 17; 02095 int xF2[17] = { 3, 3, 3, 3, 3, 1, 2, 3, 4, 5, 1, 5, 1, 2, 3, 4, 5 }; 02096 int yF2[17] = { 1, 2, 3, 4, 5, 6, 6, 6, 6, 6, 7, 7, 8, 8, 8, 8, 8 }; 02097 02098 int k = 0; 02099 02100 // get feature from all angle of gabor response 02101 for(int n = 0; n < NUM_DIR; n++) 02102 { 02103 for(int i = 0; i< totalF1; i++) 02104 { 02105 features[k] = double(img[0][n].getVal(xF1[i], yF1[i])); 02106 k++; 02107 } 02108 02109 for(int i = 0; i< totalF2; i++) 02110 { 02111 features[k] = double(img[1][n].getVal(xF2[i], yF2[i])); 02112 k++; 02113 } 02114 } 02115 } 02116 02117 // ###################################################################### 02118 void getMouthFeature(Rectangle r, Image<double> &features) 02119 { 02120 //displayPartImage(currImg,r); 02121 Image<float> **img = new Image<float>*[2]; 02122 for(int i = 0; i < 2; i++) img[i] = new Image<float>[NUM_DIR]; 02123 02124 Rectangle r2 = 02125 Rectangle::tlbrI(r.top()/2 , r.left()/2 , 02126 r.top()/2+r.height()/2-1, r.left()/2+r.width()/2-1 ); 02127 02128 // image array to be inputted 02129 for(int j = 0; j < NUM_DIR; j++) 02130 img[0][j] = crop(currGaPyr[j][primeLev ], r ); 02131 for(int j = 0; j < NUM_DIR; j++) 02132 img[1][j] = crop(currGaPyr[j][primeLev+1], r2); 02133 getMouthFeature(img, features); 02134 } 02135 02136 // ###################################################################### 02137 void getMouthFeature(Image<float> **img, Image<double> &features) 02138 { 02139 // the Mouth model is 20 by 10 02140 // we will extract information from specific points only 02141 // so as to reduce the number of inputs 02142 02143 // for level 1 02144 // ______________________ 02145 // | | 02146 // | | 02147 // | X XXXXXX X | 02148 // | X XX X | 02149 // | X XX X | 02150 // | X XX X | 02151 // | X XX X | 02152 // | X XXXXXX X | 02153 // | | 02154 // |____________________| 02155 02156 int totalF1 = 32; 02157 int xF1[32] = { 2, 7, 8, 9,10,11,12,17, 2, 9,10,17, 2, 9,10,17, 02158 2, 9,10,17, 2, 9,10,17, 2, 7, 8, 9,10,11,12,17 }; 02159 int yF1[32] = { 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 02160 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7 }; 02161 02162 // for level 2 02163 // ____________ 02164 // | | 02165 // | X XXXX X | 02166 // | X XX X | 02167 // | X XXXX X | 02168 // |__________| 02169 02170 int totalF2 = 16; 02171 int xF2[16] = { 1, 3, 4, 5, 6, 8, 1, 4, 5, 8, 1, 3, 4, 5, 6, 8 }; 02172 int yF2[16] = { 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3 }; 02173 02174 int k = 0; 02175 02176 // get feature from all angle of gabor response 02177 for(int n = 0; n < NUM_DIR; n++) 02178 { 02179 for(int i = 0; i< totalF1; i++) 02180 { 02181 features[k] = double(img[0][n].getVal(xF1[i], yF1[i])); 02182 k++; 02183 } 02184 02185 for(int i = 0; i< totalF2; i++) 02186 { 02187 features[k] = double(img[1][n].getVal(xF2[i], yF2[i])); 02188 k++; 02189 } 02190 } 02191 } 02192 02193 // ###################################################################### 02194 XWinManaged *partWin; 02195 int start = 0; 02196 void displayPartImage(Image<float> img, Rectangle pr) 02197 { 02198 // for debugging 02199 if(start == 0) 02200 { 02201 partWin = new XWinManaged(img.getDims(),1000,700,"Image"); wList.add(*partWin); 02202 start = 1; 02203 } 02204 02205 drawRect(img, pr, 255.0f, 1); 02206 partWin->drawImage(img,0,0); 02207 Raster::waitForKey(); 02208 } 02209 02210 // ###################################################################### 02211 // simple image printing procedure 02212 void printRegion(Image<float> img,int sX,int eX,int dX, int sY,int eY, int dY) 02213 { 02214 for(int j = sY; j<=eY; j+=dY) 02215 { 02216 for(int i = sX; i<=eX; i+=dX) 02217 printf("%8.3f ", img.getVal(i,j)); 02218 printf(" \n"); 02219 } 02220 printf("\n"); 02221 } 02222 02223 // ###################################################################### 02224 /* So things look consistent in everyone's emacs... */ 02225 /* Local Variables: */ 02226 /* indent-tabs-mode: nil */ 02227 /* End: */