segmentImageMerge.C

Go to the documentation of this file.
00001 /*!@file VFAT/segmentImageMerge.C Basic image segmenter blob finder using color */
00002 
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the //
00005 // University of Southern California (USC) and the iLab at USC.         //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: T. Nathan Mundhenk <mundhenk@usc.edu>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/VFAT/segmentImageMerge.C $
00035 // $Id: segmentImageMerge.C 9412 2008-03-10 23:10:15Z farhan $
00036 //
00037 
00038 #include "VFAT/segmentImageMerge.H"
00039 
00040 #include "Util/Assert.H"
00041 
00042 #include <cstdio>
00043 #include <cstdlib>
00044 #include <iostream>
00045 
00046 //! initial vergance values
00047 #define CAMERAMU1         12.0F
00048 #define CAMERAMU2         23.0F
00049 #define CAMERAMU3         34.0F
00050 #define CAMERASIGMA1      33.0F
00051 #define CAMERASIGMA2      56.0F
00052 #define CAMERASIGMA3      67.0F
00053 
00054 //! width distance to center between cameras in inches
00055 /* e.g. take the distance between cameras and divide by half */
00056 #define DISTANCE          5
00057 
00058 //! maximum lose of tracks before color is reset
00059 #define LOTMAX            5
00060 
00061 //! How many iteration to calculate over for movement statistics
00062 #define ERRINTERVAL       5
00063 
00064 void segmentImageMerge::colorProcessBlobs(int instance)
00065 {
00066   float H1,S1,V1,Hs1,Ss1,Vs1;
00067   float mass;
00068   H1 = 0; S1 = 0; V1 = 0;
00069   Hs1 = 0; Ss1 = 0; Vs1 = 0;
00070   mass = 0;
00071 
00072   // iterate over all blobs and asses new HSV thresholds from
00073   // candidate blobs
00074   for(int i = 0; i < segment[instance].numberBlobs(); i++)
00075   {
00076     if(track[instance].isCandidate(i) == true)
00077     {
00078       float H2,S2,V2,Hs2,Ss2,Vs2;
00079       // get mean value for this candidate blob, these values will be used
00080       // to create new adaptive color
00081       segment[instance].getHSVvalueMean(i,&H2,&S2,&V2,&Hs2,&Ss2,&Vs2);
00082 
00083       // Add in HSV values for each blob times the blob size
00084       // to make larger blobs give more push on mean values
00085       H1 += H2 * segment[instance].getMass(i);
00086       S1 += S2 * segment[instance].getMass(i);
00087       V1 += V2 * segment[instance].getMass(i);
00088       Hs1 += Hs2 * segment[instance].getMass(i);
00089       Ss1 += Ss2 * segment[instance].getMass(i);
00090       Vs1 += Vs2 * segment[instance].getMass(i);
00091       // increment mass
00092       mass += segment[instance].getMass(i);
00093 
00094       // find boundaries of this blob
00095       int tt = segment[instance].getYmin(i);
00096       int bb = segment[instance].getYmax(i);
00097       int ll = segment[instance].getXmin(i);
00098       int rr = segment[instance].getXmax(i);
00099 
00100 
00101       // draw bounding box for this blob
00102       // Note: box must be of height > 1 and width > 1
00103       if((bb != tt) && (ll != rr))
00104         drawRect(*imageHold, Rectangle::tlbrI(tt*4,ll*4,bb*4,rr*4),
00105                  PixRGB<byte>(boxRed[instance],
00106                               boxGreen[instance],
00107                               boxBlue[instance]),1);
00108 
00109       // draw target circle for this blob
00110       drawCircle(*imageHold, Point2D<int>((int)segment[instance].getCenterX(i)*4
00111                                      ,(int)segment[instance].getCenterY(i)*4)
00112                  ,(int)sqrt((double)segment[instance].getMass(i)),
00113                  PixRGB<byte>(circleRed[instance],
00114                               circleGreen[instance],
00115                               circleBlue[instance]),2);
00116 
00117       drawCircle(*imageHold, Point2D<int>((int)segment[instance].getCenterX(i)*4
00118                                      ,(int)segment[instance].getCenterY(i)*4)
00119                  ,2,PixRGB<byte>(255,0,0),2);
00120 
00121     }
00122     else
00123     {
00124       if(track[instance].wasKilledByTrack(i) == false)
00125       {
00126               // find boundaries of this blob
00127         int tt = segment[instance].getYmin(i);
00128         int bb = segment[instance].getYmax(i);
00129         int ll = segment[instance].getXmin(i);
00130         int rr = segment[instance].getXmax(i);
00131 
00132 
00133         // draw bounding box for this blob
00134         // Note: box must be of height > 1 and width > 1
00135         if((bb != tt) && (ll != rr))
00136           drawRect(*imageHold, Rectangle::tlbrI(tt*4,ll*4,bb*4,rr*4),
00137                    PixRGB<byte>(circleRed[instance],
00138                                 circleGreen[instance],
00139                                 circleBlue[instance]),1);
00140       }
00141     }
00142   }
00143 
00144   // draw background grid in HSV bar graph
00145   if(fast != true)
00146     drawGrid(*auxHold, 25,25,1,1,PixRGB<byte>(50,50,50));
00147 
00148   // draw HSV bar graph if blobs have any mass
00149   if(mass != 0)
00150   {
00151     // figure out HSV bar values
00152     H1 = H1/mass;  S1 = S1/mass; V1 = V1/mass;
00153     Hs1 = Hs1/mass;  Ss1 = Ss1/mass; Vs1 = Vs1/mass;
00154     //std::cout << "H " << H1 << " S " << S1 << " V " << V1 << "\n";
00155     //std::cout << "Hs " << Hs1 << " Ss " << Ss1 << " Vs " << Vs1 << "\n";
00156     float htemp = H1-Hs1+6;
00157     float stemp = ((S1-Ss1)*100)+6;
00158     float vtemp = V1-Vs1+6;
00159     if(htemp <= 1) htemp = 1;
00160     if(stemp <= 1) stemp = 1;
00161     if(vtemp <= 1) vtemp = 1;
00162     if(fast != true)
00163     {
00164 
00165       // draw HSV mean value bars as a bunch of rectangles
00166       drawRect(*auxHold, Rectangle::tlbrI(5,5,((int)H1+6),20),
00167                PixRGB<byte>(255,0,0),1);
00168       drawRect(*auxHold, Rectangle::tlbrI(5,25,(int)(S1*100)+6,40),
00169                PixRGB<byte>(0,255,0),1);
00170       drawRect(*auxHold, Rectangle::tlbrI(5,45,((int)V1+6),60),
00171                PixRGB<byte>(0,0,255),1);
00172 
00173       // draw standard deviation bars
00174       drawRect(*auxHold, Rectangle::tlbrI((int)(htemp),10,
00175                                    (int)(H1+Hs1+6),15),
00176                PixRGB<byte>(255,0,0),1);
00177       drawRect(*auxHold, Rectangle::tlbrI((int)(stemp),30,
00178                                    (int)(((S1+Ss1)*100)+6),35),
00179                PixRGB<byte>(0,255,0),1);
00180       drawRect(*auxHold, Rectangle::tlbrI((int)(vtemp),50,
00181                                    (int)(V1+Vs1+6),55),
00182                PixRGB<byte>(0,0,255),1);
00183 
00184     // find total mass of all candidate blobs
00185     int massCalc = (int)((mass/(fimaHold->getWidth()*fimaHold->getHeight()))*450);
00186     drawRect(*auxHold, Rectangle::tlbrI(5,65,massCalc+6,80),
00187              PixRGB<byte>(255,0,255),1);
00188     }
00189   }
00190 
00191   if(fast != true)
00192   {
00193     // Draw hard constraint bars into HVS bar graph
00194     drawRect(*auxHold, Rectangle::tlbrI(((int)HL[instance]+5),12,
00195                                  ((int)HU[instance]+6),13),
00196              PixRGB<byte>(255,255,0),1);
00197     drawRect(*auxHold, Rectangle::tlbrI((int)(SL[instance]*100)+5,32,
00198                                  (int)(SU[instance]*100)+6,33),
00199              PixRGB<byte>(255,255,0),1);
00200     drawRect(*auxHold, Rectangle::tlbrI(((int)VL[instance]+5),52,
00201                                  ((int)VU[instance]+6),53),
00202              PixRGB<byte>(255,255,0),1);
00203   }
00204 
00205   // If loss of track is registered 5 times, reset color values to start up defualt values
00206   if(track[instance].returnLOT() == true)
00207   {
00208     //LINFO("LOT on %d, Number %d",instance, LOTcount[instance]);
00209     if(LOTcount[instance] > LOTMAX)
00210     {
00211       segment[instance].setHue(H[instance],Hstd[instance],0);
00212       segment[instance].setSat(S[instance],Sstd[instance],0);
00213       segment[instance].setVal(V[instance],Vstd[instance],0);
00214       LOTcount[instance] = 0;
00215       track[instance].reset();
00216     }
00217     else
00218     {
00219       LOTcount[instance]++;
00220     }
00221   }
00222   else
00223   {
00224 
00225     //LINFO("Get Value %d - %f,%f,%f,%f,%f,%f",instance,H1,S1,V1,Hs1,Ss1,Vs1);
00226     float hadj, sadj, vadj;
00227 
00228     //if adaptive thresholding is turned on, adjust color by standard deviation of color
00229     if(HASTD[instance] == true)
00230       hadj = Hs1*HA[instance];
00231     else
00232       hadj = HA[instance];
00233 
00234     if(SASTD[instance] == true)
00235       sadj = Ss1*SA[instance];
00236     else
00237       sadj = SA[instance];
00238 
00239     if(VASTD[instance] == true)
00240       vadj = Vs1*VA[instance];
00241     else
00242       vadj = VA[instance];
00243 
00244     // if adaptation is true, adapt new color to mean values with new standard deviation
00245     // but only within boundry constaints
00246     if(adpt[instance] == true)
00247     {
00248       if(H1 > HU[instance]) H1 = HU[instance];
00249       if(H1 < HL[instance]) H1 = HL[instance];
00250       if(S1 > SU[instance]) S1 = SU[instance];
00251       if(S1 < SL[instance]) S1 = SL[instance];
00252       if(V1 > VU[instance]) V1 = VU[instance];
00253       if(V1 < VL[instance]) V1 = VL[instance];
00254       //LINFO("Set Value %d - %f,%f,%f,%f,%f,%f",instance,
00255       //  H1,hadj,S1,sadj,V1,vadj);
00256       segment[instance].setHue(H1,hadj,0);
00257       segment[instance].setSat(S1,sadj,0);
00258       segment[instance].setVal(V1,vadj,0);
00259     }
00260   }
00261 }
00262 
00263 /************************/
00264 /* START PUBLIC METHODS */
00265 /***********************/
00266 
00267 // When called at the start, this will resize all the vectors we use
00268 segmentImageMerge::segmentImageMerge(int instances)
00269 {
00270   instanceNumber = instances;
00271   H.resize(instances,0);
00272   S.resize(instances,0);
00273   V.resize(instances,0);
00274   Hstd.resize(instances,0);
00275   Sstd.resize(instances,0);
00276   Vstd.resize(instances,0);
00277   HA.resize(instances,3);
00278   SA.resize(instances,3);
00279   VA.resize(instances,3);
00280   HU.resize(instances,360);
00281   SU.resize(instances,1);
00282   VU.resize(instances,255);
00283   HL.resize(instances,0);
00284   SL.resize(instances,0);
00285   VL.resize(instances,0);
00286   delay.resize(instances,0);
00287   cameraMovePan.resize(instances,90);
00288   cameraMoveTilt.resize(instances,90);
00289   cameraGotoPan.resize(instances,90);
00290   cameraGotoTilt.resize(instances,90);
00291   cameraMu.resize((instances-1),0);
00292   cameraSigma.resize((instances-1),0);
00293   meanMove.resize(instances,0);
00294 
00295   stdMove.resize(instances,0);
00296 
00297   std::vector<float> temp;
00298   temp.resize(ERRINTERVAL,0);
00299   moveRecord.resize(instances,temp);
00300   moveRecordGrad.resize(instances,temp);
00301   LOTcount.resize(instances,0);
00302   height.resize(instances,0);
00303   width.resize(instances,0);
00304   gotoX.resize(instances,0);
00305   gotoY.resize(instances,0);
00306   circleRed.resize(instances,0);
00307   circleGreen.resize(instances,0);
00308   circleBlue.resize(instances,0);
00309   boxRed.resize(instances,0);
00310   boxGreen.resize(instances,0);
00311   boxBlue.resize(instances,0);
00312   didCircleColor.resize(instances,0);
00313   didBoxColor.resize(instances,0);
00314   didTrackColor.resize(instances,0);
00315   recordCounter.resize(instances,0);
00316   adpt.resize(instances,true);
00317   HASTD.resize(instances,false);
00318   SASTD.resize(instances,false);
00319   VASTD.resize(instances,false);
00320   moveCamera.resize(instances,false);
00321   temp.resize(instances);
00322   Timer Ttemp;
00323   tim.resize(instances,Ttemp);
00324   //segmentImage stmp;
00325   //segment.resize(instances,stmp);
00326   segment = new segmentImage[instances];
00327   segmentImageTrack sttmp;
00328   track.resize(instances,sttmp);
00329   for(int i = 0; i < instances; i++)
00330   {
00331     track[i].setUpVars(1000);
00332     track[i].setImage(&segment[i]);
00333   }
00334   cameraMu[0] = CAMERAMU1;
00335   cameraMu[1] = CAMERAMU2;
00336   cameraMu[2] = CAMERAMU3;
00337   cameraSigma[0] = CAMERASIGMA1;
00338   cameraSigma[1] = CAMERASIGMA2;
00339   cameraSigma[2] = CAMERASIGMA3;
00340 }
00341 
00342 segmentImageMerge::~segmentImageMerge()
00343 {}
00344 
00345 void segmentImageMerge::setCircleColor(int r, int g, int b, int instance)
00346 {
00347   circleRed[instance] = r;
00348   circleBlue[instance] = g;
00349   circleGreen[instance] = b;
00350   didCircleColor[instance] = 1;
00351 }
00352 
00353 void segmentImageMerge::setBoxColor(int r, int g, int b, int instance)
00354 {
00355   boxRed[instance] = r;
00356   boxBlue[instance] = g;
00357   boxGreen[instance] = b;
00358   didBoxColor[instance] = 1;
00359 }
00360 
00361 void segmentImageMerge::setTrackColor(float h, float hstd,
00362                                       float s, float sstd,
00363                                       float v, float vstd,
00364                                       int instance, bool adapt, int avg)
00365 {
00366   H[instance] = h;
00367   S[instance] = s;
00368   V[instance] = v;
00369   Hstd[instance] = hstd;
00370   Sstd[instance] = sstd;
00371   Vstd[instance] = vstd;
00372   adpt[instance] = adapt;
00373   segment[instance].setHue(H[instance],Hstd[instance],0);
00374   segment[instance].setSat(S[instance],Sstd[instance],0);
00375   segment[instance].setVal(V[instance],Vstd[instance],0);
00376   didTrackColor[instance] = 1;
00377   segment[instance].setHSVavg(avg);
00378 }
00379 
00380 void segmentImageMerge::setAdapt(float ha, bool haSTD, float sa, bool saSTD,
00381                                  float va, bool vaSTD, int instance)
00382 {
00383   HA[instance] = ha;
00384   SA[instance] = sa;
00385   VA[instance] = va;
00386   HASTD[instance] = haSTD;
00387   SASTD[instance] = saSTD;
00388   VASTD[instance] = vaSTD;
00389 }
00390 
00391 void segmentImageMerge::setAdaptBound(float Hupper, float Hlower,
00392                    float Supper, float Slower,
00393                    float Vupper, float Vlower,
00394                    int instance)
00395 {
00396   HU[instance] = Hupper;
00397   SU[instance] = Supper;
00398   VU[instance] = Vupper;
00399   HL[instance] = Hlower;
00400   SL[instance] = Slower;
00401   VL[instance] = Vlower;
00402 }
00403 
00404 void segmentImageMerge::setCameraPosition(float pan, float tilt, int instance
00405                                           , bool stats)
00406 {
00407   // record camera movement
00408   cameraMovePan[instance] = pan;
00409   cameraMoveTilt[instance] = tilt;
00410   if(stats == true)
00411   {
00412     int doThisItem;
00413     float move = sqrt(pow(pan,2)+pow(tilt,2));
00414     if(recordCounter[instance] != 0)
00415     {
00416       doThisItem = instance - 1;
00417     }
00418     else
00419     {
00420       doThisItem = ERRINTERVAL - 1;
00421     }
00422 
00423     // Calculate finate state gradiant from last iteration to this one and record
00424     moveRecordGrad[instance][recordCounter[instance]] =
00425       move - moveRecord[instance][recordCounter[doThisItem]] ;
00426     moveRecord[instance][recordCounter[instance]] = move;
00427 
00428     float sumP = 0;
00429     float SSP = 0;
00430 
00431     // calcuate mean movements of camera servos
00432     for(int i = 0; i < ERRINTERVAL; i++)
00433     {
00434       sumP += moveRecordGrad[instance][i];
00435     }
00436     meanMove[instance] = sumP/ERRINTERVAL;
00437 
00438     // calculate standard deviation of camera servos
00439     for(int i = 0; i < ERRINTERVAL; i++)
00440     {
00441       SSP += pow((meanMove[instance] - moveRecordGrad[instance][i]),2);
00442     }
00443     stdMove[instance] = sqrt(SSP/ERRINTERVAL);
00444 
00445     //LINFO("CAM %d Move STD %f",instance,stdMove[instance]);
00446 
00447     // increment counter
00448     if(recordCounter[instance] < ERRINTERVAL)
00449       recordCounter[instance]++;
00450     else
00451       recordCounter[instance] = 0;
00452   }
00453 }
00454 
00455 void segmentImageMerge::setFrame(int x1, int y1, int x2, int y2,
00456                                  int realX, int realY, int instance)
00457 {
00458   segment[instance].setFrame(x1,y1,x2,y2,realX,realY);
00459 }
00460 
00461 /* This is a basic tracker access method that tracks on one image at a time */
00462 void segmentImageMerge::trackImage(Image<PixRGB<byte> > input,
00463                                    Image<PixRGB<byte> > *image, int instance,
00464                                    Image<PixRGB<byte> > *auxImage, bool _fast)
00465 {
00466   // Assert that parameters have been set up before starting
00467   fast = _fast;
00468   ASSERT(didCircleColor[instance] == 1);
00469   ASSERT(didBoxColor[instance] == 1);
00470   ASSERT(didTrackColor[instance] == 1);
00471   imageHold = image;
00472   auxHold = auxImage;
00473 
00474   Image< PixRGB<float> > fima;
00475 
00476   // decimate input image twice to speed things up
00477   fima = decXY(input);
00478   fima = decXY(fima);
00479 
00480   fimaHold = &fima;
00481 
00482   // segment the decimated image
00483   segment[instance].segment(fima);
00484 
00485   // get center of mass for blobs
00486   segment[instance].calcMassCenter();
00487 
00488   // edit blobs, weed out all the non-hackers who are not fit to carry a rifle
00489   track[instance].track();
00490   // apply adaptive color thesholding
00491   colorProcessBlobs(instance);
00492 
00493 }
00494 
00495 // END
00496 
00497 // the statistical multi image tracker we will build
00498 void segmentImageMerge::trackImageMulti(
00499                   std::vector<Image<PixRGB<byte> > > *image, int instances)
00500 {
00501   fast = true;
00502   Image< PixRGB<float> > fima;
00503 
00504   for(int i = 0; i < instances; i++)
00505   {
00506     ASSERT(didCircleColor[i] == 1);
00507     ASSERT(didBoxColor[i] == 1);
00508     ASSERT(didTrackColor[i] == 1);
00509 
00510     imageHold = &(image->at(i));
00511     fima = decXY(image->at(i));
00512     fima = decXY(fima);
00513 
00514     // Color segment this instance
00515     segment[i].segment(fima);
00516 
00517     // get center of mass for blobs
00518     segment[i].calcMassCenter();
00519 
00520     // edit blobs, weed out all the non-hackers who are not fit to carry a rifle
00521     track[i].track(0);
00522   }
00523 
00524   moveMeanNormal = 0;
00525   moveStdNormal = 0;
00526 
00527   // Normalize over movement statisitcs to apply them in the next iterations
00528   for(int i = 0; i < instances; i++)
00529   {
00530     moveMeanNormal += meanMove[i];
00531     moveStdNormal += stdMove[i];
00532   }
00533   //avoid divide by zero error
00534   moveMeanNormal += .000001;
00535   moveStdNormal += .000001;
00536   updateVergance(48,36);
00537 
00538   for(int i = 0; i < instances; i++)
00539   {
00540     imageHold = &(image->at(i));
00541     //compute vergance springs for each camera
00542     verganceSpring(instances,i,true);
00543 
00544     // apply adaptive color thesholding
00545     colorProcessBlobs(i);
00546   }
00547 }
00548 
00549 void segmentImageMerge::mergeImages(Image<PixRGB<byte> > *image)
00550 {
00551   mergeGotoX = 0; mergeGotoY = 0;
00552   int mergeCount = 0;
00553   for(int i = 0; i < instanceNumber; i++)
00554   {
00555     gotoX[i] = track[i].getObjectX();
00556     gotoY[i] = track[i].getObjectY();
00557     if(track[i].returnLOT() == false)
00558     {
00559       mergeGotoX += gotoX[i];
00560       mergeGotoY += gotoY[i];
00561       mergeCount++;
00562     }
00563   }
00564   if(mergeCount != 0)
00565   {
00566     mergeGotoX = mergeGotoX/mergeCount;
00567     mergeGotoY = mergeGotoY/mergeCount;
00568   }
00569   drawCircle(*image, Point2D<int>((int)mergeGotoX*4
00570                              ,(int)mergeGotoY*4)
00571              ,10,PixRGB<byte>(255,0,0),2);
00572 }
00573 
00574 void segmentImageMerge::updateVergance(float distance, float gaussBase)
00575 {
00576   for(int i = 0; i < (instanceNumber-1); i++)
00577   {
00578     //this is the angle to the target from two apposing cameras.
00579     //Mu is then the differenc between these angles and 90 * 2
00580     cameraMu[i] = 2*(90-(((2*atan(distance/(DISTANCE*(i+1))))/3.14159)*90));
00581     // the base angle for something at three feet from target
00582     // i.e. make the gaussian three feet in diameters
00583     float baseAngle = 2*(90-(((2*atan((distance-gaussBase)
00584                                       /(DISTANCE*(i+1))))/3.14159)*90));
00585     cameraSigma[i] = fabs(baseAngle-cameraMu[i]);
00586     //LINFO("UPDATE VERGANCE camera %d, Mu %f STD %f",i,cameraMu[i],cameraSigma[i]);
00587   }
00588 }
00589 
00590 void segmentImageMerge::verganceSpring(int instances, int current, bool doTracked)
00591 {
00592 
00593   float theta, phi;
00594   int seperation;
00595 
00596 
00597   int maxBlob = -1;
00598   float maxBlobVal = 0;
00599 
00600   // if we didn't lose track and we want to vergance on cameras
00601   // that are tracking...
00602   if((track[current].returnLOT() == false) && (doTracked == true))
00603   {
00604     moveCamera[current] = false;
00605 
00606     // for each blob this camera is tracking do
00607     for(int x = 0; x < segment[current].numberBlobs(); x++)
00608     {
00609       // check to make sure we havn't already disqualified this blob
00610       if(track[current].isCandidate(x) == true)
00611       {
00612         track[current].pVergance[x] = 0;
00613         // calculate the angle to the target blob being analized at the moment
00614         float gotoCY = fabs((480 - segment[current].getCenterY(x)*8)
00615                            - camera.Ypixel);
00616 
00617         float panConv = ((float)camera.Xfield/(float)camera.Xpixel);
00618         float tiltConv = ((float)camera.Yfield/(float)camera.Ypixel);
00619 
00620         float panOff = ((float)camera.Xpixel*.5)-
00621           segment[current].getCenterX(x)*8;
00622         float tiltOff = ((float)camera.Ypixel*.5)-gotoCY;
00623 
00624         float travelPan = cameraMovePan[current] +
00625           ((panOff*panConv)*camera.fieldAdjustmentX);
00626         float travelTilt = cameraMoveTilt[current] +
00627           ((tiltOff*tiltConv)*camera.fieldAdjustmentY);
00628 
00629         // cycle over other camera positions
00630         //and calculate the p of vergance for this camera
00631         for(int j = 0; j < instances; j++)
00632         {
00633           if(j != current)
00634           {
00635             if(j < current)
00636               theta = travelPan - cameraMovePan[j];
00637             else
00638               theta = cameraMovePan[j] - travelPan;
00639 
00640             phi = fabs(travelTilt - cameraMoveTilt[j]);
00641             seperation = abs(current - j);
00642 
00643             // p += vergance(tilt,cam(x))*vergance(pan,cam(x))
00644             track[current].pVergance[x] +=
00645               (Stats.gauss(theta,cameraMu[seperation-1],
00646                            cameraSigma[seperation-1])
00647                *Stats.gauss(phi,0.0F,21.0F))*(1-(stdMove[j]/moveStdNormal));
00648 
00649           }
00650         }
00651         // if I have the highest P of all the blobs in this
00652         // instance (camera) so far, I win. Take argmax
00653         if(track[current].pVergance[x] >= maxBlobVal)
00654         {
00655           if(maxBlob != -1)
00656           {
00657             // turn off this blob, it's no good
00658             track[current].setCandidate(maxBlob,false);
00659           }
00660           maxBlob = x;
00661           // set this blob as the best one
00662           maxBlobVal = track[current].pVergance[x];
00663         }
00664         else
00665         {
00666           // turn off this blob, it not better than anyone
00667           track[current].setCandidate(x,false);
00668         }
00669       }
00670     }
00671   }
00672   else
00673   {
00674     // this camera is in a LOT, send it to a vergance coordinate;
00675     if(LOTcount[current] > LOTMAX)
00676     {
00677       moveCamera[current] = true;
00678       float doPan = 0;
00679       float doTilt = 0;
00680       int normal = 0;
00681 
00682       // for all cameras not in LOT, go to the average vergance
00683       // over those cameras
00684       // e.g. you vergance should reflect the ones tracking
00685       for(int k = 0; k < instances; k++)
00686       {
00687 
00688         if((k != current) && (track[k].returnLOT() == false))
00689         {
00690           seperation = abs(current - k);
00691           // you should converge to another camera based upon the
00692           // P derived from the gradiant of its
00693           // movement. This is cameras that are more fluid have more influince.
00694           if(k < current)
00695           {
00696             doPan += cameraMovePan[k]*(1 - stdMove[k]/moveStdNormal)
00697               + cameraMu[seperation-1];
00698 
00699           }
00700           else
00701           {
00702             doPan += cameraMovePan[k]*(1 - stdMove[k]/moveStdNormal)
00703               - cameraMu[seperation-1];
00704           }
00705           doTilt += cameraMoveTilt[k]*(1 - stdMove[k]/moveStdNormal);
00706           normal++;
00707         }
00708       }
00709       if(normal != 0)
00710       {
00711         // if we can be biased by at least one camera do this
00712         cameraGotoPan[current] = doPan/normal;
00713         cameraGotoTilt[current] = doTilt/normal;
00714       }
00715     }
00716   }
00717 }
00718 
00719 void segmentImageMerge::getImageTrackXY(int *x, int *y, int instance)
00720 {
00721   *x = gotoX[instance];
00722   *y = gotoY[instance];
00723 }
00724 
00725 void segmentImageMerge::getImageTrackXY2(int *x, int *y, int instance)
00726 {
00727    *x = track[instance].getObjectX();
00728    *y = track[instance].getObjectY();
00729 }
00730 
00731 void segmentImageMerge::getImageTrackXYMerge(int *x, int *y)
00732 {
00733   *x = mergeGotoX;
00734   *y = mergeGotoY;
00735 }
00736 
00737 bool segmentImageMerge::returnLOT(int instance)
00738 {
00739   return track[instance].returnLOT();
00740 }
00741 
00742 float segmentImageMerge::returnCameraProb(int instance)
00743 {
00744   return (1-stdMove[instance]/moveStdNormal);
00745 }
00746 
00747 bool segmentImageMerge::doMoveCamera(int instance, float *doPan, float *doTilt)
00748 {
00749   *doPan = cameraGotoPan[instance];
00750   *doTilt = cameraGotoTilt[instance];
00751 
00752   // calculate gradiant variance
00753   return moveCamera[instance];
00754 }
00755 
00756 Image<byte> segmentImageMerge::returnCandidateImage(int instance)
00757 {
00758   return segment[instance].returnNormalizedCandidates();
00759 }
00760 
00761 bool segmentImageMerge::StereoMatch(PixelPoint points[2],
00762                                     CameraParams params[2],
00763                                     Point3D* retPoint)
00764 {
00765     float PI = 3.14159;
00766     float deg2rad = PI/180.0;
00767 
00768     //define the std deviations of the error function(gaussian) for
00769     //various parameters
00770     Point3D* P = (Point3D*)calloc(2, sizeof(Point3D));
00771     P[0] = Point3D(0.0, 0.0, 0.0);
00772     P[1] = Point3D(0.0, 0.0, 0.0);
00773 
00774     Point3D* f = (Point3D*)calloc(2, sizeof(Point3D));
00775     f[0] = Point3D(0.0, 0.0, 0.0);
00776     f[1] = Point3D(0.0, 0.0, 0.0);
00777 
00778     //get ideal case values and max error values
00779     for(int i=0; i<2; i++)
00780     {
00781       P[i].x = params[i].x + params[i].r*sin(params[i].theta*deg2rad)
00782         * cos(params[i].phi*deg2rad)
00783         + points[i].x * cos(params[i].theta*deg2rad)
00784         * cos(params[i].phi*deg2rad)
00785         - points[i].y * sin(params[i].phi*deg2rad);
00786 
00787       P[i].y = params[i].y + params[i].r*sin(params[i].theta*deg2rad)
00788         * sin(params[i].phi*deg2rad)
00789         + points[i].x * cos(params[i].theta*deg2rad)
00790         * sin(params[i].phi*deg2rad)
00791         + points[i].y * cos(params[i].phi*deg2rad);
00792 
00793       P[i].z = params[i].z + params[i].r*cos(params[i].theta*deg2rad)
00794         - points[i].x * sin(params[i].theta*deg2rad);
00795 
00796       f[i].x = params[i].x +params[i].r*sin(params[i].theta*deg2rad)
00797         * cos(params[i].phi*deg2rad)
00798         + params[i].f * sin(params[i].theta*deg2rad)
00799         * cos(params[i].phi*deg2rad);
00800 
00801       f[i].y = params[i].y + params[i].r*sin(params[i].theta*deg2rad)
00802         * sin(params[i].phi*deg2rad)
00803         + params[i].f * sin(params[i].theta*deg2rad)
00804         * sin(params[i].phi*deg2rad);
00805 
00806       f[i].z = params[i].z + params[i].r*cos(params[i].theta*deg2rad) +
00807                params[i].f * cos(params[i].theta*deg2rad);
00808     }
00809 
00810     float r1 = ((f[1].z-P[1].z)*(P[0].x-P[1].x)
00811                 - (f[1].x-P[1].x)*(P[0].z-P[1].z))/
00812       ((f[1].x-P[1].x)*(f[0].z-P[0].z)
00813        - (f[1].z-P[1].z)*(f[0].x-P[0].x)+0.0001);
00814 
00815     float r2 = ((f[0].z-P[0].z)*(P[0].x-P[1].x)
00816                 - (f[0].x-P[0].x)*(P[0].z-P[1].z))/
00817       ((f[1].x-P[1].x)*(f[0].z-P[0].z)
00818        - (f[1].z-P[1].z)*(f[0].x-P[0].x)+0.0001);
00819 
00820     float lhs = P[0].y + (f[0].y-P[0].y)*r1;
00821     float rhs = P[1].y + (f[1].y-P[1].y)*r2;
00822 
00823     //printf("Here!!!!\n");
00824     if(lhs-rhs>20 || lhs-rhs<-20)
00825       return false;
00826 
00827     retPoint->x = P[0].x + (f[0].x-P[0].x)*r1;
00828     retPoint->y = (lhs+rhs)/2.0;
00829     retPoint->z = P[0].z + (f[0].z-P[0].z)*r1;
00830     return true;
00831 }
00832 
Generated on Sun May 8 08:42:35 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3