00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040 #include "Image/OpenCVUtil.H"
00041 #include "Beobot/SalientRegionTracker.H"
00042
00043 #include "Component/ModelManager.H"
00044 #include "Raster/Raster.H"
00045 #include "GUI/XWinManaged.H"
00046 #include "Image/Image.H"
00047 #include "Image/Pixels.H"
00048 #include "Util/Timer.H"
00049
00050 #include <signal.h>
00051
00052 #include "Image/ShapeOps.H"
00053 #include "Image/CutPaste.H"
00054 #include "Image/MathOps.H"
00055 #include "Image/DrawOps.H"
00056
00057 #define WINSIZE 7
00058 #define templThresh 2000.0F
00059
00060
00061 SalientRegionTracker::SalientRegionTracker(OptionManager& mgr,
00062 const std::string& descrName,
00063 const std::string& tagName)
00064 :
00065 ModelComponent(mgr, descrName, tagName),
00066 itsTimer(1000000)
00067 {
00068 itsCurrTrackedPoints.clear();
00069
00070
00071
00072
00073 }
00074
00075
00076 SalientRegionTracker::~SalientRegionTracker()
00077 { }
00078
00079
00080 void SalientRegionTracker::input
00081 (Image<PixRGB<byte> > image, ImageSet<float> cmap, bool resetTracker,
00082 std::vector<Point2D<int> > points, std::vector<Rectangle> rects,
00083 std::vector<rutz::shared_ptr<VisualObject> > visualObjects)
00084 {
00085 itsTimer.reset();
00086 itsCurrCmap = cmap;
00087
00088
00089
00090 itsResetTracker = resetTracker;
00091
00092 if(itsResetTracker == false && itsCurrTrackedPoints.size() == 0)
00093 return;
00094
00095
00096 if(itsResetTracker)
00097 {
00098 itsOriginalInputImage = image;
00099 itsCurrInputImage = image;
00100 itsCurrTrackedPoints = points;
00101 itsCurrTrackedVisualObjects = visualObjects;
00102
00103 itsCurrTrackedROI = rects;
00104 }
00105
00106 track();
00107
00108
00109 LINFO("Time: %6.3f ms", itsTimer.get()/1000.0);
00110 }
00111
00112
00113 void SalientRegionTracker::clear()
00114 {
00115 itsPrevTrackedPointsScaled.clear();
00116 itsTrackerBias.clear();
00117 itsTrackerBiasOffset.clear();
00118 itsCurrTrackedPoints.clear();
00119 itsCurrTrackedROI.clear();
00120 }
00121
00122
00123 void SalientRegionTracker::move
00124 (nub::soft_ref<SalientRegionTracker> tracker2, uint i)
00125 {
00126 itsPrevTrackedPointsScaled.push_back
00127 (tracker2->getPrevTrackedPointsScaled(i));
00128 itsTrackerBias.push_back
00129 (tracker2->getTrackerBias(i));
00130 itsTrackerBiasOffset.push_back
00131 (tracker2->getTrackerBiasOffset(i));
00132 itsCurrTrackedPoints.push_back
00133 (tracker2->getCurrTrackedPoints(i));
00134 itsCurrTrackedROI.push_back
00135 (tracker2->getCurrTrackedROI(i));
00136
00137
00138
00139 }
00140
00141
00142 void SalientRegionTracker::track()
00143 {
00144 std::vector<Point2D<int> > diffs;
00145 for(uint i = 0; i < itsCurrTrackedPoints.size(); i++)
00146 {
00147 Point2D<int> pt =
00148 itsCurrTrackedROI[i].topLeft() - itsCurrTrackedPoints[i];
00149 diffs.push_back(pt);
00150
00151 LDEBUG("diff[%3d]: (%4d,%4d) - (%4d,%4d) = (%4d,%4d)", i,
00152 itsCurrTrackedROI[i].topLeft().i,
00153 itsCurrTrackedROI[i].topLeft().j,
00154 itsCurrTrackedPoints[i].i,
00155 itsCurrTrackedPoints[i].j,
00156 diffs[i].i, diffs[i].j);
00157 }
00158
00159 trackCmaps();
00160
00161 Dims imgDims = itsCurrInputImage.getDims();
00162 Rectangle imgRect(Point2D<int>(0,0), imgDims);
00163 for(uint i = 0; i < itsCurrTrackedPoints.size(); i++)
00164 {
00165 Point2D<int> tl = diffs[i] + itsCurrTrackedPoints[i];
00166 Dims d = itsCurrTrackedROI[i].dims();
00167
00168 LDEBUG("imgRect[%4d,%4d,%4d,%4d]",
00169 imgRect.topLeft().i, imgRect.topLeft().j,
00170 imgRect.bottomRight().i, imgRect.bottomRight().j);
00171
00172 Rectangle newRect(tl, d);
00173 LDEBUG("newRect[%4d,%4d,%4d,%4d]",
00174 newRect.topLeft().i, newRect.topLeft().j,
00175 newRect.bottomRight().i, newRect.bottomRight().j);
00176
00177 itsCurrTrackedROI[i] = imgRect.getOverlap(newRect);
00178
00179 LINFO("Resulting ROI[%4d,%4d,%4d,%4d]",
00180 itsCurrTrackedROI[i].topLeft().i,
00181 itsCurrTrackedROI[i].topLeft().j,
00182 itsCurrTrackedROI[i].bottomRight().i,
00183 itsCurrTrackedROI[i].bottomRight().j);
00184 }
00185
00186
00187 }
00188
00189
00190 void SalientRegionTracker::trackVisualObjects()
00191 {
00192 if(itsResetTracker) return;
00193
00194
00195 std::string sName("scene");
00196 std::string sfName = sName + std::string(".png");
00197 rutz::shared_ptr<VisualObject>
00198 scene(new VisualObject(sName, sfName, itsCurrInputImage));
00199
00200
00201 for(uint i = 0; i < itsCurrTrackedVisualObjects.size(); i++)
00202 {
00203
00204 Timer tim(1000000);
00205 VisualObjectMatchAlgo voma(VOMA_SIMPLE);
00206 rutz::shared_ptr<VisualObjectMatch> matchRes
00207 (new VisualObjectMatch(scene, itsCurrTrackedVisualObjects[i], voma));
00208 uint64 t = tim.get();
00209
00210
00211 uint orgSize = matchRes->size();
00212 tim.reset();
00213 uint np = matchRes->prune();
00214 uint t2 = tim.get();
00215
00216 LINFO("Found %u matches (%s & %s) in %.3fms:"
00217 " pruned %u in %.3fms",
00218 orgSize, scene->getName().c_str(),
00219 itsCurrTrackedVisualObjects[i]->getName().c_str(),
00220 float(t) * 0.001F,
00221 np, float(t2) * 0.001F);
00222
00223
00224 float kpAvgDist = matchRes->getKeypointAvgDist();
00225 float afAvgDist = matchRes->getAffineAvgDist();
00226 float score = matchRes->getScore();
00227 bool isSIFTaffine = matchRes->checkSIFTaffine
00228 (M_PI/4,5.0F,0.25F);
00229 SIFTaffine siftAffine = matchRes->getSIFTaffine();
00230 LINFO("kpAvgDist = %.4f|affAvgDist = %.4f|"
00231 " score: %.4f|aff? %d",
00232 kpAvgDist, afAvgDist, score, isSIFTaffine);
00233
00234 if (!isSIFTaffine)
00235 LINFO("### Affine is too weird -- BOGUS MATCH");
00236 else
00237 {
00238
00239 LINFO("[testX] [ %- .3f %- .3f ] [refX] [%- .3f]",
00240 siftAffine.m1, siftAffine.m2, siftAffine.tx);
00241 LINFO("[testY]= [ %- .3f %- .3f ] [refY] + [%- .3f]",
00242 siftAffine.m3, siftAffine.m4, siftAffine.ty);
00243 }
00244
00245 bool isSIFTfit = (isSIFTaffine && (score > 2.5) &&
00246 (matchRes->size() > 3));
00247 LINFO("OD isSIFTfit %d", isSIFTfit);
00248
00249
00250
00251
00252
00253
00254
00255
00256
00257
00258
00259
00260 if(isSIFTfit)
00261 {
00262
00263
00264
00265
00266
00267
00268
00269
00270
00271
00272
00273
00274
00275
00276
00277
00278
00279
00280
00281
00282
00283
00284
00285
00286
00287
00288
00289
00290
00291
00292
00293
00294
00295
00296
00297
00298
00299
00300
00301
00302
00303
00304 }
00305 }
00306
00307 }
00308
00309
00310 void SalientRegionTracker::trackCmaps()
00311 {
00312 int smscale = (int)(pow(2,sml));
00313
00314
00315 if(itsResetTracker)
00316 {
00317 itsPrevTrackedPointsScaled.clear();
00318 itsTrackerBias.clear();
00319 itsTrackerBiasOffset.clear();
00320 }
00321
00322 for(uint i = 0; i < itsCurrTrackedPoints.size(); i++)
00323 {
00324
00325 if(itsResetTracker)
00326 {
00327 if(!itsCurrTrackedPoints[i].isValid())
00328 LFATAL("invalid input tracked point[%d]", i);
00329
00330 itsPrevTrackedPointsScaled.push_back
00331 (Point2D<int>(itsCurrTrackedPoints[i].i/smscale,
00332 itsCurrTrackedPoints[i].j/smscale));
00333
00334 Point2D<int> tempOffset;
00335 itsTrackerBias.push_back
00336 (setNewBias(itsPrevTrackedPointsScaled[i], tempOffset));
00337 itsTrackerBiasOffset.push_back(tempOffset);
00338 }
00339
00340 else if(itsPrevTrackedPointsScaled[i].isValid())
00341 {
00342 LINFO("tracking current point[%d]", i);
00343 itsPrevTrackedPointsScaled[i] = trackPoint
00344 (itsTrackerBias[i],
00345 itsTrackerBiasOffset[i],
00346 itsPrevTrackedPointsScaled[i]);
00347 }
00348
00349 else { LINFO("lost current point[%d]", i); }
00350
00351 if(itsPrevTrackedPointsScaled[i].isValid())
00352 itsCurrTrackedPoints[i] =
00353 Point2D<int>(itsPrevTrackedPointsScaled[i].i*smscale,
00354 itsPrevTrackedPointsScaled[i].j*smscale);
00355 else itsCurrTrackedPoints[i] = Point2D<int>(-1,-1);
00356 LINFO("current track[%d] result: [%d,%d] -> [%d,%d]", i,
00357 itsCurrTrackedPoints[i].i,
00358 itsCurrTrackedPoints[i].j,
00359 itsPrevTrackedPointsScaled[i].i,
00360 itsPrevTrackedPointsScaled[i].j);
00361 }
00362 }
00363
00364
00365 ImageSet<float> SalientRegionTracker::setNewBias
00366 (Point2D<int> inTrackLoc, Point2D<int> &biasOffset)
00367 {
00368 int w = itsCurrCmap[0].getWidth();
00369 int h = itsCurrCmap[0].getHeight();
00370
00371 ImageSet<float> bias(NUM_CHANNELS);
00372
00373
00374 if(inTrackLoc.i < (WINSIZE/2))
00375 biasOffset.i = inTrackLoc.i;
00376 else if(inTrackLoc.i > ((w - 1) - (WINSIZE/2)))
00377 biasOffset.i = WINSIZE - (w - inTrackLoc.i);
00378 else
00379 biasOffset.i = WINSIZE/2;
00380
00381 if(inTrackLoc.j < (WINSIZE/2))
00382 biasOffset.j = inTrackLoc.j;
00383 else if(inTrackLoc.j > ((h - 1) - (WINSIZE/2)))
00384 biasOffset.j = WINSIZE - (h - inTrackLoc.j);
00385 else
00386 biasOffset.j = WINSIZE/2;
00387
00388 LINFO("Set new bias[%d,%d]: offset: (%d, %d)",
00389 inTrackLoc.i, inTrackLoc.j, biasOffset.i, biasOffset.j);
00390
00391
00392 for(int i = 0; i < NUM_CHANNELS; i++)
00393 {
00394 Point2D<int> upLeftsc(inTrackLoc.i - biasOffset.i,
00395 inTrackLoc.j - biasOffset.j);
00396 Image<float> target = crop(itsCurrCmap[i], upLeftsc,
00397 Dims(WINSIZE,WINSIZE));
00398 bias[i] = target;
00399
00400
00401
00402
00403
00404
00405
00406
00407
00408
00409
00410
00411
00412
00413
00414
00415
00416
00417 }
00418 return bias;
00419 }
00420
00421
00422 Point2D<int> SalientRegionTracker::trackPoint
00423 (ImageSet<float> &bias, Point2D<int> biasOffset,
00424 Point2D<int> trackLoc)
00425 {
00426 int w = itsCurrCmap[0].getWidth();
00427 int h = itsCurrCmap[0].getHeight();
00428
00429
00430
00431
00432
00433 Image<float> smap = getBiasedSMap(bias);
00434
00435
00436
00437
00438
00439
00440
00441 int i = 0; float maxDist = sqrt(w*w + h*h);
00442 int wsmap = smap.getWidth();
00443
00444
00445
00446 Point2D<int> prevLoc(trackLoc.i - biasOffset.i, trackLoc.j - biasOffset.j);
00447 for (Image<float>::iterator itr = smap.beginw(), stop = smap.endw();
00448 itr != stop; ++itr, i++)
00449 {
00450 int x = i % wsmap;
00451 int y = i / wsmap;
00452 float dist = (prevLoc.distance(Point2D<int>(x,y))+.1)/maxDist;
00453 *itr = *itr * dist;
00454
00455
00456 }
00457
00458
00459
00460
00461
00462
00463
00464
00465
00466
00467 float minval; Point2D<int> upLeft; findMin(smap, upLeft, minval);
00468
00469
00470
00471
00472
00473
00474
00475
00476
00477
00478
00479
00480
00481
00482
00483
00484
00485
00486
00487
00488
00489
00490
00491 updateTemplate(upLeft, bias);
00492
00493
00494 Point2D<int> newTrackLoc = upLeft + biasOffset;
00495 return newTrackLoc;
00496 }
00497
00498
00499 void SalientRegionTracker::updateTemplate
00500 ( Point2D<int> upLeft, ImageSet<float> &bias)
00501 {
00502 double dist = 0;
00503 ImageSet<float> newBias(NUM_CHANNELS);
00504
00505 for(int i = 0; i < NUM_CHANNELS; i++)
00506 {
00507 Image<float> target =
00508 crop(itsCurrCmap[i], upLeft, Dims(WINSIZE,WINSIZE));
00509
00510
00511 newBias[i] = bias[i]*0.9 + target*(1 - 0.9);
00512 dist += distance(bias[i], newBias[i]);
00513 }
00514
00515
00516 LINFO("Distance %f (thresh: %f)", dist, templThresh);
00517 if (dist < templThresh)
00518 {
00519 bias = newBias;
00520 }
00521 else LINFO("not adding bias");
00522
00523
00524
00525 }
00526
00527
00528 Image<float> SalientRegionTracker::getBiasedSMap(ImageSet<float> bias)
00529 {
00530 #ifndef HAVE_OPENCV
00531 LFATAL("OpenCV must be installed to use this function");
00532 return Image<float>();
00533 #else
00534
00535 int w = itsCurrCmap[0].getWidth();
00536 int h = itsCurrCmap[0].getHeight();
00537
00538
00539 Image<float> biasedCMap(w - WINSIZE + 1, h - WINSIZE + 1, ZEROS);
00540 Image<float> res(w - WINSIZE + 1, h - WINSIZE + 1, ZEROS);
00541
00542
00543 for(uint i = 0; i < NUM_CHANNELS; i++)
00544 {
00545 cvMatchTemplate(img2ipl(itsCurrCmap[i]), img2ipl(bias[i]),
00546 img2ipl(biasedCMap), CV_TM_SQDIFF);
00547
00548
00549
00550
00551
00552
00553
00554
00555
00556
00557
00558
00559
00560 res += biasedCMap;
00561 }
00562
00563 return res;
00564
00565 #endif
00566 }
00567
00568
00569
00570
00571
00572