00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038 #include "Image/VisualTracker.H"
00039
00040 #include "Component/OptionManager.H"
00041 #include "Component/ModelOptionDef.H"
00042 #include "Simulation/SimEventQueue.H"
00043 #include "Image/ColorOps.H"
00044 #include "Image/MathOps.H"
00045
00046 const ModelOptionCateg MOC_VISUALTRACKER = {
00047 MOC_SORTPRI_2, "Visual Tracker Related Options" };
00048
00049 const ModelOptionDef OPT_TrackWindowSize =
00050 { MODOPT_ARG(int), "TrackWindowSize", &MOC_VISUALTRACKER, OPTEXP_CORE,
00051 "Size of the tracking window",
00052 "trackwindowsize", '\0', "<int>", "30" };
00053
00054 const ModelOptionDef OPT_InitTrackWindowSize =
00055 { MODOPT_ARG(int), "InitTrackWindowSize", &MOC_VISUALTRACKER, OPTEXP_CORE,
00056 "Size of the window from which to initialize the tracker",
00057 "inittrackwindowsize", '\0', "<int>", "30" };
00058
00059
00060 VisualTracker::VisualTracker(OptionManager& mgr, const std::string& descrName,
00061 const std::string& tagName) :
00062 SimModule(mgr, descrName, tagName),
00063 SIMCALLBACK_INIT(SimEventInputFrame),
00064 SIMCALLBACK_INIT(SimEventSetVisualTracker),
00065 itsTrackWindowSize(&OPT_TrackWindowSize, this, ALLOW_ONLINE_CHANGES),
00066 itsInitTrackWindowSize(&OPT_InitTrackWindowSize, this, ALLOW_ONLINE_CHANGES)
00067
00068 {
00069 #ifdef HAVE_OPENCV
00070 itsCurrentPoints = NULL;
00071 itsPreviousPoints = NULL;
00072 itsTrackStatus = NULL;
00073 itsTrackError = NULL;
00074 itsCurrentPyramid = NULL;
00075 itsPreviousPyramid = NULL;
00076 itsKalman = NULL;
00077 itsObjectHist = NULL;
00078 itsBackproject = NULL;
00079 itsTrackWindow = cvRect(0,0,320,240);
00080 itsUseKalman = false;
00081 #endif
00082 itsInitTracker = true;
00083 itsTracking = false;
00084
00085 }
00086
00087 VisualTracker::~VisualTracker()
00088 {
00089 #ifdef HAVE_OPENCV
00090 cvFree(&itsCurrentPoints);
00091 cvFree(&itsPreviousPoints);
00092 cvFree(&itsTrackStatus);
00093 cvFree(&itsTrackError);
00094 cvReleaseImage(&itsCurrentPyramid);
00095 cvReleaseImage(&itsPreviousPyramid);
00096
00097 cvReleaseKalman(&itsKalman);
00098 #endif
00099
00100 }
00101
00102 void VisualTracker::onSimEventInputFrame(SimEventQueue& q,
00103 rutz::shared_ptr<SimEventInputFrame>& e)
00104 {
00105 #ifdef HAVE_OPENCV
00106
00107 const Image<PixRGB<byte> > inimg = e->frame().asRgb();
00108
00109
00110
00111
00112
00113
00114 itsCurrentGreyImg = luminance(inimg);
00115
00116 itsCurrentTargetLoc = trackObjects(itsCurrentGreyImg);
00117
00118 if (itsCurrentTargetLoc.isValid())
00119 {
00120 q.post(rutz::make_shared(new SimEventVisualTracker(this, itsCurrentTargetLoc, true)));
00121 LINFO("Tracking %ix%i", itsCurrentTargetLoc.i, itsCurrentTargetLoc.j);
00122 } else {
00123 if (!itsTracking)
00124 q.post(rutz::make_shared(new SimEventVisualTracker(this, itsCurrentTargetLoc, false)));
00125 }
00126 #endif
00127
00128 }
00129
00130 void VisualTracker::onSimEventSetVisualTracker(SimEventQueue& q,
00131 rutz::shared_ptr<SimEventSetVisualTracker>& e)
00132 {
00133 #ifdef HAVE_OPENCV
00134 Point2D<int> targetLoc = e->getTargetLoc();
00135 LINFO("Set visual tracker to %ix%i",
00136 targetLoc.i,
00137 targetLoc.j);
00138
00139 setTargets(itsCurrentGreyImg, targetLoc);
00140
00141 #endif
00142 }
00143
00144
00145
00146 void VisualTracker::start2()
00147 {
00148 }
00149
00150 void VisualTracker::initTracker(Dims imageDims)
00151 {
00152
00153 #ifdef HAVE_OPENCV
00154 itsMaxNumPoints = 1;
00155 itsCurrentNumPoints = 0;
00156 itsCurrentPoints = (CvPoint2D32f*)cvAlloc(itsMaxNumPoints*sizeof(itsCurrentPoints));
00157 itsPreviousPoints = (CvPoint2D32f*)cvAlloc(itsMaxNumPoints*sizeof(itsPreviousPoints));
00158
00159 itsPreviousGreyImg = Image<byte>(imageDims, ZEROS);
00160 itsCurrentPyramid = cvCreateImage( cvSize(imageDims.w(), imageDims.h()), 8, 1 );
00161 itsPreviousPyramid = cvCreateImage( cvSize(imageDims.w(), imageDims.h()), 8, 1 );
00162
00163 itsTrackStatus = (char*)cvAlloc(itsMaxNumPoints);
00164 itsTrackError = (float*)cvAlloc(itsMaxNumPoints);
00165
00166 if (itsUseKalman)
00167 {
00168 itsKalman = cvCreateKalman(4,
00169 2);
00170
00171
00172
00173 const float A[] = { 1, 0, 1, 0,
00174 0, 1, 0, 1,
00175 0, 0, 1, 0,
00176 0, 0, 0, 1};
00177
00178
00179 const float H[] = { 1, 0, 0, 0,
00180 0, 1, 0, 0};
00181
00182
00183 memcpy( itsKalman->transition_matrix->data.fl, A, sizeof(A));
00184 memcpy( itsKalman->measurement_matrix->data.fl, H, sizeof(H));
00185
00186
00187 cvSetIdentity( itsKalman->process_noise_cov, cvRealScalar(1e-5) );
00188 cvSetIdentity( itsKalman->measurement_noise_cov, cvRealScalar(1e-1) );
00189
00190
00191 cvSetIdentity( itsKalman->error_cov_post, cvRealScalar(1));
00192
00193 cvZero(itsKalman->state_post);
00194 cvZero(itsKalman->state_pre);
00195
00196 }
00197
00198
00199
00200
00201
00202
00203
00204
00205
00206
00207 itsTrackFlags = 0;
00208 #endif
00209 itsInitTracker = false;
00210
00211 }
00212
00213
00214
00215 void VisualTracker::setTargets(const Image<byte>& grey, const Point2D<int> loc)
00216 {
00217 if (!loc.isValid())
00218 {
00219 itsTracking = false;
00220 return;
00221 }
00222
00223 if (itsInitTracker)
00224 initTracker(grey.getDims());
00225
00226 #ifdef HAVE_OPENCV
00227 itsCurrentNumPoints = itsMaxNumPoints;
00228
00229 IplImage* currentImg = img2ipl(grey);
00230 itsCurrentPoints[0].x = loc.i;
00231 itsCurrentPoints[0].y = loc.j;
00232
00233
00234
00235
00236
00237
00238 cvReleaseImageHeader(¤tImg);
00239
00240 itsPreviousGreyImg = grey;
00241
00242 itsTrackFlags = 0;
00243 IplImage *swap_temp;
00244 CV_SWAP( itsPreviousPyramid, itsCurrentPyramid, swap_temp );
00245
00246 CvPoint2D32f* swap_points;
00247 CV_SWAP( itsPreviousPoints, itsCurrentPoints, swap_points );
00248
00249 if (itsUseKalman)
00250 {
00251 itsKalman->state_post->data.fl[0] = loc.i;
00252 itsKalman->state_post->data.fl[1] = loc.j;
00253 }
00254
00255 #endif
00256
00257 itsTracking = true;
00258 }
00259
00260
00261 void VisualTracker::setTargets(const Image<byte>& grey, const Image<byte>& target)
00262 {
00263 #ifdef HAVE_OPENCV
00264 if (itsInitTracker)
00265 initTracker(grey.getDims());
00266
00267 IplImage* currentImg = img2ipl(grey);
00268
00269 cvCalcHist( ¤tImg, itsObjectHist );
00270
00271 float max_val = 0.f;
00272 cvGetMinMaxHistValue(itsObjectHist, 0, &max_val, 0, 0 );
00273 cvConvertScale( itsObjectHist->bins, itsObjectHist->bins, max_val ? 255. / max_val : 0., 0 );
00274
00275
00276
00277 itsTargetTempl = target;
00278 #endif
00279 }
00280
00281
00282 Point2D<int> VisualTracker::trackTemplObject(const Image<byte>& grey, double& err)
00283 {
00284
00285 Point2D<int> targetLoc(-1,-1);
00286
00287 if (!itsTargetTempl.initialized())
00288 return targetLoc;
00289
00290
00291 #ifdef HAVE_OPENCV
00292 IplImage *result = cvCreateImage(
00293 cvSize(grey.getWidth() - itsTargetTempl.getWidth() + 1, grey.getHeight() - itsTargetTempl.getHeight()+1),
00294 IPL_DEPTH_32F, 1);
00295
00296 cvMatchTemplate(img2ipl(grey), img2ipl(itsTargetTempl), result,
00297
00298 CV_TM_SQDIFF_NORMED);
00299
00300 double minVal, maxVal;
00301 CvPoint minLoc, maxLoc;
00302 cvMinMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc);
00303
00304 if (true)
00305 {
00306 targetLoc.i = minLoc.x + itsTargetTempl.getWidth()/2;
00307 targetLoc.j = minLoc.y + itsTargetTempl.getHeight()/2;
00308 err = minVal;
00309 } else {
00310 err = -1;
00311 }
00312 #endif
00313
00314 return targetLoc;
00315
00316
00317
00318 }
00319
00320
00321
00322
00323
00324
00325
00326
00327
00328
00329
00330
00331
00332
00333
00334
00335
00336
00337
00338
00339
00340
00341
00342
00343
00344
00345
00346
00347
00348
00349
00350
00351 Point2D<int> VisualTracker::trackObjects(const Image<byte>& grey)
00352 {
00353 Point2D<int> targetLoc(-1,-1);
00354
00355 if (!itsTracking)
00356 return targetLoc;
00357
00358 #ifdef HAVE_OPENCV
00359 if (itsCurrentNumPoints > 0)
00360 {
00361 IplImage* pGrey = img2ipl(itsPreviousGreyImg);
00362 IplImage* cGrey = img2ipl(grey);
00363
00364
00365
00366 cvCalcOpticalFlowPyrLK(pGrey, cGrey, itsPreviousPyramid, itsCurrentPyramid,
00367 itsPreviousPoints, itsCurrentPoints,
00368 itsCurrentNumPoints,
00369 cvSize(itsTrackWindowSize.getVal(),itsTrackWindowSize.getVal()),
00370 3,
00371 itsTrackStatus,
00372 itsTrackError,
00373 cvTermCriteria(CV_TERMCRIT_ITER
00374 |CV_TERMCRIT_EPS,
00375 20,0.03), itsTrackFlags);
00376
00377 itsTrackFlags = CV_LKFLOW_PYR_A_READY | CV_LKFLOW_PYR_B_READY;
00378
00379 cvReleaseImageHeader(&pGrey);
00380 cvReleaseImageHeader(&cGrey);
00381
00382
00383
00384 int k, i;
00385 for(i = k = 0; i<itsCurrentNumPoints; i++)
00386 {
00387 if (!itsTrackStatus[i])
00388 continue;
00389
00390 itsCurrentPoints[k++] = itsCurrentPoints[i];
00391
00392 if (itsTrackError[i] < 2000)
00393 {
00394 targetLoc.i = std::min(grey.getWidth()-1, std::max(0, (int)itsCurrentPoints[i].x));
00395 targetLoc.j = std::min(grey.getHeight()-1, std::max(0, (int)itsCurrentPoints[i].y));
00396 ASSERT(grey.coordsOk(targetLoc));
00397 }
00398 }
00399 itsCurrentNumPoints = k;
00400
00401 }
00402
00403 IplImage *swap_temp;
00404 CV_SWAP( itsPreviousPyramid, itsCurrentPyramid, swap_temp );
00405 CvPoint2D32f* swap_points;
00406 CV_SWAP( itsPreviousPoints, itsCurrentPoints, swap_points );
00407
00408 itsPreviousGreyImg = grey;
00409
00410
00411 if (itsUseKalman && grey.coordsOk(targetLoc))
00412 {
00413 float Z[2];
00414 CvMat Zmat = cvMat(2,1,CV_32F, Z);
00415
00416 Z[0] = targetLoc.i;
00417 Z[1] = targetLoc.j;
00418
00419 cvKalmanCorrect(itsKalman, &Zmat);
00420 const CvMat* prediction = cvKalmanPredict(itsKalman, 0);
00421
00422
00423 cvMatMulAdd(itsKalman->measurement_matrix, itsKalman->state_pre, NULL, &Zmat);
00424
00425 targetLoc.i = (int)prediction->data.fl[0];
00426 targetLoc.j = (int)prediction->data.fl[1];
00427 }
00428
00429 #endif
00430
00431 return targetLoc;
00432 }
00433
00434
00435
00436
00437
00438