00001 /*!@file SceneUnderstanding/PCamera.C */ 00002 00003 // //////////////////////////////////////////////////////////////////// // 00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2000-2005 // 00005 // by the University of Southern California (USC) and the iLab at USC. // 00006 // See http://iLab.usc.edu for information about this project. // 00007 // //////////////////////////////////////////////////////////////////// // 00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected // 00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency // 00010 // in Visual Environments, and Applications'' by Christof Koch and // 00011 // Laurent Itti, California Institute of Technology, 2001 (patent // 00012 // pending; application number 09/912,225 filed July 23, 2001; see // 00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status). // 00014 // //////////////////////////////////////////////////////////////////// // 00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit. // 00016 // // 00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can // 00018 // redistribute it and/or modify it under the terms of the GNU General // 00019 // Public License as published by the Free Software Foundation; either // 00020 // version 2 of the License, or (at your option) any later version. // 00021 // // 00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope // 00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the // 00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // 00025 // PURPOSE. See the GNU General Public License for more details. // 00026 // // 00027 // You should have received a copy of the GNU General Public License // 00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write // 00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, // 00030 // Boston, MA 02111-1307 USA. // 00031 // //////////////////////////////////////////////////////////////////// // 00032 // 00033 // Primary maintainer for this file: Lior Elazary <elazary@usc.edu> 00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/plugins/SceneUnderstanding/PCamera.C $ 00035 // $Id: PCamera.C 13551 2010-06-10 21:56:32Z itti $ 00036 // 00037 00038 #ifndef PCamera_C_DEFINED 00039 #define PCamera_C_DEFINED 00040 00041 #include "plugins/SceneUnderstanding/PCamera.H" 00042 00043 #include "Image/DrawOps.H" 00044 #include "Image/MathOps.H" 00045 #include "Image/Kernels.H" 00046 #include "Image/FilterOps.H" 00047 #include "Image/Convolutions.H" 00048 #include "Image/fancynorm.H" 00049 #include "Image/Point3D.H" 00050 #include "Image/MatrixOps.H" 00051 #include "Image/DrawOps.H" 00052 #include "Image/FilterOps.H" 00053 #include "Image/MathOps.H" 00054 #include "Simulation/SimEventQueue.H" 00055 #include "Neuro/EnvVisualCortex.H" 00056 #include "GUI/DebugWin.H" 00057 #include <math.h> 00058 #include <fcntl.h> 00059 #include <limits> 00060 #include <string> 00061 00062 const ModelOptionCateg MOC_PCamera = { 00063 MOC_SORTPRI_3, "PCamera-Related Options" }; 00064 00065 // Used by: SimulationViewerEyeMvt 00066 const ModelOptionDef OPT_PCameraShowDebug = 00067 { MODOPT_ARG(bool), "PCameraShowDebug", &MOC_PCamera, OPTEXP_CORE, 00068 "Show debug img", 00069 "pcamera-debug", '\0', "<true|false>", "false" }; 00070 00071 00072 #define KEY_UP 98 00073 #define KEY_DOWN 104 00074 #define KEY_LEFT 100 00075 #define KEY_RIGHT 102 00076 00077 namespace 00078 { 00079 00080 // helper function: 00081 // finds a cosine of angle between vectors 00082 // from pt0->pt1 and from pt0->pt2 00083 double angle( CvPoint* pt1, CvPoint* pt2, CvPoint* pt0 ) 00084 { 00085 double dx1 = pt1->x - pt0->x; 00086 double dy1 = pt1->y - pt0->y; 00087 double dx2 = pt2->x - pt0->x; 00088 double dy2 = pt2->y - pt0->y; 00089 return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 1e-10); 00090 } 00091 } 00092 00093 00094 // ###################################################################### 00095 PCamera::PCamera(OptionManager& mgr, const std::string& descrName, 00096 const std::string& tagName) : 00097 SimModule(mgr, descrName, tagName), 00098 SIMCALLBACK_INIT(SimEventInputFrame), 00099 SIMCALLBACK_INIT(SimEventSaveOutput), 00100 SIMCALLBACK_INIT(SimEventUserInput), 00101 itsShowDebug(&OPT_PCameraShowDebug, this), 00102 itsCameraCtrl(new Visca(mgr)), 00103 itsStorage(cvCreateMemStorage(0)) 00104 { 00105 ASSERT(itsStorage != 0); 00106 00107 addSubComponent(itsCameraCtrl); 00108 00109 00110 itsZoomOut = false; 00111 00112 itsIntrinsicMatrix = cvCreateMat( 3, 3, CV_32FC1); 00113 itsDistortionCoeffs = cvCreateMat( 4, 1, CV_32FC1); 00114 itsCameraRotation = cvCreateMat( 1, 3, CV_64FC1); 00115 itsCameraTranslation = cvCreateMat( 1, 3, CV_64FC1); 00116 00117 // cvmSet(itsDistortionCoeffs, 0, 0, 0.1237883 ); 00118 // cvmSet(itsDistortionCoeffs, 1, 0, -26.7828178 ); 00119 // cvmSet(itsDistortionCoeffs, 2, 0, -0.0112169 ); 00120 // cvmSet(itsDistortionCoeffs, 3, 0, 0.0065743 ); 00121 // // cvmSet(itsDistortionCoeffs, 0, 0, 0 ); 00122 // // cvmSet(itsDistortionCoeffs, 1, 0, 0); 00123 // // cvmSet(itsDistortionCoeffs, 2, 0, 0); 00124 // // cvmSet(itsDistortionCoeffs, 3, 0, 0); 00125 // 00126 // cvmSet(itsIntrinsicMatrix, 0, 0, 672.84222); cvmSet(itsIntrinsicMatrix, 0, 1, 0); cvmSet(itsIntrinsicMatrix, 0, 2, 159.50000); 00127 // cvmSet(itsIntrinsicMatrix, 1, 0, 0); cvmSet(itsIntrinsicMatrix, 1, 1, 3186.79102 ); cvmSet(itsIntrinsicMatrix, 1, 2, 119.5); 00128 // cvmSet(itsIntrinsicMatrix, 2, 0, 0); cvmSet(itsIntrinsicMatrix, 2, 1, 0); cvmSet(itsIntrinsicMatrix, 2, 2, 1); 00129 // 00130 // //Set the extrensic params 00131 cvmSet(itsCameraRotation, 0, 0, 2.373648 ); 00132 cvmSet(itsCameraRotation, 0, 1, -0.017453 ); 00133 cvmSet(itsCameraRotation, 0, 2, 0.157079); 00134 // 00135 cvmSet(itsCameraTranslation, 0, 0, 24); 00136 cvmSet(itsCameraTranslation, 0, 1, -21); 00137 cvmSet(itsCameraTranslation, 0, 2, 990.2); 00138 00139 cvmSet(itsDistortionCoeffs, 0, 0, 0.0 ); 00140 cvmSet(itsDistortionCoeffs, 1, 0, 0.0 ); 00141 cvmSet(itsDistortionCoeffs, 2, 0, 0.0 ); 00142 cvmSet(itsDistortionCoeffs, 3, 0, 0.0 ); 00143 00144 //double cameraParam[3][4] = { 00145 // {350.475735, 0, 158.250000, 0}, 00146 // {0.000000, -363.047091, 118.250000, 0.000000}, 00147 // {0.000000, 0.000000, 1.000000, 0.00000}}; 00148 00149 cvmSet(itsIntrinsicMatrix, 0, 0,350.475735); cvmSet(itsIntrinsicMatrix, 0, 1, 0); cvmSet(itsIntrinsicMatrix, 0, 2, 158.25000); 00150 cvmSet(itsIntrinsicMatrix, 1, 0, 0); cvmSet(itsIntrinsicMatrix, 1, 1, -363.047091 ); cvmSet(itsIntrinsicMatrix, 1, 2, 118.25); 00151 cvmSet(itsIntrinsicMatrix, 2, 0, 0); cvmSet(itsIntrinsicMatrix, 2, 1, 0); cvmSet(itsIntrinsicMatrix, 2, 2, 1); 00152 00153 itsIntrinsicInit = false; 00154 itsSaveCorners = false; 00155 itsDrawGrid = false; 00156 itsChangeRot = false; 00157 00158 itsVP = new ViewPort3D(320,240); 00159 00160 // itsCameraCtrl->zoom(600); 00161 // itsCameraCtrl->movePanTilt(0,0); 00162 00163 } 00164 00165 // ###################################################################### 00166 PCamera::~PCamera() 00167 { 00168 00169 } 00170 00171 void PCamera::onSimEventInputFrame(SimEventQueue& q, 00172 rutz::shared_ptr<SimEventInputFrame>& e) 00173 { 00174 // here is the inputs image: 00175 GenericFrame frame = e->frame(); 00176 00177 const Image<PixRGB<byte> > inimg = frame.asRgb(); 00178 itsCurrentImg = inimg; 00179 00180 //Calibrate 00181 findSquares(inimg, 00182 itsStorage, 00183 2000, 00184 10000, 00185 0.6); 00186 00187 //const Image<PixRGB<byte> > out = drawSquares(inimg, cards); 00188 00189 //SHOWIMG(out); 00190 //getTransMat(); 00191 00192 //itsVP->setCamera(Point3D<float>(0,-460,300), Point3D<float>(-58,0,0)); 00193 double trans[3][4] = { 00194 {-0.996624, 0.070027, 0.042869, -16.907477}, 00195 {-0.004359, 0.476245, -0.879302, 9.913470}, 00196 {-0.081990, -0.876520, -0.474332, 276.648010}}; 00197 itsVP->setCamera(trans); 00198 00199 00200 00201 itsVP->initFrame(); 00202 itsVP->drawBox( 00203 Point3D<float>(0,0,20), 00204 Point3D<float>(0,0,0), 00205 Point3D<float>(40,40,40), 00206 PixRGB<byte>(0,256,0)); 00207 00208 itsRenderImg = flipVertic(itsVP->getFrame()); 00209 00210 00211 } 00212 00213 void PCamera::findSquares(const Image<PixRGB<byte> >& in, CvMemStorage* storage, 00214 const int minarea, const int maxarea, const double mincos) 00215 { 00216 const int N = 11; 00217 00218 itsSquares.clear(); 00219 IplImage* img = img2ipl(in); 00220 00221 CvSize sz = cvSize( img->width & -2, img->height & -2 ); 00222 IplImage* timg = cvCloneImage( img ); // make a copy of input image 00223 IplImage* gray = cvCreateImage( sz, 8, 1 ); 00224 IplImage* pyr = cvCreateImage( cvSize(sz.width/2, sz.height/2), 8, 3 ); 00225 // create empty sequence that will contain points - 00226 // 4 points per square (the square's vertices) 00227 00228 // select the maximum ROI in the image 00229 // with the width and height divisible by 2 00230 cvSetImageROI( timg, cvRect( 0, 0, sz.width, sz.height )); 00231 00232 // down-scale and upscale the image to filter out the noise 00233 cvPyrDown( timg, pyr, 7 ); 00234 cvPyrUp( pyr, timg, 7 ); 00235 IplImage* tgray = cvCreateImage( sz, 8, 1 ); 00236 00237 // find squares in every color plane of the image 00238 for (int c = 0; c < 3; ++c) 00239 { 00240 // extract the c-th color plane 00241 cvSetImageCOI( timg, c+1 ); 00242 cvCopy( timg, tgray, 0 ); 00243 00244 // try several threshold levels 00245 for (int l = 0; l < N; ++l) 00246 { 00247 // hack: use Canny instead of zero threshold level. 00248 // Canny helps to catch squares with gradient shading 00249 if( l == 0 ) 00250 { 00251 // apply Canny. Take the upper threshold from slider 00252 // and set the lower to 0 (which forces edges merging) 00253 const int thresh = 50; 00254 cvCanny( tgray, gray, 0, thresh, 5 ); 00255 // dilate canny output to remove potential 00256 // holes between edge segments 00257 cvDilate( gray, gray, 0, 1 ); 00258 } 00259 else 00260 { 00261 // apply threshold if l!=0: 00262 // tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0 00263 cvThreshold( tgray, gray, (l+1)*255/N, 255, CV_THRESH_BINARY ); 00264 } 00265 00266 // find contours and store them all as a list 00267 CvSeq* contours = 0; 00268 cvFindContours( gray, storage, &contours, sizeof(CvContour), 00269 CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) ); 00270 00271 // test each contour 00272 while( contours ) 00273 { 00274 // approximate contour with accuracy proportional 00275 // to the contour perimeter 00276 CvSeq* result = 00277 cvApproxPoly( contours, sizeof(CvContour), storage, 00278 CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.02, 0 ); 00279 // square contours should have 4 vertices after approximation 00280 // relatively large area (to filter out noisy contours) 00281 // and be convex. 00282 // Note: absolute value of an area is used because 00283 // area may be positive or negative - in accordance with the 00284 // contour orientation 00285 const double area = fabs(cvContourArea(result,CV_WHOLE_SEQ)); 00286 if (result->total == 4 && 00287 area >= minarea && area <= maxarea && 00288 cvCheckContourConvexity(result)) 00289 { 00290 double s = 0; 00291 00292 for (int i = 0; i < 4; ++i) 00293 { 00294 // find minimum angle between joint 00295 // edges (maximum of cosine) 00296 const double t = 00297 fabs(angle((CvPoint*)cvGetSeqElem( result, i % 4 ), 00298 (CvPoint*)cvGetSeqElem( result, (i-2) % 4 ), 00299 (CvPoint*)cvGetSeqElem( result, (i-1) % 4 ))); 00300 s = s > t ? s : t; 00301 } 00302 00303 00304 // if cosines of all angles are small 00305 // (all angles are ~90 degree) then write quandrangle 00306 // vertices to resultant sequence 00307 if (s < mincos) 00308 { 00309 PCamera::Square sq; 00310 for (int i = 0; i < 4; ++i) 00311 { 00312 CvPoint *pt = (CvPoint*)cvGetSeqElem( result, i ); 00313 sq.p[i] = Point2D<int>(pt->x, pt->y); 00314 } 00315 itsSquares.push_back(sq); 00316 // LINFO("area=%f, mincos=%f", area, s); 00317 } 00318 } 00319 00320 // take the next contour 00321 contours = contours->h_next; 00322 } 00323 } 00324 } 00325 00326 // release all the temporary images 00327 cvReleaseImage( &gray ); 00328 cvReleaseImage( &pyr ); 00329 cvReleaseImage( &tgray ); 00330 cvReleaseImage( &timg ); 00331 cvReleaseImageHeader( &img ); 00332 00333 } 00334 00335 00336 void PCamera::getTransMat() 00337 { 00338 00339 if (itsSquares.size() > 0) 00340 { 00341 CvMat *image_points_ex = cvCreateMat( 4, 2, CV_64FC1); 00342 00343 for (uint j = 0; j < 4; j++){ 00344 cvSetReal2D( image_points_ex, j, 0, itsSquares[0].p[j].i); 00345 cvSetReal2D( image_points_ex, j, 1, itsSquares[0].p[j].j); 00346 } 00347 00348 CvMat *object_points_ex = cvCreateMat( 4, 3, CV_64FC1); 00349 //for (uint j = 0; j < corners.size(); j++){ 00350 cvSetReal2D( object_points_ex, 0, 0, -40 ); cvSetReal2D( object_points_ex, 0, 1, -40 ); cvSetReal2D( object_points_ex, 0, 2, 0.0 ); 00351 cvSetReal2D( object_points_ex, 1, 0, 40 ); cvSetReal2D( object_points_ex, 1, 1, -40 ); cvSetReal2D( object_points_ex, 1, 2, 0.0 ); 00352 cvSetReal2D( object_points_ex, 2, 0, -40 ); cvSetReal2D( object_points_ex, 2, 1, 40 ); cvSetReal2D( object_points_ex, 2, 2, 0.0 ); 00353 cvSetReal2D( object_points_ex, 3, 0, 40 ); cvSetReal2D( object_points_ex, 3, 1, 40 ); cvSetReal2D( object_points_ex, 3, 2, 0.0 ); 00354 //} 00355 00356 cvFindExtrinsicCameraParams2( object_points_ex, 00357 image_points_ex, 00358 itsIntrinsicMatrix, 00359 itsDistortionCoeffs, 00360 itsCameraRotation, 00361 itsCameraTranslation); 00362 00363 printf( "Rotation2: %f %f %f\n", 00364 cvGetReal2D(itsCameraRotation, 0, 0), 00365 cvGetReal2D(itsCameraRotation, 0, 1), 00366 cvGetReal2D(itsCameraRotation, 0, 2)); 00367 printf( "Translation2: %f %f %f\n", 00368 cvGetReal2D(itsCameraTranslation, 0, 0), 00369 cvGetReal2D(itsCameraTranslation, 0, 1), 00370 cvGetReal2D(itsCameraTranslation, 0, 2)); 00371 } 00372 00373 } 00374 00375 00376 00377 00378 00379 //// ###################################################################### 00380 //void PCamera::onSimEventLGNOutput(SimEventQueue& q, 00381 // rutz::shared_ptr<SimEventLGNOutput>& e) 00382 //{ 00383 // itsPCameraCellsInput = e->getCells(); 00384 // 00385 // 00386 // inplaceNormalize(itsPCameraCellsInput[0], 0.0F, 255.0F); 00387 // Image<byte> in = itsPCameraCellsInput[0]; 00388 // 00389 // int rows = 7; 00390 // int cols = 7; 00391 // std::vector<CvPoint2D32f> corners(rows*cols); 00392 // 00393 // int count = 0; 00394 // int result = cvFindChessboardCorners(img2ipl(in), cvSize(rows,cols), 00395 // &corners[0], &count, 00396 // CV_CALIB_CB_ADAPTIVE_THRESH | 00397 // CV_CALIB_CB_NORMALIZE_IMAGE | 00398 // CV_CALIB_CB_FILTER_QUADS); 00399 // 00400 // // result = 0 if not all corners were found 00401 // // Find corners to an accuracy of 0.1 pixel 00402 // if(result != 0) 00403 // cvFindCornerSubPix(img2ipl(in), 00404 // &corners[0], 00405 // count, 00406 // cvSize(10,10), //win 00407 // cvSize(-1,-1), //zero_zone 00408 // cvTermCriteria(CV_TERMCRIT_ITER,1000,0.01) ); 00409 // 00410 // //LINFO("Found %i %lu complete %i", count, corners.size(), result); 00411 // //cvDrawChessboardCorners(img2ipl(in), cvSize(rows,cols), &corners[0], count, result); 00412 // 00413 // if(result != 0) 00414 // { 00415 // if (itsSaveCorners) 00416 // { 00417 // for(uint i=0; i<corners.size(); i++) 00418 // itsCorners.push_back(corners[i]); 00419 // itsSaveCorners=false; 00420 // } 00421 // } 00422 // 00423 // //Set the intrinsnic params based on the zoom 00424 //// float currentZoom = itsCameraCtrl->getCurrentZoom(); 00425 //// cvmSet(itsIntrinsicMatrix, 0, 0, 3.6e-06*pow(currentZoom, 3) - 0.0012*pow(currentZoom,2) + 0.9265*currentZoom + 415.5); 00426 //// cvmSet(itsIntrinsicMatrix, 1, 1, 3.6e-06*pow(currentZoom, 3) - 0.0013*pow(currentZoom,2) + 0.9339*currentZoom + 436); 00427 // 00428 // 00429 // itsDebugImg = in; 00430 // 00431 // std::vector<Point3D<float> > points; 00432 // 00433 // //points.push_back(Point3D<float>(0,0,0)); 00434 // //points.push_back(Point3D<float>(63.5,0,0)); 00435 // //points.push_back(Point3D<float>(63.5,44.45,0)); 00436 // //points.push_back(Point3D<float>(0,44.45,0)); 00437 // 00438 // //points.push_back(Point3D<float>(0,0,-50.8)); 00439 // //points.push_back(Point3D<float>(63.5,0,-50.8)); 00440 // //points.push_back(Point3D<float>(63.5,44.45,-50.8)); 00441 // //points.push_back(Point3D<float>(0,44.45,-50.8)); 00442 // 00443 // Point3D<float> pos(-15,-15,0); 00444 // points.push_back(Point3D<float>(pos.x,pos.y,0)); 00445 // points.push_back(Point3D<float>(pos.x,pos.y,30)); 00446 // points.push_back(Point3D<float>(pos.x+30,pos.y,30)); 00447 // points.push_back(Point3D<float>(pos.x+30,pos.y,0)); 00448 // points.push_back(Point3D<float>(pos.x,pos.y,0)); 00449 // 00450 // points.push_back(Point3D<float>(pos.x,pos.y+30,0)); 00451 // points.push_back(Point3D<float>(pos.x,pos.y+30,30)); 00452 // points.push_back(Point3D<float>(pos.x,pos.y,30)); 00453 // 00454 // 00455 // points.push_back(Point3D<float>(pos.x+30,pos.y,30)); 00456 // points.push_back(Point3D<float>(pos.x+30,pos.y+30,30)); 00457 // points.push_back(Point3D<float>(pos.x,pos.y+30,30)); 00458 // points.push_back(Point3D<float>(pos.x,pos.y,30)); 00459 // 00460 // 00461 // 00462 // //points.push_back(Point3D<float>(30,30,0)); 00463 // //points.push_back(Point3D<float>(0,30,0)); 00464 // 00465 // //points.push_back(Point3D<float>(0,0,-4.5)); 00466 // //points.push_back(Point3D<float>(30,0,-4.5)); 00467 // //points.push_back(Point3D<float>(30,30,-4.5)); 00468 // //points.push_back(Point3D<float>(0,30,-4.5)); 00469 // 00470 // if (itsDrawGrid) 00471 // projectGrid(); 00472 // // displayExtrinsic(corners); 00473 // //projectPoints(points); 00474 // 00475 // //double d0 = cvGetReal2D( itsDistortionCoeffs, 0, 0); 00476 // //double d1 = cvGetReal2D( itsDistortionCoeffs, 1, 0); 00477 // //double d2 = cvGetReal2D( itsDistortionCoeffs, 2, 0); 00478 // //double d3 = cvGetReal2D( itsDistortionCoeffs, 3, 0); 00479 // ////printf( "distortion_coeffs ( %7.7lf, %7.7lf, %7.7lf, %7.7lf)\n", d0, d1, d2, d3); 00480 // 00481 // //double ir00 = cvGetReal2D( itsIntrinsicMatrix, 0, 0); 00482 // //double ir01 = cvGetReal2D( itsIntrinsicMatrix, 0, 1); 00483 // //double ir02 = cvGetReal2D( itsIntrinsicMatrix, 0, 2); 00484 // //double ir10 = cvGetReal2D( itsIntrinsicMatrix, 1, 0); 00485 // //double ir11 = cvGetReal2D( itsIntrinsicMatrix, 1, 1); 00486 // //double ir12 = cvGetReal2D( itsIntrinsicMatrix, 1, 2); 00487 // //double ir20 = cvGetReal2D( itsIntrinsicMatrix, 2, 0); 00488 // //double ir21 = cvGetReal2D( itsIntrinsicMatrix, 2, 1); 00489 // //double ir22 = cvGetReal2D( itsIntrinsicMatrix, 2, 2); 00490 // //printf( "intrinsics ( %7.5lf, %7.5lf, %7.5lf)\n", ir00, ir01, ir02); 00491 // //printf( " ( %7.5lf, %7.5lf, %7.5lf)\n", ir10, ir11, ir12); 00492 // //printf( " ( %7.5lf, %7.5lf, %7.5lf)\n", ir20, ir21, ir22); 00493 // 00494 // //printf( "Rotation: %f %f %f\n", 00495 // // cvGetReal2D(itsCameraRotation, 0, 0), 00496 // // cvGetReal2D(itsCameraRotation, 0, 1), 00497 // // cvGetReal2D(itsCameraRotation, 0, 2)); 00498 // //printf( "Translation: %f %f %f\n", 00499 // // cvGetReal2D(itsCameraTranslation, 0, 0), 00500 // // cvGetReal2D(itsCameraTranslation, 0, 1), 00501 // // cvGetReal2D(itsCameraTranslation, 0, 2)); 00502 // 00503 //} 00504 00505 // ###################################################################### 00506 void PCamera::onSimEventPTZToLoc(SimEventQueue& q, 00507 rutz::shared_ptr<SimEventUserInput>& e) 00508 { 00509 00510 Point2D<int> targetLoc = e->getMouseClick(); 00511 00512 //LINFO("PTZ to %ix%i", 00513 // targetLoc.i, 00514 // targetLoc.j); 00515 00516 //if (targetLoc.isValid()) 00517 //{ 00518 // itsZoomOut = false; 00519 // q.post(rutz::make_shared(new SimEventSetVisualTracker(this, targetLoc))); 00520 //} 00521 00522 } 00523 00524 //void PCamera::onSimEventVisualTracker(SimEventQueue& q, rutz::shared_ptr<SimEventVisualTracker>& e) 00525 //{ 00526 // 00527 // if (e->isTracking()) 00528 // { 00529 // itsCurrentTargetLoc = e->getTargetLoc(); 00530 // 00531 // //move camera to target loc 00532 // Point2D<int> locErr = itsCurrentTargetLoc - Point2D<int>(320/2, 240/2); 00533 // 00534 // int currentZoom = itsCameraCtrl->getCurrentZoom(); 00535 // 00536 // Point2D<float> pGain(0.15, 0.15); 00537 // 00538 // if (currentZoom > 800) 00539 // { 00540 // pGain.i = 0.05; pGain.j = 0.06; 00541 // } else if (currentZoom > 1000) 00542 // { 00543 // pGain.i = 0.04; pGain.j = 0.04; 00544 // } 00545 // 00546 // 00547 // //P controller for now 00548 // Point2D<float> u = pGain*locErr; 00549 // 00550 // 00551 // LINFO("***Target is at: %ix%i zoom=%i (err %ix%i) move=%ix%i", 00552 // itsCurrentTargetLoc.i, itsCurrentTargetLoc.j, currentZoom, 00553 // locErr.i, locErr.j, 00554 // (int)u.i, (int)u.j); 00555 // 00556 // if (fabs(locErr.distance(Point2D<int>(0,0))) > 16) 00557 // itsCameraCtrl->movePanTilt((int)u.i, -1*(int)u.j, true); //move relative 00558 // else 00559 // { 00560 // LINFO("****** zoom out"); 00561 // if (itsZoomOut) 00562 // itsCameraCtrl->zoom(400); //Zoom out 00563 // else 00564 // itsCameraCtrl->zoom(10, true); //move relative 00565 // } 00566 // } else { 00567 // //LINFO("Not tracking"); 00568 // } 00569 // 00570 // 00571 //} 00572 00573 00574 void PCamera::onSimEventUserInput(SimEventQueue& q, rutz::shared_ptr<SimEventUserInput>& e) 00575 { 00576 00577 LINFO("Got event %s %ix%i key=%i", 00578 e->getWinName(), 00579 e->getMouseClick().i, 00580 e->getMouseClick().j, 00581 e->getKey()); 00582 00583 onSimEventPTZToLoc(q, e); 00584 00585 bool moveCamera = true; 00586 00587 switch(e->getKey()) 00588 { 00589 case KEY_UP: 00590 if (moveCamera) 00591 { 00592 if (itsChangeRot) 00593 cvmSet(itsCameraRotation, 0, 0, cvGetReal2D(itsCameraRotation, 0, 0) + M_PI/180); 00594 else 00595 cvmSet(itsCameraTranslation, 0, 0, cvGetReal2D(itsCameraTranslation, 0, 0) + 1); 00596 } else { 00597 itsCameraCtrl->movePanTilt(1, 0, true); //move relative 00598 } 00599 00600 break; 00601 case KEY_DOWN: 00602 if (moveCamera) 00603 { 00604 if (itsChangeRot) 00605 cvmSet(itsCameraRotation, 0, 0, cvGetReal2D(itsCameraRotation, 0, 0) - M_PI/180); 00606 else 00607 cvmSet(itsCameraTranslation, 0, 0, cvGetReal2D(itsCameraTranslation, 0, 0) - 1); 00608 } else { 00609 itsCameraCtrl->movePanTilt(-1, 0, true); //move relative 00610 } 00611 break; 00612 case KEY_LEFT: 00613 if (moveCamera) 00614 { 00615 if (itsChangeRot) 00616 cvmSet(itsCameraRotation, 0, 1, cvGetReal2D(itsCameraRotation, 0, 1) + M_PI/180); 00617 else 00618 cvmSet(itsCameraTranslation, 0, 1, cvGetReal2D(itsCameraTranslation, 0, 1) + 1); 00619 } else { 00620 itsCameraCtrl->movePanTilt(0, 1, true); //move relative 00621 } 00622 break; 00623 case KEY_RIGHT: 00624 if (moveCamera) 00625 { 00626 if (itsChangeRot) 00627 cvmSet(itsCameraRotation, 0, 1, cvGetReal2D(itsCameraRotation, 0, 1) - M_PI/180); 00628 else 00629 cvmSet(itsCameraTranslation, 0, 1, cvGetReal2D(itsCameraTranslation, 0, 1) - 1); 00630 } else { 00631 itsCameraCtrl->movePanTilt(0, -1, true); //move relative 00632 } 00633 break; 00634 case 38: //a 00635 if (moveCamera) 00636 { 00637 if (itsChangeRot) 00638 cvmSet(itsCameraRotation, 0, 2, cvGetReal2D(itsCameraRotation, 0, 2) + M_PI/180); 00639 else 00640 cvmSet(itsCameraTranslation, 0, 2, cvGetReal2D(itsCameraTranslation, 0, 2) + 1); 00641 } else { 00642 itsCameraCtrl->zoom(1, true); //move relative 00643 } 00644 break; 00645 case 52: //z 00646 if (moveCamera) 00647 { 00648 if (itsChangeRot) 00649 cvmSet(itsCameraRotation, 0, 2, cvGetReal2D(itsCameraRotation, 0, 2) - M_PI/180); 00650 else 00651 cvmSet(itsCameraTranslation, 0, 2, cvGetReal2D(itsCameraTranslation, 0, 2) - 1); 00652 } else { 00653 itsCameraCtrl->zoom(-1, true); //move relative 00654 } 00655 break; 00656 case 39: //s 00657 { 00658 //itsSaveCorners = true; 00659 //itsZoomOut=true; 00660 itsCameraCtrl->zoom(+10, true); //move relative 00661 00662 //Point2D<int> target(-1,-1); 00663 //q.post(rutz::make_shared(new SimEventSetVisualTracker(this, target))); 00664 00665 } 00666 00667 break; 00668 case 54: //c 00669 calibrate(itsCorners); 00670 itsCurrentObjName = ""; 00671 break; 00672 case 42: //g 00673 itsDrawGrid = !itsDrawGrid; 00674 { 00675 double d0 = cvGetReal2D( itsDistortionCoeffs, 0, 0); 00676 double d1 = cvGetReal2D( itsDistortionCoeffs, 1, 0); 00677 double d2 = cvGetReal2D( itsDistortionCoeffs, 2, 0); 00678 double d3 = cvGetReal2D( itsDistortionCoeffs, 3, 0); 00679 printf( "distortion_coeffs ( %7.7lf, %7.7lf, %7.7lf, %7.7lf)\n", d0, d1, d2, d3); 00680 00681 double ir00 = cvGetReal2D( itsIntrinsicMatrix, 0, 0); 00682 double ir01 = cvGetReal2D( itsIntrinsicMatrix, 0, 1); 00683 double ir02 = cvGetReal2D( itsIntrinsicMatrix, 0, 2); 00684 double ir10 = cvGetReal2D( itsIntrinsicMatrix, 1, 0); 00685 double ir11 = cvGetReal2D( itsIntrinsicMatrix, 1, 1); 00686 double ir12 = cvGetReal2D( itsIntrinsicMatrix, 1, 2); 00687 double ir20 = cvGetReal2D( itsIntrinsicMatrix, 2, 0); 00688 double ir21 = cvGetReal2D( itsIntrinsicMatrix, 2, 1); 00689 double ir22 = cvGetReal2D( itsIntrinsicMatrix, 2, 2); 00690 printf( "intrinsics ( %7.5lf, %7.5lf, %7.5lf)\n", ir00, ir01, ir02); 00691 printf( " ( %7.5lf, %7.5lf, %7.5lf)\n", ir10, ir11, ir12); 00692 printf( " ( %7.5lf, %7.5lf, %7.5lf)\n", ir20, ir21, ir22); 00693 00694 printf( "Rotation: %f %f %f\n", 00695 cvGetReal2D(itsCameraRotation, 0, 0), 00696 cvGetReal2D(itsCameraRotation, 0, 1), 00697 cvGetReal2D(itsCameraRotation, 0, 2)); 00698 printf( "Translation: %f %f %f\n", 00699 cvGetReal2D(itsCameraTranslation, 0, 0), 00700 cvGetReal2D(itsCameraTranslation, 0, 1), 00701 cvGetReal2D(itsCameraTranslation, 0, 2)); 00702 } 00703 break; 00704 case 27: //r 00705 itsChangeRot = !itsChangeRot; 00706 //recognize 00707 //LINFO("What is this object"); 00708 //std::getline(std::cin, itsCurrentObjName); 00709 //itsZoomOut=true; 00710 00711 break; 00712 00713 } 00714 00715 //cvmSet(itsIntrinsicMatrix, 0, 0, 577.94189); cvmSet(itsIntrinsicMatrix, 0, 1, 0); cvmSet(itsIntrinsicMatrix, 0, 2, 159.500); 00716 //cvmSet(itsIntrinsicMatrix, 1, 0, 0); cvmSet(itsIntrinsicMatrix, 1, 1, 2235823.50000 ); cvmSet(itsIntrinsicMatrix, 1, 2, 119.5); 00717 00718 } 00719 00720 00721 void PCamera::calibrate(std::vector<CvPoint2D32f>& corners) 00722 { 00723 int rows = 7; 00724 int cols = 7; 00725 00726 int nViews = corners.size()/(rows*cols); 00727 LINFO("Calibrate: %i views", nViews); 00728 00729 if (nViews <= 0) 00730 { 00731 LINFO("No corners avl"); 00732 return; 00733 } 00734 float gridSize = 40; 00735 00736 // Set up the object points matrix 00737 // Squares are size set in defines found in header file. 00738 CvMat* object_points = cvCreateMat(corners.size(), 3, CV_32FC1); 00739 for(uint k=0; k < corners.size()/(rows*cols); k++ ) 00740 { 00741 for(int i=0; i < cols; i++ ) 00742 { 00743 for(int j=0; j < rows; j++ ) 00744 { 00745 cvmSet( object_points, k*(rows*cols) + i*rows + j, 0, gridSize*j ); // x coordinate (35mm size of square) 00746 cvmSet( object_points, k*(rows*cols) + i*rows + j, 1, gridSize*i ); // y coordinate 00747 cvmSet( object_points, k*(rows*cols) + i*rows + j, 2, 0 ); // z coordinate 00748 } 00749 } 00750 } 00751 00752 //for (uint j = 0; j < corners.size(); j++){ 00753 // cvSetReal2D( object_points, j, 0, ( j % rows) * gridSize ); //0.4m 00754 // cvSetReal2D( object_points, j, 1, ( j / rows) * gridSize ); 00755 // cvSetReal2D( object_points, j, 2, 0.0 ); 00756 //} 00757 00758 // Set up the matrix of points per image 00759 CvMat* point_counts = cvCreateMat(1, nViews, CV_32SC1); 00760 for(int i=0; i < nViews; i++ ) 00761 cvSetReal1D( point_counts, i, rows*cols ); 00762 00763 // Copy corners found to matrix 00764 CvMat image_points = cvMat(corners.size(), 2, CV_32FC1, &corners[0]); 00765 00766 00767 00768 int flags = 0; 00769 CvMat* cameraRotation = cvCreateMat( nViews, 3, CV_64FC1); 00770 CvMat* cameraTranslation = cvCreateMat( nViews, 3, CV_64FC1); 00771 00772 //flags = CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_USE_INTRINSIC_GUESS; 00773 flags = CV_CALIB_USE_INTRINSIC_GUESS; 00774 cvCalibrateCamera2( object_points, &image_points, point_counts, cvSize(320,240), 00775 itsIntrinsicMatrix, itsDistortionCoeffs, 00776 cameraRotation, 00777 cameraTranslation, 00778 flags); 00779 00780 00781 //display results 00782 // Image<byte> in = itsPCameraCellsInput[0]; 00783 // Image<byte> out(in.getDims(), ZEROS); 00784 00785 // cvUndistort2( img2ipl(in), img2ipl(out), itsIntrinsicMatrix, itsDistortionCoeffs); 00786 00787 // //itsDebugImg = out; 00788 00789 00790 00791 cvReleaseMat( &object_points); 00792 cvReleaseMat( &point_counts); 00793 00794 } 00795 00796 void PCamera::displayExtrinsic(std::vector<CvPoint2D32f>& corners) 00797 { 00798 00799 int rows = 7; 00800 //int cols = 6; 00801 00802 Image<byte> tmp = itsPCameraCellsInput[0]; 00803 00804 float gridSize = 40; 00805 //Get 00806 CvMat *image_points_ex = cvCreateMat( corners.size(), 2, CV_64FC1); 00807 00808 for (uint j = 0; j < corners.size(); j++){ 00809 cvSetReal2D( image_points_ex, j, 0, corners[j].x); 00810 cvSetReal2D( image_points_ex, j, 1, corners[j].y); 00811 } 00812 00813 //int views = 1; 00814 00815 CvMat *object_points_ex = cvCreateMat( corners.size(), 3, CV_64FC1); 00816 for (uint j = 0; j < corners.size(); j++){ 00817 cvSetReal2D( object_points_ex, j, 0, ( j % rows) * gridSize ); //0.4m 00818 cvSetReal2D( object_points_ex, j, 1, ( j / rows) * gridSize ); 00819 cvSetReal2D( object_points_ex, j, 2, 0.0 ); 00820 } 00821 00822 //cvSetReal2D( itsCameraTranslation, 0, 2, 782.319961 ); 00823 cvFindExtrinsicCameraParams2( object_points_ex, 00824 image_points_ex, 00825 itsIntrinsicMatrix, 00826 itsDistortionCoeffs, 00827 itsCameraRotation, 00828 itsCameraTranslation); 00829 00830 //cvmSet(itsCameraRotation, 0, 0, -1.570442 ); 00831 //cvmSet(itsCameraRotation, 0, 1, 0.028913 ); 00832 //cvmSet(itsCameraRotation, 0, 2, 0.028911 ); 00833 00834 //cvmSet(itsCameraTranslation, 0, 0, -57.067339 ); 00835 //cvmSet(itsCameraTranslation, 0, 1, 0.005114 ); 00836 //cvmSet(itsCameraTranslation, 0, 2, 777.677687 ); 00837 00838 00839 //printf( "Rotation2: %f %f %f\n", 00840 // cvGetReal2D(itsCameraRotation, 0, 0), 00841 // cvGetReal2D(itsCameraRotation, 0, 1), 00842 // cvGetReal2D(itsCameraRotation, 0, 2)); 00843 //printf( "Translation2: %f %f %f\n", 00844 // cvGetReal2D(itsCameraTranslation, 0, 0), 00845 // cvGetReal2D(itsCameraTranslation, 0, 1), 00846 // cvGetReal2D(itsCameraTranslation, 0, 2)); 00847 00848 CvMat *rot_mat = cvCreateMat( 3, 3, CV_64FC1); 00849 cvRodrigues2( itsCameraRotation, rot_mat, 0); 00850 00851 int NUM_GRID = 12; //21 00852 CvMat *my_3d_point = cvCreateMat( 3, NUM_GRID * NUM_GRID + 1, CV_64FC1); 00853 CvMat *my_image_point = cvCreateMat( 2, NUM_GRID * NUM_GRID + 1, CV_64FC1); 00854 00855 for ( int i = 0; i < NUM_GRID; i++){ 00856 for ( int j = 0; j < NUM_GRID; j++){ 00857 cvSetReal2D( my_3d_point, 0, i * NUM_GRID + j, -(gridSize*4) + (i * gridSize)); 00858 cvSetReal2D( my_3d_point, 1, i * NUM_GRID + j, -(gridSize*4) + (j * gridSize)); 00859 cvSetReal2D( my_3d_point, 2, i * NUM_GRID + j, 0.0); 00860 } 00861 } 00862 00863 cvSetReal2D( my_3d_point, 0, NUM_GRID*NUM_GRID, 0.0); 00864 cvSetReal2D( my_3d_point, 1, NUM_GRID*NUM_GRID, 0.0); 00865 cvSetReal2D( my_3d_point, 2, NUM_GRID*NUM_GRID, 15); 00866 00867 00868 00869 cvProjectPoints2( my_3d_point, 00870 itsCameraRotation, 00871 itsCameraTranslation, 00872 itsIntrinsicMatrix, 00873 itsDistortionCoeffs, 00874 my_image_point); 00875 00876 for ( int i = 0; i < NUM_GRID; i++){ 00877 for ( int j = 0; j < NUM_GRID-1; j++){ 00878 int im_x1 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j); 00879 int im_y1 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j); 00880 int im_x2 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j+1); 00881 int im_y2 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j+1); 00882 00883 cvLine( img2ipl(tmp), cvPoint( im_x1, im_y1), cvPoint( im_x2, im_y2), CV_RGB( 0, 255, 0), 1); 00884 } 00885 } 00886 for ( int j = 0; j < NUM_GRID; j++){ 00887 for ( int i = 0; i < NUM_GRID-1; i++){ 00888 int im_x1 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j); 00889 int im_y1 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j); 00890 int im_x2 = (int)cvGetReal2D( my_image_point, 0, (i+1) * NUM_GRID + j); 00891 int im_y2 = (int)cvGetReal2D( my_image_point, 1, (i+1) * NUM_GRID + j); 00892 00893 cvLine( img2ipl(tmp), cvPoint( im_x1, im_y1), cvPoint( im_x2, im_y2), CV_RGB( 0, 255, 0), 1); 00894 } 00895 } 00896 00897 int im_x0 = (int)cvGetReal2D( my_image_point, 0, 0); 00898 int im_y0 = (int)cvGetReal2D( my_image_point, 1, 0); 00899 int im_x = (int)cvGetReal2D( my_image_point, 0, NUM_GRID*NUM_GRID); 00900 int im_y = (int)cvGetReal2D( my_image_point, 1, NUM_GRID*NUM_GRID); 00901 cvLine( img2ipl(tmp), cvPoint( im_x0, im_y0), cvPoint( im_x, im_y), CV_RGB( 255, 0, 0), 2); //Z axis 00902 00903 00904 itsDebugImg = tmp; 00905 00906 cvReleaseMat( &my_3d_point); 00907 cvReleaseMat( &my_image_point); 00908 //cvReleaseMat( &image_points_ex); 00909 //cvReleaseMat( &object_points_ex); 00910 cvReleaseMat( &rot_mat); 00911 00912 } 00913 00914 00915 void PCamera::projectGrid() 00916 { 00917 00918 Image<byte> tmp = itsDebugImg; //itsPCameraCellsInput[0]; 00919 00920 00921 float gridSize = 40; //38; // in mm 00922 CvMat *rot_mat = cvCreateMat( 3, 3, CV_64FC1); 00923 cvRodrigues2( itsCameraRotation, rot_mat, 0); 00924 00925 int NUM_GRID = 9; //21 00926 CvMat *my_3d_point = cvCreateMat( 3, NUM_GRID * NUM_GRID + 2, CV_64FC1); 00927 CvMat *my_image_point = cvCreateMat( 2, NUM_GRID * NUM_GRID + 2, CV_64FC1); 00928 00929 for ( int i = 0; i < NUM_GRID; i++){ 00930 for ( int j = 0; j < NUM_GRID; j++){ 00931 cvSetReal2D( my_3d_point, 0, i * NUM_GRID + j, -(gridSize*NUM_GRID/2) + (i * gridSize)); 00932 cvSetReal2D( my_3d_point, 1, i * NUM_GRID + j, -(gridSize*NUM_GRID/2) + (j * gridSize)); 00933 cvSetReal2D( my_3d_point, 2, i * NUM_GRID + j, 0.0); 00934 } 00935 } 00936 00937 cvSetReal2D( my_3d_point, 0, NUM_GRID*NUM_GRID, 0); 00938 cvSetReal2D( my_3d_point, 1, NUM_GRID*NUM_GRID, 0); 00939 cvSetReal2D( my_3d_point, 2, NUM_GRID*NUM_GRID, 0); 00940 00941 cvSetReal2D( my_3d_point, 0, NUM_GRID*NUM_GRID+1, 0); 00942 cvSetReal2D( my_3d_point, 1, NUM_GRID*NUM_GRID+1, 0); 00943 cvSetReal2D( my_3d_point, 2, NUM_GRID*NUM_GRID+1, 30); 00944 00945 00946 00947 cvProjectPoints2( my_3d_point, 00948 itsCameraRotation, 00949 itsCameraTranslation, 00950 itsIntrinsicMatrix, 00951 itsDistortionCoeffs, 00952 my_image_point); 00953 00954 for ( int i = 0; i < NUM_GRID; i++){ 00955 for ( int j = 0; j < NUM_GRID-1; j++){ 00956 int im_x1 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j); 00957 int im_y1 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j); 00958 int im_x2 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j+1); 00959 int im_y2 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j+1); 00960 00961 cvLine( img2ipl(tmp), cvPoint( im_x1, im_y1), cvPoint( im_x2, im_y2), CV_RGB( 0, 255, 0), 1); 00962 } 00963 } 00964 for ( int j = 0; j < NUM_GRID; j++){ 00965 for ( int i = 0; i < NUM_GRID-1; i++){ 00966 int im_x1 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j); 00967 int im_y1 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j); 00968 int im_x2 = (int)cvGetReal2D( my_image_point, 0, (i+1) * NUM_GRID + j); 00969 int im_y2 = (int)cvGetReal2D( my_image_point, 1, (i+1) * NUM_GRID + j); 00970 00971 cvLine( img2ipl(tmp), cvPoint( im_x1, im_y1), cvPoint( im_x2, im_y2), CV_RGB( 0, 255, 0), 1); 00972 } 00973 } 00974 00975 int im_x0 = (int)cvGetReal2D( my_image_point, 0, NUM_GRID*NUM_GRID); 00976 int im_y0 = (int)cvGetReal2D( my_image_point, 1, NUM_GRID*NUM_GRID); 00977 int im_x = (int)cvGetReal2D( my_image_point, 0, NUM_GRID*NUM_GRID+1); 00978 int im_y = (int)cvGetReal2D( my_image_point, 1, NUM_GRID*NUM_GRID+1); 00979 cvLine( img2ipl(tmp), cvPoint( im_x0, im_y0), cvPoint( im_x, im_y), CV_RGB( 255, 0, 0), 2); //Z axis 00980 00981 00982 itsDebugImg = tmp; 00983 00984 cvReleaseMat( &my_3d_point); 00985 cvReleaseMat( &my_image_point); 00986 cvReleaseMat( &rot_mat); 00987 00988 00989 00990 00991 } 00992 00993 void PCamera::projectPoints(std::vector<Point3D<float> >& points) 00994 { 00995 00996 00997 Image<byte> tmp = itsDebugImg; 00998 //Image<byte> tmp = itsPCameraCellsInput[0]; 00999 01000 01001 CvMat *rot_mat = cvCreateMat( 3, 3, CV_64FC1); 01002 cvRodrigues2( itsCameraRotation, rot_mat, 0); 01003 01004 //Project qube 01005 CvMat *my_3d_point = cvCreateMat( 3, points.size(), CV_64FC1); 01006 CvMat *my_image_point = cvCreateMat( 2, points.size(), CV_64FC1); 01007 01008 for(uint i=0; i<points.size(); i++) 01009 { 01010 cvSetReal2D( my_3d_point, 0, i, points[i].x-20); 01011 cvSetReal2D( my_3d_point, 1, i, points[i].y-20); 01012 cvSetReal2D( my_3d_point, 2, i, points[i].z); 01013 } 01014 01015 cvProjectPoints2( my_3d_point, 01016 itsCameraRotation, 01017 itsCameraTranslation, 01018 itsIntrinsicMatrix, 01019 itsDistortionCoeffs, 01020 my_image_point); 01021 01022 for(uint i=0; i<points.size(); i++) 01023 { 01024 int im_x1 = (int)cvGetReal2D( my_image_point, 0, i ); 01025 int im_y1 = (int)cvGetReal2D( my_image_point, 1, i ); 01026 01027 int im_x2 = (int)cvGetReal2D( my_image_point, 0, (i+1)%points.size() ); 01028 int im_y2 = (int)cvGetReal2D( my_image_point, 1, (i+1)%points.size() ); 01029 01030 drawLine(tmp, Point2D<int>(im_x1, im_y1), Point2D<int>(im_x2, im_y2), 01031 (byte)0, 3); 01032 } 01033 01034 // for ( int i = 0; i < NUM_GRID; i++){ 01035 // for ( int j = 0; j < NUM_GRID-1; j++){ 01036 // int im_x1 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j); 01037 // int im_y1 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j); 01038 // int im_x2 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j+1); 01039 // int im_y2 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j+1); 01040 // 01041 // cvLine( img2ipl(tmp), cvPoint( im_x1, im_y1), cvPoint( im_x2, im_y2), CV_RGB( 0, 255, 0), 1); 01042 // } 01043 // } 01044 // for ( int j = 0; j < NUM_GRID; j++){ 01045 // for ( int i = 0; i < NUM_GRID-1; i++){ 01046 // int im_x1 = (int)cvGetReal2D( my_image_point, 0, i * NUM_GRID + j); 01047 // int im_y1 = (int)cvGetReal2D( my_image_point, 1, i * NUM_GRID + j); 01048 // int im_x2 = (int)cvGetReal2D( my_image_point, 0, (i+1) * NUM_GRID + j); 01049 // int im_y2 = (int)cvGetReal2D( my_image_point, 1, (i+1) * NUM_GRID + j); 01050 // 01051 // cvLine( img2ipl(tmp), cvPoint( im_x1, im_y1), cvPoint( im_x2, im_y2), CV_RGB( 0, 255, 0), 1); 01052 // } 01053 // } 01054 // 01055 // int im_x0 = (int)cvGetReal2D( my_image_point, 0, 0); 01056 // int im_y0 = (int)cvGetReal2D( my_image_point, 1, 0); 01057 // int im_x = (int)cvGetReal2D( my_image_point, 0, NUM_GRID*NUM_GRID); 01058 // int im_y = (int)cvGetReal2D( my_image_point, 1, NUM_GRID*NUM_GRID); 01059 // cvLine( img2ipl(tmp), cvPoint( im_x0, im_y0), cvPoint( im_x, im_y), CV_RGB( 255, 0, 0), 2); //Z axis 01060 01061 01062 itsDebugImg = tmp; 01063 01064 cvReleaseMat( &my_3d_point); 01065 cvReleaseMat( &my_image_point); 01066 cvReleaseMat( &rot_mat); 01067 01068 } 01069 01070 01071 01072 01073 // ###################################################################### 01074 void PCamera::onSimEventSaveOutput(SimEventQueue& q, rutz::shared_ptr<SimEventSaveOutput>& e) 01075 { 01076 if (itsShowDebug.getVal()) 01077 { 01078 // get the OFS to save to, assuming sinfo is of type 01079 // SimModuleSaveInfo (will throw a fatal exception otherwise): 01080 nub::ref<FrameOstream> ofs = 01081 dynamic_cast<const SimModuleSaveInfo&>(e->sinfo()).ofs; 01082 Layout<PixRGB<byte> > disp = getDebugImage(); 01083 ofs->writeRgbLayout(disp, "PCamera", FrameInfo("PCamera", SRC_POS)); 01084 } 01085 } 01086 01087 01088 Layout<PixRGB<byte> > PCamera::getDebugImage() 01089 { 01090 Layout<PixRGB<byte> > outDisp; 01091 01092 Image<PixRGB<byte> > tmp = itsCurrentImg; 01093 for(uint i=0; i<tmp.size(); i++) 01094 if (itsRenderImg[i] != PixRGB<byte>(0,0,0)) 01095 tmp[i] = itsRenderImg[i]; 01096 01097 for(uint i=0; i<itsSquares.size(); i++) 01098 { 01099 for(uint j=0; j<4; j++) 01100 drawLine(itsCurrentImg, 01101 itsSquares[i].p[j], 01102 itsSquares[i].p[(j+1)%4], 01103 PixRGB<byte>(255,0,0)); 01104 } 01105 outDisp = hcat(itsCurrentImg, tmp); 01106 01107 return outDisp; 01108 01109 } 01110 01111 // ###################################################################### 01112 /* So things look consistent in everyone's emacs... */ 01113 /* Local Variables: */ 01114 /* indent-tabs-mode: nil */ 01115 /* End: */ 01116 01117 #endif 01118