00001 /*!@file ArmControl/app-ArmControl.C tests the multi-threaded salincy code 00002 * with the arm control to move the end effector towerd salient objects 00003 * */ 00004 00005 // //////////////////////////////////////////////////////////////////// // 00006 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the // 00007 // University of Southern California (USC) and the iLab at USC. // 00008 // See http://iLab.usc.edu for information about this project. // 00009 // //////////////////////////////////////////////////////////////////// // 00010 // Major portions of the iLab Neuromorphic Vision Toolkit are protected // 00011 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency // 00012 // in Visual Environments, and Applications'' by Christof Koch and // 00013 // Laurent Itti, California Institute of Technology, 2001 (patent // 00014 // pending; application number 09/912,225 filed July 23, 2001; see // 00015 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status). // 00016 // //////////////////////////////////////////////////////////////////// // 00017 // This file is part of the iLab Neuromorphic Vision C++ Toolkit. // 00018 // // 00019 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can // 00020 // redistribute it and/or modify it under the terms of the GNU General // 00021 // Public License as published by the Free Software Foundation; either // 00022 // version 2 of the License, or (at your option) any later version. // 00023 // // 00024 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope // 00025 // that it will be useful, but WITHOUT ANY WARRANTY; without even the // 00026 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // 00027 // PURPOSE. See the GNU General Public License for more details. // 00028 // // 00029 // You should have received a copy of the GNU General Public License // 00030 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write // 00031 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, // 00032 // Boston, MA 02111-1307 USA. // 00033 // //////////////////////////////////////////////////////////////////// // 00034 // 00035 // Primary maintainer for this file: Lior Elazary <elazary@usc.edu> 00036 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/ArmControl/app-ArmControl.C $ 00037 // $Id: app-ArmControl.C 10982 2009-03-05 05:11:22Z itti $ 00038 // 00039 00040 #ifndef TESTARM_H_DEFINED 00041 #define TESTARM_H_DEFINED 00042 00043 #include "Component/ModelManager.H" 00044 #include "Image/Image.H" 00045 #include "Image/ImageSet.H" 00046 #include "Image/ShapeOps.H" 00047 #include "Image/DrawOps.H" 00048 #include "Image/FilterOps.H" 00049 #include "Image/ColorOps.H" 00050 #include "Image/Transforms.H" 00051 #include "Image/MathOps.H" 00052 #include "Image/CutPaste.H" // for inplacePaste() 00053 #include "Image/Pixels.H" 00054 #include "Neuro/StdBrain.H" 00055 #include "Neuro/VisualCortex.H" 00056 #include "Neuro/VisualCortexConfigurator.H" 00057 #include "Neuro/NeuroOpts.H" 00058 #include "Channels/DescriptorVec.H" 00059 #include "Channels/ComplexChannel.H" 00060 #include "Channels/SubmapAlgorithmBiased.H" 00061 #include "Simulation/SimEventQueue.H" 00062 #include "Simulation/SimulationOpts.H" 00063 #include "Simulation/SimEventQueueConfigurator.H" 00064 #include "Learn/Bayes.H" 00065 #include "GUI/DebugWin.H" 00066 #include "ObjRec/BayesianBiaser.H" 00067 #include "Devices/DeviceOpts.H" 00068 #include "Devices/FrameGrabberConfigurator.H" 00069 #include "Media/FrameSeries.H" 00070 #include "Transport/FrameIstream.H" 00071 #include "GUI/XWinManaged.H" 00072 #include "Neuro/NeuroOpts.H" 00073 #include "Neuro/SaccadeControllers.H" 00074 #include "Neuro/SaccadeControllerConfigurator.H" 00075 #include "Raster/Raster.H" 00076 #include "Transport/FrameIstream.H" 00077 #include "Util/Timer.H" 00078 #include "Devices/rt100.H" 00079 #include "Controllers/PID.H" 00080 00081 #include <arpa/inet.h> 00082 #include <fcntl.h> 00083 #include <netdb.h> 00084 #include <signal.h> 00085 #include <stdlib.h> 00086 #include <unistd.h> 00087 00088 #define UP_KEY 98 00089 #define DOWN_KEY 104 00090 #define LEFT_KEY 100 00091 #define RIGHT_KEY 102 00092 00093 //! Number of frames over which average framerate is computed 00094 #define NAVG 20 00095 00096 //! Factor to display the sm values as greyscale: 00097 00098 // UDP communications: 00099 #define UDPHOST "192.168.0.8" 00100 #define UDPPORT 5003 00101 00102 static bool goforever = true; //!< Will turn false on interrupt signal 00103 00104 //! Signal handler (e.g., for control-C) 00105 void terminate(int s) 00106 { LERROR("*** INTERRUPT ***"); goforever = false; exit(1); } 00107 00108 enum STATE {INIT, REACH, GRASP, LIFT, PLACE_IN_BASKET, DROP, DONE}; 00109 00110 struct ArmPosition 00111 { 00112 int elbow; 00113 int sholder; 00114 int zed; 00115 }; 00116 00117 bool init(nub::soft_ref<RT100> rt100); 00118 bool reach(nub::soft_ref<RT100> rt100, Point2D<int> fixation, 00119 int x, int y, int zedPos); 00120 bool learnMovement(float xErr, float yErr, int *elbowPos, int *sholderPos); 00121 bool grasp(nub::soft_ref<RT100> rt100); 00122 bool lift(nub::soft_ref<RT100> rt100); 00123 bool placeInBasket(nub::soft_ref<RT100> rt100); 00124 bool drop(nub::soft_ref<RT100> rt100); 00125 void updateDisplay(Image<PixRGB<byte> > &ima, 00126 Image<float> &sm, 00127 Point2D<int> &fixation); 00128 void waitForMoveComplete(nub::soft_ref<RT100> rt100); 00129 00130 Point2D<int> evolveBrain(Image<PixRGB<byte> > &img, DescriptorVec& descVec, 00131 Image<float> &smap); 00132 00133 void biasVC(ComplexChannel &vc, Bayes &bayesNet, int objId, bool DoBias); 00134 00135 00136 ModelManager *mgr; 00137 nub::soft_ref<FrameIstream> gb; 00138 XWinManaged *xwinPtr; 00139 Image<PixRGB<byte> > *dispPtr; 00140 Timer masterclock; // master clock for simulations 00141 int w=-1,h=-1; 00142 int smlevel = -1; 00143 00144 ArmPosition armPosition; 00145 00146 // ###################################################################### 00147 int main(int argc, char **argv) 00148 { 00149 MYLOGVERB = LOG_INFO; 00150 int sim = false; 00151 00152 00153 // instantiate a model manager (for camera input): 00154 ModelManager manager("Test Arm Control"); 00155 00156 mgr = &manager; 00157 // Instantiate our various ModelComponents: 00158 nub::ref<FrameGrabberConfigurator> 00159 gbc(new FrameGrabberConfigurator(manager)); 00160 manager.addSubComponent(gbc); 00161 00162 nub::soft_ref<SimEventQueueConfigurator> 00163 seqc(new SimEventQueueConfigurator(manager)); 00164 manager.addSubComponent(seqc); 00165 00166 //our brain 00167 nub::ref<StdBrain> brain(new StdBrain(manager)); 00168 manager.addSubComponent(brain); 00169 00170 // out arm 00171 nub::soft_ref<RT100> rt100(new RT100(manager)); 00172 manager.addSubComponent(rt100); 00173 00174 nub::ref<InputFrameSeries> ifs(new InputFrameSeries(manager)); 00175 if (sim) 00176 { 00177 manager.addSubComponent(ifs); 00178 } 00179 00180 // Set the appropriate defaults for our machine that is connected to 00181 manager.exportOptions(MC_RECURSE); 00182 00183 manager.setOptionValString(&OPT_RawVisualCortexChans, "IOC"); 00184 //manager.setOptionValString(&OPT_RawVisualCortexChans, "C"); 00185 manager.setOptionValString(&OPT_SaliencyMapType, "Fast"); 00186 manager.setOptionValString(&OPT_SMfastInputCoeff, "1"); 00187 manager.setOptionValString(&OPT_WinnerTakeAllType, "Fast"); 00188 manager.setOptionValString(&OPT_SimulationTimeStep, "0.2"); 00189 00190 manager.setModelParamVal("FOAradius", 20, MC_RECURSE); 00191 manager.setModelParamVal("FoveaRadius", 20, MC_RECURSE); 00192 00193 //manager.setOptionValString(&OPT_IORtype, "Disc"); 00194 manager.setOptionValString(&OPT_IORtype, "None"); 00195 00196 00197 manager.setOptionValString(&OPT_FrameGrabberStreaming, "false"); 00198 manager.setOptionValString(&OPT_FrameGrabberType, "V4L"); 00199 manager.setOptionValString(&OPT_FrameGrabberChannel, "1"); 00200 manager.setOptionValString(&OPT_FrameGrabberHue, "0"); 00201 manager.setOptionValString(&OPT_FrameGrabberContrast, "16384"); 00202 manager.setOptionValString(&OPT_FrameGrabberDims, "320x240"); 00203 //manager.setOptionValString(&OPT_SaccadeControllerType, "Threshfric"); 00204 manager.setOptionValString(&OPT_SCeyeMaxIdleSecs, "1000.0"); 00205 manager.setOptionValString(&OPT_SCeyeThreshMinOvert, "4.0"); 00206 manager.setOptionValString(&OPT_SCeyeThreshMaxCovert, "3.0"); 00207 manager.setOptionValString(&OPT_SCeyeThreshMinNum, "2"); 00208 // manager.setOptionValString(&OPT_SCeyeSpringK, "1000000.0"); 00209 00210 //manager.setOptionValString(&OPT_EyeHeadControllerType, "Trivial"); 00211 00212 // Parse command-line: 00213 if (manager.parseCommandLine(argc, argv, "", 0, 0) == false) return(1); 00214 00215 00216 if (sim) 00217 { 00218 w = ifs->getWidth(); 00219 h = ifs->getHeight(); 00220 } else { 00221 // do post-command-line configs: 00222 gb = gbc->getFrameGrabber(); 00223 if (gb.isInvalid()) 00224 LFATAL("You need to select a frame grabber type via the " 00225 "--fg-type=XX command-line option for this program " 00226 "to be useful"); 00227 w = gb->getWidth(); 00228 h = gb->getHeight(); 00229 } 00230 00231 const int foa_size = std::min(w, h) / 12; 00232 manager.setModelParamVal("InputFrameDims", Dims(w, h), 00233 MC_RECURSE | MC_IGNORE_MISSING); 00234 manager.setModelParamVal("SCeyeStartAtIP", true, 00235 MC_RECURSE | MC_IGNORE_MISSING); 00236 manager.setModelParamVal("SCeyeInitialPosition", Point2D<int>(w/2,h/2), 00237 MC_RECURSE | MC_IGNORE_MISSING); 00238 manager.setModelParamVal("FOAradius", foa_size, 00239 MC_RECURSE | MC_IGNORE_MISSING); 00240 manager.setModelParamVal("FoveaRadius", foa_size, 00241 MC_RECURSE | MC_IGNORE_MISSING); 00242 00243 // catch signals and redirect them to terminate for clean exit: 00244 signal(SIGHUP, terminate); signal(SIGINT, terminate); 00245 signal(SIGQUIT, terminate); signal(SIGTERM, terminate); 00246 signal(SIGALRM, terminate); 00247 00248 // let's do it! 00249 manager.start(); 00250 00251 ComplexChannel *cc = NULL; 00252 cc = &*dynCastWeak<ComplexChannel>(brain->getVC()); 00253 const LevelSpec lspec = cc->getModelParamVal<LevelSpec>("LevelSpec"); 00254 smlevel = lspec.mapLevel(); 00255 00256 Image<float> sm(w >> smlevel, h >> smlevel, ZEROS); // saliency map 00257 00258 uint64 avgtime = 0; int avgn = 0; // for average framerate 00259 float fps = 0.0F; // to display framerate 00260 Timer tim; // for computation of framerate 00261 00262 Point2D<int> fixation(-1, -1); // coordinates of eye fixation 00263 00264 00265 STATE currentState = INIT; 00266 00267 00268 // image buffer for display: 00269 Image<PixRGB<byte> > disp(w * 2, h + 20, ZEROS); 00270 dispPtr = &disp; 00271 //disp += PixRGB<byte>(128); 00272 XWinManaged xwin(disp.getDims(), -1, -1, "RT100 arm control"); 00273 xwinPtr = &xwin; 00274 00275 char info[1000]; // general text buffer for various info messages 00276 00277 Point2D<int> lastpointsent(w/2, h/2); 00278 00279 static bool moveArm = false; 00280 // ###################################################################### 00281 try { 00282 00283 //Get a new descriptor vector 00284 DescriptorVec descVec(manager, "Descriptor Vector", "DecscriptorVec", cc); 00285 //Get new classifier 00286 LINFO("size %i\n", descVec.getFVSize()); 00287 00288 Bayes bayesNet(descVec.getFVSize(), 10); 00289 Dims objSize(20, 20); 00290 descVec.setFoveaSize(objSize); 00291 00292 //get command line options 00293 // const char *bayesNetFile = mgr->getExtraArg(0).c_str(); 00294 // const char *imageSetFile = mgr->getExtraArg(1).c_str(); 00295 // int objToBias = mgr->getExtraArgAs<int>(2)-1; 00296 00297 // bayesNet.load("test.net"); 00298 00299 00300 initRandomNumbers(); 00301 00302 if (!sim) 00303 { 00304 // get the frame grabber to start streaming: 00305 gb->startStream(); 00306 } 00307 00308 // initialize the timers: 00309 tim.reset(); masterclock.reset(); 00310 Timer timer(1000000); timer.reset(); 00311 00312 int frame = 0; 00313 unsigned int objID = 0; 00314 while(goforever) 00315 { 00316 // grab image: 00317 Image< PixRGB<byte> > ima; 00318 if (sim) 00319 { 00320 ifs->updateNext(); 00321 ima = ifs->readRGB(); 00322 } else { 00323 ima = gb->readRGB(); 00324 } 00325 frame++; 00326 00327 Point2D<int> loc = xwin.getLastMouseClick(); 00328 00329 //Arm control 00330 //int key = xwin.getLastKeyPress(); 00331 if (loc.isValid()) moveArm = !moveArm; //toggole moveArm 00332 00333 if (moveArm) 00334 { 00335 switch (currentState) 00336 { 00337 case INIT: 00338 if (init(rt100)) 00339 { 00340 LINFO("INitalize complete"); 00341 currentState = REACH; 00342 rt100->setJointParam(RT100::ZED, RT100::SPEED, 20); 00343 00344 rt100->setJointParam(RT100::SHOLDER, RT100::SPEED, 60); 00345 rt100->setJointParam(RT100::ELBOW, RT100::SPEED, 60); 00346 // 00347 //rt100->setJointPosition(RT100::ZED, -2800); 00348 // 00349 // rt100->setJointPosition(RT100::SHOLDER, 1280); 00350 // rt100->setJointPosition(RT100::ELBOW, -492); 00351 printf("Init complete\n"); 00352 printf("Fixation: %i %i\n", fixation.i, fixation.j); 00353 // rt100->moveArm(); 00354 //moveArm=false; 00355 biasVC(*cc, bayesNet, -1, false); //start with an unbiased smap 00356 frame = 0; 00357 } 00358 break; 00359 case REACH: 00360 { 00361 fixation = evolveBrain(ima, descVec, sm); 00362 printf("Fixation: %i %i\n", fixation.i, fixation.j); 00363 00364 descVec.setFovea(fixation); 00365 descVec.buildRawDV(); 00366 00367 std::vector<double> FV = descVec.getFV(); 00368 // double statSig = bayesNet.getStatSig(FV, (uint)0); 00369 00370 bayesNet.learn(FV, objID); //update the model 00371 00372 if (!(frame%10)) 00373 biasVC(*cc, bayesNet, objID, true); 00374 00375 if (frame > 20) { //start reaching for the object 00376 if(reach(rt100, fixation, 00377 ima.getWidth()/2, 00378 (ima.getHeight()/2) + 70, 00379 -2800)) //final zed position 00380 { 00381 LINFO("Reached object"); 00382 currentState = GRASP; 00383 } 00384 } 00385 } 00386 break; 00387 case GRASP: 00388 if(grasp(rt100)) 00389 { 00390 LINFO("Objected grasped"); 00391 currentState = PLACE_IN_BASKET; 00392 } else { 00393 LINFO("Did not graped object. Trying again"); 00394 rt100->setJointPosition(RT100::YAW, 600); 00395 rt100->setJointPosition(RT100::ZED, -2600); 00396 rt100->setJointPosition(RT100::GRIPPER, 1000); 00397 //rt100->moveArm(true); 00398 00399 currentState = REACH; 00400 } 00401 break; 00402 00403 case LIFT: 00404 if(lift(rt100)) 00405 { 00406 LINFO("Objected lifted"); 00407 currentState = INIT; 00408 } 00409 break; 00410 case PLACE_IN_BASKET: 00411 if(placeInBasket(rt100)) 00412 { 00413 LINFO("Object placed in basket"); 00414 currentState = INIT; 00415 objID++; 00416 if (objID == 5) 00417 currentState = DONE; 00418 } 00419 break; 00420 case DONE: 00421 init(rt100); 00422 goforever = false; 00423 break; 00424 00425 00426 default: 00427 break; 00428 } 00429 } 00430 00431 updateDisplay(ima, sm, fixation); 00432 00433 avgtime += tim.getReset(); avgn ++; 00434 if (avgn == NAVG) 00435 { 00436 fps = 1000.0F / float(avgtime) * float(avgn); 00437 avgtime = 0; avgn = 0; 00438 } 00439 00440 // create an info string: 00441 sprintf(info, "%.1ffps Target - (%03d,%03d) %f ", 00442 fps, fixation.i-(ima.getWidth()/2), 00443 fixation.j-(ima.getHeight()/2)-75, 00444 masterclock.getSecs()); 00445 00446 writeText(disp, Point2D<int>(0, h), info, 00447 PixRGB<byte>(255), PixRGB<byte>(127)); 00448 00449 // ready for next frame: 00450 } 00451 00452 // get ready to terminate: 00453 manager.stop(); 00454 00455 } catch ( ... ) { }; 00456 00457 return 0; 00458 } 00459 00460 00461 void biasVC(ComplexChannel &vc, Bayes &bayesNet, int objId, bool doBias) 00462 { 00463 //Set mean and sigma to bias submap 00464 BayesianBiaser bb(bayesNet, objId, -1, doBias); 00465 vc.accept(bb); 00466 00467 setSubmapAlgorithmBiased(vc); 00468 } 00469 00470 void updateDisplay(Image<PixRGB<byte> > &ima, 00471 Image<float> &sm, 00472 Point2D<int> &fixation){ 00473 00474 static int frame = 0; 00475 char filename[255]; 00476 const int foa_size = std::min(w, h) / 12; 00477 00478 // display image 00479 inplacePaste(*dispPtr, ima, Point2D<int>(0, 0)); 00480 00481 if (sm.initialized()) 00482 { 00483 Image<float> dispsm = quickInterpolate(sm, 1 << smlevel); 00484 inplaceNormalize(dispsm, 0.0f, 255.0f); 00485 00486 inplacePaste(*dispPtr, 00487 (Image<PixRGB<byte> >)toRGB(dispsm), 00488 Point2D<int>(w, 0)); 00489 00490 Point2D<int> fix2(fixation); fix2.i += w; 00491 if (fixation.i >= 0) 00492 { 00493 drawDisk(*dispPtr, fixation, foa_size/6+2, PixRGB<byte>(20, 50, 255)); 00494 drawDisk(*dispPtr, fixation, foa_size/6, PixRGB<byte>(255, 255, 20)); 00495 drawDisk(*dispPtr, fix2, foa_size/6+2, PixRGB<byte>(20, 50, 255)); 00496 drawDisk(*dispPtr, fix2, foa_size/6, PixRGB<byte>(255, 255, 20)); 00497 } 00498 } 00499 00500 printf("FrameInfo: %i %f\n", frame, masterclock.getSecs()); 00501 sprintf(filename, "armControlFrames/pickup/frame%06d.ppm", frame++); 00502 // Raster::WriteRGB(*dispPtr, filename); 00503 00504 xwinPtr->drawImage(*dispPtr); 00505 } 00506 00507 bool init(nub::soft_ref<RT100> rt100) 00508 { 00509 00510 rt100->setJointParam(RT100::ZED, RT100::SPEED, 40); 00511 rt100->setJointParam(RT100::SHOLDER, RT100::SPEED, 40); 00512 rt100->setJointParam(RT100::ELBOW, RT100::SPEED, 40); 00513 00514 rt100->setJointPosition(RT100::ZED, -1500); 00515 //rt100->setJointPosition(RT100::SHOLDER, 2600); 00516 rt100->setJointPosition(RT100::SHOLDER, 2300); 00517 //rt100->setJointPosition(RT100::ELBOW, -1500); 00518 rt100->setJointPosition(RT100::ELBOW, -2000); 00519 rt100->setJointPosition(RT100::YAW, 600); 00520 rt100->setJointPosition(RT100::TILT_WRIST, -700); 00521 rt100->setJointPosition(RT100::ROLL_WRIST, 2400); 00522 rt100->setJointPosition(RT100::GRIPPER, 1000); 00523 rt100->moveArm(); 00524 waitForMoveComplete(rt100); 00525 00526 short int val; 00527 LINFO("Joint Pos: "); 00528 rt100->getJointPosition(RT100::ZED, &val); 00529 armPosition.zed = val; 00530 printf("%i ", val); 00531 rt100->getJointPosition(RT100::SHOLDER, &val); 00532 armPosition.sholder = val; 00533 printf("%i ", val); 00534 rt100->getJointPosition(RT100::ELBOW, &val); 00535 armPosition.elbow = val; 00536 printf("%i ", val); 00537 rt100->getJointPosition(RT100::YAW, &val); 00538 printf("%i ", val); 00539 printf("\n"); 00540 00541 return true; 00542 } 00543 00544 bool learnMovement(float xErr, float yErr, int *elbowPos, int *sholderPos) 00545 { 00546 bool reachXPos = false, reachYPos = false; 00547 00548 *elbowPos = 0; 00549 *sholderPos = 0; 00550 if (!(xErr < 15 && xErr > -15)) 00551 { 00552 00553 if (xErr > 0) 00554 *elbowPos = -30; 00555 else 00556 *elbowPos = 30; 00557 reachXPos = false; 00558 } else { 00559 reachXPos = true; 00560 } 00561 00562 00563 if (!(yErr < 15 && yErr > -15)) 00564 { 00565 if (yErr > 0) 00566 *sholderPos = -30; 00567 else 00568 *sholderPos = 30; 00569 reachYPos = false; 00570 } else { 00571 reachYPos = true; 00572 } 00573 00574 00575 if (reachXPos && reachYPos) 00576 return true; 00577 else 00578 return false; 00579 00580 return false; 00581 00582 } 00583 00584 bool reach(nub::soft_ref<RT100> rt100, Point2D<int> fixation, int x, int y, int zedPos) 00585 { 00586 bool reachZedPos = false; 00587 std::vector<short int> movePos(rt100->getNumJoints(), 0); 00588 float xErr = fixation.i-x; 00589 float yErr = -1*(fixation.j-y); 00590 00591 int elbowPos = 0, sholderPos = 0; 00592 bool reachedTarget = learnMovement(xErr, yErr, &elbowPos, &sholderPos); 00593 00594 00595 // movePos[RT100::ELBOW] = elbowPos; 00596 // movePos[RT100::SHOLDER] = sholderPos; 00597 00598 armPosition.sholder += sholderPos; 00599 armPosition.elbow += elbowPos; 00600 armPosition.zed -= 25; 00601 if (armPosition.zed < zedPos) armPosition.zed = zedPos; 00602 00603 if (armPosition.zed == zedPos) 00604 reachZedPos = true; 00605 00606 LINFO("Err: %f:%f s:%i e:%i z:%i", 00607 xErr, yErr, 00608 armPosition.elbow, 00609 armPosition.sholder, 00610 armPosition.zed); 00611 00612 if (reachedTarget && reachZedPos) 00613 { 00614 //set current positions 00615 short int sholderPos, elbowPos; 00616 rt100->getJointPosition(RT100::SHOLDER, &sholderPos); 00617 rt100->getJointPosition(RT100::ELBOW, &elbowPos); 00618 00619 rt100->setJointPosition(RT100::SHOLDER, sholderPos); 00620 rt100->setJointPosition(RT100::ELBOW, elbowPos); 00621 00622 short int val; 00623 printf("Reach complete\n"); 00624 printf("Joint Pos: "); 00625 rt100->getJointPosition(RT100::ZED, &val); 00626 printf("%i ", val); 00627 rt100->getJointPosition(RT100::SHOLDER, &val); 00628 printf("%i ", val); 00629 rt100->getJointPosition(RT100::ELBOW, &val); 00630 printf("%i ", val); 00631 rt100->getJointPosition(RT100::YAW, &val); 00632 printf("%i\n", val); 00633 00634 return true; 00635 } else { 00636 // rt100->interpolationMove(movePos); 00637 rt100->setJointPosition(RT100::SHOLDER, armPosition.sholder); 00638 rt100->setJointPosition(RT100::ELBOW, armPosition.elbow); 00639 rt100->setJointPosition(RT100::ZED, armPosition.zed); 00640 rt100->moveArm(); 00641 00642 return false; 00643 } 00644 00645 return false; 00646 } 00647 00648 void waitForMoveComplete(nub::soft_ref<RT100> rt100) 00649 { 00650 bool moveDone = false; 00651 for(int i=0; i<1000 && !moveDone; i++) //timeout 00652 { 00653 moveDone = rt100->moveComplete(); 00654 Image< PixRGB<byte> > ima = gb->readRGB(); 00655 Image<float> sm; 00656 Point2D<int> fix; 00657 updateDisplay(ima, sm, fix); 00658 } 00659 } 00660 00661 00662 bool grasp(nub::soft_ref<RT100> rt100) 00663 { 00664 00665 short int elbowPos; 00666 rt100->getJointPosition(RT100::ELBOW, &elbowPos); 00667 00668 //center the gripper in the image 00669 00670 float yawPos = 0.3334*elbowPos+40.909; //the mapping between elbow and yaw pos 00671 // float yawPos = 0.3334*elbowPos+220; 00672 LINFO("Elbow pos %i: %i\n", elbowPos, (short int)yawPos); 00673 rt100->setJointPosition(RT100::YAW, (short int)yawPos); 00674 rt100->moveArm(); 00675 waitForMoveComplete(rt100); 00676 //rt100->moveArm(true); 00677 00678 LINFO("Zed change"); 00679 rt100->setJointPosition(RT100::ZED, -2900); 00680 rt100->moveArm(); 00681 waitForMoveComplete(rt100); 00682 00683 LINFO("close gripper"); 00684 rt100->setJointPosition(RT100::GRIPPER, -500); 00685 rt100->moveArm(); 00686 waitForMoveComplete(rt100); 00687 00688 short int gripperPos; 00689 rt100->getJointPosition(RT100::GRIPPER, &gripperPos); 00690 printf("Gripper pos %i", gripperPos); 00691 00692 if (gripperPos <= 0) 00693 return false; 00694 else 00695 return true; 00696 } 00697 00698 bool lift(nub::soft_ref<RT100> rt100) 00699 { 00700 LINFO("Zed change"); 00701 rt100->setJointPosition(RT100::ZED, -2500); 00702 // rt100->setJointPosition(RT100::SHOLDER, 2600); 00703 // rt100->setJointPosition(RT100::ELBOW, -1500); 00704 rt100->moveArm(); 00705 waitForMoveComplete(rt100); 00706 00707 LINFO("open gripper"); 00708 rt100->setJointPosition(RT100::GRIPPER, 1000); 00709 rt100->moveArm(); 00710 waitForMoveComplete(rt100); 00711 00712 return true; 00713 } 00714 00715 bool placeInBasket(nub::soft_ref<RT100> rt100) 00716 { 00717 LINFO("Move to basket"); 00718 rt100->setJointPosition(RT100::ZED, -2100); 00719 rt100->moveArm(); 00720 waitForMoveComplete(rt100); 00721 00722 rt100->setJointPosition(RT100::SHOLDER, 1890); 00723 rt100->setJointPosition(RT100::ELBOW, -120); 00724 rt100->setJointPosition(RT100::YAW, -226); 00725 rt100->moveArm(); 00726 waitForMoveComplete(rt100); 00727 00728 LINFO("prepare to drop"); 00729 00730 LINFO("open gripper"); 00731 rt100->setJointPosition(RT100::GRIPPER, 1000); 00732 rt100->moveArm(); 00733 waitForMoveComplete(rt100); 00734 00735 return true; 00736 } 00737 bool drop(nub::soft_ref<RT100> rt100) 00738 { 00739 00740 00741 return true; 00742 } 00743 00744 Point2D<int> evolveBrain(Image<PixRGB<byte> > &img, DescriptorVec& descVec, 00745 Image<float> &smap) 00746 { 00747 00748 nub::ref<StdBrain> brain = dynCastWeak<StdBrain>(mgr->subComponent("Brain")); 00749 nub::ref<SimEventQueueConfigurator> seqc = 00750 dynCastWeak<SimEventQueueConfigurator>(mgr->subComponent("SimEventQueueConfigurator")); 00751 nub::soft_ref<SimEventQueue> seq = seqc->getQ(); 00752 00753 LINFO("Evolve Brain"); 00754 00755 if (mgr->started()){ //give the image to the brain 00756 00757 if (img.initialized()) 00758 { 00759 //place the image in the inputFrame queue 00760 rutz::shared_ptr<SimEventInputFrame> 00761 e(new SimEventInputFrame(brain.get(), GenericFrame(img), 0)); 00762 seq->post(e); 00763 // brain->input(img, seq); 00764 descVec.setInputImg(img); 00765 } 00766 00767 bool keep_going = true; 00768 while (keep_going){ 00769 brain->evolve(*seq); 00770 seq->evolve(); 00771 00772 const SimStatus status = seq->evolve(); 00773 if (status == SIM_BREAK) { 00774 LINFO("V %d\n", (int)(seq->now().msecs()) ); 00775 keep_going = false; 00776 } 00777 if (brain->gotCovertShift()) // new attended location 00778 { 00779 00780 const Point2D<int> winner = brain->getLastCovertPos(); 00781 const float winV = brain->getLastCovertAgmV(); 00782 00783 LINFO("##### Winner (%d,%d) at %fms : %.4f #####\n", 00784 winner.i, winner.j, seq->now().msecs(), winV * 1000.0f); 00785 //Image<float> img = brain->getSM()->getV(false); 00786 smap = brain->getVC()->getOutput(); 00787 // SHOWIMG(img); 00788 /* char filename[255]; 00789 sprintf(filename, "SceneSMap%i.ppm", ii++); 00790 Raster::WriteRGB(img, filename);*/ 00791 00792 return winner; 00793 00794 keep_going = false; 00795 00796 } 00797 if (seq->now().secs() > 3.0) { 00798 LINFO("##### Time limit reached #####"); 00799 keep_going = false; 00800 } 00801 LINFO("Evolve brain"); 00802 } 00803 00804 } 00805 00806 return Point2D<int>(); 00807 00808 } 00809 00810 00811 00812 #endif 00813 // ###################################################################### 00814 /* So things look consistent in everyone's emacs... */ 00815 /* Local Variables: */ 00816 /* indent-tabs-mode: nil */ 00817 /* End: */