LobotMain.C

Go to the documentation of this file.
00001 /**
00002    \file  Robots/LoBot/LobotMain.C
00003    \brief lobot/Robolocust controller.
00004 
00005    This file defines the main function for the Robolocust controller. The
00006    goal of the Robolocust project is to design and implement a robot that
00007    avoids obstacles based on a model of the Lobula Giant Movement
00008    Detector (LGMD) found in locust brains.
00009 
00010    The LGMD is a visual interneuron that responds with increasing
00011    frequency to stimuli on a direct collision course with the locust. For
00012    the Robolocust project, we want to use a computational model of this
00013    neuron to develop a collision sensor that can then be applied to the
00014    problem of robotic obstacle avoidance. Additionally, we also want to
00015    hook up actual locusts to the robot, tap into their LGMDs and use the
00016    spikes directly to be able to avoid obstacles.
00017 
00018    To be able to perform the above-mentioned tasks, we need to have
00019    algorithms in place to integrate the LGMD spikes from multiple (real
00020    or virtual) locusts so as to produce a coherent steering decision for
00021    the robot. The lobot controller is designed to be a software framework
00022    for this purpose.
00023 
00024    In terms of hardware, this framework has the ability to interface with
00025    FireWire cameras, a laser range finder and different robot platforms
00026    (e.g., the iRobot Create). For high-level control aspects, it provides
00027    a behaviour-based substrate built atop an implementation of the
00028    Distributed Architecture for Mobile Navigation (DAMN). Moreover, the
00029    framework provides for a configuration file that can be used to change
00030    almost any aspect of the program. This allows us to develop, test and
00031    compare many different LGMD models and integration algorithms.
00032 */
00033 
00034 // //////////////////////////////////////////////////////////////////// //
00035 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2000-2005   //
00036 // by the University of Southern California (USC) and the iLab at USC.  //
00037 // See http://iLab.usc.edu for information about this project.          //
00038 // //////////////////////////////////////////////////////////////////// //
00039 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00040 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00041 // in Visual Environments, and Applications'' by Christof Koch and      //
00042 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00043 // pending; application number 09/912,225 filed July 23, 2001; see      //
00044 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00045 // //////////////////////////////////////////////////////////////////// //
00046 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00047 //                                                                      //
00048 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00049 // redistribute it and/or modify it under the terms of the GNU General  //
00050 // Public License as published by the Free Software Foundation; either  //
00051 // version 2 of the License, or (at your option) any later version.     //
00052 //                                                                      //
00053 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00054 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00055 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00056 // PURPOSE.  See the GNU General Public License for more details.       //
00057 //                                                                      //
00058 // You should have received a copy of the GNU General Public License    //
00059 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00060 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00061 // Boston, MA 02111-1307 USA.                                           //
00062 // //////////////////////////////////////////////////////////////////// //
00063 //
00064 // Primary maintainer for this file: mviswana usc edu
00065 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Robots/LoBot/LobotMain.C $
00066 // $Id: LobotMain.C 13780 2010-08-11 22:07:47Z mviswana $
00067 //
00068 
00069 //------------------------------ HEADERS --------------------------------
00070 
00071 // lobot headers
00072 #include "Robots/LoBot/LoApp.H"
00073 
00074 #include "Robots/LoBot/thread/LoShutdown.H"
00075 #include "Robots/LoBot/thread/LoUpdateLock.H"
00076 #include "Robots/LoBot/thread/LoThread.H"
00077 
00078 #include "Robots/LoBot/misc/LoExcept.H"
00079 
00080 // INVT utilities
00081 #include "Util/log.H"
00082 
00083 // Standard C++ headers
00084 #include <stdexcept>
00085 
00086 //------------------------------- MAIN ----------------------------------
00087 
00088 int main(int argc, const char* argv[])
00089 {
00090    MYLOGVERB = LOG_ERR ; // minimize INVT's incessant chatter
00091    int ret = 0 ;
00092    try
00093    {
00094       lobot::App& app = lobot::App::create(argc, argv) ;
00095       app.parse_command_line() ;
00096       app.run() ;
00097    }
00098    catch (lobot::uhoh& e)
00099    {
00100       LERROR("%s", e.what()) ;
00101       ret = e.code() ;
00102    }
00103    catch (std::exception& e)
00104    {
00105       LERROR("%s", e.what()) ;
00106       ret = 255 ;
00107    }
00108    if (ret != 0) {
00109       lobot::Shutdown::signal() ;
00110       lobot::UpdateLock::instance().unlock() ;
00111    }
00112    lobot::Thread::wait_all() ;
00113    return ret ;
00114 }
00115 
00116 //-----------------------------------------------------------------------
00117 
00118 /* So things look consistent in everyone's emacs... */
00119 /* Local Variables: */
00120 /* indent-tabs-mode: nil */
00121 /* End: */
Generated on Sun May 8 08:41:30 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3