LoParticle.H

00001 /**
00002    \file  Robots/LoBot/misc/LoParticle.H
00003    \brief A particle for the FastSLAM algorithm's particle filter.
00004 
00005    This file defines a class that encapsulates a particle as required by
00006    the Robolocust implementation of the FastSLAM algorithm, a
00007    Rao-Blackwellized particle filter for building an occupancy grid map
00008    and recording lobot's trajectory within this map.
00009 */
00010 
00011 // //////////////////////////////////////////////////////////////////// //
00012 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2000-2005   //
00013 // by the University of Southern California (USC) and the iLab at USC.  //
00014 // See http://iLab.usc.edu for information about this project.          //
00015 // //////////////////////////////////////////////////////////////////// //
00016 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00017 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00018 // in Visual Environments, and Applications'' by Christof Koch and      //
00019 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00020 // pending; application number 09/912,225 filed July 23, 2001; see      //
00021 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00022 // //////////////////////////////////////////////////////////////////// //
00023 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00024 //                                                                      //
00025 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00026 // redistribute it and/or modify it under the terms of the GNU General  //
00027 // Public License as published by the Free Software Foundation; either  //
00028 // version 2 of the License, or (at your option) any later version.     //
00029 //                                                                      //
00030 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00031 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00032 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00033 // PURPOSE.  See the GNU General Public License for more details.       //
00034 //                                                                      //
00035 // You should have received a copy of the GNU General Public License    //
00036 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00037 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00038 // Boston, MA 02111-1307 USA.                                           //
00039 // //////////////////////////////////////////////////////////////////// //
00040 //
00041 // Primary maintainer for this file: mviswana usc edu
00042 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Robots/LoBot/slam/LoParticle.H $
00043 // $Id: LoParticle.H 13575 2010-06-17 01:42:18Z mviswana $
00044 //
00045 
00046 #ifndef LOBOT_PARTICLE_DOT_H
00047 #define LOBOT_PARTICLE_DOT_H
00048 
00049 //------------------------------ HEADERS --------------------------------
00050 
00051 // lobot headers
00052 #include "Robots/LoBot/slam/LoOccGrid.H"
00053 #include "Robots/LoBot/slam/LoScanMatch.H"
00054 #include "Robots/LoBot/slam/LoPose.H"
00055 #include "Robots/LoBot/slam/LoOdometry.H"
00056 
00057 // Boost headers
00058 #include <boost/shared_ptr.hpp>
00059 
00060 // Standard C++ headers
00061 #include <vector>
00062 
00063 //----------------------------- NAMESPACE -------------------------------
00064 
00065 namespace lobot {
00066 
00067 //------------------------- CLASS DEFINITION ----------------------------
00068 
00069 /**
00070    \class lobot::Particle
00071    \brief Encapsulation of particle used in FastSLAM algorithm.
00072 
00073    Robolocust uses FastSLAM to build an occupancy grid map of its
00074    surroundings and record its trajectory (which is used to compare the
00075    performance of different LGMD-based avoidance algorithms). This class
00076    encapsulates the notion of a particle as required by the FastSLAM
00077    algorithm.
00078 */
00079 class Particle {
00080    /// In a particle filter, each particle hypothesizes the current state
00081    /// of the system. In a robot mapping application, the state is
00082    /// basically the robot's pose.
00083    Pose m_pose ;
00084 
00085    /// In addition to the pose, in FastSLAM, a Rao-Blackwellized particle
00086    /// filter, each particle must also carry with it its own copy of the
00087    /// occupancy grid being built.
00088    ///
00089    /// DEVNOTE: We use a shared pointer here rather than an OccGrid
00090    /// object because this FastSLAM implementation can be configured to
00091    /// perform localization only. In this mode of operation, the user
00092    /// supplies a known obstacle map and we use only the Monte Carlo
00093    /// Localization component of the FastSLAM algorithm.
00094    ///
00095    /// Since the map will be common to all particles in localization
00096    /// mode, we use a shared pointer. In normal SLAM mode, each shared
00097    /// pointer will, in fact, point to its own unique OccGrid instance.
00098    /// But in localization mode, all the shared pointers will point to a
00099    /// single OccGrid instance.
00100    typedef boost::shared_ptr<OccGrid> MapPtr ;
00101    MapPtr m_map ;
00102 
00103    /// Since grid-based FastSLAM requires each particle to carry its own
00104    /// copy of the occupancy map, the algorithm's CPU and memory
00105    /// requirements can become quite daunting. Therefore, it is critical
00106    /// that we somehow reduce the total number of particles required by
00107    /// the filter.
00108    ///
00109    /// One way to do that is to eschew raw odometry in favour something
00110    /// more precise (e.g., laser range finder based scan matching). For
00111    /// that to work, we need to keep track of the most recent LRF scan.
00112    /// The latest scan will then be registered to this reference scan and
00113    /// the resulting scan matching transformation will be used as the
00114    /// odometric input to the particle filter's motion model.
00115    Scan m_ref_scan ;
00116 
00117    /// Each particle is weighted according to the sensor model. This data
00118    /// member holds the particle's current weight.
00119    float m_weight ;
00120 
00121    /// Debugging support: since there are (typically) hundreds of
00122    /// particles, distinguishing the state of one particle's debug output
00123    /// from another's can be challenging. To make this a little easier,
00124    /// we prefix each of a particle's debug output with that particle's
00125    /// address. However, having to use reinterpret_cast in each debug
00126    /// statement is a hassle. So, we use the following data member to
00127    /// store the particle's address as an unsigned int when the particle
00128    /// is created. Thereafter, we can simply use this value without
00129    /// worrying about casting the same pointer over and over...
00130    unsigned long m_this ;
00131 
00132 public:
00133    /// When a particle is created, the client should supply an initial
00134    /// weight as well as an initial LRF scan (for the scan matching). The
00135    /// particle will start off assuming a pose of (0,0,0) and an
00136    /// occupancy map which has equal probabilities for all locations.
00137    Particle(float initial_weight, const std::vector<int>& initial_scan) ;
00138 
00139    /// When FastSLAM is configured to perform localization only, i.e., no
00140    /// mapping, the user will specify a map to use and FastSLAM will use
00141    /// only its Monte Carlo Localization component. This constructor
00142    /// takes the same parameters as the normal SLAM mode constructor. In
00143    /// addition, it takes an OccGrid to serve as the common map to be
00144    /// shared by all the particles. The map has to be loaded by this
00145    /// class's client.
00146    Particle(float initial_weight, const std::vector<int>& initial_scan,
00147             const boost::shared_ptr<OccGrid>& known_map) ;
00148 
00149    /// Copy.
00150    Particle(const Particle&) ;
00151 
00152    /// Assignment.
00153    Particle& operator=(const Particle&) ;
00154 
00155    /// This method "perturbs" the particle so as to produce a new state
00156    /// hypothesis based on the current control input. The current sensor
00157    /// data is also used because the Robolocust implementation of
00158    /// FastSLAM uses laser range scan matching to correct the raw
00159    /// odometry obtained from the robot's lower layers.
00160    ///
00161    /// In Bayesian/particle filter parlance, this method implements the
00162    /// prediction step, producing a new state according to the likelihood
00163    /// function P(xt|xt-1,ut).
00164    void apply_motion_model(const Odometry& control_input,
00165                            const std::vector<int>& sensor_data) ;
00166 
00167    /// This method reweights the particle once it has been "moved" to a
00168    /// new state. The weighting is based on the current sensor
00169    /// observation.
00170    ///
00171    /// In Bayesian/particle filter parlance, this method implements the
00172    /// correction step, taking the latest observation into account to
00173    /// correct the state predicted by the motion model according to the
00174    /// likelihood function P(xt|zt).
00175    void apply_sensor_model(const std::vector<int>& range_readings) ;
00176 
00177    /// This method updates the occupancy map based on the latest state
00178    /// hypothesis and laser range finder data. That is, it "evolves" the
00179    /// latest occupancy probabilities according to the likelihood
00180    /// function P(mt|xt,mt-1).
00181    void update_map(const std::vector<int>& range_readings) ;
00182 
00183    /// This method normalizes the particle weight.
00184    void normalize(float normalizer) {m_weight *= normalizer ;}
00185 
00186    /// Accessors.
00187    //@{
00188    float  weight() const {return m_weight    ;}
00189    Pose   pose()   const {return m_pose      ;}
00190    const OccGrid& map() const {return *m_map ;}
00191    //@}
00192 
00193    /// This function can be used to randomize this particle's pose
00194    /// estimate. It is useful when we want to introduce random particles
00195    /// to help deal with mislocalizations.
00196    void randomize() ;
00197 
00198    /// Visualization support. This helper packages a particle's pose and
00199    /// weight together in one structure.
00200    struct Viz {
00201       Pose  pose ;
00202       float weight ;
00203       Viz(const Pose&, float) ;
00204    } ;
00205 
00206    /// Returns the particle's current pose and weight for visualization
00207    /// purposes.
00208    Viz viz() const {return Viz(m_pose, m_weight) ;}
00209 } ;
00210 
00211 //-----------------------------------------------------------------------
00212 
00213 } // end of namespace encapsulating this file's definitions
00214 
00215 #endif
00216 
00217 /* So things look consistent in everyone's emacs... */
00218 /* Local Variables: */
00219 /* indent-tabs-mode: nil */
00220 /* End: */
Generated on Sun May 8 08:41:31 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3