00001 /*!@file Devices/IEEE1394grabber.H Definition and access functions for 00002 FireWire grabber */ 00003 00004 // //////////////////////////////////////////////////////////////////// // 00005 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2001 by the // 00006 // University of Southern California (USC) and the iLab at USC. // 00007 // See http://iLab.usc.edu for information about this project. // 00008 // //////////////////////////////////////////////////////////////////// // 00009 // Major portions of the iLab Neuromorphic Vision Toolkit are protected // 00010 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency // 00011 // in Visual Environments, and Applications'' by Christof Koch and // 00012 // Laurent Itti, California Institute of Technology, 2001 (patent // 00013 // pending; application number 09/912,225 filed July 23, 2001; see // 00014 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status). // 00015 // //////////////////////////////////////////////////////////////////// // 00016 // This file is part of the iLab Neuromorphic Vision C++ Toolkit. // 00017 // // 00018 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can // 00019 // redistribute it and/or modify it under the terms of the GNU General // 00020 // Public License as published by the Free Software Foundation; either // 00021 // version 2 of the License, or (at your option) any later version. // 00022 // // 00023 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope // 00024 // that it will be useful, but WITHOUT ANY WARRANTY; without even the // 00025 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR // 00026 // PURPOSE. See the GNU General Public License for more details. // 00027 // // 00028 // You should have received a copy of the GNU General Public License // 00029 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write // 00030 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, // 00031 // Boston, MA 02111-1307 USA. // 00032 // //////////////////////////////////////////////////////////////////// // 00033 // 00034 // Primary maintainer for this file: Laurent Itti <itti@usc.edu> 00035 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Devices/IEEE1394grabber.H $ 00036 // $Id: IEEE1394grabber.H 10345 2008-10-15 17:27:10Z icore $ 00037 // 00038 00039 #ifndef IEEE1394GRABBER_H_DEFINED 00040 #define IEEE1394GRABBER_H_DEFINED 00041 00042 #include "Component/ModelParam.H" 00043 #include "Image/Dims.H" 00044 #include "Transport/FrameIstream.H" 00045 #include "Util/Types.H" 00046 #include "Video/VideoFormat.H" 00047 00048 #ifdef HAVE_IEEE1394 00049 #include <libdc1394/dc1394_control.h> 00050 #endif 00051 00052 class VideoFrame; 00053 template <class T> class PixRGB; 00054 template <class T> class Image; 00055 00056 //! Firewire bus speed to use 00057 #define IEEE1394GRABSPEED SPEED_400 00058 00059 //! Definition and access functions for FireWire digital camera frame capture 00060 /*! This class provides a trivial interface to FireWire digital 00061 cameras (i.e., web cams that send raw frame data over the firewire 00062 bus, not DV camcorders that send compressed video streams over the 00063 firewire bus). All the low-level setup is done during 00064 construction. The user only needs to call readFrame() or readRGB() 00065 to capture an image. 00066 00067 After each grab, the next grab is initiated, and will be ready 33ms 00068 later (or one frame later if not NTSC). If you call readFrame() 00069 again before 33ms have elapsed, it will block until the next frame 00070 is available. If you call it too late, you will have missed the 00071 latest frame, and readFrame() will block until the next frame is 00072 acquired. 00073 00074 So a good strategy is to use readFrame() to not only grab but also 00075 to synchronize your code with the video rate (30 frames/s if NTSC). 00076 Typically, then, you would have a main loop that first grabs and 00077 then does various processing that is guaranteed to take less than 00078 33ms. You do not need to insert any pause after that processing to 00079 obtain a stable framerate; just finish your main loop, and the next 00080 call to readFrame() (at the next iteration) will block until exactly 00081 one frame has passed since it was last called. See how this is done, 00082 for example, in pvisionTCP-master.C or test-grab.C 00083 00084 Our Unibrain Fire-i rev 1.2 camera supports the following (use 00085 byteswap = false): 00086 00087 160 x 120: YUV444 (30, 15, 7.5 fps) 00088 320 x 240: YUV422 (30, 15, 7.5, 3.75 fps) 00089 640 x 480: YUV411 (30, 15, 7.5, 3.75 fps) 00090 640 x 480: YUV422 (15, 7.5, 3.75 fps) 00091 640 x 480: RGB24 (15, 7.5, 3.75 fps) 00092 640 x 480: GREY (30, 15, 7.5, 3.75 fps) 00093 00094 To use this, you must have libdc1394 0.9.0 or later and libraw1394 00095 0.9.0 or later installed (and your need the -devel RPMS installed as 00096 well to compile this code). Make sure you also have: 00097 mknod -m 777 /dev/raw1394 c 171 0 00098 mkdir /dev/video1394 00099 mknod -m 777 /dev/video1394/0 c 171 16 00100 */ 00101 00102 class IEEE1394grabber : public FrameIstream 00103 { 00104 public: 00105 //! Constructor 00106 IEEE1394grabber(OptionManager& mgr, 00107 const std::string& descrName="IEEE1394 Frame Grabber Driver", 00108 const std::string& tagName = "IEEE1394FrameGrabber", 00109 const ParamFlag flags = USE_MY_VAL); 00110 00111 //! Destructor 00112 virtual ~IEEE1394grabber(); 00113 00114 //! Return the specifications of the next frame to be returned 00115 virtual GenericFrameSpec peekFrameSpec(); 00116 00117 //! Get the inter-frame time that matches our video mode 00118 virtual SimTime getNaturalFrameTime() const; 00119 00120 //! Grab video into a pre-allocated buffer 00121 /*! This is faster than readFrame(), as here we will not allocate a 00122 new image for each grab, but rather we use an existing image. If 00123 not NULL, mutex 'lock' will be locked while the contents of image 00124 are being modified, and if not NULL count will be incremented by 00125 one. For IEEE1394, only YUV444 is supported. */ 00126 void grabPrealloc(Image< PixRGB<byte> >& image, 00127 pthread_mutex_t *lock = NULL, int *count = NULL); 00128 00129 //! Get the next frame from the frame-grabber 00130 /*! Returns grabbed frame. This call will block until a frame is 00131 ready and has been grabbed. 00132 00133 Beware that the integrity of the GenericFrame object may not 00134 last "very long"; basically, try to be finished using the 00135 GenericFrame object before you attempt to grab the next frame in 00136 the stream. If you need it for longer than that, then you should 00137 use GenericFrame::deepCopyOf() to make a copy of the frame that 00138 can be safely held indefinitely. */ 00139 virtual GenericFrame readFrame(); 00140 00141 protected: 00142 //! Grab raw data 00143 /*! Don't call this directly; use readFrame() instead. */ 00144 VideoFrame grabRaw(); 00145 00146 //! get started 00147 virtual void start1(); 00148 00149 //! get stopped 00150 virtual void stop2(); 00151 00152 private: 00153 virtual void paramChanged(ModelParamBase* const param, 00154 const bool valueChanged, 00155 ParamClient::ChangeStatus* status); 00156 00157 //! device name of the /dev/ entry for the grabber device 00158 OModelParam<std::string> itsDevName; 00159 00160 //! input channel to use 00161 OModelParam<int> itsChannel; 00162 00163 //! input subchannel to use 00164 OModelParam<int> itsSubChan; 00165 00166 //! width of grabbed frames 00167 OModelParam<Dims> itsDims; 00168 00169 //! grab mode that the hardware should use 00170 /*! Grabbed frames will internally be converted to Image< 00171 PixRGB<byte> > whatever that mode is, but playing with it may 00172 influence image quality, maximum achievable framerate, and amounts 00173 of CPU doing those conversions to RGB. */ 00174 OModelParam<VideoFormat> itsGrabMode; 00175 00176 //! determines whether byte swapping is done during conversion to RGB 00177 OModelParam<bool> itsByteSwap; 00178 00179 //! frames per second 00180 OModelParam<float> itsFPS; 00181 00182 //! number of frame buffers kept internally 00183 OModelParam<int> itsNbuf; 00184 00185 //! brightness - highly dependent on your driver 00186 OModelParam<int> itsBrightness; 00187 00188 //! hue - highly dependent on your driver 00189 OModelParam<int> itsHue; 00190 00191 //! saturation - highly dependent on your driver 00192 OModelParam<int> itsSaturation; 00193 00194 //! exposure - highly dependent on your driver 00195 OModelParam<int> itsExposure; 00196 00197 //! sharpness - highly dependent on your driver 00198 OModelParam<int> itsSharpness; 00199 00200 //! white balance (B/U value) - highly dependent on your driver 00201 OModelParam<int> itsWhiteBalBU; 00202 00203 //! white balance (R/V value) - highly dependent on your driver 00204 OModelParam<int> itsWhiteBalRV; 00205 00206 //! gamma - highly dependent on your driver 00207 OModelParam<int> itsGamma; 00208 00209 //! shutter - highly dependent on your driver 00210 OModelParam<int> itsShutter; 00211 00212 //! gain - highly dependent on your driver 00213 OModelParam<int> itsGain; 00214 00215 // check whether the configure script found libraw1394; if not, then 00216 // just stub out the core of the IEEE1394grabber class and issue 00217 // LFATAL()s if somebody actually tries to use it 00218 #ifdef HAVE_IEEE1394 00219 bool itsCameraOk; // the camera is initialized and transmitting 00220 raw1394handle_t itsHandle; // raw 1394 OHCI handle 00221 dc1394_cameracapture itsCam; // our camera 00222 #endif // HAVE_IEEE1394 00223 00224 }; 00225 00226 #endif 00227 00228 // ###################################################################### 00229 /* So things look consistent in everyone's emacs... */ 00230 /* Local Variables: */ 00231 /* indent-tabs-mode: nil */ 00232 /* End: */