env_mt_channel.c

Go to the documentation of this file.
00001 /*!@file Envision/env_mt_channel.c */
00002 
00003 // //////////////////////////////////////////////////////////////////// //
00004 // The iLab Neuromorphic Vision C++ Toolkit - Copyright (C) 2000-2005   //
00005 // by the University of Southern California (USC) and the iLab at USC.  //
00006 // See http://iLab.usc.edu for information about this project.          //
00007 // //////////////////////////////////////////////////////////////////// //
00008 // Major portions of the iLab Neuromorphic Vision Toolkit are protected //
00009 // under the U.S. patent ``Computation of Intrinsic Perceptual Saliency //
00010 // in Visual Environments, and Applications'' by Christof Koch and      //
00011 // Laurent Itti, California Institute of Technology, 2001 (patent       //
00012 // pending; application number 09/912,225 filed July 23, 2001; see      //
00013 // http://pair.uspto.gov/cgi-bin/final/home.pl for current status).     //
00014 // //////////////////////////////////////////////////////////////////// //
00015 // This file is part of the iLab Neuromorphic Vision C++ Toolkit.       //
00016 //                                                                      //
00017 // The iLab Neuromorphic Vision C++ Toolkit is free software; you can   //
00018 // redistribute it and/or modify it under the terms of the GNU General  //
00019 // Public License as published by the Free Software Foundation; either  //
00020 // version 2 of the License, or (at your option) any later version.     //
00021 //                                                                      //
00022 // The iLab Neuromorphic Vision C++ Toolkit is distributed in the hope  //
00023 // that it will be useful, but WITHOUT ANY WARRANTY; without even the   //
00024 // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR      //
00025 // PURPOSE.  See the GNU General Public License for more details.       //
00026 //                                                                      //
00027 // You should have received a copy of the GNU General Public License    //
00028 // along with the iLab Neuromorphic Vision C++ Toolkit; if not, write   //
00029 // to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,   //
00030 // Boston, MA 02111-1307 USA.                                           //
00031 // //////////////////////////////////////////////////////////////////// //
00032 //
00033 // Primary maintainer for this file: Rob Peters <rjpeters at usc dot edu>
00034 // $HeadURL: svn://isvn.usc.edu/software/invt/trunk/saliency/src/Envision/env_mt_channel.c $
00035 // $Id: env_mt_channel.c 9830 2008-06-18 18:50:22Z lior $
00036 //
00037 
00038 #ifndef ENVISION_ENV_MT_CHANNEL_C_DEFINED
00039 #define ENVISION_ENV_MT_CHANNEL_C_DEFINED
00040 
00041 #include "Envision/env_mt_channel.h"
00042 
00043 #include "Envision/env_alloc.h"
00044 #include "Envision/env_c_math_ops.h"
00045 #include "Envision/env_image_ops.h"
00046 #include "Envision/env_job_server.h"
00047 #include "Envision/env_log.h"
00048 #include "Envision/env_params.h"
00049 
00050 // ######################################################################
00051 struct env_ori_subjob_data
00052 {
00053         const struct env_params* envp;
00054         const struct env_math* imath;
00055         struct env_dims dims;
00056         const struct env_pyr* hipass9;
00057         env_size_t thetaidx;
00058         env_chan_status_func* status_func;
00059         void* status_userdata;
00060         struct env_image chanOut;
00061         char channame[17];
00062 };
00063 
00064 static void env_ori_subjob_run(void* p)
00065 {
00066         struct env_ori_subjob_data* j = (struct env_ori_subjob_data*)(p);
00067 
00068         env_chan_steerable
00069                 (j->channame, j->envp, j->imath, j->dims,
00070                  j->hipass9, j->thetaidx,
00071                  j->status_func, j->status_userdata, &j->chanOut);
00072 
00073         ENV_ASSERT(env_img_initialized(&j->chanOut));
00074 }
00075 
00076 // ######################################################################
00077 void env_mt_chan_orientation(const char* tagName,
00078                              const struct env_params* envp,
00079                              const struct env_math* imath,
00080                              const struct env_image* img,
00081                              env_chan_status_func* status_func,
00082                              void* status_userdata,
00083                              struct env_image* result)
00084 {
00085         env_img_make_empty(result);
00086 
00087         if (envp->num_orientations == 0)
00088                 return;
00089 
00090         struct env_pyr hipass9;
00091         env_pyr_init(&hipass9, env_max_pyr_depth(envp));
00092         env_pyr_build_hipass_9(img,
00093                                envp->cs_lev_min,
00094                                imath,
00095                                &hipass9);
00096 
00097         char buf[17] =
00098                 {
00099                         's', 't', 'e', 'e', 'r', 'a', 'b', 'l', 'e', // 0--8
00100                         '(', '_', '_', // 9--11
00101                         '/', '_', '_', ')', '\0' // 12--16
00102                 };
00103 
00104         ENV_ASSERT(envp->num_orientations <= 99);
00105 
00106         buf[13] = '0' + (envp->num_orientations / 10);
00107         buf[14] = '0' + (envp->num_orientations % 10);
00108 
00109         struct env_job* jobs =
00110                 env_allocate(envp->num_orientations * sizeof(struct env_job));
00111 
00112         ENV_ASSERT2(jobs != 0, "env_allocate failed");
00113 
00114         struct env_ori_subjob_data* jobdata =
00115                 env_allocate(envp->num_orientations
00116                              * sizeof(struct env_ori_subjob_data));
00117 
00118         ENV_ASSERT2(jobdata != 0, "env_allocate failed");
00119 
00120         for (env_size_t i = 0; i < envp->num_orientations; ++i)
00121         {
00122                 // theta = (180.0 * i) / envp->num_orientations +
00123                 // 90.0, where ENV_TRIG_TABSIZ is equivalent to 360.0
00124                 // or 2*pi
00125                 const env_size_t thetaidx =
00126                         (ENV_TRIG_TABSIZ * i)
00127                         / (2 * envp->num_orientations)
00128                         + (ENV_TRIG_TABSIZ / 4);
00129 
00130                 ENV_ASSERT(thetaidx < ENV_TRIG_TABSIZ);
00131 
00132                 buf[10] = '0' + ((i+1) / 10);
00133                 buf[11] = '0' + ((i+1) % 10);
00134 
00135                 jobdata[i].envp = envp;
00136                 jobdata[i].imath = imath;
00137                 jobdata[i].dims = img->dims;
00138                 jobdata[i].hipass9 = &hipass9;
00139                 jobdata[i].thetaidx = thetaidx;
00140                 jobdata[i].status_func = status_func;
00141                 jobdata[i].status_userdata = status_userdata;
00142                 env_img_init_empty(&jobdata[i].chanOut);
00143                 for (env_size_t c = 0; c < sizeof(buf); ++c)
00144                         jobdata[i].channame[c] = buf[c];
00145 
00146                 jobs[i].callback = &env_ori_subjob_run;
00147                 jobs[i].userdata = &jobdata[i];
00148         }
00149 
00150         env_run_jobs(&jobs[0], envp->num_orientations);
00151 
00152         for (env_size_t i = 0; i < envp->num_orientations; ++i)
00153         {
00154                 struct env_image* chanOut = &jobdata[i].chanOut;
00155 
00156                 if (!env_img_initialized(result))
00157                 {
00158                         env_img_resize_dims(result, chanOut->dims);
00159                         env_c_image_div_scalar
00160                                 (env_img_pixels(chanOut),
00161                                  env_img_size(chanOut),
00162                                  (intg32) envp->num_orientations,
00163                                  env_img_pixelsw(result));
00164                 }
00165                 else
00166                 {
00167                         ENV_ASSERT(env_dims_equal(chanOut->dims,
00168                                                   result->dims));
00169                         env_c_image_div_scalar_accum
00170                                 (env_img_pixels(chanOut),
00171                                  env_img_size(chanOut),
00172                                  (intg32) envp->num_orientations,
00173                                  env_img_pixelsw(result));
00174                 }
00175 
00176                 env_img_make_empty(chanOut);
00177         }
00178 
00179         env_pyr_make_empty(&hipass9);
00180 
00181         ENV_ASSERT(env_img_initialized(result));
00182 
00183         env_max_normalize_inplace(result, INTMAXNORMMIN, INTMAXNORMMAX,
00184                                   envp->maxnorm_type,
00185                                   envp->range_thresh);
00186 
00187         if (status_func)
00188                 (*status_func)(status_userdata, tagName, result);
00189 
00190         env_deallocate(jobdata);
00191         env_deallocate(jobs);
00192 }
00193 
00194 // ######################################################################
00195 struct env_direction_job_data
00196 {
00197         const struct env_motion_channel* chan;
00198         env_size_t dir;
00199         const struct env_params* envp;
00200         const struct env_math* imath;
00201         struct env_dims inputdims;
00202         const struct env_pyr* unshiftedCur;
00203         env_chan_status_func* status_func;
00204         void* status_userdata;
00205         struct env_image chanOut;
00206         char channame[17];
00207 };
00208 
00209 static void env_direction_job_run(void* p)
00210 {
00211         struct env_direction_job_data* j = (struct env_direction_job_data*)(p);
00212 
00213         const env_size_t firstlevel = j->envp->cs_lev_min;
00214         const env_size_t depth = env_max_pyr_depth(j->envp);
00215 
00216         // theta = (360.0 * i) / chan->num_directions;
00217         const env_size_t thetaidx =
00218                 (j->dir * ENV_TRIG_TABSIZ) / j->chan->num_directions;
00219 
00220         ENV_ASSERT(thetaidx < ENV_TRIG_TABSIZ);
00221 
00222         // create an empty pyramid
00223         struct env_pyr shiftedCur;
00224         env_pyr_init(&shiftedCur, depth);
00225 
00226         // fill the empty pyramid with the shifted version
00227         for (env_size_t i = firstlevel; i < depth; ++i)
00228         {
00229                 env_img_resize_dims(env_pyr_imgw(&shiftedCur, i),
00230                                     env_pyr_img(j->unshiftedCur, i)->dims);
00231                 env_shift_image(env_pyr_img(j->unshiftedCur, i),
00232                                 j->imath->costab[thetaidx],
00233                                 -j->imath->sintab[thetaidx],
00234                                 ENV_TRIG_NBITS,
00235                                 env_pyr_imgw(&shiftedCur, i));
00236         }
00237 
00238         env_chan_direction(j->channame, j->envp, j->imath,
00239                            j->inputdims,
00240                            &j->chan->unshifted_prev, j->unshiftedCur,
00241                            &j->chan->shifted_prev[j->dir], &shiftedCur,
00242                            j->status_func, j->status_userdata, &j->chanOut);
00243 
00244         env_pyr_swap(&j->chan->shifted_prev[j->dir], &shiftedCur);
00245         env_pyr_make_empty(&shiftedCur);
00246 }
00247 
00248 // ######################################################################
00249 void env_mt_motion_channel_input_and_consume_pyr(
00250         struct env_motion_channel* chan,
00251         const char* tagName,
00252         const struct env_params* envp,
00253         const struct env_math* imath,
00254         const struct env_dims inputdims,
00255         struct env_pyr* unshiftedCur,
00256         env_chan_status_func* status_func,
00257         void* status_userdata,
00258         struct env_image* result)
00259 {
00260         env_img_make_empty(result);
00261 
00262         if (chan->num_directions != envp->num_motion_directions)
00263         {
00264                 env_motion_channel_destroy(chan);
00265                 env_motion_channel_init(chan, envp);
00266         }
00267 
00268         if (chan->num_directions == 0)
00269                 return;
00270 
00271         char buf[17] =
00272                 {
00273                         'r', 'e', 'i', 'c', 'h', 'a', 'r', 'd', 't', // 0--8
00274                         '(', '_', '_', // 9--11
00275                         '/', '_', '_', ')', '\0' // 12--16
00276                 };
00277 
00278         ENV_ASSERT(chan->num_directions <= 99);
00279 
00280         buf[13] = '0' + (chan->num_directions / 10);
00281         buf[14] = '0' + (chan->num_directions % 10);
00282 
00283         struct env_job* jobs =
00284                 env_allocate(chan->num_directions * sizeof(struct env_job));
00285 
00286         ENV_ASSERT2(jobs != 0, "env_allocate failed");
00287 
00288         struct env_direction_job_data* jobdata =
00289                 env_allocate(chan->num_directions
00290                              * sizeof(struct env_direction_job_data));
00291 
00292         ENV_ASSERT2(jobdata != 0, "env_allocate failed");
00293 
00294         // compute Reichardt motion detection into several directions
00295         for (env_size_t dir = 0; dir < chan->num_directions; ++dir)
00296         {
00297                 buf[10] = '0' + ((dir+1) / 10);
00298                 buf[11] = '0' + ((dir+1) % 10);
00299 
00300                 jobdata[dir].chan = chan;
00301                 jobdata[dir].dir = dir;
00302                 jobdata[dir].envp = envp;
00303                 jobdata[dir].imath = imath;
00304                 jobdata[dir].inputdims = inputdims;
00305                 jobdata[dir].unshiftedCur = unshiftedCur;
00306                 jobdata[dir].status_func = status_func;
00307                 jobdata[dir].status_userdata = status_userdata;
00308                 env_img_init_empty(&jobdata[dir].chanOut);
00309                 for (env_size_t c = 0; c < sizeof(buf); ++c)
00310                         jobdata[dir].channame[c] = buf[c];
00311 
00312                 jobs[dir].callback = &env_direction_job_run;
00313                 jobs[dir].userdata = &jobdata[dir];
00314         }
00315 
00316         env_run_jobs(&jobs[0], chan->num_directions);
00317 
00318         for (env_size_t dir = 0; dir < chan->num_directions; ++dir)
00319         {
00320                 struct env_image* chanOut = &jobdata[dir].chanOut;
00321 
00322                 if (env_img_initialized(chanOut))
00323                 {
00324                         if (!env_img_initialized(result))
00325                         {
00326                                 env_img_resize_dims(result, chanOut->dims);
00327                                 env_c_image_div_scalar
00328                                         (env_img_pixels(chanOut),
00329                                          env_img_size(chanOut),
00330                                          (intg32) chan->num_directions,
00331                                          env_img_pixelsw(result));
00332                         }
00333                         else
00334                         {
00335                                 ENV_ASSERT
00336                                         (env_dims_equal(chanOut->dims,
00337                                                         result->dims));
00338                                 env_c_image_div_scalar_accum
00339                                         (env_img_pixels(chanOut),
00340                                          env_img_size(chanOut),
00341                                          (intg32) chan->num_directions,
00342                                          env_img_pixelsw(result));
00343                         }
00344                 }
00345 
00346                 env_img_make_empty(chanOut);
00347         }
00348 
00349         if (env_img_initialized(result))
00350         {
00351                 env_max_normalize_inplace(result,
00352                                           INTMAXNORMMIN, INTMAXNORMMAX,
00353                                           envp->maxnorm_type,
00354                                           envp->range_thresh);
00355                 if (status_func)
00356                         (*status_func)(status_userdata, tagName, result);
00357         }
00358 
00359         env_pyr_swap(unshiftedCur, &chan->unshifted_prev);
00360         env_pyr_make_empty(unshiftedCur);
00361 
00362         env_deallocate(jobdata);
00363         env_deallocate(jobs);
00364 }
00365 
00366 // ######################################################################
00367 /* So things look consistent in everyone's emacs... */
00368 /* Local Variables: */
00369 /* indent-tabs-mode: nil */
00370 /* c-file-style: "linux" */
00371 /* End: */
00372 
00373 #endif // ENVISION_ENV_MT_CHANNEL_C_DEFINED
Generated on Sun May 8 08:04:45 2011 for iLab Neuromorphic Vision Toolkit by  doxygen 1.6.3