libeblearn
/home/rex/ebltrunk/core/libeblearn/include/ebl_layers.h
00001 /***************************************************************************
00002  *   Copyright (C) 2008 by Yann LeCun and Pierre Sermanet *
00003  *   yann@cs.nyu.edu, pierre.sermanet@gmail.com *
00004  *
00005  * Redistribution and use in source and binary forms, with or without
00006  * modification, are permitted provided that the following conditions are met:
00007  *     * Redistributions of source code must retain the above copyright
00008  *       notice, this list of conditions and the following disclaimer.
00009  *     * Redistributions in binary form must reproduce the above copyright
00010  *       notice, this list of conditions and the following disclaimer in the
00011  *       documentation and/or other materials provided with the distribution.
00012  *     * Redistribution under a license not approved by the Open Source
00013  *       Initiative (http://www.opensource.org) must display the
00014  *       following acknowledgement in all advertising material:
00015  *        This product includes software developed at the Courant
00016  *        Institute of Mathematical Sciences (http://cims.nyu.edu).
00017  *     * The names of the authors may not be used to endorse or promote products
00018  *       derived from this software without specific prior written permission.
00019  *
00020  * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
00021  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
00022  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
00023  * DISCLAIMED. IN NO EVENT SHALL ThE AUTHORS BE LIABLE FOR ANY
00024  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
00025  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00026  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
00027  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
00028  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00029  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00030  ***************************************************************************/
00031 
00032 #ifndef EBL_LAYERS_H_
00033 #define EBL_LAYERS_H_
00034 
00035 #include "ebl_defines.h"
00036 #include "libidx.h"
00037 #include "ebl_states.h"
00038 #include "ebl_basic.h"
00039 #include "ebl_pooling.h"
00040 #include "ebl_arch.h"
00041 #include "ebl_nonlinearity.h"
00042 #include "ebl_normalization.h"
00043 
00044 namespace ebl {
00045 
00048   template <typename T, class Tstate = bbstate_idx<T> >
00049     class full_layer : public module_1_1<T,Tstate> {
00050   public: 
00057     full_layer(parameter<T,Tstate> *p, intg indim0, intg noutputs,
00058                bool tanh = true, const char *name = "full_layer");
00060     virtual ~full_layer();
00062     void fprop(Tstate &in, Tstate &out);
00064     void bprop(Tstate &in, Tstate &out);
00066     void bbprop(Tstate &in, Tstate &out);
00068     void forget(forget_param_linear &fp);
00071     virtual fidxdim fprop_size(fidxdim &i_size);
00074     virtual idxdim bprop_size(const idxdim &o_size);
00076     virtual full_layer<T,Tstate>* copy();
00078     virtual std::string describe();
00079 
00080     // members ////////////////////////////////////////////////////////
00081   private:
00082     bool                                 btanh; 
00083   public:
00084      /* linear_module_replicable<T,Tstate>       linear;//!< linear module for weight */
00085     linear_module<T,Tstate>              linear;
00086     addc_module<T,Tstate>                adder; 
00087     module_1_1<T,Tstate>                *sigmoid;
00088     Tstate                              *sum;   
00089   };
00090 
00093   template <typename T, class Tstate = bbstate_idx<T> >
00094     class convolution_layer : public module_1_1<T,Tstate> {
00095   public:
00104     convolution_layer(parameter<T,Tstate> *p, idxdim &ker, idxdim &stride,
00105                       idx<intg> &tbl, bool tanh = true,
00106                       const char *name = "convolution_layer");
00107     virtual ~convolution_layer();
00109     void fprop(Tstate &in, Tstate &out);
00111     void bprop(Tstate &in, Tstate &out);
00113     void bbprop(Tstate &in, Tstate &out);
00115     void forget(forget_param_linear &fp);
00118     virtual fidxdim fprop_size(fidxdim &i_size);
00121     virtual fidxdim bprop_size(const fidxdim &o_size);
00123     virtual convolution_layer<T,Tstate>* copy();
00124 
00125     // members ////////////////////////////////////////////////////////
00126   private:
00127     bool                                         btanh; 
00128   public:
00129   //    convolution_module_replicable<T,Tstate>  convol;//!< convolution module
00130     convolution_module<T,Tstate>         convol;
00131     addc_module<T,Tstate>                        adder; 
00132     module_1_1<T,Tstate>                        *sigmoid;
00133     Tstate                                      *sum;   
00134   };
00135 
00139   template <typename T, class Tstate = bbstate_idx<T> >
00140     class convabsnorm_layer : public module_1_1<T,Tstate> {
00141   public:
00154 
00155     convabsnorm_layer(parameter<T,Tstate> *p, intg kerneli, intg kernelj, 
00156                       intg stridei, intg stridej, idx<intg> &tbl,
00157                       bool mirror = false, bool tanh = true,
00158                       const char *name = "convabsnorm_layer");
00160     virtual ~convabsnorm_layer();
00162     void fprop(Tstate &in, Tstate &out);
00164     void bprop(Tstate &in, Tstate &out);
00166     void bbprop(Tstate &in, Tstate &out);
00168     void forget(forget_param_linear &fp);
00171     virtual fidxdim fprop_size(fidxdim &i_size);
00174     virtual fidxdim bprop_size(const fidxdim &o_size);
00176     virtual convabsnorm_layer<T,Tstate>* copy();
00177 
00178     // members ////////////////////////////////////////////////////////
00179   private:
00180     bool                                 btanh; 
00181   public:
00182     convolution_layer<T,Tstate>          lconv; 
00183     abs_module<T,Tstate>                 abs;   
00184     contrast_norm_module<T,Tstate>       norm;  
00185     Tstate                              *tmp;   
00186     Tstate                              *tmp2;  
00187   };
00188 
00191   template <typename T, class Tstate = bbstate_idx<T> >
00192     class subsampling_layer : public module_1_1<T,Tstate> {
00193   public:
00201     subsampling_layer(parameter<T,Tstate> *p, uint thickness,
00202                       idxdim &kernel, idxdim &stride,
00203                       bool tanh = true, const char *name = "subsampling_layer");
00205     virtual ~subsampling_layer();
00207     void fprop(Tstate &in, Tstate &out);
00209     void bprop(Tstate &in, Tstate &out);
00211     void bbprop(Tstate &in, Tstate &out);
00213     void forget(forget_param_linear &fp);
00216     virtual fidxdim fprop_size(fidxdim &i_size);
00219     virtual fidxdim bprop_size(const fidxdim &o_size);
00221     virtual subsampling_layer<T,Tstate>* copy();
00223     virtual std::string describe();
00224 
00225     // members ////////////////////////////////////////////////////////
00226   private:
00227     bool                                         btanh; 
00228   public:
00229     subsampling_module_replicable<T,Tstate>      subsampler;
00230     addc_module<T,Tstate>                        adder; 
00231     module_1_1<T,Tstate>                        *sigmoid;
00232     Tstate                                      *sum;   
00233    };
00234 
00235 } // namespace ebl {
00236 
00237 #include "ebl_layers.hpp"
00238 
00239 #endif /* EBL_LAYERS_H_ */