libeblearn
|
00001 /*************************************************************************** 00002 * Copyright (C) 2008 by Yann LeCun and Pierre Sermanet * 00003 * yann@cs.nyu.edu, pierre.sermanet@gmail.com * 00004 * 00005 * Redistribution and use in source and binary forms, with or without 00006 * modification, are permitted provided that the following conditions are met: 00007 * * Redistributions of source code must retain the above copyright 00008 * notice, this list of conditions and the following disclaimer. 00009 * * Redistributions in binary form must reproduce the above copyright 00010 * notice, this list of conditions and the following disclaimer in the 00011 * documentation and/or other materials provided with the distribution. 00012 * * Redistribution under a license not approved by the Open Source 00013 * Initiative (http://www.opensource.org) must display the 00014 * following acknowledgement in all advertising material: 00015 * This product includes software developed at the Courant 00016 * Institute of Mathematical Sciences (http://cims.nyu.edu). 00017 * * The names of the authors may not be used to endorse or promote products 00018 * derived from this software without specific prior written permission. 00019 * 00020 * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED 00021 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 00022 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 00023 * DISCLAIMED. IN NO EVENT SHALL ThE AUTHORS BE LIABLE FOR ANY 00024 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 00025 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 00026 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 00027 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 00028 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00029 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00030 ***************************************************************************/ 00031 00032 #ifndef SDNN_MODULES_H_ 00033 #define SDNN_MODULES_H_ 00034 00035 #include "libidx.h" 00036 #include "net_cscscfe.h" 00037 00038 namespace ebl { 00039 00040 /* //! a special kind of state used to store the output of a classifier. */ 00041 /* //! sdnnclass-state are generated by modules such as class-max, and used */ 00042 /* //! by meters such as classifier-meter. No backprop is possible through */ 00043 /* //! a sdnnclass-state. */ 00044 /* class sdnnclass_state */ 00045 /* { */ 00046 /* public: */ 00047 /* int output_class; */ 00048 /* double confidence; */ 00049 /* idx<int> *sorted_classes; */ 00050 /* idx<double> *sorted_scores; */ 00051 00052 /* sdnnclass_state(int n); */ 00053 00054 /* virtual ~sdnnclass_state(); */ 00055 /* }; */ 00056 00057 /* ////////////////////////////////////////////////////////////////////////////// */ 00058 00059 /* //! a classifier that computes class scores based on */ 00060 /* //! an mmi type criterion (a kind of softmax in log) */ 00061 /* //! It gives scores (cost) for all classes including junk. */ 00062 /* //! It should be used in conjunction with sdnn-cost. */ 00063 /* //! This assumes that the output of the previous module */ 00064 /* //! are costs, or negative log likelihoods. */ 00065 /* //! this modules accepts spatially replicated inputs. */ 00066 /* template <class T> class sdnn_classer */ 00067 /* { */ 00068 /* public: */ 00069 /* //! a vector that maps output unit index to a label */ 00070 /* idx<int> *classindex2label; */ 00071 /* state_idx<T> *junk_param; */ 00072 /* state_idx<T> *logadded_distjunk; */ 00073 /* idx<double> *priors; */ 00074 00075 /* //! makes a new sdnn-classer. The arguments are identical */ 00076 /* //! to that of sdnn-cost. In fact if an sdnn-classer is to */ 00077 /* //! used in conjunction with an mmi-cost, they should share */ 00078 /* //! the prior vector and the parameter. */ 00079 /* //! sharing the parameter can be done by first building the */ 00080 /* //! classer, then reducing the size of the parameter by one, */ 00081 /* //! then creating the cost. */ 00082 /* sdnn_classer(idx<int> *classes, idx<double> *pr, int ini, int inj, */ 00083 /* parameter *prm); */ 00084 00085 /* virtual ~sdnn_classer(); */ 00086 00087 /* //! set the constant cost of the junk class to <c>. */ 00088 /* //! the underlying parameter is given the value */ 00089 /* //! (sqrt (* 2 <c>)), so <c> must be positive. */ 00090 /* //! BE CAREFUL that the junk parameter of an sdnn-classer */ 00091 /* //! is usually shared by an mmi-cost, changing one */ 00092 /* //! will change the other. */ 00093 /* void set_junk_cost(float c); */ 00094 /* void fprop(state_idx<T> *in, sdnnclass_state *out); */ 00095 /* }; */ 00096 00097 00098 /* ////////////////////////////////////////////////////////////////////////////// */ 00099 00100 /* //! a module that takes an idx3 as input, runs it through */ 00101 /* //! a machine, and runs the output of the machine through */ 00102 /* //! a cost function whose second output is the desired label */ 00103 /* //! stored in an idx0 of int. */ 00104 /* template <class T> class sdnn_module */ 00105 /* { */ 00106 /* public: */ 00107 /* net_cscscfe &machine; */ 00108 /* state_idx<T> *mout; */ 00109 /* sdnn_classer &classifier; */ 00110 00111 /* sdnn_module(net_cscscfe &m, sdnn_classer &cl); */ 00112 00113 /* virtual ~sdnn_module(); */ 00114 00115 /* void fprop(state_idx<T> &input, sdnnclass_state &output); */ 00116 /* }; */ 00117 00118 00119 } // end namespace ebl 00120 00121 #endif /* SDNN_MODULES_H_ */