Main Page | Namespace List | Class Hierarchy | Alphabetical List | Class List | File List | Namespace Members | Class Members | File Members

NeighborhoodSmoothnessNNet.h

Go to the documentation of this file.
00001 // -*- C++ -*- 00002 00003 // NeighborhoodSmoothnessNNet.h 00004 // Copyright (c) 1998-2002 Pascal Vincent 00005 // Copyright (C) 1999-2002 Yoshua Bengio and University of Montreal 00006 // Copyright (c) 2002 Jean-Sebastien Senecal, Xavier Saint-Mleux, Rejean Ducharme 00007 // 00008 // Redistribution and use in source and binary forms, with or without 00009 // modification, are permitted provided that the following conditions are met: 00010 // 00011 // 1. Redistributions of source code must retain the above copyright 00012 // notice, this list of conditions and the following disclaimer. 00013 // 00014 // 2. Redistributions in binary form must reproduce the above copyright 00015 // notice, this list of conditions and the following disclaimer in the 00016 // documentation and/or other materials provided with the distribution. 00017 // 00018 // 3. The name of the authors may not be used to endorse or promote 00019 // products derived from this software without specific prior written 00020 // permission. 00021 // 00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00032 // 00033 // This file is part of the PLearn library. For more information on the PLearn 00034 // library, go to the PLearn Web site at www.plearn.org 00035 00036 00037 /* ******************************************************* 00038 * $Id: NeighborhoodSmoothnessNNet.h,v 1.10 2004/07/21 16:30:56 chrish42 Exp $ 00039 ******************************************************* */ 00040 00043 #ifndef NeighborhoodSmoothnessNNet_INC 00044 #define NeighborhoodSmoothnessNNet_INC 00045 00046 #include "PLearner.h" 00047 #include <plearn/opt/Optimizer.h> 00048 //#include "Var_all.h" 00049 00050 namespace PLearn { 00051 using namespace std; 00052 00053 class NeighborhoodSmoothnessNNet: public PLearner 00054 { 00055 00056 protected: 00057 00058 Var input; // Var(inputsize()) 00059 Var target; // Var(targetsize()-weightsize()) 00060 Var sampleweight; // Var(1) if train_set->hasWeights() 00061 Var w1; // bias and weights of first hidden layer 00062 Var w2; // bias and weights of second hidden layer 00063 Var wout; // bias and weights of output layer 00064 Var wdirect; // bias and weights for direct in-to-out connection 00065 Var last_hidden; // last hidden layer (the one to smooth) 00066 Var output; // output (P(y_i|x_i)) for a single bag element 00067 Var bag_size; // filled up by SumOverBagsVariable 00068 Var bag_inputs; // filled up by SumOverBagsVariable 00069 Var bag_output; // P(y=1|bag_inputs) 00070 Var bag_hidden; // The hidden layers of all inputs in a bag. 00071 mutable int test_bag_size; // BECAUSE OF UGLY HACK IN computOutputAndCost (look at it, it's worth it!) 00072 Func invars_to_training_cost; // (bag inputs and targets) -> training cost 00073 00074 VarArray costs; // (negative log-likelihood, classification error) for the bag 00075 VarArray penalties; 00076 Var training_cost; // weighted cost + penalties 00077 Var test_costs; // hconcat(costs) 00078 VarArray invars; 00079 VarArray params; // all arameter input vars 00080 Vec paramsvalues; // values of all parameters 00081 00082 Var p_ij; // The probabilities p_ij on the inputs. 00083 00084 public: 00085 00086 mutable Func f; // input -> output 00087 Func f_input_to_hidden; // input -> hidden 00088 mutable Func test_costf; // input & target -> output & test_costs 00089 mutable Func output_and_target_to_cost; // output & target -> cost 00090 00091 public: 00092 00093 typedef PLearner inherited; 00094 00095 // Build options inherited from learner: 00096 // inputsize, outputszie, targetsize, experiment_name, save_at_every_epoch 00097 00098 // Build options: 00099 int max_n_instances; // maximum number of instances (input vectors x_i) allowed 00100 00101 int nhidden; // number of hidden units in first hidden layer (default:0) 00102 int nhidden2; // number of hidden units in second hidden layer (default:0) 00103 int noutputs; // number of output units (outputsize) 00104 00105 real sigma_hidden; 00106 real sne_weight; 00107 00108 real weight_decay; // default: 0 00109 real bias_decay; // default: 0 00110 real layer1_weight_decay; // default: MISSING_VALUE 00111 real layer1_bias_decay; // default: MISSING_VALUE 00112 real layer2_weight_decay; // default: MISSING_VALUE 00113 real layer2_bias_decay; // default: MISSING_VALUE 00114 real output_layer_weight_decay; // default: MISSING_VALUE 00115 real output_layer_bias_decay; // default: MISSING_VALUE 00116 real direct_in_to_out_weight_decay; // default: MISSING_VALUE 00117 real classification_regularizer; // default: 0 00118 00119 bool L1_penalty; // default: false 00120 bool direct_in_to_out; // should we include direct input to output connecitons? default: false 00121 string output_transfer_func; // tanh, sigmoid, softplus, softmax (default: "" means no transfer function) 00122 real interval_minval, interval_maxval; // if output_transfer_func = interval(minval,maxval), these are the interval bounds 00123 00125 // where the cost functions can be one of mse, mse_onehot, NLL, 00126 // class_error or multiclass_error (no default) 00127 Array<string> cost_funcs; 00128 00129 // Build options related to the optimization: 00130 PP<Optimizer> optimizer; // the optimizer to use (no default) 00131 00132 int batch_size; // how many samples to use to estimate gradient before an update 00133 // 0 means the whole training set (default: 1) 00134 00135 private: 00136 00137 void build_(); 00138 00139 public: 00140 00141 NeighborhoodSmoothnessNNet(); 00142 virtual ~NeighborhoodSmoothnessNNet(); 00143 PLEARN_DECLARE_OBJECT(NeighborhoodSmoothnessNNet); 00144 00145 virtual void build(); 00146 virtual void forget(); // simply calls initializeParams() 00147 00148 virtual int outputsize() const; 00149 virtual TVec<string> getTrainCostNames() const; 00150 virtual TVec<string> getTestCostNames() const; 00151 00152 virtual void train(); 00153 00154 virtual void setTrainingSet(VMat training_set, bool call_forget=true); 00155 00156 virtual void computeOutput(const Vec& input, Vec& output) const; 00157 00158 virtual void computeOutputAndCosts(const Vec& input, const Vec& target, 00159 Vec& output, Vec& costs) const; 00160 00161 virtual void computeCostsFromOutputs(const Vec& input, const Vec& output, 00162 const Vec& target, Vec& costs) const; 00163 00164 virtual void makeDeepCopyFromShallowCopy(CopiesMap &copies); 00165 00166 protected: 00167 static void declareOptions(OptionList& ol); 00168 void initializeParams(); 00169 00170 }; 00171 00172 DECLARE_OBJECT_PTR(NeighborhoodSmoothnessNNet); 00173 00174 } // end of namespace PLearn 00175 00176 #endif 00177

Generated on Tue Aug 17 15:59:43 2004 for PLearn by doxygen 1.3.7