Main Page | Namespace List | Class Hierarchy | Alphabetical List | Class List | File List | Namespace Members | Class Members | File Members

ConstantRegressor.cc

Go to the documentation of this file.
00001 00002 // -*- C++ -*- 00003 00004 // ConstantRegressor.cc 00005 // 00006 // Copyright (C) 2003 *AUTHOR(S)* 00007 // 00008 // Redistribution and use in source and binary forms, with or without 00009 // modification, are permitted provided that the following conditions are met: 00010 // 00011 // 1. Redistributions of source code must retain the above copyright 00012 // notice, this list of conditions and the following disclaimer. 00013 // 00014 // 2. Redistributions in binary form must reproduce the above copyright 00015 // notice, this list of conditions and the following disclaimer in the 00016 // documentation and/or other materials provided with the distribution. 00017 // 00018 // 3. The name of the authors may not be used to endorse or promote 00019 // products derived from this software without specific prior written 00020 // permission. 00021 // 00022 // THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR 00023 // IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 00024 // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN 00025 // NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 00026 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 00027 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 00028 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 00029 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 00030 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 00031 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 00032 // 00033 // This file is part of the PLearn library. For more information on the PLearn 00034 // library, go to the PLearn Web site at www.plearn.org 00035 00036 /* ******************************************************* 00037 * $Id: ConstantRegressor.cc,v 1.8 2004/07/21 20:26:52 tihocan Exp $ 00038 ******************************************************* */ 00039 00041 #include "ConstantRegressor.h" 00042 00043 namespace PLearn { 00044 using namespace std; 00045 00046 ConstantRegressor::ConstantRegressor() 00047 : weight_decay(0.0) 00048 { 00049 } 00050 00051 PLEARN_IMPLEMENT_OBJECT(ConstantRegressor, 00052 "PLearner that outputs a constant (input-independent) vector.\n", 00053 "ConstantRegressor is a PLearner that outputs a constant (input-independent\n" 00054 "but training-data-dependent) vector. It is a regressor (i.e. during training\n" 00055 "the constant vector is chosen to minimize the (possibly weighted) average\n" 00056 "of the training set targets. Let\n" 00057 " N = number of training examples,\n" 00058 " M = target size (= output size),\n" 00059 " y_{ij} = the jth target value of the ith training example,\n" 00060 " w_i = weight associated to the ith training example,\n" 00061 "then the j-th component of the learned vector is\n" 00062 " (sum_{i=1}^N w_i * y_ij) / (sum_{i=1}^N w_i)\n" 00063 "The output can also be set manually with the 'constant_output' vector option\n"); 00064 00065 void ConstantRegressor::declareOptions(OptionList& ol) 00066 { 00067 // ### Declare all of this object's options here 00068 // ### For the "flags" of each option, you should typically specify 00069 // ### one of OptionBase::buildoption, OptionBase::learntoption or 00070 // ### OptionBase::tuningoption. Another possible flag to be combined with 00071 // ### is OptionBase::nosave 00072 00073 declareOption(ol, "weight_decay", &ConstantRegressor::weight_decay, 00074 OptionBase::buildoption, 00075 "Weight decay parameter. Default=0. NOT CURRENTLY TAKEN INTO ACCOUNT!"); 00076 00077 // ### ex: 00078 declareOption(ol, "constant_output", &ConstantRegressor::constant_output, 00079 OptionBase::learntoption, 00080 "This is the learnt parameter, the constant output. During training\n" 00081 "It is set to the (possibly weighted) average of the targets.\n" 00082 ); 00083 00084 // Now call the parent class' declareOptions 00085 inherited::declareOptions(ol); 00086 } 00087 00088 void ConstantRegressor::build_() 00089 { 00090 } 00091 00092 // ### Nothing to add here, simply calls build_ 00093 void ConstantRegressor::build() 00094 { 00095 inherited::build(); 00096 build_(); 00097 } 00098 00099 00100 void ConstantRegressor::makeDeepCopyFromShallowCopy(map<const void*, void*>& copies) 00101 { 00102 inherited::makeDeepCopyFromShallowCopy(copies); 00103 } 00104 00105 00106 int ConstantRegressor::outputsize() const 00107 { 00108 return targetsize(); 00109 } 00110 00111 void ConstantRegressor::forget() 00112 { 00113 // Since this is a one-shot learner, there is nothing to forget. 00114 } 00115 00116 void ConstantRegressor::train() 00117 { 00118 // The role of the train method is to bring the learner up to stage==nstages, 00119 // updating train_stats with training costs measured on-line in the process. 00120 00121 Vec input; // Not static because God knows who may be using a ConstantRegressor. 00122 Vec target; 00123 Vec train_costs; 00124 Vec sum_of_weighted_targets; 00125 real weight; 00126 train_costs.resize(1); 00127 input.resize(inputsize()); // the train_set's inputsize() 00128 target.resize(targetsize()); // the train_set's targetsize() 00129 sum_of_weighted_targets.resize(targetsize()); // the running sum of weighted targets 00130 constant_output.resize(targetsize()); 00131 00132 if(!train_stats) // make a default stats collector, in case there's none 00133 train_stats = new VecStatsCollector(); 00134 00135 real sum_of_weights = 0; 00136 sum_of_weighted_targets.clear(); 00137 00138 int n_examples = train_set->length(); 00139 for (int i=0;i<n_examples;i++) 00140 { 00141 train_set->getExample(i, input, target, weight); 00142 multiplyAdd(sum_of_weighted_targets,target,weight,sum_of_weighted_targets); 00143 sum_of_weights += weight; 00144 multiply(sum_of_weighted_targets,real(1.0/sum_of_weights),constant_output); 00145 train_costs[0] = 00146 weight*powdistance(constant_output,target); 00147 train_stats->update(train_costs); 00148 } 00149 train_stats->finalize(); // finalize statistics for this one and only epoch 00150 } 00151 00152 00153 void ConstantRegressor::computeOutput(const Vec& input, Vec& output) const 00154 { 00155 // Compute the output from the input 00156 output.resize(outputsize()); 00157 output << constant_output; 00158 } 00159 00160 void ConstantRegressor::computeCostsFromOutputs(const Vec& input, const Vec& output, 00161 const Vec& target, Vec& costs) const 00162 { 00163 // Compute the costs from *already* computed output. 00164 costs[0] = powdistance(output,target); 00165 } 00166 00167 TVec<string> ConstantRegressor::getTestCostNames() const 00168 { 00169 // Return the names of the costs computed by computeCostsFromOutpus 00170 return getTrainCostNames(); 00171 } 00172 00173 TVec<string> ConstantRegressor::getTrainCostNames() const 00174 { 00175 // Return the names of the objective costs that the train method computes and 00176 // for which it updates the VecStatsCollector train_stats 00177 return TVec<string>(1,"mse"); 00178 } 00179 00180 00181 00182 } // end of namespace PLearn

Generated on Tue Aug 17 15:50:35 2004 for PLearn by doxygen 1.3.7