00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
#include "Optimizer.h"
00045
00046
#ifdef DEBUGCG
00047
#include <plearn/display/GhostScript.h>
00048
#endif
00049
00050
namespace PLearn {
00051
using namespace std;
00052
00053
00054 Optimizer::Optimizer(
int n_updates,
const string& file_name,
int every_iterations)
00055 :nupdates(n_updates), nstages(0), filename(file_name),
00056 every(every_iterations)
00057 {}
00058
00059 Optimizer::Optimizer(
VarArray the_params,
Var the_cost,
int n_updates,
00060
const string& file_name,
int every_iterations)
00061 :params(the_params),cost(the_cost), nupdates(n_updates),
00062 filename(file_name), every(every_iterations)
00063 {}
00064
00065 Optimizer::Optimizer(
VarArray the_params,
Var the_cost,
VarArray the_update_for_measure,
00066
int n_updates,
const string& file_name,
00067
int every_iterations)
00068 :params(the_params), cost(the_cost), nupdates(n_updates),
00069 update_for_measure(the_update_for_measure),
00070 filename(file_name), every(every_iterations)
00071 {}
00072
00073 void Optimizer::build()
00074 {
00075 inherited::build();
00076
build_();
00077 }
00078
00079 void Optimizer::build_()
00080 {
00081
if (
update_for_measure.
length()==0)
00082 {
00083
00084
early_stop=
false;
00085
early_stop_i=0;
00086
proppath =
propagationPath(
params,
cost);
00087 }
00088
else
00089
proppath =
propagationPath(
params,
update_for_measure & (
VarArray)
cost);
00090 VarArray path_from_all_sources_to_direct_parents =
propagationPathToParentsOfPath(
params, cost);
00091 path_from_all_sources_to_direct_parents.
fprop();
00092
int n =
params.
nelems();
00093
if (n > 0) {
00094
temp_grad.
resize(
params.
nelems());
00095
00096
00097 }
00098
00099 }
00100
00101 void Optimizer::reset()
00102 {
00103
stage = 0;
00104 }
00105
00106 void Optimizer::declareOptions(
OptionList& ol)
00107 {
00108
declareOption(ol,
"n_updates", &Optimizer::nupdates, OptionBase::buildoption,
00109
" Deprecated - maximum number of parameter-updates to be performed by the optimizer\n");
00110
00111
declareOption(ol,
"every_iterations", &Optimizer::every, OptionBase::buildoption,
00112
" Deprecated - call measure() method every that many updates \n");
00113
00114
declareOption(ol,
"filename", &Optimizer::filename, OptionBase::buildoption,
00115
" call measure <every> <nupdates> iterations saving the results in the <filename>. \n");
00116
00117
declareOption(ol,
"nstages", &Optimizer::nstages, OptionBase::buildoption,
00118
" number of iterations to perform on the next ""optimizeN"" call\n");
00119
00120 inherited::declareOptions(ol);
00121 }
00122
00123 void Optimizer::oldwrite(ostream& out)
const
00124
{
00125
writeHeader(out,
"Optimizer", 0);
00126
writeField(out,
"n_updates",
nupdates);
00127
writeField(out,
"every_iterations",
every);
00128
writeFooter(out,
"Optimizer");
00129 }
00130
00131 void Optimizer::oldread(istream& in)
00132 {
00133
int ver =
readHeader(in,
"Optimizer");
00134
if(ver!=0)
00135
PLERROR(
"In Optimizer::read version number %d not supported",ver);
00136
readField(in,
"n_updates",
nupdates);
00137
readField(in,
"every_iterations",
every);
00138
readFooter(in,
"Optimizer");
00139 }
00140
00141 void Optimizer::setToOptimize(
VarArray the_params,
Var the_cost)
00142 {
00143
00144
00145
params = the_params;
00146
cost = the_cost;
00147
proppath =
propagationPath(
params,
cost);
00148
VarArray path_from_all_sources_to_direct_parents =
propagationPathToParentsOfPath(
params,
cost);
00149 path_from_all_sources_to_direct_parents.
fprop();
00150 }
00151
00152 void Optimizer::setVarArrayOption(
const string& optionname,
VarArray value)
00153 {
00154
if (optionname==
"params")
setToOptimize(value,
cost);
00155
else if (optionname==
"update_for_measure")
update_for_measure = value;
00156
else PLERROR(
"In Optimizer::setVarArrayOption(const string& optionname, VarArray value): option not recognized (%s).",optionname.c_str());
00157 }
00158
00159 void Optimizer::setVarOption(
const string& optionname,
Var value)
00160 {
00161
if (optionname==
"cost")
setToOptimize(
params, value);
00162
else PLERROR(
"In Optimizer::setVarOption(const string& optionname, VarArray value): option not recognized (%s).",optionname.c_str());
00163 }
00164
00165 void Optimizer::setVMatOption(
const string& optionname,
VMat value)
00166 {
00167
PLERROR(
"In Optimizer::setVMatOption(const string& optionname, VarArray value): option not recognized (%s).",optionname.c_str());
00168 }
00169
00170
00171
PLEARN_IMPLEMENT_ABSTRACT_OBJECT(
Optimizer,
"ONE LINE DESCR",
"NO HELP");
00172
00174
extern void varDeepCopyField(
Var& field, CopiesMap& copies);
00175
00177
00179 void Optimizer::makeDeepCopyFromShallowCopy(map<const void*, void*>& copies)
00180 {
00181 inherited::makeDeepCopyFromShallowCopy(copies);
00182
varDeepCopyField(
cost, copies);
00183
deepCopyField(
params, copies);
00184
deepCopyField(
update_for_measure, copies);
00185
deepCopyField(
temp_grad, copies);
00186
00187
00188
00189
00190
if (
measurers.
size() > 0) {
00191
PLWARNING(
00192
"In Optimizer::makeDeepCopyFromShallowCopy - The 'measurers' field "
00193
"won't be deep copied, since the deepCopy method is not currently "
00194
"implemented in the class Measurer."
00195 );
00196 }
00197
build();
00198 }
00199
00200 void Optimizer::addMeasurer(
Measurer& measurer)
00201 {
00202
measurers.
appendIfNotThereAlready(&measurer);
00203 }
00204
00205 bool Optimizer::measure(
int t,
const Vec& costs)
00206 {
00207
bool stop=
false;
00208
for(
int i=0; i<
measurers.
size(); i++)
00209 stop = stop ||
measurers[i]->measure(t,costs);
00210
return stop;
00211 }
00212
00213 void Optimizer::verifyGradient(
real minval,
real maxval,
real step)
00214 {
00215
Func f(
params,
cost);
00216 f->verifyGradient(minval, maxval, step);
00217 }
00218
00219 void Optimizer::verifyGradient(
real step)
00220 {
00221
Func f(
params,
cost);
00222
Vec p(
params.
nelems());
00223
params >> p;
00224 f->verifyGradient(p, step);
00225 }
00226
00227 Optimizer::~Optimizer()
00228 {}
00229
00231
00233 void Optimizer::computeRepartition(
00234
Vec v,
int n,
real mini,
real maxi,
00235
Vec res,
int& noutliers) {
00236 res.
clear();
00237 noutliers = 0;
00238
for (
int i=0; i<v.
length(); i++) {
00239
real k = (v[i] - mini) / (maxi - mini);
00240
int j =
int(
k*n);
00241
if (j >= n) {
00242 noutliers++;
00243 j = n-1;
00244 }
00245
if (j < 0) {
00246 noutliers++;
00247 j = 0;
00248 }
00249 res[j]++;
00250 }
00251
for (
int i = 0; i<n; i++) {
00252 res[i] /= v.
length();
00253 }
00254 }
00255
00257
00259 void Optimizer::computeGradient(
00260
Optimizer* opt,
00261
const Vec& gradient) {
00262
00263 opt->
proppath.
clearGradient();
00264 opt->
params.
clearGradient();
00265 opt->
cost->gradient[0] = 1;
00266 opt->
proppath.
fbprop();
00267 opt->
params.
copyGradientTo(gradient);
00268 }
00269
00270
#ifdef DEBUGCG
00271
extern GhostScript* gs;
00272
#endif
00273
00275
00277 void Optimizer::computeOppositeGradient(
00278
Optimizer* opt,
00279
const Vec& gradient) {
00280
00281 opt->
proppath.
clearGradient();
00282 opt->
params.
clearGradient();
00283
00284 opt->
cost->gradient[0] = -1;
00285 opt->
proppath.
fbprop();
00286 opt->
params.
copyGradientTo(gradient);
00287
#ifdef DEBUGCG
00288
gs->setcolor(
"blue");
00289 gs->drawCircle(opt->
params[0]->value[0],opt->
params[0]->value[1],0.02);
00290 #endif
00291
00292 }
00293
00294 }