add some sanity checks

git-svn-id: https://mosesdecoder.svn.sourceforge.net/svnroot/mosesdecoder/trunk@1668 1f5c12ca-751b-0410-a591-d2e778427230
This commit is contained in:
jfouet 2008-05-14 16:19:00 +00:00
parent 13f6a427c5
commit cff8d4702b
2 changed files with 23 additions and 11 deletions

View File

@ -29,7 +29,7 @@ float intersect (float m1, float b1,float m2,float b2){
}
statscore Optimizer::LineOptimize(const Point& origin,Point direction,Point& bestpoint){
direction.normalize();//we pass by value so changing is ok
// we are looking for the best Point on the line y=Origin+x*direction
float min_int=0.00001;
@ -38,11 +38,11 @@ statscore Optimizer::LineOptimize(const Point& origin,Point direction,Point& bes
thresholdlist.push_back(pair<float,vector<unsigned> >(MINFLOAT,vector<unsigned>()));
for(int S=0;S<N;S++){
for(int S=0;S<size();S++){
//first we determine the translation with the best feature score for each sentence and each value of x
multimap<float,unsigned> gradient;
vector<float> f0;
for(unsigned j=0;j<FData[S].size();j++){
for(unsigned j=0;j<FData->get(S).size();j++){
gradient.insert(pair<float,unsigned>(direction*(FData->get(S,j)),j));//gradient of the feature function for this particular target sentence
f0[j]=origin*FData->get(S,j);//compute the feature function at the origin point
}
@ -152,7 +152,7 @@ statscore Optimizer::LineOptimize(const Point& origin,Point direction,Point& bes
list<threshold>::iterator lit2;
statscore bestscore=MINFLOAT;
for(lit2=thresholdlist.begin();lit2!=thresholdlist.end();lit2){
assert(lit2->second.size()==N);
assert(lit2->second.size()==FData->size());
statscore cur=GetStatScore(lit2->second);
if(cur>bestscore){
bestscore=cur;
@ -168,7 +168,18 @@ statscore Optimizer::LineOptimize(const Point& origin,Point direction,Point& bes
};
Point SimpleOptimizer::run(const Point& init){
Point Optimizer::Run(const Point& init){
if(!FData){
cerr<<"error trying to optimize without Feature loaded"<<endl;
exit(2);
}
if(!scorer){
cerr<<"error trying to optimize without a Scorer loaded"<<endl;
exit(2);
}
return TrueRun(init);
}
Point SimpleOptimizer::TrueRun(const Point& init){
assert(dimension==init.size());
Point cur=init;
statscore prevscore=FLT_MAX;

View File

@ -21,7 +21,7 @@ class Optimizer{
Scorer * scorer;
FeatureData * FData;
/**number of lambda parameters*/
unsigned dimension;
const unsigned dimension;
Optimizer(unsigned d):dimension(d),scorer(NULL),FData(NULL){};
void SetScorer(Scorer *S);
void SetFData(FeatureData *F);
@ -29,10 +29,11 @@ class Optimizer{
delete scorer;
delete FData;
}
/**Number of sentences in the tuning set*/
unsigned N;
/**main function that perform an optimization*/
virtual Point run(const Point& init);
unsigned size(){return (FData?FData->size():0);}
/**Generic wrapper around TrueRun to check a few things. Non virtual*/
Point Run(const Point& init);
/**main function that perform an optimization*/
virtual Point TrueRun(const Point& init);
/**given a set of lambdas, get the nbest for each sentence*/
vector<unsigned> Get1bests(const Point& param);
/**given a set of nbests, get the Statistical score*/
@ -48,7 +49,7 @@ class SimpleOptimizer: public Optimizer{
private: float eps;
public:
SimpleOptimizer(unsigned dim,float _eps):Optimizer(dim),eps(_eps){};
Point run(const Point& init);
virtual Point TrueRun(const Point& init);
};
#endif