mirror of
https://github.com/moses-smt/mosesdecoder.git
synced 2024-12-26 13:23:25 +03:00
remove unused code
This commit is contained in:
parent
902741681a
commit
90fe3514bc
@ -264,61 +264,6 @@ vector< vector<const Word*> > MosesDecoder::runChartDecoder(const std::string& s
|
||||
return translations;
|
||||
}
|
||||
|
||||
void MosesDecoder::outputNBestList(const std::string& source, size_t sentenceid,
|
||||
size_t nBestSize, float bleuObjectiveWeight, float bleuScoreWeight,
|
||||
bool distinctNbest, bool avgRefLength, string filename, ofstream& streamOut)
|
||||
{
|
||||
StaticData &staticData = StaticData::InstanceNonConst();
|
||||
bool chartDecoding = staticData.IsChart();
|
||||
initialize(staticData, source, sentenceid, bleuObjectiveWeight, bleuScoreWeight, avgRefLength, chartDecoding);
|
||||
|
||||
if (chartDecoding) {
|
||||
m_chartManager = new ChartManager(*m_sentence);
|
||||
m_chartManager->ProcessSentence();
|
||||
ChartTrellisPathList nBestList;
|
||||
m_chartManager->CalcNBest(nBestSize, nBestList, distinctNbest);
|
||||
|
||||
cerr << "generate nbest list " << filename << endl;
|
||||
cerr << "not implemented.." << endl;
|
||||
exit(1);
|
||||
if (filename != "") {
|
||||
ofstream out(filename.c_str());
|
||||
if (!out) {
|
||||
ostringstream msg;
|
||||
msg << "Unable to open " << filename;
|
||||
throw runtime_error(msg.str());
|
||||
}
|
||||
// TODO: handle sentence id (for now always 0)
|
||||
// OutputNBestList(const ChartTrellisPathList &nBestList, const ChartHypothesis *bestHypo, const TranslationSystem* system, long translationId, false)
|
||||
// OutputNBest(out, nBestList, StaticData::Instance().GetOutputFactorOrder(),m_manager->GetTranslationSystem(), 0, false);
|
||||
out.close();
|
||||
} else {
|
||||
// OutputNBest(streamOut, nBestList, StaticData::Instance().GetOutputFactorOrder(),m_manager->GetTranslationSystem(), sentenceid, false);
|
||||
}
|
||||
} else {
|
||||
// run the decoder
|
||||
m_manager = new Moses::Manager(0,*m_sentence, staticData.GetSearchAlgorithm());
|
||||
m_manager->ProcessSentence();
|
||||
TrellisPathList nBestList;
|
||||
m_manager->CalcNBest(nBestSize, nBestList, distinctNbest);
|
||||
|
||||
if (filename != "") {
|
||||
ofstream out(filename.c_str());
|
||||
if (!out) {
|
||||
ostringstream msg;
|
||||
msg << "Unable to open " << filename;
|
||||
throw runtime_error(msg.str());
|
||||
}
|
||||
// TODO: handle sentence id (for now always 0)
|
||||
//OutputNBest(out, nBestList, StaticData::Instance().GetOutputFactorOrder(),m_manager->GetTranslationSystem(), 0, false);
|
||||
out.close();
|
||||
} else {
|
||||
//OutputNBest(streamOut, nBestList, StaticData::Instance().GetOutputFactorOrder(),m_manager->GetTranslationSystem(), sentenceid, false);
|
||||
streamOut.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MosesDecoder::initialize(StaticData& staticData, const std::string& source, size_t sentenceid,
|
||||
float bleuObjectiveWeight, float bleuScoreWeight, bool avgRefLength, bool chartDecoding)
|
||||
{
|
||||
|
@ -95,15 +95,6 @@ public:
|
||||
bool distinct,
|
||||
size_t rank,
|
||||
size_t epoch);
|
||||
void outputNBestList(const std::string& source,
|
||||
size_t sentenceid,
|
||||
size_t nBestSize,
|
||||
float bleuObjectiveWeight,
|
||||
float bleuScoreWeight,
|
||||
bool distinctNbest,
|
||||
bool avgRefLength,
|
||||
std::string filename,
|
||||
std::ofstream& streamOut);
|
||||
void initialize(Moses::StaticData& staticData, const std::string& source, size_t sentenceid,
|
||||
float bleuObjectiveWeight, float bleuScoreWeight, bool avgRefLength, bool chartDecoding);
|
||||
void updateHistory(const std::vector<const Moses::Word*>& words);
|
||||
|
@ -129,7 +129,6 @@ int main(int argc, char** argv)
|
||||
bool most_violated, most_violated_reg, all_violated, max_bleu_diff;
|
||||
bool feature_confidence, signed_counts;
|
||||
float decay_core, decay_sparse, core_r0, sparse_r0;
|
||||
bool selective, summed;
|
||||
float bleu_weight_fear_factor;
|
||||
bool hildreth;
|
||||
float add2lm;
|
||||
@ -159,8 +158,6 @@ int main(int argc, char** argv)
|
||||
("rescale-slack", po::value<bool>(&rescaleSlack)->default_value(false), "Rescale slack in 1-slack formulation")
|
||||
("add2lm", po::value<float>(&add2lm)->default_value(0.0), "Add the specified amount to all LM weights")
|
||||
("hildreth", po::value<bool>(&hildreth)->default_value(false), "Prefer Hildreth over analytical update")
|
||||
("selective", po::value<bool>(&selective)->default_value(false), "Build constraints for every feature")
|
||||
("summed", po::value<bool>(&summed)->default_value(false), "Sum up all constraints")
|
||||
("model-plus-bleu", po::value<bool>(&modelPlusBleu)->default_value(false), "Use the sum of model score and +/- bleu to select hope and fear translations")
|
||||
("simple-history-bleu", po::value<bool>(&simpleHistoryBleu)->default_value(false), "Simple history Bleu")
|
||||
|
||||
@ -446,7 +443,6 @@ int main(int argc, char** argv)
|
||||
if (rank == 0) {
|
||||
cerr << "Optimising using Mira" << endl;
|
||||
cerr << "slack: " << slack << ", learning rate: " << mira_learning_rate << endl;
|
||||
cerr << "selective: " << selective << endl;
|
||||
if (normaliseMargin)
|
||||
cerr << "sigmoid parameter: " << sigmoidParam << endl;
|
||||
}
|
||||
@ -1238,29 +1234,19 @@ int main(int argc, char** argv)
|
||||
} else
|
||||
update_status = 1;
|
||||
} else if (kbest) {
|
||||
if (selective)
|
||||
update_status = ((MiraOptimiser*)optimiser)->updateWeightsHopeFearSelective(
|
||||
weightUpdate, featureValuesHope, featureValuesFear, bleuScoresHope, bleuScoresFear,
|
||||
modelScoresHope, modelScoresFear, learning_rate, rank, epoch);
|
||||
else if (summed)
|
||||
update_status = ((MiraOptimiser*)optimiser)->updateWeightsHopeFearSummed(
|
||||
weightUpdate, featureValuesHope, featureValuesFear, bleuScoresHope, bleuScoresFear,
|
||||
modelScoresHope, modelScoresFear, learning_rate, rank, epoch, rescaleSlack, makePairs);
|
||||
else {
|
||||
if (batchSize == 1 && featureValuesHope[0].size() == 1 && !hildreth) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score hope: " << modelScoresHope[0][0] << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score fear: " << modelScoresFear[0][0] << endl;
|
||||
update_status = ((MiraOptimiser*) optimiser)->updateWeightsAnalytically(
|
||||
if (batchSize == 1 && featureValuesHope[0].size() == 1 && !hildreth) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score hope: " << modelScoresHope[0][0] << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score fear: " << modelScoresFear[0][0] << endl;
|
||||
update_status = ((MiraOptimiser*) optimiser)->updateWeightsAnalytically(
|
||||
weightUpdate, featureValuesHope[0][0], featureValuesFear[0][0],
|
||||
bleuScoresHope[0][0], bleuScoresFear[0][0], modelScoresHope[0][0],
|
||||
modelScoresFear[0][0], learning_rate, rank, epoch);
|
||||
} else {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score hope: " << modelScoresHope[0][0] << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score fear: " << modelScoresFear[0][0] << endl;
|
||||
update_status = optimiser->updateWeightsHopeFear(weightUpdate, featureValuesHope,
|
||||
} else {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score hope: " << modelScoresHope[0][0] << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", model score fear: " << modelScoresFear[0][0] << endl;
|
||||
update_status = optimiser->updateWeightsHopeFear(weightUpdate, featureValuesHope,
|
||||
featureValuesFear, bleuScoresHope, bleuScoresFear, modelScoresHope,
|
||||
modelScoresFear, learning_rate, rank, epoch);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// model_hope_fear
|
||||
|
@ -441,293 +441,5 @@ size_t MiraOptimiser::updateWeightsAnalytically(
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t MiraOptimiser::updateWeightsHopeFearSelective(
|
||||
Moses::ScoreComponentCollection& weightUpdate,
|
||||
const std::vector< std::vector<Moses::ScoreComponentCollection> >& featureValuesHope,
|
||||
const std::vector< std::vector<Moses::ScoreComponentCollection> >& featureValuesFear,
|
||||
const std::vector<std::vector<float> >& bleuScoresHope,
|
||||
const std::vector<std::vector<float> >& bleuScoresFear,
|
||||
const std::vector<std::vector<float> >& modelScoresHope,
|
||||
const std::vector<std::vector<float> >& modelScoresFear,
|
||||
float learning_rate,
|
||||
size_t rank,
|
||||
size_t epoch,
|
||||
int updatePosition)
|
||||
{
|
||||
|
||||
// vector of feature values differences for all created constraints
|
||||
vector<ScoreComponentCollection> nonZeroFeatures;
|
||||
vector<float> lossMinusModelScoreDiffs;
|
||||
|
||||
// Make constraints for new hypothesis translations
|
||||
float epsilon = 0.0001;
|
||||
int violatedConstraintsBefore = 0;
|
||||
|
||||
// iterate over input sentences (1 (online) or more (batch))
|
||||
for (size_t i = 0; i < featureValuesHope.size(); ++i) {
|
||||
if (updatePosition != -1) {
|
||||
if (i < updatePosition)
|
||||
continue;
|
||||
else if (i > updatePosition)
|
||||
break;
|
||||
}
|
||||
|
||||
// Pick all pairs[j,j] of hope and fear translations for one input sentence
|
||||
for (size_t j = 0; j < featureValuesHope[i].size(); ++j) {
|
||||
ScoreComponentCollection featureValueDiff = featureValuesHope[i][j];
|
||||
featureValueDiff.MinusEquals(featureValuesFear[i][j]);
|
||||
if (featureValueDiff.GetL1Norm() == 0) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", features equal --> skip" << endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// check if constraint is violated
|
||||
float loss = bleuScoresHope[i][j] - bleuScoresFear[i][j];
|
||||
float modelScoreDiff = modelScoresHope[i][j] - modelScoresFear[i][j];
|
||||
float diff = 0;
|
||||
if (loss > modelScoreDiff)
|
||||
diff = loss - modelScoreDiff;
|
||||
if (diff > epsilon)
|
||||
++violatedConstraintsBefore;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", constraint: " << modelScoreDiff << " >= " << loss << " (current violation: " << diff << ")" << endl;
|
||||
|
||||
// iterate over difference vector and add a constraint for every non-zero feature
|
||||
FVector features = featureValueDiff.GetScoresVector();
|
||||
size_t n_core = 0, n_sparse = 0, n_sparse_hope = 0, n_sparse_fear = 0;
|
||||
for (size_t i=0; i<features.coreSize(); ++i) {
|
||||
if (features[i] != 0.0) {
|
||||
++n_core;
|
||||
ScoreComponentCollection f;
|
||||
f.Assign(i, features[i]);
|
||||
nonZeroFeatures.push_back(f);
|
||||
}
|
||||
}
|
||||
|
||||
vector<ScoreComponentCollection> nonZeroFeaturesHope;
|
||||
vector<ScoreComponentCollection> nonZeroFeaturesFear;
|
||||
for (FVector::iterator i = features.begin(); i != features.end(); ++i) {
|
||||
if (i->second != 0.0) {
|
||||
ScoreComponentCollection f;
|
||||
f.Assign((i->first).name(), i->second);
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", f: " << f << endl;
|
||||
|
||||
if (i->second > 0.0) {
|
||||
++n_sparse_hope;
|
||||
nonZeroFeaturesHope.push_back(f);
|
||||
} else {
|
||||
++n_sparse_fear;
|
||||
nonZeroFeaturesFear.push_back(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
float n = n_core + n_sparse_hope + n_sparse_fear;
|
||||
for (size_t i=0; i<n_core; ++i)
|
||||
lossMinusModelScoreDiffs.push_back(diff/n);
|
||||
for (size_t i=0; i<n_sparse_hope; ++i) {
|
||||
nonZeroFeatures.push_back(nonZeroFeaturesHope[i]);
|
||||
lossMinusModelScoreDiffs.push_back((diff/n)*1.1);
|
||||
}
|
||||
for (size_t i=0; i<n_sparse_fear; ++i) {
|
||||
nonZeroFeatures.push_back(nonZeroFeaturesFear[i]);
|
||||
lossMinusModelScoreDiffs.push_back(diff/n);
|
||||
}
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", core diff: " << diff/n << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", hope diff: " << ((diff/n)*1.1) << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", fear diff: " << diff/n << endl;
|
||||
}
|
||||
}
|
||||
|
||||
assert(nonZeroFeatures.size() == lossMinusModelScoreDiffs.size());
|
||||
|
||||
// run optimisation: compute alphas for all given constraints
|
||||
vector<float> alphas;
|
||||
ScoreComponentCollection summedUpdate;
|
||||
if (violatedConstraintsBefore > 0) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", number of constraints passed to optimizer: " << nonZeroFeatures.size() << endl;
|
||||
alphas = Hildreth::optimise(nonZeroFeatures, lossMinusModelScoreDiffs, m_slack);
|
||||
|
||||
// Update the weight vector according to the alphas and the feature value differences
|
||||
// * w' = w' + SUM alpha_i * (h_i(oracle) - h_i(hypothesis))
|
||||
for (size_t k = 0; k < nonZeroFeatures.size(); ++k) {
|
||||
float alpha = alphas[k];
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", alpha: " << alpha << endl;
|
||||
if (alpha != 0) {
|
||||
ScoreComponentCollection update(nonZeroFeatures[k]);
|
||||
update.MultiplyEquals(alpha);
|
||||
|
||||
// sum updates
|
||||
summedUpdate.PlusEquals(update);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", no constraint violated for this batch" << endl;
|
||||
// return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// apply learning rate
|
||||
if (learning_rate != 1) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", apply learning rate " << learning_rate << " to update." << endl;
|
||||
summedUpdate.MultiplyEquals(learning_rate);
|
||||
}
|
||||
|
||||
// scale update by BLEU of oracle (for batch size 1 only)
|
||||
if (featureValuesHope.size() == 1) {
|
||||
if (m_scale_update) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", scaling summed update with oracle bleu score " << bleuScoresHope[0][0] << endl;
|
||||
summedUpdate.MultiplyEquals(bleuScoresHope[0][0]);
|
||||
}
|
||||
}
|
||||
|
||||
//cerr << "Rank " << rank << ", epoch " << epoch << ", update: " << summedUpdate << endl;
|
||||
weightUpdate.PlusEquals(summedUpdate);
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t MiraOptimiser::updateWeightsHopeFearSummed(
|
||||
Moses::ScoreComponentCollection& weightUpdate,
|
||||
const std::vector< std::vector<Moses::ScoreComponentCollection> >& featureValuesHope,
|
||||
const std::vector< std::vector<Moses::ScoreComponentCollection> >& featureValuesFear,
|
||||
const std::vector<std::vector<float> >& bleuScoresHope,
|
||||
const std::vector<std::vector<float> >& bleuScoresFear,
|
||||
const std::vector<std::vector<float> >& modelScoresHope,
|
||||
const std::vector<std::vector<float> >& modelScoresFear,
|
||||
float learning_rate,
|
||||
size_t rank,
|
||||
size_t epoch,
|
||||
bool rescaleSlack,
|
||||
bool makePairs)
|
||||
{
|
||||
|
||||
// vector of feature values differences for all created constraints
|
||||
ScoreComponentCollection averagedFeatureDiffs;
|
||||
float averagedViolations = 0;
|
||||
|
||||
// Make constraints for new hypothesis translations
|
||||
float epsilon = 0.0001;
|
||||
int violatedConstraintsBefore = 0;
|
||||
|
||||
if (!makePairs) {
|
||||
ScoreComponentCollection featureValueDiff;
|
||||
float lossHope = 0, lossFear = 0, modelScoreHope = 0, modelScoreFear = 0, hopeCount = 0, fearCount = 0;
|
||||
// add all hope vectors
|
||||
for (size_t i = 0; i < featureValuesHope.size(); ++i) {
|
||||
for (size_t j = 0; j < featureValuesHope[i].size(); ++j) {
|
||||
featureValueDiff.PlusEquals(featureValuesHope[i][j]);
|
||||
lossHope += bleuScoresHope[i][j];
|
||||
modelScoreHope += modelScoresHope[i][j];
|
||||
++hopeCount;
|
||||
}
|
||||
}
|
||||
lossHope /= hopeCount;
|
||||
modelScoreHope /= hopeCount;
|
||||
|
||||
// subtract all fear vectors
|
||||
for (size_t i = 0; i < featureValuesFear.size(); ++i) {
|
||||
for (size_t j = 0; j < featureValuesFear[i].size(); ++j) {
|
||||
featureValueDiff.MinusEquals(featureValuesFear[i][j]);
|
||||
lossFear += bleuScoresFear[i][j];
|
||||
modelScoreFear += modelScoresFear[i][j];
|
||||
++fearCount;
|
||||
}
|
||||
}
|
||||
lossFear /= fearCount;
|
||||
modelScoreFear /= fearCount;
|
||||
|
||||
if (featureValueDiff.GetL1Norm() == 0) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", features equal --> skip" << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", no constraint violated for this batch" << endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// check if constraint is violated
|
||||
float lossDiff = lossHope - lossFear;
|
||||
float modelScoreDiff = modelScoreHope - modelScoreFear;
|
||||
float diff = 0;
|
||||
if (lossDiff > modelScoreDiff)
|
||||
diff = lossDiff - modelScoreDiff;
|
||||
if (diff > epsilon)
|
||||
++violatedConstraintsBefore;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", constraint: " << modelScoreDiff << " >= " << lossDiff << " (current violation: " <<\
|
||||
diff << ")" << endl;
|
||||
|
||||
// add constraint
|
||||
averagedFeatureDiffs = featureValueDiff;
|
||||
averagedViolations = diff;
|
||||
} else {
|
||||
// iterate over input sentences (1 (online) or more (batch))
|
||||
for (size_t i = 0; i < featureValuesHope.size(); ++i) {
|
||||
// Pick all pairs[j,j] of hope and fear translations for one input sentence and add them up
|
||||
for (size_t j = 0; j < featureValuesHope[i].size(); ++j) {
|
||||
ScoreComponentCollection featureValueDiff = featureValuesHope[i][j];
|
||||
featureValueDiff.MinusEquals(featureValuesFear[i][j]);
|
||||
if (featureValueDiff.GetL1Norm() == 0) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", features equal --> skip" << endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// check if constraint is violated
|
||||
float lossDiff = bleuScoresHope[i][j] - bleuScoresFear[i][j];
|
||||
float modelScoreDiff = modelScoresHope[i][j] - modelScoresFear[i][j];
|
||||
if (rescaleSlack) {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", modelScoreDiff scaled by lossDiff: " << modelScoreDiff << " --> " << modelScoreDiff*lossDiff << endl;
|
||||
modelScoreDiff *= lossDiff;
|
||||
}
|
||||
float diff = 0;
|
||||
if (lossDiff > modelScoreDiff)
|
||||
diff = lossDiff - modelScoreDiff;
|
||||
if (diff > epsilon)
|
||||
++violatedConstraintsBefore;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", constraint: " << modelScoreDiff << " >= " << lossDiff << " (current violation: " << diff << ")" << endl;
|
||||
|
||||
// add constraint
|
||||
if (rescaleSlack) {
|
||||
averagedFeatureDiffs.MultiplyEquals(lossDiff);
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", featureValueDiff scaled by lossDiff." << endl;
|
||||
}
|
||||
averagedFeatureDiffs.PlusEquals(featureValueDiff);
|
||||
averagedViolations += diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// divide by number of constraints (1/n)
|
||||
if (!makePairs) {
|
||||
averagedFeatureDiffs.DivideEquals(featureValuesHope[0].size());
|
||||
} else {
|
||||
averagedFeatureDiffs.DivideEquals(featureValuesHope[0].size());
|
||||
averagedViolations /= featureValuesHope[0].size();
|
||||
}
|
||||
//cerr << "Rank " << rank << ", epoch " << epoch << ", averaged feature diffs: " << averagedFeatureDiffs << endl;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", averaged violations: " << averagedViolations << endl;
|
||||
|
||||
if (violatedConstraintsBefore > 0) {
|
||||
// compute alpha for given constraint: (loss diff - model score diff) / || feature value diff ||^2
|
||||
// featureValueDiff.GetL2Norm() * featureValueDiff.GetL2Norm() == featureValueDiff.InnerProduct(featureValueDiff)
|
||||
// from Crammer&Singer 2006: alpha = min {C , l_t/ ||x||^2}
|
||||
// adjusted for 1 slack according to Joachims 2009, OP4 (margin rescaling), OP5 (slack rescaling)
|
||||
float squaredNorm = averagedFeatureDiffs.GetL2Norm() * averagedFeatureDiffs.GetL2Norm();
|
||||
float alpha = averagedViolations / squaredNorm;
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", unclipped alpha: " << alpha << endl;
|
||||
if (m_slack > 0 ) {
|
||||
if (alpha > m_slack) {
|
||||
alpha = m_slack;
|
||||
} else if (alpha < m_slack*(-1)) {
|
||||
alpha = m_slack*(-1);
|
||||
}
|
||||
}
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", clipped alpha: " << alpha << endl;
|
||||
|
||||
// compute update
|
||||
averagedFeatureDiffs.MultiplyEquals(alpha);
|
||||
weightUpdate.PlusEquals(averagedFeatureDiffs);
|
||||
return 0;
|
||||
} else {
|
||||
cerr << "Rank " << rank << ", epoch " << epoch << ", no constraint violated for this batch" << endl;
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -112,31 +112,6 @@ public:
|
||||
size_t rank,
|
||||
size_t epoch,
|
||||
int updatePosition = -1);
|
||||
size_t updateWeightsHopeFearSelective(
|
||||
Moses::ScoreComponentCollection& weightUpdate,
|
||||
const std::vector<std::vector<Moses::ScoreComponentCollection> >& featureValuesHope,
|
||||
const std::vector<std::vector<Moses::ScoreComponentCollection> >& featureValuesFear,
|
||||
const std::vector<std::vector<float> >& bleuScoresHope,
|
||||
const std::vector<std::vector<float> >& bleuScoresFear,
|
||||
const std::vector<std::vector<float> >& modelScoresHope,
|
||||
const std::vector<std::vector<float> >& modelScoresFear,
|
||||
float learning_rate,
|
||||
size_t rank,
|
||||
size_t epoch,
|
||||
int updatePosition = -1);
|
||||
size_t updateWeightsHopeFearSummed(
|
||||
Moses::ScoreComponentCollection& weightUpdate,
|
||||
const std::vector<std::vector<Moses::ScoreComponentCollection> >& featureValuesHope,
|
||||
const std::vector<std::vector<Moses::ScoreComponentCollection> >& featureValuesFear,
|
||||
const std::vector<std::vector<float> >& bleuScoresHope,
|
||||
const std::vector<std::vector<float> >& bleuScoresFear,
|
||||
const std::vector<std::vector<float> >& modelScoresHope,
|
||||
const std::vector<std::vector<float> >& modelScoresFear,
|
||||
float learning_rate,
|
||||
size_t rank,
|
||||
size_t epoch,
|
||||
bool rescaleSlack,
|
||||
bool makePairs);
|
||||
size_t updateWeightsAnalytically(
|
||||
Moses::ScoreComponentCollection& weightUpdate,
|
||||
Moses::ScoreComponentCollection& featureValuesHope,
|
||||
|
Loading…
Reference in New Issue
Block a user