This commit is contained in:
Nicola Bertoldi 2014-01-15 16:42:02 +01:00
parent a098550f33
commit bd83999264
91 changed files with 820 additions and 842 deletions

View File

@ -59,8 +59,9 @@ public:
static float CalcEstimateOfBestScore(const TargetPhraseCollection &,
const StackVec &);
size_t GetSize() const
{ return m_collection.size(); }
size_t GetSize() const {
return m_collection.size();
}
//! @todo dunno
const StackVec &GetStackVec() const {

View File

@ -33,7 +33,8 @@ DecodeGraph::~DecodeGraph()
}
//! Add another decode step to the graph
void DecodeGraph::Add(DecodeStep *decodeStep) {
void DecodeGraph::Add(DecodeStep *decodeStep)
{
m_steps.push_back(decodeStep);
decodeStep->SetContainer(this);
}

View File

@ -86,7 +86,7 @@ public:
return m_backoff;
}
void SetBackoff(size_t backoff){
void SetBackoff(size_t backoff) {
m_backoff = backoff;
}

View File

@ -105,10 +105,12 @@ public:
void RemoveFeature(const FeatureFunction *ff);
void SetContainer(const DecodeGraph *container)
{ m_container = container; }
const DecodeGraph *GetContainer() const
{ return m_container; }
void SetContainer(const DecodeGraph *container) {
m_container = container;
}
const DecodeGraph *GetContainer() const {
return m_container;
}
};

View File

@ -85,7 +85,7 @@ void DecodeStepTranslation::Process(const TranslationOption &inputPartialTranslO
outPhrase.Merge(targetPhrase, m_newOutputFactors);
outPhrase.Evaluate(inputPath.GetPhrase(), m_featuresToApply); // need to do this as all non-transcores would be screwed up
cerr << "DecodeStepTranslation::Process is calling outPhrase.Evaluate(inputPath.GetPhrase(), m_featuresToApply)" << endl;
cerr << "DecodeStepTranslation::Process is calling outPhrase.Evaluate(inputPath.GetPhrase(), m_featuresToApply)" << endl;
TranslationOption *newTransOpt = new TranslationOption(sourceWordsRange, outPhrase);
assert(newTransOpt != NULL);

View File

@ -115,15 +115,13 @@ FFState* ConstrainedDecoding::Evaluate(
bool match = (searchPos == 0) && (ref->GetSize() == outputPhrase.GetSize());
if (!m_negate) {
score = match ? 0 : - std::numeric_limits<float>::infinity();
}
else {
} else {
score = !match ? 0 : - std::numeric_limits<float>::infinity();
}
} else if (m_negate) {
// keep all derivations
score = 0;
}
else {
} else {
score = (searchPos != NOT_FOUND) ? 0 : - std::numeric_limits<float>::infinity();
}
@ -155,8 +153,7 @@ FFState* ConstrainedDecoding::EvaluateChart(
if (!m_negate) {
score = match ? 0 : - std::numeric_limits<float>::infinity();
}
else {
} else {
score = !match ? 0 : - std::numeric_limits<float>::infinity();
}
} else if (m_negate) {

View File

@ -66,7 +66,8 @@ void CoveredReferenceFeature::Evaluate(const InputType &input
estimatedFutureScore->Assign(this, scores);
}
void CoveredReferenceFeature::Load() {
void CoveredReferenceFeature::Load()
{
InputFileStream refFile(m_path);
std::string line;
const StaticData &staticData = StaticData::Instance();

View File

@ -37,8 +37,7 @@ class CoveredReferenceFeature : public StatefulFeatureFunction
public:
CoveredReferenceFeature(const std::string &line)
:StatefulFeatureFunction(1, line)
{
:StatefulFeatureFunction(1, line) {
m_tuneable = true;
ReadParameters();
}

View File

@ -79,8 +79,9 @@ public:
, ScoreComponentCollection &estimatedFutureScore) const
{}
void SetContainer(const DecodeStep *container)
{ m_container = container; }
void SetContainer(const DecodeStep *container) {
m_container = container;
}
protected:
std::vector<FactorType> m_input;

View File

@ -49,12 +49,9 @@ void DynamicCacheBasedLanguageModel::SetPreComputedScores()
float DynamicCacheBasedLanguageModel::GetPreComputedScores(const unsigned int age)
{
if (age < precomputedScores.size())
{
if (age < precomputedScores.size()) {
return precomputedScores.at(age);
}
else
{
} else {
return precomputedScores.at(m_maxAge);
}
}
@ -117,8 +114,7 @@ float DynamicCacheBasedLanguageModel::Evaluate_Whole_String(const TargetPhrase&
if (it != m_cache.end()) { //found!
score = ((*it).second).second;
VERBOSE(3,"cblm::Evaluate_Whole_String: found w:|" << w << "| actual score:|" << ((*it).second).second << "| score:|" << score << "|" << std::endl);
}
else{
} else {
score = m_lower_score;
}
@ -144,8 +140,7 @@ float DynamicCacheBasedLanguageModel::Evaluate_All_Substrings(const TargetPhrase
if (it != m_cache.end()) { //found!
score += ((*it).second).second;
VERBOSE(3,"cblm::Evaluate_All_Substrings: found w:|" << w << "| actual score:|" << ((*it).second).second << "| score:|" << score << "|" << std::endl);
}
else{
} else {
score += m_lower_score;
}

View File

@ -20,11 +20,11 @@ void InternalStructStatelessFF::Evaluate(const InputType &input
, const TargetPhrase &targetPhrase
, ScoreComponentCollection &scoreBreakdown
, ScoreComponentCollection *estimatedFutureScore) const
{
{
cerr << "HHHHH" << scoreBreakdown << endl;
scoreBreakdown.PlusEquals(this, 66);
/* FactorList f_mask;
cerr << "HHHHH" << scoreBreakdown << endl;
scoreBreakdown.PlusEquals(this, 66);
/* FactorList f_mask;
f_mask.push_back(0);
//if(inputPath.GetPhrase().GetStringRep(f_mask).)
int score =50;
@ -35,7 +35,7 @@ scoreBreakdown.PlusEquals(this, 66);
}
}
scoreBreakdown.PlusEquals(this, score);
*/
*/
}
}

View File

@ -13,8 +13,9 @@ public:
:StatelessFeatureFunction(line)
{}
bool IsUseable(const FactorMask &mask) const
{ return true; }
bool IsUseable(const FactorMask &mask) const {
return true;
}
void Evaluate(const Phrase &source
, const TargetPhrase &targetPhrase

View File

@ -45,10 +45,10 @@ public:
{}
/*
/*
virtual void Evaluate(const InputType &source
, ScoreComponentCollection &scoreBreakdown) const;
*/
*/
};
}

View File

@ -87,8 +87,9 @@ public:
, const TargetPhraseCollection *targetPhrases
, const void *ptNode);
const TargetPhraseCollection *GetTargetPhrases(const PhraseDictionary &phraseDictionary) const;
const TargetPhrases &GetTargetPhrases() const
{ return m_targetPhrases; }
const TargetPhrases &GetTargetPhrases() const {
return m_targetPhrases;
}
// pointer to internal node in phrase-table. Since this is implementation dependent, this is a void*
const void *GetPtNode(const PhraseDictionary &phraseDictionary) const;

View File

@ -15,20 +15,21 @@
using namespace std;
/////////////////////////
void read_ini(const char *inifile, string &model, string &words, string &wordstxt){
void read_ini(const char *inifile, string &model, string &words, string &wordstxt)
{
ifstream ifs(inifile);
string line;
getline(ifs, line);
while(ifs){
while(ifs) {
unsigned int pos = line.find("=");
string key = line.substr(0, pos);
string value = line.substr(pos+1, line.size()-pos);
if(key=="MODEL"){
if(key=="MODEL") {
model = value;
}else if(key=="WORDS"){
} else if(key=="WORDS") {
words = value;
}else if(key=="WORDSTXT"){
} else if(key=="WORDSTXT") {
wordstxt = value;
}
getline(ifs, line);
@ -46,30 +47,30 @@ private:
DALM::State *state;
public:
DALMState(unsigned short order){
DALMState(unsigned short order) {
state = new DALM::State(order);
}
DALMState(const DALMState &from){
DALMState(const DALMState &from) {
state = new DALM::State(*from.state);
}
virtual ~DALMState(){
virtual ~DALMState() {
delete state;
}
virtual int Compare(const FFState& other) const{
virtual int Compare(const FFState& other) const {
const DALMState &o = static_cast<const DALMState &>(other);
if(state->get_count() < o.state->get_count()) return -1;
else if(state->get_count() > o.state->get_count()) return 1;
else return state->compare(o.state);
}
DALM::State *get_state() const{
DALM::State *get_state() const {
return state;
}
void refresh(){
void refresh() {
state->refresh();
}
};
@ -135,13 +136,15 @@ void LanguageModelDALM::Load()
m_beginSentenceFactor = collection.AddFactor(BOS_);
}
const FFState *LanguageModelDALM::EmptyHypothesisState(const InputType &/*input*/) const{
const FFState *LanguageModelDALM::EmptyHypothesisState(const InputType &/*input*/) const
{
DALMState *s = new DALMState(m_nGramOrder);
m_lm->init_state(*s->get_state());
return s;
}
void LanguageModelDALM::CalcScore(const Phrase &phrase, float &fullScore, float &ngramScore, size_t &oovCount) const{
void LanguageModelDALM::CalcScore(const Phrase &phrase, float &fullScore, float &ngramScore, size_t &oovCount) const
{
fullScore = 0;
ngramScore = 0;
@ -183,7 +186,8 @@ void LanguageModelDALM::CalcScore(const Phrase &phrase, float &fullScore, float
delete dalm_state;
}
LMResult LanguageModelDALM::GetValue(DALM::VocabId wid, DALM::State* finalState) const{
LMResult LanguageModelDALM::GetValue(DALM::VocabId wid, DALM::State* finalState) const
{
LMResult ret;
// last word is unk?
@ -204,7 +208,8 @@ LMResult LanguageModelDALM::GetValue(const Word &word, DALM::State* finalState)
return GetValue(wid, finalState);
}
FFState *LanguageModelDALM::Evaluate(const Hypothesis &hypo, const FFState *ps, ScoreComponentCollection *out) const{
FFState *LanguageModelDALM::Evaluate(const Hypothesis &hypo, const FFState *ps, ScoreComponentCollection *out) const
{
// In this function, we only compute the LM scores of n-grams that overlap a
// phrase boundary. Phrase-internal scores are taken directly from the
// translation option.
@ -212,7 +217,7 @@ FFState *LanguageModelDALM::Evaluate(const Hypothesis &hypo, const FFState *ps,
const DALMState *dalm_ps = static_cast<const DALMState *>(ps);
// Empty phrase added? nothing to be done
if (hypo.GetCurrTargetLength() == 0){
if (hypo.GetCurrTargetLength() == 0) {
return dalm_ps ? new DALMState(*dalm_ps) : NULL;
}
@ -225,7 +230,7 @@ FFState *LanguageModelDALM::Evaluate(const Hypothesis &hypo, const FFState *ps,
std::size_t position = begin;
float score = 0.0;
for(; position < adjust_end; position++){
for(; position < adjust_end; position++) {
score += GetValue(hypo.GetWord(position), dalm_state->get_state()).score;
}
@ -256,7 +261,8 @@ FFState *LanguageModelDALM::Evaluate(const Hypothesis &hypo, const FFState *ps,
return dalm_state;
}
FFState *LanguageModelDALM::EvaluateChart(const ChartHypothesis& hypo, int featureID, ScoreComponentCollection *out) const{
FFState *LanguageModelDALM::EvaluateChart(const ChartHypothesis& hypo, int featureID, ScoreComponentCollection *out) const
{
LanguageModelChartState *ret = new LanguageModelChartState(hypo, featureID, m_nGramOrder);
// initialize language model context state
DALMState *dalm_state = new DALMState(m_nGramOrder);
@ -375,8 +381,7 @@ DALM::VocabId LanguageModelDALM::GetVocabId(const Factor *factor) const
iter = m_vocabMap.left.find(factor);
if (iter != m_vocabMap.left.end()) {
return iter->second;
}
else {
} else {
// not in mapping. Must be UNK
return m_vocab->unk();
}

View File

@ -10,7 +10,7 @@ using namespace std;
namespace Moses
{
NeuralLMWrapper::NeuralLMWrapper(const std::string &line)
:LanguageModelSingleFactor(line)
:LanguageModelSingleFactor(line)
{
// This space intentionally left blank
}

View File

@ -2,8 +2,9 @@
#include "SingleFactor.h"
namespace nplm {
class neuralLM;
namespace nplm
{
class neuralLM;
}
namespace Moses

View File

@ -545,7 +545,8 @@ void OutputWordGraph(std::ostream &outputWordGraphStream, const Hypothesis *hypo
outputWordGraphStream << endl;
}
void Manager::GetOutputLanguageModelOrder( std::ostream &out, const Hypothesis *hypo ) {
void Manager::GetOutputLanguageModelOrder( std::ostream &out, const Hypothesis *hypo )
{
Phrase translation;
hypo->GetOutputPhrase(translation);
const std::vector<const StatefulFeatureFunction*> &statefulFFs = StatefulFeatureFunction::GetStatefulFeatureFunctions();

View File

@ -107,8 +107,9 @@ public:
return m_transOpt;
}
size_t GetItemSetSize() const
{ return m_covered.size(); }
size_t GetItemSetSize() const {
return m_covered.size();
}
private:
typedef boost::unordered_set<RuleCubeItem*,

View File

@ -135,7 +135,7 @@ int Sentence::Read(std::istream& in,const std::vector<FactorType>& factorOrder)
std::vector< std::map<std::string, std::string> >::iterator dlt_meta_it = dlt_meta.begin();
for (dlt_meta_it = dlt_meta.begin(); dlt_meta_it != dlt_meta.end(); ++dlt_meta_it) {
DynamicCacheBasedLanguageModel& cblm = DynamicCacheBasedLanguageModel::InstanceNonConst();
std::cerr << "&cblm:|" << &cblm << "|" << std::endl;
std::cerr << "&cblm:|" << &cblm << "|" << std::endl;
PhraseDictionaryDynamicCacheBased& cbtm = PhraseDictionaryDynamicCacheBased::InstanceNonConst();
if ((*dlt_meta_it).find("cbtm") != (*dlt_meta_it).end()) {
if (&cbtm) cbtm.Insert((*dlt_meta_it)["cbtm"]);

View File

@ -522,8 +522,7 @@ bool StaticData::LoadData(Parameter *parameter)
if (iter == featureNameOverride.end()) {
// feature name not override
m_registry.Construct(feature, line);
}
else {
} else {
// replace feature name with new name
string newName = iter->second;
feature = newName;

View File

@ -728,8 +728,9 @@ public:
return m_placeHolderFactor;
}
const FeatureRegistry &GetFeatureRegistry() const
{ return m_registry; }
const FeatureRegistry &GetFeatureRegistry() const {
return m_registry;
}
/** check whether we should be using the old code to support binary phrase-table.
** eventually, we'll stop support the binary phrase-table and delete this legacy code

View File

@ -99,7 +99,7 @@ public:
return m_scoreBreakdown;
}
/*
/*
//TODO: Probably shouldn't copy this, but otherwise ownership is unclear
void SetSourcePhrase(const Phrase& p) {
m_sourcePhrase=p;
@ -107,7 +107,7 @@ public:
const Phrase& GetSourcePhrase() const {
return m_sourcePhrase;
}
*/
*/
void SetTargetLHS(const Word *lhs) {
m_lhsTarget = lhs;
}

View File

@ -72,8 +72,7 @@ public:
//! delete an entry from the collection
void Remove(const size_t pos) {
if (pos < m_collection.size())
{
if (pos < m_collection.size()) {
m_collection.erase(begin() + pos);
}
}

View File

@ -39,8 +39,7 @@ void Timer::start(const char* msg)
if (stopped) {
start_time = util::WallTime() - (stop_time - start_time);
stopped = false;
}
else {
} else {
start_time = util::WallTime();
running = true;
}

View File

@ -35,7 +35,7 @@ PhraseDictionaryDynamicCacheBased *PhraseDictionaryDynamicCacheBased::s_instance
//! contructor
PhraseDictionaryDynamicCacheBased::PhraseDictionaryDynamicCacheBased(const std::string &line)
: PhraseDictionary(line)
: PhraseDictionary(line)
{
std::cerr << "Initializing PhraseDictionaryDynamicCacheBased feature..." << std::endl;
@ -114,15 +114,13 @@ const TargetPhraseCollection *PhraseDictionaryDynamicCacheBased::GetTargetPhrase
TargetPhraseCollection* tpc = NULL;
VERBOSE(3,"source:|" << source << "|" << std::endl);
cacheMap::const_iterator it = m_cacheTM.find(source);
if(it != m_cacheTM.end())
{
if(it != m_cacheTM.end()) {
VERBOSE(3,"source:|" << source << "| FOUND" << std::endl);
tpc = (it->second).first;
std::vector<const TargetPhrase*>::const_iterator it2 = tpc->begin();
while (it2 != tpc->end())
{
while (it2 != tpc->end()) {
((TargetPhrase*) *it2)->Evaluate(source, GetFeaturesToApply());
it2++;
}
@ -145,7 +143,8 @@ ChartRuleLookupManager* PhraseDictionaryDynamicCacheBased::CreateRuleLookupManag
UTIL_THROW(util::Exception, "Phrase table used in chart decoder");
}
void PhraseDictionaryDynamicCacheBased::SetScoreType(size_t type) {
void PhraseDictionaryDynamicCacheBased::SetScoreType(size_t type)
{
#ifdef WITH_THREADS
boost::shared_lock<boost::shared_mutex> read_lock(m_cacheLock);
#endif
@ -157,8 +156,7 @@ void PhraseDictionaryDynamicCacheBased::SetScoreType(size_t type) {
&& m_score_type != CBTM_SCORE_TYPE_COSINE
&& m_score_type != CBTM_SCORE_TYPE_HYPERBOLA_REWARD
&& m_score_type != CBTM_SCORE_TYPE_POWER_REWARD
&& m_score_type != CBTM_SCORE_TYPE_EXPONENTIAL_REWARD )
{
&& m_score_type != CBTM_SCORE_TYPE_EXPONENTIAL_REWARD ) {
VERBOSE(2, "This score type " << m_score_type << " is unknown. Instead used " << CBTM_SCORE_TYPE_HYPERBOLA << "." << std::endl);
m_score_type = CBTM_SCORE_TYPE_HYPERBOLA;
}
@ -167,7 +165,8 @@ void PhraseDictionaryDynamicCacheBased::SetScoreType(size_t type) {
}
void PhraseDictionaryDynamicCacheBased::SetMaxAge(unsigned int age) {
void PhraseDictionaryDynamicCacheBased::SetMaxAge(unsigned int age)
{
#ifdef WITH_THREADS
boost::shared_lock<boost::shared_mutex> read_lock(m_cacheLock);
#endif
@ -185,7 +184,7 @@ ostream& operator<<(ostream& out, const PhraseDictionaryDynamicCacheBased& phras
float PhraseDictionaryDynamicCacheBased::decaying_score(const int age)
{
float sc;
switch(m_score_type){
switch(m_score_type) {
case CBTM_SCORE_TYPE_HYPERBOLA:
sc = (float) 1.0/age - 1.0;
break;
@ -220,26 +219,21 @@ void PhraseDictionaryDynamicCacheBased::SetPreComputedScores(const unsigned int
boost::shared_lock<boost::shared_mutex> lock(m_cacheLock);
#endif
float sc;
for (size_t i=0; i<=m_maxAge; i++)
{
if (i==m_maxAge){
for (size_t i=0; i<=m_maxAge; i++) {
if (i==m_maxAge) {
if ( m_score_type == CBTM_SCORE_TYPE_HYPERBOLA
|| m_score_type == CBTM_SCORE_TYPE_POWER
|| m_score_type == CBTM_SCORE_TYPE_EXPONENTIAL
|| m_score_type == CBTM_SCORE_TYPE_COSINE )
{
|| m_score_type == CBTM_SCORE_TYPE_COSINE ) {
sc = decaying_score(m_maxAge)/numScoreComponent;
}
else{ // m_score_type = CBTM_SCORE_TYPE_XXXXXXXXX_REWARD
} else { // m_score_type = CBTM_SCORE_TYPE_XXXXXXXXX_REWARD
sc = 0.0;
}
}
else{
} else {
sc = decaying_score(i)/numScoreComponent;
}
Scores sc_vec;
for (size_t j=0; j<numScoreComponent; j++)
{
for (size_t j=0; j<numScoreComponent; j++) {
sc_vec.push_back(sc); //CHECK THIS SCORE
}
precomputedScores.push_back(sc_vec);
@ -250,12 +244,9 @@ void PhraseDictionaryDynamicCacheBased::SetPreComputedScores(const unsigned int
Scores PhraseDictionaryDynamicCacheBased::GetPreComputedScores(const unsigned int age)
{
VERBOSE(3,"age:|" << age << "|" << std::endl);
if (age < precomputedScores.size())
{
if (age < precomputedScores.size()) {
return precomputedScores.at(age);
}
else
{
} else {
return precomputedScores.at(m_maxAge);
}
}
@ -285,8 +276,7 @@ void PhraseDictionaryDynamicCacheBased::Update(std::vector<std::string> entries,
std::vector<std::string> pp;
std::vector<std::string>::iterator it;
for(it = entries.begin(); it!=entries.end(); it++)
{
for(it = entries.begin(); it!=entries.end(); it++) {
pp.clear();
pp = TokenizeMultiCharSeparator((*it), "|||");
VERBOSE(3,"pp[0]:|" << pp[0] << "|" << std::endl);
@ -331,8 +321,7 @@ void PhraseDictionaryDynamicCacheBased::Update(Phrase sp, Phrase tp, int age)
cacheMap::const_iterator it = m_cacheTM.find(sp);
VERBOSE(3,"sp:|" << sp << "|" << std::endl);
if(it!=m_cacheTM.end())
{
if(it!=m_cacheTM.end()) {
VERBOSE(3,"sp:|" << sp << "| FOUND" << std::endl);
// p is found
// here we have to remove the target phrase from targetphrasecollection and from the TargetAgeMap
@ -344,18 +333,15 @@ void PhraseDictionaryDynamicCacheBased::Update(Phrase sp, Phrase tp, int age)
const Phrase* tp_ptr = NULL;
bool found = false;
size_t tp_pos=0;
while (!found && tp_pos < tpc->GetSize())
{
while (!found && tp_pos < tpc->GetSize()) {
tp_ptr = (const Phrase*) tpc->GetTargetPhrase(tp_pos);
if (tp == *tp_ptr)
{
if (tp == *tp_ptr) {
found = true;
continue;
}
tp_pos++;
}
if (!found)
{
if (!found) {
VERBOSE(3,"tp:|" << tp << "| NOT FOUND" << std::endl);
std::auto_ptr<TargetPhrase> targetPhrase(new TargetPhrase(tp));
@ -368,9 +354,7 @@ void PhraseDictionaryDynamicCacheBased::Update(Phrase sp, Phrase tp, int age)
VERBOSE(3,"ac size:|" << ac->size() << "|" << std::endl);
VERBOSE(3,"tp:|" << tp << "| INSERTED" << std::endl);
}
}
else
{
} else {
VERBOSE(3,"sp:|" << sp << "| NOT FOUND" << std::endl);
// p is not found
// create target collection
@ -399,8 +383,7 @@ void PhraseDictionaryDynamicCacheBased::Decay()
boost::shared_lock<boost::shared_mutex> lock(m_cacheLock);
#endif
cacheMap::iterator it;
for(it = m_cacheTM.begin(); it!=m_cacheTM.end(); it++)
{
for(it = m_cacheTM.begin(); it!=m_cacheTM.end(); it++) {
Decay((*it).first);
}
}
@ -410,8 +393,7 @@ void PhraseDictionaryDynamicCacheBased::Decay(Phrase p)
VERBOSE(3,"p:|" << p << "|" << std::endl);
cacheMap::const_iterator it = m_cacheTM.find(p);
VERBOSE(3,"searching:|" << p << "|" << std::endl);
if (it != m_cacheTM.end())
{
if (it != m_cacheTM.end()) {
VERBOSE(3,"found:|" << p << "|" << std::endl);
//p is found
@ -420,8 +402,7 @@ void PhraseDictionaryDynamicCacheBased::Decay(Phrase p)
AgeCollection* ac = TgtCollAgePair.second;
//loop in inverted order to allow a correct deletion of std::vectors tpc and ac
for (int tp_pos = tpc->GetSize() - 1 ; tp_pos >= 0; tp_pos--)
{
for (int tp_pos = tpc->GetSize() - 1 ; tp_pos >= 0; tp_pos--) {
VERBOSE(3,"p:|" << p << "|" << std::endl);
unsigned int tp_age = ac->at(tp_pos); //increase the age by 1
tp_age++; //increase the age by 1
@ -431,29 +412,26 @@ void PhraseDictionaryDynamicCacheBased::Decay(Phrase p)
VERBOSE(3,"p:|" << p << "| " << "tp_age:|" << tp_age << "| " << "*tp_ptr:|" << *tp_ptr << "|" << std::endl);
VERBOSE(3,"precomputedScores.size():|" << precomputedScores.size() << "|" << std::endl);
if (tp_age > m_maxAge){
if (tp_age > m_maxAge) {
VERBOSE(3,"tp_age:|" << tp_age << "| TOO BIG" << std::endl);
tpc->Remove(tp_pos); //delete entry in the Target Phrase Collection
ac->erase(ac->begin() + tp_pos); //delete entry in the Age Collection
m_entries--;
}
else{
} else {
VERBOSE(3,"tp_age:|" << tp_age << "| STILL GOOD" << std::endl);
tp_ptr->GetScoreBreakdown().Assign(this, GetPreComputedScores(tp_age));
ac->at(tp_pos) = tp_age;
VERBOSE(3,"precomputedScores.size():|" << precomputedScores.size() << "|" << std::endl);
}
}
if (tpc->GetSize() == 0)
{// delete the entry from m_cacheTM in case it points to an empty TargetPhraseCollection and AgeCollection
if (tpc->GetSize() == 0) {
// delete the entry from m_cacheTM in case it points to an empty TargetPhraseCollection and AgeCollection
(((*it).second).second)->clear();
delete ((*it).second).second;
delete ((*it).second).first;
m_cacheTM.erase(p);
}
}
else
{
} else {
//do nothing
VERBOSE(3,"p:|" << p << "| NOT FOUND" << std::endl);
}
@ -493,8 +471,7 @@ void PhraseDictionaryDynamicCacheBased::Clear()
boost::shared_lock<boost::shared_mutex> lock(m_cacheLock);
#endif
cacheMap::const_iterator it;
for(it = m_cacheTM.begin(); it!=m_cacheTM.end(); it++)
{
for(it = m_cacheTM.begin(); it!=m_cacheTM.end(); it++) {
(((*it).second).second)->clear();
delete ((*it).second).second;
delete ((*it).second).first;
@ -510,13 +487,11 @@ void PhraseDictionaryDynamicCacheBased::Print() const
boost::shared_lock<boost::shared_mutex> read_lock(m_cacheLock);
#endif
cacheMap::const_iterator it;
for(it = m_cacheTM.begin(); it!=m_cacheTM.end(); it++)
{
for(it = m_cacheTM.begin(); it!=m_cacheTM.end(); it++) {
std::string source = (it->first).ToString();
TargetPhraseCollection* tpc = (it->second).first;
TargetPhraseCollection::iterator itr;
for(itr = tpc->begin(); itr != tpc->end(); itr++)
{
for(itr = tpc->begin(); itr != tpc->end(); itr++) {
std::string target = (*itr)->ToString();
std::cout << source << " ||| " << target << std::endl;
}

View File

@ -42,7 +42,8 @@
#define PI 3.14159265
namespace Moses {
namespace Moses
{
/** Implementation of a Cache-based phrase table.
*/

View File

@ -66,8 +66,7 @@ void PhraseDictionaryTransliteration::GetTargetPhraseCollection(InputPath &input
// already in cache
const TargetPhraseCollection *tpColl = iter->second.first;
inputPath.SetTargetPhrases(*this, tpColl, NULL);
}
else {
} else {
// TRANSLITERATE
char *ptr = tmpnam(NULL);
string inFile(ptr);

View File

@ -143,8 +143,7 @@ void TranslationOptionCollectionLattice::CreateTranslationOptions()
Add(transOpt);
}
}
else if (path.GetPhrase().GetSize() == 1) {
} else if (path.GetPhrase().GetSize() == 1) {
// unknown word processing
ProcessOneUnknownWord(path, path.GetWordsRange().GetEndPos(), 1, path.GetInputScore());
}

View File

@ -216,8 +216,7 @@ WordLattice::CreateTranslationOptionCollection() const
if (StaticData::Instance().GetUseLegacyPT()) {
rv = new TranslationOptionCollectionConfusionNet(*this, maxNoTransOptPerCoverage, translationOptionThreshold);
}
else {
} else {
rv = new TranslationOptionCollectionLattice(*this, maxNoTransOptPerCoverage, translationOptionThreshold);
}