mirror of
https://github.com/moses-smt/mosesdecoder.git
synced 2024-11-20 11:38:50 +03:00
Code refactoring: All FF now get access to AllOptions at load time, so they don't have to rely on StaticData.
This commit is contained in:
parent
9528b56242
commit
fdb5d9b282
@ -15,7 +15,7 @@ ExternalFeatureState::ExternalFeatureState(int stateSize, void *data)
|
||||
memcpy(m_data, data, stateSize);
|
||||
}
|
||||
|
||||
void ExternalFeature::Load()
|
||||
void ExternalFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
string nparam = "testing";
|
||||
|
||||
|
@ -43,7 +43,7 @@ public:
|
||||
}
|
||||
~ExternalFeature();
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const {
|
||||
return true;
|
||||
|
@ -58,7 +58,7 @@ public:
|
||||
delete m_lmImpl;
|
||||
}
|
||||
|
||||
bool Load(const std::string &filePath
|
||||
bool Load(AllOptions const& opts, const std::string &filePath
|
||||
, const std::vector<FactorType> &factorTypes
|
||||
, size_t nGramOrder) {
|
||||
m_factorTypes = FactorMask(factorTypes);
|
||||
@ -77,7 +77,7 @@ public:
|
||||
m_sentenceEndWord[factorType] = factorCollection.AddFactor(Output, factorType, EOS_);
|
||||
}
|
||||
|
||||
m_lmImpl->Load();
|
||||
m_lmImpl->Load(AllOptions const& opts);
|
||||
}
|
||||
|
||||
LMResult GetValueForgotState(const std::vector<const Word*> &contextFactor, FFState &outState) const {
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "moses/TranslationModel/CompactPT/PhraseDictionaryCompact.h"
|
||||
#include "moses/Util.h"
|
||||
#include "moses/Phrase.h"
|
||||
#include "moses/parameters/AllOptions.h"
|
||||
|
||||
void usage();
|
||||
|
||||
@ -50,7 +51,8 @@ int main(int argc, char **argv)
|
||||
std::stringstream ss;
|
||||
ss << nscores;
|
||||
PhraseDictionaryCompact pdc("PhraseDictionaryCompact input-factor=0 output-factor=0 num-features=" + ss.str() + " path=" + ttable);
|
||||
pdc.Load();
|
||||
AllOptions opts;
|
||||
pdc.Load(opts);
|
||||
|
||||
std::string line;
|
||||
while(getline(std::cin, line)) {
|
||||
|
@ -52,7 +52,7 @@ void CoveredReferenceFeature::EvaluateWithSourceContext(const InputType &input
|
||||
estimatedScores->Assign(this, scores);
|
||||
}
|
||||
|
||||
void CoveredReferenceFeature::Load()
|
||||
void CoveredReferenceFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
InputFileStream refFile(m_path);
|
||||
std::string line;
|
||||
|
@ -44,7 +44,7 @@ public:
|
||||
ReadParameters();
|
||||
}
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const {
|
||||
return true;
|
||||
|
@ -16,7 +16,7 @@ DeleteRules::DeleteRules(const std::string &line)
|
||||
ReadParameters();
|
||||
}
|
||||
|
||||
void DeleteRules::Load()
|
||||
void DeleteRules::Load(AllOptions const& opts)
|
||||
{
|
||||
std::vector<FactorType> factorOrder;
|
||||
factorOrder.push_back(0); // unfactored for now
|
||||
|
@ -15,7 +15,7 @@ protected:
|
||||
public:
|
||||
DeleteRules(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const {
|
||||
return true;
|
||||
|
@ -323,7 +323,7 @@ void DynamicCacheBasedLanguageModel::Clear()
|
||||
m_cache.clear();
|
||||
}
|
||||
|
||||
void DynamicCacheBasedLanguageModel::Load()
|
||||
void DynamicCacheBasedLanguageModel::Load(AllOptions const& opts)
|
||||
{
|
||||
// SetPreComputedScores();
|
||||
VERBOSE(2,"DynamicCacheBasedLanguageModel::Load()" << std::endl);
|
||||
|
@ -119,7 +119,7 @@ public:
|
||||
return true;
|
||||
}
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
void Load(const std::string filestr);
|
||||
void Execute(std::string command);
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
|
@ -7,12 +7,13 @@
|
||||
#include <string>
|
||||
#include "moses/FeatureVector.h"
|
||||
#include "moses/TypeDef.h"
|
||||
|
||||
#include "moses/parameters/AllOptions.h"
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
namespace Moses
|
||||
{
|
||||
|
||||
class AllOptions;
|
||||
class Phrase;
|
||||
class TargetPhrase;
|
||||
class TranslationOptionList;
|
||||
@ -69,7 +70,7 @@ public:
|
||||
virtual ~FeatureFunction();
|
||||
|
||||
//! override to load model files
|
||||
virtual void Load() {
|
||||
virtual void Load(AllOptions const& opts) {
|
||||
}
|
||||
|
||||
static void ResetDescriptionCounts() {
|
||||
|
@ -51,7 +51,7 @@ GlobalLexicalModel::~GlobalLexicalModel()
|
||||
}
|
||||
}
|
||||
|
||||
void GlobalLexicalModel::Load()
|
||||
void GlobalLexicalModel::Load(AllOptions const& opts)
|
||||
{
|
||||
FactorCollection &factorCollection = FactorCollection::Instance();
|
||||
const std::string& factorDelimiter = StaticData::Instance().GetFactorDelimiter();
|
||||
|
@ -57,7 +57,7 @@ private:
|
||||
std::vector<FactorType> m_inputFactorsVec, m_outputFactorsVec;
|
||||
std::string m_filePath;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
float ScorePhrase( const TargetPhrase& targetPhrase ) const;
|
||||
float GetFromCacheOrScorePhrase( const TargetPhrase& targetPhrase ) const;
|
||||
|
@ -23,7 +23,7 @@ InputFeature::InputFeature(const std::string &line)
|
||||
s_instance = this;
|
||||
}
|
||||
|
||||
void InputFeature::Load()
|
||||
void InputFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
|
||||
const PhraseDictionary *pt = PhraseDictionary::GetColl()[0];
|
||||
|
@ -23,7 +23,7 @@ public:
|
||||
|
||||
InputFeature(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
|
||||
|
@ -84,7 +84,7 @@ LexicalReordering::
|
||||
|
||||
void
|
||||
LexicalReordering::
|
||||
Load()
|
||||
Load(AllOptions const& opts)
|
||||
{
|
||||
typedef LexicalReorderingTable LRTable;
|
||||
if (m_filePath.size())
|
||||
|
@ -33,7 +33,7 @@ class LexicalReordering : public StatefulFeatureFunction
|
||||
public:
|
||||
LexicalReordering(const std::string &line);
|
||||
virtual ~LexicalReordering();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual
|
||||
bool
|
||||
|
@ -159,7 +159,7 @@ void Model1Feature::SetParameter(const std::string& key, const std::string& valu
|
||||
}
|
||||
}
|
||||
|
||||
void Model1Feature::Load()
|
||||
void Model1Feature::Load(AllOptions const& opts)
|
||||
{
|
||||
FEATUREVERBOSE(2, GetScoreProducerDescription() << ": Loading source vocabulary from file " << m_fileNameVcbS << " ...");
|
||||
Model1Vocabulary vcbS;
|
||||
|
@ -99,7 +99,7 @@ private:
|
||||
Model1LexicalTable m_model1;
|
||||
const Factor* m_emptyWord;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
// cache
|
||||
mutable boost::unordered_map<const InputType*, boost::unordered_map<const Factor*, float> > m_cache;
|
||||
|
@ -35,7 +35,7 @@ void OpSequenceModel :: readLanguageModel(const char *lmFile)
|
||||
}
|
||||
|
||||
|
||||
void OpSequenceModel::Load()
|
||||
void OpSequenceModel::Load(AllOptions const& opts)
|
||||
{
|
||||
readLanguageModel(m_lmPath.c_str());
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ public:
|
||||
~OpSequenceModel();
|
||||
|
||||
void readLanguageModel(const char *);
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
FFState* EvaluateWhenApplied(
|
||||
const Hypothesis& cur_hypo,
|
||||
|
@ -75,7 +75,7 @@ void PhraseOrientationFeature::SetParameter(const std::string& key, const std::s
|
||||
}
|
||||
|
||||
|
||||
void PhraseOrientationFeature::Load()
|
||||
void PhraseOrientationFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
if ( !m_filenameTargetWordList.empty() ) {
|
||||
LoadWordList(m_filenameTargetWordList,m_targetWordList);
|
||||
|
@ -289,7 +289,7 @@ public:
|
||||
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
void EvaluateInIsolation(const Phrase &source
|
||||
, const TargetPhrase &targetPhrase
|
||||
|
@ -65,7 +65,7 @@ void PhrasePairFeature::SetParameter(const std::string& key, const std::string&
|
||||
}
|
||||
}
|
||||
|
||||
void PhrasePairFeature::Load()
|
||||
void PhrasePairFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
if (m_domainTrigger) {
|
||||
// domain trigger terms for each input document
|
||||
|
@ -44,7 +44,7 @@ class PhrasePairFeature: public StatelessFeatureFunction
|
||||
public:
|
||||
PhrasePairFeature(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
@ -88,7 +88,7 @@ void SoftSourceSyntacticConstraintsFeature::SetParameter(const std::string& key,
|
||||
}
|
||||
}
|
||||
|
||||
void SoftSourceSyntacticConstraintsFeature::Load()
|
||||
void SoftSourceSyntacticConstraintsFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
// don't change the loading order!
|
||||
LoadSourceLabelSet();
|
||||
|
@ -31,7 +31,7 @@ public:
|
||||
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
void EvaluateInIsolation(const Phrase &source
|
||||
, const TargetPhrase &targetPhrase
|
||||
|
@ -36,7 +36,7 @@ void SourceWordDeletionFeature::SetParameter(const std::string& key, const std::
|
||||
}
|
||||
}
|
||||
|
||||
void SourceWordDeletionFeature::Load()
|
||||
void SourceWordDeletionFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
if (m_filename.empty())
|
||||
return;
|
||||
|
@ -23,7 +23,7 @@ private:
|
||||
public:
|
||||
SourceWordDeletionFeature(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
||||
|
@ -48,7 +48,7 @@ void TargetBigramFeature::SetParameter(const std::string& key, const std::string
|
||||
}
|
||||
}
|
||||
|
||||
void TargetBigramFeature::Load()
|
||||
void TargetBigramFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
if (m_filePath == "*")
|
||||
return ; //allow all
|
||||
|
@ -34,7 +34,7 @@ class TargetBigramFeature : public StatefulFeatureFunction
|
||||
public:
|
||||
TargetBigramFeature(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
||||
|
@ -74,7 +74,7 @@ void TargetNgramFeature::SetParameter(const std::string& key, const std::string&
|
||||
}
|
||||
}
|
||||
|
||||
void TargetNgramFeature::Load()
|
||||
void TargetNgramFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
if (m_file == "") return; //allow all, for now
|
||||
|
||||
|
@ -203,7 +203,7 @@ class TargetNgramFeature : public StatefulFeatureFunction
|
||||
public:
|
||||
TargetNgramFeature(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
||||
|
@ -34,7 +34,7 @@ void TargetWordInsertionFeature::SetParameter(const std::string& key, const std:
|
||||
}
|
||||
}
|
||||
|
||||
void TargetWordInsertionFeature::Load()
|
||||
void TargetWordInsertionFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
if (m_filename.empty())
|
||||
return;
|
||||
|
@ -25,7 +25,7 @@ public:
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual void EvaluateInIsolation(const Phrase &source
|
||||
, const TargetPhrase &targetPhrase
|
||||
|
@ -8,7 +8,7 @@
|
||||
namespace Moses
|
||||
{
|
||||
|
||||
void TreeStructureFeature::Load()
|
||||
void TreeStructureFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
|
||||
// syntactic constraints can be hooked in here.
|
||||
|
@ -74,7 +74,7 @@ public:
|
||||
int /* featureID - used to index the state in the previous hypotheses */,
|
||||
ScoreComponentCollection* accumulator) const;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
};
|
||||
|
||||
|
||||
|
@ -87,7 +87,7 @@ void WordTranslationFeature::SetParameter(const std::string& key, const std::str
|
||||
}
|
||||
}
|
||||
|
||||
void WordTranslationFeature::Load()
|
||||
void WordTranslationFeature::Load(AllOptions const& opts)
|
||||
{
|
||||
// load word list for restricted feature set
|
||||
if (m_filePathSource.empty()) {
|
||||
|
@ -40,7 +40,7 @@ public:
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
void EvaluateWithSourceContext(const InputType &input
|
||||
, const InputPath &inputPath
|
||||
|
@ -44,7 +44,7 @@ GenerationDictionary::GenerationDictionary(const std::string &line)
|
||||
ReadParameters();
|
||||
}
|
||||
|
||||
void GenerationDictionary::Load()
|
||||
void GenerationDictionary::Load(AllOptions const& opts)
|
||||
{
|
||||
FactorCollection &factorCollection = FactorCollection::Instance();
|
||||
|
||||
|
@ -62,7 +62,7 @@ public:
|
||||
virtual ~GenerationDictionary();
|
||||
|
||||
//! load data file
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
/** number of unique input entries in the generation table.
|
||||
* NOT the number of lines in the generation table
|
||||
|
@ -20,7 +20,7 @@ BilingualLM::BilingualLM(const std::string &line)
|
||||
|
||||
}
|
||||
|
||||
void BilingualLM::Load()
|
||||
void BilingualLM::Load(AllOptions const& opts)
|
||||
{
|
||||
ReadParameters();
|
||||
loadModel();
|
||||
|
@ -117,7 +117,7 @@ public:
|
||||
return new BilingualLMState(0);
|
||||
}
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
FFState* EvaluateWhenApplied(
|
||||
const Hypothesis& cur_hypo,
|
||||
|
@ -204,7 +204,7 @@ LanguageModelDALM::~LanguageModelDALM()
|
||||
delete m_lm;
|
||||
}
|
||||
|
||||
void LanguageModelDALM::Load()
|
||||
void LanguageModelDALM::Load(AllOptions const& opts)
|
||||
{
|
||||
/////////////////////
|
||||
// READING INIFILE //
|
||||
|
@ -28,7 +28,7 @@ public:
|
||||
LanguageModelDALM(const std::string &line);
|
||||
virtual ~LanguageModelDALM();
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual const FFState *EmptyHypothesisState(const InputType &/*input*/) const;
|
||||
|
||||
|
@ -96,7 +96,7 @@ bool LanguageModelIRST::IsUseable(const FactorMask &mask) const
|
||||
return ret;
|
||||
}
|
||||
|
||||
void LanguageModelIRST::Load()
|
||||
void LanguageModelIRST::Load(AllOptions const& opts)
|
||||
{
|
||||
FactorCollection &factorCollection = FactorCollection::Instance();
|
||||
|
||||
|
@ -88,7 +88,7 @@ public:
|
||||
|
||||
bool IsUseable(const FactorMask &mask) const;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
const FFState *EmptyHypothesisState(const InputType &/*input*/) const;
|
||||
|
||||
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
|
||||
|
@ -66,7 +66,7 @@ LanguageModelMaxEntSRI::~LanguageModelMaxEntSRI()
|
||||
delete m_srilmVocab;
|
||||
}
|
||||
|
||||
void LanguageModelMaxEntSRI::Load()
|
||||
void LanguageModelMaxEntSRI::Load(AllOptions const& opts)
|
||||
{
|
||||
m_srilmVocab = new ::Vocab();
|
||||
m_srilmModel = new MEModel(*m_srilmVocab, m_nGramOrder);
|
||||
|
@ -54,7 +54,7 @@ protected:
|
||||
public:
|
||||
LanguageModelMaxEntSRI(const std::string &line);
|
||||
~LanguageModelMaxEntSRI();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
|
||||
};
|
||||
|
@ -22,7 +22,7 @@ NeuralLMWrapper::~NeuralLMWrapper()
|
||||
}
|
||||
|
||||
|
||||
void NeuralLMWrapper::Load()
|
||||
void NeuralLMWrapper::Load(AllOptions const& opts)
|
||||
{
|
||||
|
||||
// Set parameters required by ancestor classes
|
||||
|
@ -27,7 +27,7 @@ public:
|
||||
|
||||
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
|
||||
|
||||
virtual void Load();
|
||||
virtual void Load(AllOptions const& opts);
|
||||
|
||||
};
|
||||
|
||||
|
@ -39,7 +39,7 @@ RDLM::~RDLM()
|
||||
delete lm_label_base_instance_;
|
||||
}
|
||||
|
||||
void RDLM::Load()
|
||||
void RDLM::Load(AllOptions const& opts)
|
||||
{
|
||||
|
||||
lm_head_base_instance_ = new nplm::neuralTM();
|
||||
|
@ -208,7 +208,7 @@ public:
|
||||
int /* featureID - used to index the state in the previous hypotheses */,
|
||||
ScoreComponentCollection* accumulator) const;
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
// Iterator-class that yields all children of a node; if child is virtual node of binarized tree, its children are yielded instead.
|
||||
class UnbinarizedChildren
|
||||
|
@ -52,7 +52,7 @@ LanguageModelRandLM::~LanguageModelRandLM()
|
||||
delete m_lm;
|
||||
}
|
||||
|
||||
void LanguageModelRandLM::Load()
|
||||
void LanguageModelRandLM::Load(AllOptions const& opts)
|
||||
{
|
||||
cerr << "Loading LanguageModelRandLM..." << endl;
|
||||
FactorCollection &factorCollection = FactorCollection::Instance();
|
||||
|
@ -39,7 +39,7 @@ public:
|
||||
LanguageModelRandLM(const std::string &line);
|
||||
~LanguageModelRandLM();
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
|
||||
void InitializeForInput(ttasksptr const& ttask);
|
||||
void CleanUpAfterSentenceProcessing(const InputType& source);
|
||||
|
@ -66,7 +66,7 @@ LanguageModelSRI::~LanguageModelSRI()
|
||||
delete m_srilmVocab;
|
||||
}
|
||||
|
||||
void LanguageModelSRI::Load()
|
||||
void LanguageModelSRI::Load(AllOptions const& opts)
|
||||
{
|
||||
m_srilmVocab = new ::Vocab();
|
||||
m_srilmModel = new Ngram(*m_srilmVocab, m_nGramOrder);
|
||||
|
@ -54,7 +54,7 @@ protected:
|
||||
public:
|
||||
LanguageModelSRI(const std::string &line);
|
||||
~LanguageModelSRI();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
|
||||
};
|
||||
|
@ -70,7 +70,7 @@ void OxLM<Model>::SetParameter(const string& key, const string& value)
|
||||
}
|
||||
|
||||
template<class Model>
|
||||
void OxLM<Model>::Load()
|
||||
void OxLM<Model>::Load(AllOptions const& opts)
|
||||
{
|
||||
model.load(m_filePath);
|
||||
|
||||
|
@ -24,7 +24,7 @@ public:
|
||||
|
||||
void SetParameter(const std::string& key, const std::string& value);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual LMResult GetValue(
|
||||
const std::vector<const Word*> &contextFactor,
|
||||
|
@ -27,7 +27,6 @@
|
||||
#include "StaticData.h"
|
||||
#include "Util.h"
|
||||
#include "Range.h"
|
||||
|
||||
#include <boost/functional/hash.hpp>
|
||||
|
||||
using namespace std;
|
||||
|
@ -649,7 +649,7 @@ void StaticData::LoadFeatureFunctions()
|
||||
|
||||
if (doLoad) {
|
||||
VERBOSE(1, "Loading " << ff->GetScoreProducerDescription() << endl);
|
||||
ff->Load();
|
||||
ff->Load(options());
|
||||
}
|
||||
}
|
||||
|
||||
@ -657,7 +657,7 @@ void StaticData::LoadFeatureFunctions()
|
||||
for (size_t i = 0; i < pts.size(); ++i) {
|
||||
PhraseDictionary *pt = pts[i];
|
||||
VERBOSE(1, "Loading " << pt->GetScoreProducerDescription() << endl);
|
||||
pt->Load();
|
||||
pt->Load(options());
|
||||
}
|
||||
|
||||
CheckLEGACYPT();
|
||||
|
@ -57,7 +57,7 @@ PhraseDictionaryCompact::PhraseDictionaryCompact(const std::string &line)
|
||||
ReadParameters();
|
||||
}
|
||||
|
||||
void PhraseDictionaryCompact::Load()
|
||||
void PhraseDictionaryCompact::Load(AllOptions const& opts)
|
||||
{
|
||||
const StaticData &staticData = StaticData::Instance();
|
||||
|
||||
|
@ -67,7 +67,7 @@ public:
|
||||
|
||||
~PhraseDictionaryCompact();
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
TargetPhraseCollection::shared_ptr GetTargetPhraseCollectionNonCacheLEGACY(const Phrase &source) const;
|
||||
TargetPhraseVectorPtr GetTargetPhraseCollectionRaw(const Phrase &source) const;
|
||||
|
@ -60,7 +60,7 @@ PhraseDictionaryDynamicCacheBased::~PhraseDictionaryDynamicCacheBased()
|
||||
Clear();
|
||||
}
|
||||
|
||||
void PhraseDictionaryDynamicCacheBased::Load()
|
||||
void PhraseDictionaryDynamicCacheBased::Load(AllOptions const& opts)
|
||||
{
|
||||
VERBOSE(2,"PhraseDictionaryDynamicCacheBased::Load()" << std::endl);
|
||||
SetFeaturesToApply();
|
||||
|
@ -108,7 +108,7 @@ public:
|
||||
return *s_instance;
|
||||
}
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
void Load(const std::string files);
|
||||
|
||||
TargetPhraseCollection::shared_ptr
|
||||
|
@ -73,7 +73,7 @@ void PhraseDictionaryGroup::SetParameter(const string& key, const string& value)
|
||||
}
|
||||
}
|
||||
|
||||
void PhraseDictionaryGroup::Load()
|
||||
void PhraseDictionaryGroup::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
m_pdFeature.push_back(const_cast<PhraseDictionaryGroup*>(this));
|
||||
|
@ -63,7 +63,7 @@ class PhraseDictionaryGroup: public PhraseDictionary
|
||||
|
||||
public:
|
||||
PhraseDictionaryGroup(const std::string& line);
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
TargetPhraseCollection::shared_ptr
|
||||
CreateTargetPhraseCollection(const ttasksptr& ttask,
|
||||
const Phrase& src) const;
|
||||
|
@ -79,7 +79,7 @@ PhraseDictionaryMultiModel::
|
||||
~PhraseDictionaryMultiModel()
|
||||
{ }
|
||||
|
||||
void PhraseDictionaryMultiModel::Load()
|
||||
void PhraseDictionaryMultiModel::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
|
||||
|
@ -70,7 +70,7 @@ public:
|
||||
PhraseDictionaryMultiModel(const std::string &line);
|
||||
PhraseDictionaryMultiModel(int type, const std::string &line);
|
||||
~PhraseDictionaryMultiModel();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual void
|
||||
CollectSufficientStatistics
|
||||
|
@ -83,7 +83,7 @@ PhraseDictionaryMultiModelCounts::~PhraseDictionaryMultiModelCounts()
|
||||
}
|
||||
|
||||
|
||||
void PhraseDictionaryMultiModelCounts::Load()
|
||||
void PhraseDictionaryMultiModelCounts::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
for(size_t i = 0; i < m_numModels; ++i) {
|
||||
|
@ -79,7 +79,7 @@ class PhraseDictionaryMultiModelCounts: public PhraseDictionaryMultiModel
|
||||
public:
|
||||
PhraseDictionaryMultiModelCounts(const std::string &line);
|
||||
~PhraseDictionaryMultiModelCounts();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
TargetPhraseCollection::shared_ptr CreateTargetPhraseCollectionCounts(const Phrase &src, std::vector<float> &fs, std::map<std::string,multiModelCountsStats*>* allStats, std::vector<std::vector<float> > &multimodelweights) const;
|
||||
void CollectSufficientStats(const Phrase &src, std::vector<float> &fs, std::map<std::string,multiModelCountsStats*>* allStats) const;
|
||||
float GetTargetCount(const Phrase& target, size_t modelIndex) const;
|
||||
|
@ -22,7 +22,7 @@ PhraseDictionaryTransliteration::PhraseDictionaryTransliteration(const std::stri
|
||||
m_outputLang.empty(), "Must specify all arguments");
|
||||
}
|
||||
|
||||
void PhraseDictionaryTransliteration::Load()
|
||||
void PhraseDictionaryTransliteration::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ class PhraseDictionaryTransliteration : public PhraseDictionary
|
||||
public:
|
||||
PhraseDictionaryTransliteration(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
virtual void CleanUpAfterSentenceProcessing(const InputType& source);
|
||||
|
||||
|
@ -37,7 +37,7 @@ PhraseDictionaryTreeAdaptor::~PhraseDictionaryTreeAdaptor()
|
||||
{
|
||||
}
|
||||
|
||||
void PhraseDictionaryTreeAdaptor::Load()
|
||||
void PhraseDictionaryTreeAdaptor::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ class PhraseDictionaryTreeAdaptor : public PhraseDictionary
|
||||
public:
|
||||
PhraseDictionaryTreeAdaptor(const std::string &line);
|
||||
virtual ~PhraseDictionaryTreeAdaptor();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
// enable/disable caching
|
||||
// you enable caching if you request the target candidates for a source phrase multiple times
|
||||
|
@ -25,7 +25,7 @@ ProbingPT::~ProbingPT()
|
||||
delete m_engine;
|
||||
}
|
||||
|
||||
void ProbingPT::Load()
|
||||
void ProbingPT::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
|
||||
|
@ -21,7 +21,7 @@ public:
|
||||
ProbingPT(const std::string &line);
|
||||
~ProbingPT();
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
void InitializeForInput(ttasksptr const& ttask);
|
||||
|
||||
|
@ -21,6 +21,7 @@
|
||||
|
||||
#include "Trie.h"
|
||||
#include "moses/TypeDef.h"
|
||||
#include "moses/parameters/AllOptions.h"
|
||||
|
||||
#include <istream>
|
||||
#include <vector>
|
||||
@ -35,7 +36,8 @@ class RuleTableLoader
|
||||
public:
|
||||
virtual ~RuleTableLoader() {}
|
||||
|
||||
virtual bool Load(const std::vector<FactorType> &input,
|
||||
virtual bool Load(AllOptions const& opts,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t tableLimit,
|
||||
|
@ -32,7 +32,8 @@
|
||||
namespace Moses
|
||||
{
|
||||
|
||||
bool RuleTableLoaderCompact::Load(const std::vector<FactorType> &input,
|
||||
bool RuleTableLoaderCompact::Load(AllOptions const& opts,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t /* tableLimit */,
|
||||
|
@ -36,7 +36,8 @@ class RuleTableTrie;
|
||||
class RuleTableLoaderCompact : public RuleTableLoader
|
||||
{
|
||||
public:
|
||||
bool Load(const std::vector<FactorType> &input,
|
||||
bool Load(AllOptions const& opts,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t tableLimit,
|
||||
|
@ -35,8 +35,9 @@ namespace Moses
|
||||
|
||||
// Determines the rule table type by peeking inside the file then creates
|
||||
// a suitable RuleTableLoader object.
|
||||
std::auto_ptr<RuleTableLoader> RuleTableLoaderFactory::Create(
|
||||
const std::string &path)
|
||||
std::auto_ptr<RuleTableLoader>
|
||||
RuleTableLoaderFactory::
|
||||
Create(const std::string &path)
|
||||
{
|
||||
InputFileStream input(path);
|
||||
std::string line;
|
||||
@ -51,9 +52,7 @@ std::auto_ptr<RuleTableLoader> RuleTableLoaderFactory::Create(
|
||||
std::cerr << "Unsupported compact rule table format: " << tokens[0];
|
||||
return std::auto_ptr<RuleTableLoader>();
|
||||
} else if (tokens[0] == "[X]" && tokens[1] == "|||") {
|
||||
return std::auto_ptr<RuleTableLoader>(new
|
||||
RuleTableLoaderHiero());
|
||||
|
||||
return std::auto_ptr<RuleTableLoader>(new RuleTableLoaderHiero());
|
||||
}
|
||||
|
||||
return std::auto_ptr<RuleTableLoader>(new RuleTableLoaderStandard());
|
||||
|
@ -14,13 +14,14 @@ using namespace std;
|
||||
namespace Moses
|
||||
{
|
||||
|
||||
bool RuleTableLoaderHiero::Load(const std::vector<FactorType> &input,
|
||||
bool RuleTableLoaderHiero::Load(AllOptions const& opts,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t tableLimit,
|
||||
RuleTableTrie &ruleTable)
|
||||
{
|
||||
bool ret = RuleTableLoaderStandard::Load(HieroFormat
|
||||
bool ret = RuleTableLoaderStandard::Load(opts, HieroFormat
|
||||
,input, output
|
||||
,inFile
|
||||
,tableLimit
|
||||
|
@ -18,7 +18,8 @@ namespace Moses
|
||||
class RuleTableLoaderHiero : public RuleTableLoaderStandard
|
||||
{
|
||||
public:
|
||||
bool Load(const std::vector<FactorType> &input,
|
||||
bool Load(AllOptions const& opts,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t tableLimit,
|
||||
|
@ -47,19 +47,17 @@ using namespace boost::algorithm;
|
||||
|
||||
namespace Moses
|
||||
{
|
||||
bool RuleTableLoaderStandard::Load(const std::vector<FactorType> &input
|
||||
, const std::vector<FactorType> &output
|
||||
, const std::string &inFile
|
||||
, size_t tableLimit
|
||||
, RuleTableTrie &ruleTable)
|
||||
{
|
||||
bool ret = Load(MosesFormat
|
||||
,input, output
|
||||
,inFile
|
||||
,tableLimit
|
||||
,ruleTable);
|
||||
return ret;
|
||||
|
||||
bool
|
||||
RuleTableLoaderStandard::
|
||||
Load(AllOptions const& opts
|
||||
, const std::vector<FactorType> &input
|
||||
, const std::vector<FactorType> &output
|
||||
, const std::string &inFile
|
||||
, size_t tableLimit
|
||||
, RuleTableTrie &ruleTable)
|
||||
{
|
||||
return Load(opts, MosesFormat,input, output ,inFile ,tableLimit ,ruleTable);
|
||||
}
|
||||
|
||||
void ReformatHieroRule(int sourceTarget, string &phrase, map<size_t, pair<size_t, size_t> > &ntAlign)
|
||||
@ -142,7 +140,7 @@ void ReformatHieroRule(const string &lineOrig, string &out)
|
||||
out = ret.str();
|
||||
}
|
||||
|
||||
bool RuleTableLoaderStandard::Load(FormatType format
|
||||
bool RuleTableLoaderStandard::Load(AllOptions const& opts, FormatType format
|
||||
, const std::vector<FactorType> &input
|
||||
, const std::vector<FactorType> &output
|
||||
, const std::string &inFile
|
||||
|
@ -29,14 +29,16 @@ class RuleTableLoaderStandard : public RuleTableLoader
|
||||
{
|
||||
protected:
|
||||
|
||||
bool Load(FormatType format,
|
||||
bool Load(AllOptions const& opts,
|
||||
FormatType format,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t tableLimit,
|
||||
RuleTableTrie &);
|
||||
public:
|
||||
bool Load(const std::vector<FactorType> &input,
|
||||
bool Load(AllOptions const& opts,
|
||||
const std::vector<FactorType> &input,
|
||||
const std::vector<FactorType> &output,
|
||||
const std::string &inFile,
|
||||
size_t tableLimit,
|
||||
|
@ -32,7 +32,7 @@ PhraseDictionaryALSuffixArray::PhraseDictionaryALSuffixArray(const std::string &
|
||||
ReadParameters();
|
||||
}
|
||||
|
||||
void PhraseDictionaryALSuffixArray::Load()
|
||||
void PhraseDictionaryALSuffixArray::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
}
|
||||
@ -47,11 +47,11 @@ void PhraseDictionaryALSuffixArray::InitializeForInput(ttasksptr const& ttask)
|
||||
|
||||
std::auto_ptr<RuleTableLoader> loader =
|
||||
RuleTableLoaderFactory::Create(grammarFile);
|
||||
bool ret = loader->Load(m_input, m_output, grammarFile, m_tableLimit,
|
||||
*this);
|
||||
AllOptions const& opts = ttask->options();
|
||||
bool ret = loader->Load(opts, m_input, m_output, grammarFile, m_tableLimit, *this);
|
||||
|
||||
UTIL_THROW_IF2(!ret,
|
||||
"Rules not successfully loaded for sentence id " << translationId);
|
||||
UTIL_THROW_IF2(!ret, "Rules not successfully loaded for sentence id "
|
||||
<< translationId);
|
||||
}
|
||||
|
||||
void PhraseDictionaryALSuffixArray::CleanUpAfterSentenceProcessing(const InputType &source)
|
||||
|
@ -23,7 +23,7 @@ class PhraseDictionaryALSuffixArray : public PhraseDictionaryMemory
|
||||
{
|
||||
public:
|
||||
PhraseDictionaryALSuffixArray(const std::string &line);
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
void InitializeForInput(ttasksptr const& ttask);
|
||||
void CleanUpAfterSentenceProcessing(const InputType& source);
|
||||
|
||||
|
@ -93,7 +93,7 @@ PhraseDictionaryFuzzyMatch::~PhraseDictionaryFuzzyMatch()
|
||||
delete m_FuzzyMatchWrapper;
|
||||
}
|
||||
|
||||
void PhraseDictionaryFuzzyMatch::Load()
|
||||
void PhraseDictionaryFuzzyMatch::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
|
||||
|
@ -44,7 +44,7 @@ class PhraseDictionaryFuzzyMatch : public PhraseDictionary
|
||||
public:
|
||||
PhraseDictionaryFuzzyMatch(const std::string &line);
|
||||
~PhraseDictionaryFuzzyMatch();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
const PhraseDictionaryNodeMemory &GetRootNode(long translationId) const;
|
||||
|
||||
|
@ -47,7 +47,7 @@ PhraseDictionaryOnDisk::~PhraseDictionaryOnDisk()
|
||||
{
|
||||
}
|
||||
|
||||
void PhraseDictionaryOnDisk::Load()
|
||||
void PhraseDictionaryOnDisk::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ protected:
|
||||
public:
|
||||
PhraseDictionaryOnDisk(const std::string &line);
|
||||
~PhraseDictionaryOnDisk();
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
// PhraseDictionary impl
|
||||
virtual ChartRuleLookupManager *CreateRuleLookupManager(
|
||||
|
@ -34,7 +34,7 @@ RuleTableTrie::~RuleTableTrie()
|
||||
{
|
||||
}
|
||||
|
||||
void RuleTableTrie::Load()
|
||||
void RuleTableTrie::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
|
||||
@ -44,8 +44,7 @@ void RuleTableTrie::Load()
|
||||
throw runtime_error("Error: Loading " + m_filePath);
|
||||
}
|
||||
|
||||
bool ret = loader->Load(m_input, m_output, m_filePath, m_tableLimit,
|
||||
*this);
|
||||
bool ret = loader->Load(opts, m_input, m_output, m_filePath, m_tableLimit, *this);
|
||||
if (!ret) {
|
||||
throw runtime_error("Error: Loading " + m_filePath);
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ public:
|
||||
|
||||
virtual ~RuleTableTrie();
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
private:
|
||||
friend class RuleTableLoader;
|
||||
|
@ -12,7 +12,7 @@ SkeletonPT::SkeletonPT(const std::string &line)
|
||||
ReadParameters();
|
||||
}
|
||||
|
||||
void SkeletonPT::Load()
|
||||
void SkeletonPT::Load(AllOptions const& opts)
|
||||
{
|
||||
SetFeaturesToApply();
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ class SkeletonPT : public PhraseDictionary
|
||||
public:
|
||||
SkeletonPT(const std::string &line);
|
||||
|
||||
void Load();
|
||||
void Load(AllOptions const& opts);
|
||||
|
||||
void InitializeForInput(ttasksptr const& ttask);
|
||||
|
||||
|
@ -417,9 +417,9 @@ namespace Moses
|
||||
|
||||
void
|
||||
Mmsapt::
|
||||
Load()
|
||||
Load(AllOptions const& opts)
|
||||
{
|
||||
Load(true);
|
||||
Load(opts, true);
|
||||
}
|
||||
|
||||
void
|
||||
@ -474,7 +474,7 @@ namespace Moses
|
||||
|
||||
void
|
||||
Mmsapt::
|
||||
Load(bool with_checks)
|
||||
Load(AllOptions const& opts, bool with_checks)
|
||||
{
|
||||
boost::unique_lock<boost::shared_mutex> lock(m_lock);
|
||||
// load feature functions (i.e., load underlying data bases, if any)
|
||||
|
@ -211,8 +211,8 @@ namespace Moses
|
||||
// Mmsapt(std::string const& description, std::string const& line);
|
||||
Mmsapt(std::string const& line);
|
||||
|
||||
void Load();
|
||||
void Load(bool with_checks);
|
||||
void Load(AllOptions const& opts);
|
||||
void Load(AllOptions const& opts, bool with_checks);
|
||||
size_t SetTableLimit(size_t limit); // returns the prior table limit
|
||||
std::string const& GetName() const;
|
||||
|
||||
|
@ -6,7 +6,7 @@ set -e -o pipefail
|
||||
|
||||
opt=$(pwd)/opt
|
||||
|
||||
args=$(getopt -oj:aq -lwith-irstlm:,with-boost:,with-cmph:,with-regtest:,no-server,with-xmlrpc-c:,full -- "$@")
|
||||
args=$(getopt -oj:aq -lwith-irstlm:,with-boost:,with-cmph:,with-regtest:,no-xmlrpc-c,with-xmlrpc-c:,full -- "$@")
|
||||
eval set -- "$args"
|
||||
|
||||
# default settings
|
||||
@ -16,7 +16,7 @@ j=$(getconf _NPROCESSORS_ONLN)
|
||||
irstlm=$opt
|
||||
boost=$opt
|
||||
cmph=$opt
|
||||
xmlrpc=$opt
|
||||
xmlrpc=--with-xmlrpc-c\=$opt
|
||||
regtest=$(pwd)/regtest
|
||||
unset q
|
||||
unset a
|
||||
@ -30,12 +30,13 @@ while true ; do
|
||||
-j ) j=$2; shift 2 ;;
|
||||
-a ) a=-a; shift ;;
|
||||
-q ) q=-q; shift ;;
|
||||
--no-xmlrpc-c ) xmlrpc=$1; shift ;;
|
||||
--with-xmlrpc-c )
|
||||
xmlrpc=--with-xmlrpc-c\=$2; shift 2 ;;
|
||||
--with-irstlm ) irstlm=$2; shift 2 ;;
|
||||
--with-boost ) boost=$2; shift 2 ;;
|
||||
--with-cmph ) cmph=$2; shift 2 ;;
|
||||
--with-regtest ) regtest=$2; shift 2 ;;
|
||||
--no-server ) noserver=true; shift 2 ;;
|
||||
--with-xmlrpc-c ) xmlrpc=$2; shift 2 ;;
|
||||
--full ) full=true; shift 2 ;;
|
||||
-- ) shift; break ;;
|
||||
* ) break ;;
|
||||
@ -54,14 +55,14 @@ set -x
|
||||
if [ "$full" == true ] ; then
|
||||
./bjam -j$j --with-irstlm=$irstlm --with-boost=$boost --with-cmph=$cmph --no-xmlrpc-c --with-regtest=$regtest -a $skipcompact $@ $q || exit $?
|
||||
if ./regression-testing/run-single-test.perl --server --startuptest ; then
|
||||
./bjam -j$j --with-irstlm=$irstlm --with-boost=$boost --with-cmph=$cmph --with-xmlrpc-c=$xmlrpc --with-regtest=$regtest -a $skipcompact $@ $q
|
||||
./bjam -j$j --with-irstlm=$irstlm --with-boost=$boost --with-cmph=$cmph $xmlrpc --with-regtest=$regtest -a $skipcompact $@ $q
|
||||
fi
|
||||
else
|
||||
# when investigating failures, always run single-threaded
|
||||
if [ "$q" == "-q" ] ; then j=1; fi
|
||||
|
||||
if ./regression-testing/run-single-test.perl --server --startuptest ; then
|
||||
./bjam -j$j $q $a --with-irstlm=$irstlm --with-boost=$boost --with-cmph=$cmph --with-xmlrpc-c=$xmlrpc --with-regtest=$regtest $skipcompact $@
|
||||
./bjam -j$j $q $a --with-irstlm=$irstlm --with-boost=$boost --with-cmph=$cmph $xmlrpc --with-regtest=$regtest $skipcompact $@
|
||||
else
|
||||
./bjam -j$j $q $a --with-irstlm=$irstlm --with-boost=$boost --with-cmph=$cmph --no-xmlrpc-c --with-regtest=$regtest $skipcompact $@
|
||||
fi
|
||||
|
Loading…
Reference in New Issue
Block a user