Code cleanup and refactoring.

This commit is contained in:
Ulrich Germann 2015-12-10 03:17:36 +00:00
parent 240b88c683
commit 29694af6e4
144 changed files with 502 additions and 530 deletions

View File

@ -51,7 +51,7 @@ int main(int argc, char **argv)
std::stringstream ss;
ss << nscores;
PhraseDictionaryCompact pdc("PhraseDictionaryCompact input-factor=0 output-factor=0 num-features=" + ss.str() + " path=" + ttable);
AllOptions opts;
AllOptions::ptr opts(new AllOptions);
pdc.Load(opts);
std::string line;

View File

@ -163,11 +163,12 @@ int main(int argc, char const* argv[])
}
StaticData& SD = const_cast<StaticData&>(StaticData::Instance());
LMBR_Options& lmbr = SD.options().lmbr;
MBR_Options& mbr = SD.options().mbr;
boost::shared_ptr<AllOptions> opts(new AllOptions(*SD.options()));
LMBR_Options& lmbr = opts->lmbr;
MBR_Options& mbr = opts->mbr;
lmbr.enabled = true;
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(SD.options()));
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(*opts));
if (!ioWrapper) {
throw runtime_error("Failed to initialise IOWrapper");
}

View File

@ -125,7 +125,7 @@ int main(int argc, char const** argv)
IFVERBOSE(1) {
PrintUserTime("Created input-output object");
}
AllOptions::ptr opts(new AllOptions(StaticData::Instance().options()));
AllOptions::ptr opts(new AllOptions(*StaticData::Instance().options()));
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(*opts));
if (ioWrapper == NULL) {
cerr << "Error; Failed to create IO object" << endl;

View File

@ -139,7 +139,8 @@ BackwardsEdge::BackwardsEdge(const BitmapContainer &prevBitmapContainer
}
// Fetch the things we need for distortion cost computation.
int maxDistortion = StaticData::Instance().GetMaxDistortion();
// int maxDistortion = StaticData::Instance().GetMaxDistortion();
int maxDistortion = itype.options()->reordering.max_distortion;
if (maxDistortion == -1) {
for (HypothesisSet::const_iterator iter = m_prevBitmapContainer.GetHypotheses().begin(); iter != m_prevBitmapContainer.GetHypotheses().end(); ++iter) {

View File

@ -102,7 +102,7 @@ ChartHypothesis::~ChartHypothesis()
*/
void ChartHypothesis::GetOutputPhrase(Phrase &outPhrase) const
{
FactorType placeholderFactor = StaticData::Instance().options().input.placeholder_factor;
FactorType placeholderFactor = StaticData::Instance().options()->input.placeholder_factor;
for (size_t pos = 0; pos < GetCurrTargetPhrase().GetSize(); ++pos) {
const Word &word = GetCurrTargetPhrase().GetWord(pos);
@ -256,7 +256,7 @@ void ChartHypothesis::CleanupArcList()
* However, may not be enough if only unique candidates are needed,
* so we'll keep all of arc list if nedd distinct n-best list
*/
AllOptions const& opts = StaticData::Instance().options();
AllOptions const& opts = *StaticData::Instance().options();
size_t nBestSize = opts.nbest.nbest_size;
bool distinctNBest = (opts.nbest.only_distinct
|| opts.mbr.enabled
@ -336,7 +336,7 @@ std::ostream& operator<<(std::ostream& out, const ChartHypothesis& hypo)
out << "->" << hypo.GetWinningHypothesis()->GetId();
}
if (StaticData::Instance().GetIncludeLHSInSearchGraph()) {
if (hypo.GetManager().options()->output.include_lhs_in_search_graph) {
out << " " << hypo.GetTargetLHS() << "=>";
}
out << " " << hypo.GetCurrTargetPhrase()

View File

@ -82,7 +82,7 @@ void ChartKBestExtractor::Extract(
// Generate the target-side yield of the derivation d.
Phrase ChartKBestExtractor::GetOutputPhrase(const Derivation &d)
{
FactorType placeholderFactor = StaticData::Instance().options().input.placeholder_factor;
FactorType placeholderFactor = StaticData::Instance().options()->input.placeholder_factor;
Phrase ret(ARRAY_SIZE_INCR);

View File

@ -44,17 +44,26 @@ ChartParserUnknown
ChartParserUnknown::~ChartParserUnknown()
{
RemoveAllInColl(m_unksrcs);
// RemoveAllInColl(m_cacheTargetPhraseCollection);
}
void ChartParserUnknown::Process(const Word &sourceWord, const Range &range, ChartParserCallback &to)
AllOptions::ptr const&
ChartParserUnknown::
options() const
{
return m_ttask.lock()->options();
}
void
ChartParserUnknown::
Process(const Word &sourceWord, const Range &range, ChartParserCallback &to)
{
// unknown word, add as trans opt
const StaticData &staticData = StaticData::Instance();
const UnknownWordPenaltyProducer &unknownWordPenaltyProducer = UnknownWordPenaltyProducer::Instance();
const UnknownWordPenaltyProducer &unknownWordPenaltyProducer
= UnknownWordPenaltyProducer::Instance();
size_t isDigit = 0;
if (staticData.options().unk.drop) {
if (options()->unk.drop) {
const Factor *f = sourceWord[0]; // TODO hack. shouldn't know which factor is surface
const StringPiece s = f->GetString();
isDigit = s.find_first_of("0123456789");
@ -79,9 +88,9 @@ void ChartParserUnknown::Process(const Word &sourceWord, const Range &range, Cha
}
//TranslationOption *transOpt;
if (! staticData.options().unk.drop || isDigit) {
if (! options()->unk.drop || isDigit) {
// loop
const UnknownLHSList &lhsList = staticData.GetUnknownLHS();
const UnknownLHSList &lhsList = options()->syntax.unknown_lhs; // staticData.GetUnknownLHS();
UnknownLHSList::const_iterator iterLHS;
for (iterLHS = lhsList.begin(); iterLHS != lhsList.end(); ++iterLHS) {
const string &targetLHSStr = iterLHS->first;
@ -91,7 +100,7 @@ void ChartParserUnknown::Process(const Word &sourceWord, const Range &range, Cha
//const Word &sourceLHS = staticData.GetInputDefaultNonTerminal();
Word *targetLHS = new Word(true);
targetLHS->CreateFromString(Output, staticData.options().output.factor_order,
targetLHS->CreateFromString(Output, options()->output.factor_order,
targetLHSStr, true);
UTIL_THROW_IF2(targetLHS->GetFactor(0) == NULL, "Null factor for target LHS");
@ -108,9 +117,8 @@ void ChartParserUnknown::Process(const Word &sourceWord, const Range &range, Cha
targetPhrase->SetAlignmentInfo("0-0");
targetPhrase->EvaluateInIsolation(*unksrc);
AllOptions const& opts = staticData.options();
if (!opts.output.detailed_tree_transrep_filepath.empty() ||
opts.nbest.print_trees || staticData.GetTreeStructure() != NULL) {
if (!options()->output.detailed_tree_transrep_filepath.empty() ||
options()->nbest.print_trees || staticData.GetTreeStructure() != NULL) {
std::string prop = "[ ";
prop += (*targetLHS)[0]->GetString().as_string() + " ";
prop += sourceWord[0]->GetString().as_string() + " ]";
@ -126,14 +134,14 @@ void ChartParserUnknown::Process(const Word &sourceWord, const Range &range, Cha
TargetPhrase *targetPhrase = new TargetPhrase(firstPt);
// loop
const UnknownLHSList &lhsList = staticData.GetUnknownLHS();
const UnknownLHSList &lhsList = options()->syntax.unknown_lhs;//staticData.GetUnknownLHS();
UnknownLHSList::const_iterator iterLHS;
for (iterLHS = lhsList.begin(); iterLHS != lhsList.end(); ++iterLHS) {
const string &targetLHSStr = iterLHS->first;
//float prob = iterLHS->second;
Word *targetLHS = new Word(true);
targetLHS->CreateFromString(Output, staticData.options().output.factor_order,
targetLHS->CreateFromString(Output, staticData.options()->output.factor_order,
targetLHSStr, true);
UTIL_THROW_IF2(targetLHS->GetFactor(0) == NULL, "Null factor for target LHS");
@ -214,9 +222,7 @@ void ChartParser::Create(const Range &range, ChartParserCallback &to)
if (range.GetNumWordsCovered() == 1
&& range.GetStartPos() != 0
&& range.GetStartPos() != m_source.GetSize()-1) {
bool always = m_ttask.lock()->options()->unk.always_create_direct_transopt;
// bool alwaysCreateDirectTranslationOption
// = StaticData::Instance().IsAlwaysCreateDirectTranslationOption();
bool always = options()->unk.always_create_direct_transopt;
if (to.Empty() || always) {
// create unknown words for 1 word coverage where we don't have any trans options
const Word &sourceWord = m_source.GetWord(range.GetStartPos());
@ -291,4 +297,14 @@ long ChartParser::GetTranslationId() const
{
return m_source.GetTranslationId();
}
AllOptions::ptr const&
ChartParser::
options() const
{
return m_ttask.lock()->options();
}
} // namespace Moses

View File

@ -57,6 +57,7 @@ public:
private:
std::vector<Phrase*> m_unksrcs;
std::list<TargetPhraseCollection::shared_ptr> m_cacheTargetPhraseCollection;
AllOptions::ptr const& options() const;
};
class ChartParser
@ -78,6 +79,8 @@ public:
return m_unknown.GetUnknownSources();
}
AllOptions::ptr const& options() const;
private:
ChartParserUnknown m_unknown;
std::vector <DecodeGraph*> m_decodeGraphList;

View File

@ -66,9 +66,8 @@ ConfusionNet(AllOptions::ptr const& opts) : InputType(opts)
{
stats.createOne();
const StaticData& SD = StaticData::Instance();
if (SD.IsSyntax()) {
m_defaultLabelSet.insert(SD.GetInputDefaultNonTerminal());
if (is_syntax(opts->search.algo)) {
m_defaultLabelSet.insert(opts->syntax.input_default_non_terminal);
}
UTIL_THROW_IF2(InputFeature::InstancePtr() == NULL, "Input feature must be specified");
}
@ -92,14 +91,14 @@ ConfusionNet(Sentence const& s) : InputType(s.options())
bool
ConfusionNet::
ReadF(std::istream& in, const std::vector<FactorType>& factorOrder, int format)
ReadF(std::istream& in, int format)
{
VERBOSE(2, "read confusion net with format "<<format<<"\n");
switch(format) {
case 0:
return ReadFormat0(in,factorOrder);
return ReadFormat0(in);
case 1:
return ReadFormat1(in,factorOrder);
return ReadFormat1(in);
default:
std::cerr << "ERROR: unknown format '"<<format
<<"' in ConfusionNet::Read";
@ -109,22 +108,20 @@ ReadF(std::istream& in, const std::vector<FactorType>& factorOrder, int format)
int
ConfusionNet::
Read(std::istream& in,
const std::vector<FactorType>& factorOrder,
AllOptions const& opts)
Read(std::istream& in)
{
int rv=ReadF(in,factorOrder,0);
int rv=ReadF(in,0);
if(rv) stats.collect(*this);
return rv;
}
bool
ConfusionNet::
ReadFormat0(std::istream& in, const std::vector<FactorType>& factorOrder)
ReadFormat0(std::istream& in)
{
Clear();
const std::vector<FactorType>& factorOrder = m_options->input.factor_order;
// const StaticData &staticData = StaticData::Instance();
const InputFeature *inputFeature = InputFeature::InstancePtr();
size_t numInputScores = inputFeature->GetNumInputScores();
size_t numRealWordCount = inputFeature->GetNumRealWordsInInput();
@ -140,7 +137,6 @@ ReadFormat0(std::istream& in, const std::vector<FactorType>& factorOrder)
Column col;
while(is>>word) {
Word w;
// String2Word(word,w,factorOrder);
w.CreateFromString(Input,factorOrder,StringPiece(word),false,false);
std::vector<float> probs(totalCount, 0.0);
for(size_t i=0; i < numInputScores; i++) {
@ -179,9 +175,10 @@ ReadFormat0(std::istream& in, const std::vector<FactorType>& factorOrder)
bool
ConfusionNet::
ReadFormat1(std::istream& in, const std::vector<FactorType>& factorOrder)
ReadFormat1(std::istream& in)
{
Clear();
const std::vector<FactorType>& factorOrder = m_options->input.factor_order;
std::string line;
if(!getline(in,line)) return 0;
size_t s;

View File

@ -30,8 +30,8 @@ protected:
std::vector<Column> data;
NonTerminalSet m_defaultLabelSet;
bool ReadFormat0(std::istream&,const std::vector<FactorType>& factorOrder);
bool ReadFormat1(std::istream&,const std::vector<FactorType>& factorOrder);
bool ReadFormat0(std::istream&);
bool ReadFormat1(std::istream&);
void String2Word(const std::string& s,Word& w,const std::vector<FactorType>& factorOrder);
public:
@ -46,7 +46,8 @@ public:
const Column& GetColumn(size_t i) const {
UTIL_THROW_IF2(i >= data.size(),
"Out of bounds. Trying to access " << i << " when vector only contains " << data.size());
"Out of bounds. Trying to access " << i
<< " when vector only contains " << data.size());
return data[i];
}
const Column& operator[](size_t i) const {
@ -64,11 +65,10 @@ public:
data.clear();
}
bool ReadF(std::istream&,const std::vector<FactorType>& factorOrder,int format=0);
bool ReadF(std::istream&, int format=0);
virtual void Print(std::ostream&) const;
int Read(std::istream& in,const std::vector<FactorType>& factorOrder,
AllOptions const& opts);
int Read(std::istream& in);
Phrase GetSubString(const Range&) const; //TODO not defined
std::string GetStringRep(const std::vector<FactorType> factorsToPrint) const; //TODO not defined

View File

@ -101,7 +101,7 @@ SimpleTranslationInterface::~SimpleTranslationInterface()
//the simplified version of string input/output translation
string SimpleTranslationInterface::translate(const string &inputString)
{
boost::shared_ptr<Moses::IOWrapper> ioWrapper(new IOWrapper(StaticData::Instance().options()));
boost::shared_ptr<Moses::IOWrapper> ioWrapper(new IOWrapper(*StaticData::Instance().options()));
// main loop over set of input sentences
size_t sentEnd = inputString.rfind('\n'); //find the last \n, the input stream has to be appended with \n to be translated
const string &newString = sentEnd != string::npos ? inputString : inputString + '\n';
@ -180,7 +180,7 @@ batch_run()
IFVERBOSE(1) PrintUserTime("Created input-output object");
// set up read/writing class:
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(staticData.options()));
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(*staticData.options()));
UTIL_THROW_IF2(ioWrapper == NULL, "Error; Failed to create IO object"
<< " [" << HERE << "]");

View File

@ -46,11 +46,12 @@ ConstrainedDecoding::ConstrainedDecoding(const std::string &line)
ReadParameters();
}
void ConstrainedDecoding::Load(AllOptions const& opts)
void ConstrainedDecoding::Load(AllOptions::ptr const& opts)
{
m_options = opts;
const StaticData &staticData = StaticData::Instance();
bool addBeginEndWord
= ((opts.search.algo == CYKPlus) || (opts.search.algo == ChartIncremental));
= ((opts->search.algo == CYKPlus) || (opts->search.algo == ChartIncremental));
for(size_t i = 0; i < m_paths.size(); ++i) {
InputFileStream constraintFile(m_paths[i]);
@ -62,10 +63,10 @@ void ConstrainedDecoding::Load(AllOptions const& opts)
Phrase phrase(0);
if (vecStr.size() == 1) {
sentenceID++;
phrase.CreateFromString(Output, opts.output.factor_order, vecStr[0], NULL);
phrase.CreateFromString(Output, opts->output.factor_order, vecStr[0], NULL);
} else if (vecStr.size() == 2) {
sentenceID = Scan<long>(vecStr[0]);
phrase.CreateFromString(Output, opts.output.factor_order, vecStr[1], NULL);
phrase.CreateFromString(Output, opts->output.factor_order, vecStr[1], NULL);
} else {
UTIL_THROW(util::Exception, "Reference file not loaded");
}

View File

@ -36,7 +36,7 @@ class ConstrainedDecoding : public StatefulFeatureFunction
public:
ConstrainedDecoding(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
bool IsUseable(const FactorMask &mask) const {
return true;

View File

@ -1,7 +1,6 @@
#include "CountNonTerms.h"
#include "moses/Util.h"
#include "moses/TargetPhrase.h"
#include "moses/StaticData.h"
using namespace std;
@ -21,8 +20,6 @@ void CountNonTerms::EvaluateInIsolation(const Phrase &sourcePhrase
, ScoreComponentCollection &scoreBreakdown
, ScoreComponentCollection &estimatedScores) const
{
const StaticData &staticData = StaticData::Instance();
vector<float> scores(m_numScoreComponents, 0);
size_t indScore = 0;
@ -39,7 +36,7 @@ void CountNonTerms::EvaluateInIsolation(const Phrase &sourcePhrase
if (m_targetSyntax) {
for (size_t i = 0; i < targetPhrase.GetSize(); ++i) {
const Word &word = targetPhrase.GetWord(i);
if (word.IsNonTerminal() && word != staticData.GetOutputDefaultNonTerminal()) {
if (word.IsNonTerminal() && word != m_options->syntax.output_default_non_terminal) {
++scores[indScore];
}
}
@ -49,7 +46,7 @@ void CountNonTerms::EvaluateInIsolation(const Phrase &sourcePhrase
if (m_sourceSyntax) {
for (size_t i = 0; i < sourcePhrase.GetSize(); ++i) {
const Word &word = sourcePhrase.GetWord(i);
if (word.IsNonTerminal() && word != staticData.GetInputDefaultNonTerminal()) {
if (word.IsNonTerminal() && word != m_options->syntax.input_default_non_terminal) {
++scores[indScore];
}
}
@ -72,5 +69,12 @@ void CountNonTerms::SetParameter(const std::string& key, const std::string& valu
}
}
void
CountNonTerms::
Load(AllOptions::ptr const& opts)
{
m_options = opts;
}
}

View File

@ -7,6 +7,8 @@ namespace Moses
class CountNonTerms : public StatelessFeatureFunction
{
Word m_input_default_nonterminal;
Word m_output_default_nonterminal;
public:
CountNonTerms(const std::string &line);
bool IsUseable(const FactorMask &mask) const {
@ -41,6 +43,7 @@ public:
void SetParameter(const std::string& key, const std::string& value);
void Load(AllOptions::ptr const& opts);
protected:
bool m_all, m_sourceSyntax, m_targetSyntax;
};

View File

@ -52,8 +52,9 @@ void CoveredReferenceFeature::EvaluateWithSourceContext(const InputType &input
estimatedScores->Assign(this, scores);
}
void CoveredReferenceFeature::Load(AllOptions const& opts)
void CoveredReferenceFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
InputFileStream refFile(m_path);
std::string line;
const StaticData &staticData = StaticData::Instance();

View File

@ -44,7 +44,7 @@ public:
ReadParameters();
}
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
bool IsUseable(const FactorMask &mask) const {
return true;

View File

@ -16,8 +16,9 @@ DeleteRules::DeleteRules(const std::string &line)
ReadParameters();
}
void DeleteRules::Load(AllOptions const& opts)
void DeleteRules::Load(AllOptions::ptr const& opts)
{
m_options = opts;
std::vector<FactorType> factorOrder;
factorOrder.push_back(0); // unfactored for now

View File

@ -15,7 +15,7 @@ protected:
public:
DeleteRules(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
bool IsUseable(const FactorMask &mask) const {
return true;

View File

@ -323,8 +323,9 @@ void DynamicCacheBasedLanguageModel::Clear()
m_cache.clear();
}
void DynamicCacheBasedLanguageModel::Load(AllOptions const& opts)
void DynamicCacheBasedLanguageModel::Load(AllOptions::ptr const& opts)
{
m_options = opts;
// SetPreComputedScores();
VERBOSE(2,"DynamicCacheBasedLanguageModel::Load()" << std::endl);
Load(m_initfiles);

View File

@ -119,7 +119,7 @@ public:
return true;
}
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
void Load(const std::string filestr);
void Execute(std::string command);
void SetParameter(const std::string& key, const std::string& value);

View File

@ -47,6 +47,7 @@ protected:
size_t m_index; // index into vector covering ALL feature function values
std::vector<bool> m_tuneableComponents;
size_t m_numTuneableComponents;
AllOptions::ptr m_options;
//In case there's multiple producers with the same description
static std::multiset<std::string> description_counts;
@ -70,7 +71,13 @@ public:
virtual ~FeatureFunction();
//! override to load model files
virtual void Load(AllOptions const& opts) {
virtual void Load(AllOptions::ptr const& opts) {
m_options = opts;
}
AllOptions::ptr const&
options() const {
return m_options;
}
static void ResetDescriptionCounts() {

View File

@ -51,8 +51,9 @@ GlobalLexicalModel::~GlobalLexicalModel()
}
}
void GlobalLexicalModel::Load(AllOptions const& opts)
void GlobalLexicalModel::Load(AllOptions::ptr const& opts)
{
m_options = opts;
FactorCollection &factorCollection = FactorCollection::Instance();
const std::string& factorDelimiter = StaticData::Instance().GetFactorDelimiter();

View File

@ -57,7 +57,7 @@ private:
std::vector<FactorType> m_inputFactorsVec, m_outputFactorsVec;
std::string m_filePath;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
float ScorePhrase( const TargetPhrase& targetPhrase ) const;
float GetFromCacheOrScorePhrase( const TargetPhrase& targetPhrase ) const;

View File

@ -19,8 +19,8 @@ HyperParameterAsWeight::HyperParameterAsWeight(const std::string &line)
vector<float> weights = staticData.GetWeights(this);
staticData.m_options.search.stack_size = weights[0] * 1000;
staticData.m_options.search.beam_width = weights[1] * 10;
staticData.m_options->search.stack_size = weights[0] * 1000;
staticData.m_options->search.beam_width = weights[1] * 10;
}

View File

@ -23,9 +23,9 @@ InputFeature::InputFeature(const std::string &line)
s_instance = this;
}
void InputFeature::Load(AllOptions const& opts)
void InputFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
const PhraseDictionary *pt = PhraseDictionary::GetColl()[0];
const PhraseDictionaryTreeAdaptor *ptBin = dynamic_cast<const PhraseDictionaryTreeAdaptor*>(pt);

View File

@ -23,7 +23,7 @@ public:
InputFeature(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
void SetParameter(const std::string& key, const std::string& value);

View File

@ -8,7 +8,7 @@ InternalTree::InternalTree(const std::string & line, size_t start, size_t len, c
{
std::vector<FactorType> const& oFactors
= StaticData::Instance().options().output.factor_order;
= StaticData::Instance().options()->output.factor_order;
if (len > 0) {
m_value.CreateFromString(Output, oFactors, StringPiece(line).substr(start, len),
nonterminal);
@ -22,7 +22,7 @@ InternalTree::InternalTree(const std::string & line, const bool nonterminal)
if (found == line.npos) {
m_value.CreateFromString(Output,
StaticData::Instance().options().output.factor_order,
StaticData::Instance().options()->output.factor_order,
line, nonterminal);
} else {
AddSubTree(line, 0);
@ -50,7 +50,7 @@ size_t InternalTree::AddSubTree(const std::string & line, size_t pos)
} else {
if (len > 0) {
m_value.CreateFromString(Output,
StaticData::Instance().options().output.factor_order,
StaticData::Instance().options()->output.factor_order,
StringPiece(line).substr(oldpos, len), false);
has_value = true;
}
@ -59,7 +59,7 @@ size_t InternalTree::AddSubTree(const std::string & line, size_t pos)
} else if (token == ' ' || token == ']') {
if (len > 0 && !has_value) {
m_value.CreateFromString(Output,
StaticData::Instance().options().output.factor_order,
StaticData::Instance().options()->output.factor_order,
StringPiece(line).substr(oldpos, len), true);
has_value = true;
} else if (len > 0) {
@ -90,7 +90,7 @@ std::string InternalTree::GetString(bool start) const
ret += "[";
}
ret += m_value.GetString(StaticData::Instance().options().output.factor_order, false);
ret += m_value.GetString(StaticData::Instance().options()->output.factor_order, false);
for (std::vector<TreePointer>::const_iterator it = m_children.begin(); it != m_children.end(); ++it) {
ret += (*it)->GetString(false);
}

View File

@ -84,8 +84,9 @@ LexicalReordering::
void
LexicalReordering::
Load(AllOptions const& opts)
Load(AllOptions::ptr const& opts)
{
m_options = opts;
typedef LexicalReorderingTable LRTable;
if (m_filePath.size())
m_table.reset(LRTable::LoadAvailable(m_filePath, m_factorsF,

View File

@ -33,7 +33,7 @@ class LexicalReordering : public StatefulFeatureFunction
public:
LexicalReordering(const std::string &line);
virtual ~LexicalReordering();
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual
bool

View File

@ -159,8 +159,9 @@ void Model1Feature::SetParameter(const std::string& key, const std::string& valu
}
}
void Model1Feature::Load(AllOptions const& opts)
void Model1Feature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
FEATUREVERBOSE(2, GetScoreProducerDescription() << ": Loading source vocabulary from file " << m_fileNameVcbS << " ...");
Model1Vocabulary vcbS;
vcbS.Load(m_fileNameVcbS);

View File

@ -99,7 +99,7 @@ private:
Model1LexicalTable m_model1;
const Factor* m_emptyWord;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
// cache
mutable boost::unordered_map<const InputType*, boost::unordered_map<const Factor*, float> > m_cache;

View File

@ -35,8 +35,9 @@ void OpSequenceModel :: readLanguageModel(const char *lmFile)
}
void OpSequenceModel::Load(AllOptions const& opts)
void OpSequenceModel::Load(AllOptions::ptr const& opts)
{
m_options = opts;
readLanguageModel(m_lmPath.c_str());
}

View File

@ -25,7 +25,7 @@ public:
~OpSequenceModel();
void readLanguageModel(const char *);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
FFState* EvaluateWhenApplied(
const Hypothesis& cur_hypo,

View File

@ -75,8 +75,9 @@ void PhraseOrientationFeature::SetParameter(const std::string& key, const std::s
}
void PhraseOrientationFeature::Load(AllOptions const& opts)
void PhraseOrientationFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
if ( !m_filenameTargetWordList.empty() ) {
LoadWordList(m_filenameTargetWordList,m_targetWordList);
m_useTargetWordList = true;

View File

@ -289,7 +289,7 @@ public:
void SetParameter(const std::string& key, const std::string& value);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
void EvaluateInIsolation(const Phrase &source
, const TargetPhrase &targetPhrase

View File

@ -65,8 +65,9 @@ void PhrasePairFeature::SetParameter(const std::string& key, const std::string&
}
}
void PhrasePairFeature::Load(AllOptions const& opts)
void PhrasePairFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
if (m_domainTrigger) {
// domain trigger terms for each input document
ifstream inFileSource(m_filePathSource.c_str());

View File

@ -44,7 +44,7 @@ class PhrasePairFeature: public StatelessFeatureFunction
public:
PhrasePairFeature(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
void SetParameter(const std::string& key, const std::string& value);
bool IsUseable(const FactorMask &mask) const;

View File

@ -14,11 +14,11 @@ RuleScope::RuleScope(const std::string &line)
{
}
bool IsAmbiguous(const Word &word, bool sourceSyntax)
{
const Word &inputDefaultNonTerminal = StaticData::Instance().GetInputDefaultNonTerminal();
return word.IsNonTerminal() && (!sourceSyntax || word == inputDefaultNonTerminal);
}
// bool IsAmbiguous(const Word &word, bool sourceSyntax)
// {
// const Word &inputDefaultNonTerminal = StaticData::Instance().GetInputDefaultNonTerminal();
// return word.IsNonTerminal() && (!sourceSyntax || word == inputDefaultNonTerminal);
// }
void RuleScope::EvaluateInIsolation(const Phrase &source
, const TargetPhrase &targetPhrase

View File

@ -52,8 +52,8 @@ bool SoftMatchingFeature::Load(const std::string& filePath)
}
Word LHS, RHS;
LHS.CreateFromString(Output, SD.options().output.factor_order, tokens[0], true);
RHS.CreateFromString(Output, SD.options().output.factor_order, tokens[1], true);
LHS.CreateFromString(Output, SD.options()->output.factor_order, tokens[0], true);
RHS.CreateFromString(Output, SD.options()->output.factor_order, tokens[1], true);
m_softMatches[RHS[0]->GetId()].push_back(LHS);
GetOrSetFeatureName(RHS, LHS);
@ -125,7 +125,7 @@ const std::string& SoftMatchingFeature::GetOrSetFeatureName(const Word& RHS, con
#endif
std::string &name = m_nameCache[RHS[0]->GetId()][LHS[0]->GetId()];
const std::vector<FactorType> & oFactors
= StaticData::Instance().options().output.factor_order;
= StaticData::Instance().options()->output.factor_order;
std::string LHS_string = LHS.GetString(oFactors, false);
std::string RHS_string = RHS.GetString(oFactors, false);
name = LHS_string + "->" + RHS_string;

View File

@ -88,8 +88,9 @@ void SoftSourceSyntacticConstraintsFeature::SetParameter(const std::string& key,
}
}
void SoftSourceSyntacticConstraintsFeature::Load(AllOptions const& opts)
void SoftSourceSyntacticConstraintsFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
// don't change the loading order!
LoadSourceLabelSet();
if (!m_coreSourceLabelSetFile.empty()) {
@ -98,6 +99,7 @@ void SoftSourceSyntacticConstraintsFeature::Load(AllOptions const& opts)
if (!m_targetSourceLHSJointCountFile.empty()) {
LoadTargetSourceLeftHandSideJointCountFile();
}
// m_output_default_nonterminal = opts->syntax.output_default_non_terminal;
}
void SoftSourceSyntacticConstraintsFeature::LoadSourceLabelSet()
@ -311,8 +313,8 @@ void SoftSourceSyntacticConstraintsFeature::EvaluateWithSourceContext(const Inpu
std::vector<float> newScores(m_numScoreComponents,0);
const TreeInput& treeInput = static_cast<const TreeInput&>(input);
const StaticData& staticData = StaticData::Instance();
const Word& outputDefaultNonTerminal = staticData.GetOutputDefaultNonTerminal();
// const StaticData& staticData = StaticData::Instance();
// const Word& outputDefaultNonTerminal = staticData.GetOutputDefaultNonTerminal();
size_t nNTs = 1;
bool treeInputMismatchLHSBinary = true;
@ -365,7 +367,7 @@ void SoftSourceSyntacticConstraintsFeature::EvaluateWithSourceContext(const Inpu
for (NonTerminalSet::const_iterator treeInputLabelsIt = treeInputLabels.begin();
treeInputLabelsIt != treeInputLabels.end(); ++treeInputLabelsIt) {
if (*treeInputLabelsIt != outputDefaultNonTerminal) {
if (*treeInputLabelsIt != m_options->syntax.output_default_non_terminal) {
boost::unordered_map<const Factor*,size_t>::const_iterator foundTreeInputLabel
= m_sourceLabelIndexesByFactor.find((*treeInputLabelsIt)[0]);
if (foundTreeInputLabel != m_sourceLabelIndexesByFactor.end()) {
@ -387,7 +389,7 @@ void SoftSourceSyntacticConstraintsFeature::EvaluateWithSourceContext(const Inpu
for (NonTerminalSet::const_iterator treeInputLabelsIt = treeInputLabels.begin();
treeInputLabelsIt != treeInputLabels.end(); ++treeInputLabelsIt) {
if (*treeInputLabelsIt != outputDefaultNonTerminal) {
if (*treeInputLabelsIt != m_options->syntax.output_default_non_terminal) {
boost::unordered_map<const Factor*,size_t>::const_iterator foundTreeInputLabel
= m_sourceLabelIndexesByFactor.find((*treeInputLabelsIt)[0]);
if (foundTreeInputLabel != m_sourceLabelIndexesByFactor.end()) {
@ -568,7 +570,9 @@ void SoftSourceSyntacticConstraintsFeature::EvaluateWithSourceContext(const Inpu
}
if ( treeInputLabelsLHS.size() == 0 ) {
scoreBreakdown.PlusEquals(this,
"LHSPAIR_" + targetLHS->GetString().as_string() + "_" + outputDefaultNonTerminal[0]->GetString().as_string(),
"LHSPAIR_" + targetLHS->GetString().as_string() + "_"
+ m_options->syntax.output_default_non_terminal[0]
->GetString().as_string(),
1);
if (!m_targetSourceLHSJointCountFile.empty()) {
t2sLabelsScore = TransformScore(m_floor);

View File

@ -31,7 +31,7 @@ public:
void SetParameter(const std::string& key, const std::string& value);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
void EvaluateInIsolation(const Phrase &source
, const TargetPhrase &targetPhrase
@ -101,6 +101,7 @@ protected:
std::pair<float,float> GetLabelPairProbabilities(const Factor* target,
const size_t source) const;
// Word m_output_default_nonterminal;
};

View File

@ -47,11 +47,12 @@ void SourceGHKMTreeInputMatchFeature::EvaluateWithSourceContext(const InputType
const Word& lhsLabel = targetPhrase.GetTargetLHS();
const StaticData& staticData = StaticData::Instance();
const Word& outputDefaultNonTerminal = staticData.GetOutputDefaultNonTerminal();
std::vector<float> newScores(m_numScoreComponents,0.0); // m_numScoreComponents == 2 // first fires for matches, second for mismatches
std::vector<float> newScores(m_numScoreComponents,0.0);
// m_numScoreComponents == 2 // first fires for matches, second for mismatches
if ( (treeInputLabels.find(lhsLabel) != treeInputLabels.end()) && (lhsLabel != outputDefaultNonTerminal) ) {
if ( (treeInputLabels.find(lhsLabel) != treeInputLabels.end())
&& (lhsLabel != m_options->syntax.output_default_non_terminal) ) {
// match
newScores[0] = 1.0;
} else {
@ -62,6 +63,13 @@ void SourceGHKMTreeInputMatchFeature::EvaluateWithSourceContext(const InputType
scoreBreakdown.PlusEquals(this, newScores);
}
void
SourceGHKMTreeInputMatchFeature::
Load(AllOptions::ptr const& opts)
{
m_options = opts;
// m_output_default_nonterminal = opts->syntax.output_default_non_terminal;
}
}

View File

@ -1,6 +1,7 @@
#pragma once
#include "StatelessFeatureFunction.h"
#include "moses/parameters/AllOptions.h"
namespace Moses
{
@ -8,6 +9,7 @@ namespace Moses
// assumes that source-side syntax labels are stored in the target non-terminal field of the rules
class SourceGHKMTreeInputMatchFeature : public StatelessFeatureFunction
{
// Word m_output_default_nonterminal;
public:
SourceGHKMTreeInputMatchFeature(const std::string &line);
@ -40,6 +42,7 @@ public:
void EvaluateWhenApplied(const ChartHypothesis &hypo,
ScoreComponentCollection* accumulator) const {};
void Load(AllOptions::ptr const& opts);
};

View File

@ -36,8 +36,9 @@ void SourceWordDeletionFeature::SetParameter(const std::string& key, const std::
}
}
void SourceWordDeletionFeature::Load(AllOptions const& opts)
void SourceWordDeletionFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
if (m_filename.empty())
return;

View File

@ -23,7 +23,7 @@ private:
public:
SourceWordDeletionFeature(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
bool IsUseable(const FactorMask &mask) const;

View File

@ -48,8 +48,9 @@ void TargetBigramFeature::SetParameter(const std::string& key, const std::string
}
}
void TargetBigramFeature::Load(AllOptions const& opts)
void TargetBigramFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
if (m_filePath == "*")
return ; //allow all
ifstream inFile(m_filePath.c_str());

View File

@ -34,7 +34,7 @@ class TargetBigramFeature : public StatefulFeatureFunction
public:
TargetBigramFeature(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
bool IsUseable(const FactorMask &mask) const;

View File

@ -74,8 +74,9 @@ void TargetNgramFeature::SetParameter(const std::string& key, const std::string&
}
}
void TargetNgramFeature::Load(AllOptions const& opts)
void TargetNgramFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
if (m_file == "") return; //allow all, for now
if (m_file == "*") return; //allow all

View File

@ -203,7 +203,7 @@ class TargetNgramFeature : public StatefulFeatureFunction
public:
TargetNgramFeature(const std::string &line);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
bool IsUseable(const FactorMask &mask) const;

View File

@ -34,8 +34,9 @@ void TargetWordInsertionFeature::SetParameter(const std::string& key, const std:
}
}
void TargetWordInsertionFeature::Load(AllOptions const& opts)
void TargetWordInsertionFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
if (m_filename.empty())
return;

View File

@ -25,7 +25,7 @@ public:
bool IsUseable(const FactorMask &mask) const;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual void EvaluateInIsolation(const Phrase &source
, const TargetPhrase &targetPhrase

View File

@ -8,8 +8,9 @@
namespace Moses
{
void TreeStructureFeature::Load(AllOptions const& opts)
void TreeStructureFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
// syntactic constraints can be hooked in here.
m_constraints = NULL;

View File

@ -74,7 +74,7 @@ public:
int /* featureID - used to index the state in the previous hypotheses */,
ScoreComponentCollection* accumulator) const;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
};

View File

@ -87,8 +87,9 @@ void WordTranslationFeature::SetParameter(const std::string& key, const std::str
}
}
void WordTranslationFeature::Load(AllOptions const& opts)
void WordTranslationFeature::Load(AllOptions::ptr const& opts)
{
m_options = opts;
// load word list for restricted feature set
if (m_filePathSource.empty()) {
return;

View File

@ -40,7 +40,7 @@ public:
void SetParameter(const std::string& key, const std::string& value);
bool IsUseable(const FactorMask &mask) const;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
void EvaluateWithSourceContext(const InputType &input
, const InputPath &inputPath

View File

@ -18,9 +18,7 @@ namespace Moses
//! populate this InputType with data from in stream
int ForestInput::
Read(std::istream &in,
std::vector<FactorType> const& factorOrder,
AllOptions const& opts)
Read(std::istream &in)
{
using Syntax::F2S::Forest;
@ -48,7 +46,7 @@ Read(std::istream &in,
std::getline(in, line);
} else {
do {
ParseHyperedgeLine(line, factorOrder);
ParseHyperedgeLine(line);
std::getline(in, line);
} while (line != "");
}
@ -58,7 +56,7 @@ Read(std::istream &in,
// not sure ForestInput needs to.
std::stringstream strme;
strme << "<s> " << sentence << " </s>" << std::endl;
Sentence::Read(strme, factorOrder, opts);
Sentence::Read(strme);
// Find the maximum end position of any vertex (0 if forest is empty).
std::size_t maxEnd = FindMaxEnd(*m_forest);
@ -70,6 +68,9 @@ Read(std::istream &in,
assert(topVertices.size() >= 1);
}
const std::vector<FactorType>& factorOrder = m_options->input.factor_order;
// Add <s> vertex.
Forest::Vertex *startSymbol = NULL;
{
@ -122,7 +123,9 @@ Read(std::istream &in,
return 1;
}
Syntax::F2S::Forest::Vertex *ForestInput::AddOrDeleteVertex(Forest::Vertex *v)
Syntax::F2S::Forest::Vertex*
ForestInput::
AddOrDeleteVertex(Forest::Vertex *v)
{
std::pair<VertexSet::iterator, bool> ret = m_vertexSet.insert(v);
if (ret.second) {
@ -172,14 +175,16 @@ void ForestInput::FindTopVertices(Forest &forest,
std::back_inserter(topVertices));
}
void ForestInput::ParseHyperedgeLine(
const std::string &line, const std::vector<FactorType>& factorOrder)
void
ForestInput::
ParseHyperedgeLine(const std::string &line)
{
const std::vector<FactorType>& factorOrder = m_options->input.factor_order;
using Syntax::F2S::Forest;
const util::AnyCharacter delimiter(" \t");
util::TokenIter<util::AnyCharacter, true> p(line, delimiter);
Forest::Vertex *v = AddOrDeleteVertex(ParseVertex(*p, factorOrder));
Forest::Vertex *v = AddOrDeleteVertex(ParseVertex(*p));
Forest::Hyperedge *e = new Forest::Hyperedge();
e->head = v;
++p;
@ -188,7 +193,7 @@ void ForestInput::ParseHyperedgeLine(
//throw Exception("");
}
for (++p; *p != "|||"; ++p) {
v = ParseVertex(*p, factorOrder);
v = ParseVertex(*p);
if (!v->pvertex.symbol.IsNonTerminal()) {
// Egret does not give start/end for terminals.
v->pvertex.span = Range(e->head->pvertex.span.GetStartPos(),
@ -203,11 +208,11 @@ void ForestInput::ParseHyperedgeLine(
e->head->incoming.push_back(e);
}
Syntax::F2S::Forest::Vertex *ForestInput::ParseVertex(
const StringPiece &s, const std::vector<FactorType>& factorOrder)
Syntax::F2S::Forest::Vertex*
ForestInput::ParseVertex(const StringPiece &s)
{
using Syntax::F2S::Forest;
const std::vector<FactorType>& factorOrder = m_options->input.factor_order;
Word symbol;
std::size_t pos = s.rfind('[');
if (pos == std::string::npos) {

View File

@ -29,9 +29,7 @@ public:
//! populate this InputType with data from in stream
virtual int
Read(std::istream& in,
std::vector<FactorType> const& factorOrder,
AllOptions const& opts);
Read(std::istream& in);
//! Output debugging info to stream out
virtual void Print(std::ostream&) const;
@ -76,11 +74,9 @@ private:
void FindTopVertices(Forest &, std::vector<Forest::Vertex *> &);
void ParseHyperedgeLine(const std::string &,
const std::vector<FactorType> &);
void ParseHyperedgeLine(const std::string &);
Forest::Vertex *ParseVertex(const StringPiece &,
const std::vector<FactorType> &);
Forest::Vertex *ParseVertex(const StringPiece &);
boost::shared_ptr<Forest> m_forest;
Forest::Vertex *m_rootVertex;

View File

@ -44,8 +44,9 @@ GenerationDictionary::GenerationDictionary(const std::string &line)
ReadParameters();
}
void GenerationDictionary::Load(AllOptions const& opts)
void GenerationDictionary::Load(AllOptions::ptr const& opts)
{
m_options = opts;
FactorCollection &factorCollection = FactorCollection::Instance();
const size_t numFeatureValuesInConfig = this->GetNumScoreComponents();

View File

@ -62,7 +62,7 @@ public:
virtual ~GenerationDictionary();
//! load data file
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
/** number of unique input entries in the generation table.
* NOT the number of lines in the generation table

View File

@ -56,7 +56,7 @@ WriteHypos(const ChartHypothesisCollection& hypos,
ChartHypothesisCollection::const_iterator iter;
for (iter = hypos.begin() ; iter != hypos.end() ; ++iter) {
ChartHypothesis &mainHypo = **iter;
if (StaticData::Instance().options().output.DontPruneSearchGraph ||
if (StaticData::Instance().options()->output.DontPruneSearchGraph ||
reachable.find(mainHypo.GetId()) != reachable.end()) {
(*m_out) << m_lineNumber << " " << mainHypo << endl;
}
@ -90,7 +90,7 @@ WriteHypos(const ChartHypothesisCollection& hypos,
ChartHypothesisCollection::const_iterator iter;
for (iter = hypos.begin() ; iter != hypos.end() ; ++iter) {
const ChartHypothesis* mainHypo = *iter;
if (!StaticData::Instance().options().output.DontPruneSearchGraph &&
if (!StaticData::Instance().options()->output.DontPruneSearchGraph &&
reachable.find(mainHypo->GetId()) == reachable.end()) {
//Ignore non reachable nodes
continue;

View File

@ -78,20 +78,19 @@ IOWrapper::IOWrapper(AllOptions const& opts)
Parameter const& P = staticData.GetParameter();
// context buffering for context-sensitive decoding
m_look_ahead = staticData.options().context.look_ahead;
m_look_back = staticData.options().context.look_back;
m_inputType = staticData.options().input.input_type;
m_look_ahead = staticData.options()->context.look_ahead;
m_look_back = staticData.options()->context.look_back;
m_inputType = staticData.options()->input.input_type;
UTIL_THROW_IF2((m_look_ahead || m_look_back) && m_inputType != SentenceInput,
"Context-sensitive decoding currently works only with sentence input.");
m_currentLine = staticData.GetStartTranslationId();
m_inputFactorOrder = &staticData.options().input.factor_order;
m_inputFactorOrder = &staticData.options()->input.factor_order;
size_t nBestSize = staticData.options().nbest.nbest_size;
string nBestFilePath = staticData.options().nbest.output_file_path;
size_t nBestSize = staticData.options()->nbest.nbest_size;
string nBestFilePath = staticData.options()->nbest.output_file_path;
staticData.GetParameter().SetParameter<string>(m_inputFilePath, "input-file", "");
if (m_inputFilePath.empty()) {
@ -130,8 +129,8 @@ IOWrapper::IOWrapper(AllOptions const& opts)
P.SetParameter<string>(path, "output-word-graph", "");
if (path.size()) m_wordGraphCollector.reset(new OutputCollector(path));
size_t latticeSamplesSize = staticData.options().output.lattice_sample_size;
string latticeSamplesFile = staticData.options().output.lattice_sample_filepath;
size_t latticeSamplesSize = staticData.options()->output.lattice_sample_size;
string latticeSamplesFile = staticData.options()->output.lattice_sample_filepath;
if (latticeSamplesSize) {
m_latticeSamplesCollector.reset(new OutputCollector(latticeSamplesFile));
if (m_latticeSamplesCollector->OutputIsCout()) {

View File

@ -219,7 +219,6 @@ boost::shared_ptr<InputType>
IOWrapper::
BufferInput()
{
AllOptions const& opts = StaticData::Instance().options();
boost::shared_ptr<itype> source;
boost::shared_ptr<InputType> ret;
if (m_future_input.size()) {
@ -228,13 +227,13 @@ BufferInput()
m_buffered_ahead -= ret->GetSize();
} else {
source.reset(new itype(m_options));
if (!source->Read(*m_inputStream, *m_inputFactorOrder, opts))
if (!source->Read(*m_inputStream))
return ret;
ret = source;
}
while (m_buffered_ahead < m_look_ahead) {
source.reset(new itype(m_options));
if (!source->Read(*m_inputStream, *m_inputFactorOrder, opts))
if (!source->Read(*m_inputStream))
break;
m_future_input.push_back(source);
m_buffered_ahead += source->GetSize();

View File

@ -208,7 +208,7 @@ Manager::Manager(ttasksptr const& ttask)
: BaseManager(ttask)
, cells_(m_source, ChartCellBaseFactory(), parser_)
, parser_(ttask, cells_)
, n_best_(search::NBestConfig(StaticData::Instance().options().nbest.nbest_size))
, n_best_(search::NBestConfig(StaticData::Instance().options()->nbest.nbest_size))
{ }
Manager::~Manager()
@ -232,8 +232,8 @@ PopulateBest(const Model &model, const std::vector<lm::WordIndex> &words, Best &
const StaticData &data = StaticData::Instance();
const float lm_weight = data.GetWeights(&abstract)[0];
const float oov_weight = abstract.OOVFeatureEnabled() ? data.GetWeights(&abstract)[1] : 0.0;
size_t cpl = data.options().cube.pop_limit;
size_t nbs = data.options().nbest.nbest_size;
size_t cpl = data.options()->cube.pop_limit;
size_t nbs = data.options()->nbest.nbest_size;
search::Config config(lm_weight * log_10, cpl, search::NBestConfig(nbs));
search::Context<Model> context(config, model);
@ -261,7 +261,7 @@ PopulateBest(const Model &model, const std::vector<lm::WordIndex> &words, Best &
template <class Model> void Manager::LMCallback(const Model &model, const std::vector<lm::WordIndex> &words)
{
std::size_t nbest = StaticData::Instance().options().nbest.nbest_size;
std::size_t nbest = StaticData::Instance().options()->nbest.nbest_size;
if (nbest <= 1) {
search::History ret = PopulateBest(model, words, single_best_);
if (ret) {

View File

@ -32,6 +32,7 @@ namespace Moses
InputType::InputType(AllOptions::ptr const& opts, long translationId)
: m_options(opts)
, m_translationId(translationId)
, m_reorderingConstraint(opts->reordering.max_distortion)
{
m_frontSpanCoveredLength = 0;
m_sourceCompleted.resize(0);

View File

@ -190,9 +190,10 @@ public:
//! populate this InputType with data from in stream
virtual int
Read(std::istream& in,
std::vector<FactorType> const& factorOrder,
AllOptions const& opts) =0;
Read(std::istream& in) = 0;
// ,
// std::vector<FactorType> const& factorOrder,
// AllOptions const& opts) =0;
//! Output debugging info to stream out
virtual void Print(std::ostream&) const =0;

View File

@ -20,8 +20,9 @@ BilingualLM::BilingualLM(const std::string &line)
}
void BilingualLM::Load(AllOptions const& opts)
void BilingualLM::Load(AllOptions::ptr const& opts)
{
m_options = opts;
ReadParameters();
loadModel();
}

View File

@ -117,7 +117,7 @@ public:
return new BilingualLMState(0);
}
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
FFState* EvaluateWhenApplied(
const Hypothesis& cur_hypo,

View File

@ -28,7 +28,7 @@ public:
LanguageModelDALM(const std::string &line);
virtual ~LanguageModelDALM();
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual const FFState *EmptyHypothesisState(const InputType &/*input*/) const;

View File

@ -96,7 +96,7 @@ bool LanguageModelIRST::IsUseable(const FactorMask &mask) const
return ret;
}
void LanguageModelIRST::Load(AllOptions const& opts)
void LanguageModelIRST::Load(AllOptions::ptr const& opts)
{
FactorCollection &factorCollection = FactorCollection::Instance();

View File

@ -88,7 +88,7 @@ public:
bool IsUseable(const FactorMask &mask) const;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
const FFState *EmptyHypothesisState(const InputType &/*input*/) const;
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;

View File

@ -54,7 +54,7 @@ protected:
public:
LanguageModelMaxEntSRI(const std::string &line);
~LanguageModelMaxEntSRI();
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
};

View File

@ -27,7 +27,7 @@ public:
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
virtual void Load(AllOptions const& opts);
virtual void Load(AllOptions::ptr const& opts);
};

View File

@ -208,7 +208,7 @@ public:
int /* featureID - used to index the state in the previous hypotheses */,
ScoreComponentCollection* accumulator) const;
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
// Iterator-class that yields all children of a node; if child is virtual node of binarized tree, its children are yielded instead.
class UnbinarizedChildren

View File

@ -39,7 +39,7 @@ public:
LanguageModelRandLM(const std::string &line);
~LanguageModelRandLM();
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
void InitializeForInput(ttasksptr const& ttask);
void CleanUpAfterSentenceProcessing(const InputType& source);

View File

@ -54,7 +54,7 @@ protected:
public:
LanguageModelSRI(const std::string &line);
~LanguageModelSRI();
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
};

View File

@ -24,7 +24,7 @@ public:
void SetParameter(const std::string& key, const std::string& value);
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(
const std::vector<const Word*> &contextFactor,

View File

@ -38,9 +38,8 @@ MockHypothesisGuard
m_uwp("UnknownWordPenalty"), m_dist("Distortion")
{
BOOST_CHECK_EQUAL(alignments.size(), targetSegments.size());
std::vector<Moses::FactorType> factors(1,0);
AllOptions::ptr opts(new AllOptions(StaticData::Instance().options()));
m_sentence.reset(new Sentence(opts,0, sourceSentence, &factors));
AllOptions::ptr opts(new AllOptions(*StaticData::Instance().options()));
m_sentence.reset(new Sentence(opts, 0, sourceSentence));
m_ttask = TranslationTask::create(m_sentence);
m_manager.reset(new Manager(m_ttask));
@ -59,16 +58,14 @@ MockHypothesisGuard
for (; ti != targetSegments.end() && ai != alignments.end(); ++ti,++ai) {
Hypothesis* prevHypo = m_hypothesis;
Range range(ai->first,ai->second);
const Bitmap &newBitmap = bitmaps.GetBitmap(prevHypo->GetWordsBitmap(),
range);
const Bitmap &newBitmap = bitmaps.GetBitmap(prevHypo->GetWordsBitmap(), range);
m_targetPhrases.push_back(TargetPhrase(NULL));
// m_targetPhrases.back().CreateFromString(Input, factors, *ti, "|", NULL);
vector<FactorType> const& factors = opts->output.factor_order;
m_targetPhrases.back().CreateFromString(Input, factors, *ti, NULL);
m_toptions.push_back(new TranslationOption
(range,m_targetPhrases.back()));
m_hypothesis = new Hypothesis(*prevHypo, *m_toptions.back(), newBitmap, m_manager->GetNextHypoId());
m_hypothesis = new Hypothesis(*prevHypo, *m_toptions.back(), newBitmap,
m_manager->GetNextHypoId());
}

View File

@ -119,7 +119,7 @@ Phrase::
GetStringRep(vector<FactorType> const& factorsToPrint,
AllOptions const* opts) const
{
if (!opts) opts = &StaticData::Instance().options();
if (!opts) opts = StaticData::Instance().options().get();
bool markUnk = opts->unk.mark;
util::StringStream strme;
for (size_t pos = 0 ; pos < GetSize() ; pos++) {

View File

@ -187,7 +187,9 @@ bool ReorderingConstraint::Check( const Bitmap &bitmap, size_t startPos, size_t
// check, if we are setting us up for a dead end due to distortion limits
size_t distortionLimit = (size_t)StaticData::Instance().GetMaxDistortion();
// size_t distortionLimit = (size_t)StaticData::Instance().GetMaxDistortion();
size_t distortionLimit = m_max_distortion;
if (startPos != firstGapPos && endZone-firstGapPos >= distortionLimit) {
VERBOSE(3," dead end due to distortion limit" << std::endl);
return false;

View File

@ -53,11 +53,16 @@ protected:
size_t *m_localWall; /**< flag for each word if it is a local wall */
std::vector< std::vector< size_t > > m_zone; /** zones that limit reordering */
bool m_active; /**< flag indicating, if there are any active constraints */
int m_max_distortion;
public:
//! create ReorderingConstraint of length size and initialise to zero
ReorderingConstraint() :m_wall(NULL),m_localWall(NULL),m_active(false) {}
ReorderingConstraint(int max_distortion)
: m_wall(NULL)
, m_localWall(NULL)
, m_active(false)
, m_max_distortion(max_distortion)
{}
//! destructer
~ReorderingConstraint() {

View File

@ -43,7 +43,7 @@ RuleCube::RuleCube(const ChartTranslationOptions &transOpt,
{
RuleCubeItem *item = new RuleCubeItem(transOpt, allChartCells);
m_covered.insert(item);
if (StaticData::Instance().options().cube.lazy_scoring) {
if (StaticData::Instance().options()->cube.lazy_scoring) {
item->EstimateScore();
} else {
item->CreateHypothesis(transOpt, manager);
@ -91,7 +91,7 @@ void RuleCube::CreateNeighbor(const RuleCubeItem &item, int dimensionIndex,
if (!result.second) {
delete newItem; // already seen it
} else {
if (StaticData::Instance().options().cube.lazy_scoring) {
if (StaticData::Instance().options()->cube.lazy_scoring) {
newItem->EstimateScore();
} else {
newItem->CreateHypothesis(m_transOpt, manager);

View File

@ -50,7 +50,7 @@ ChartHypothesis *RuleCubeQueue::Pop()
// pop the most promising item from the cube and get the corresponding
// hypothesis
RuleCubeItem *item = cube->Pop(m_manager);
if (StaticData::Instance().options().cube.lazy_scoring) {
if (StaticData::Instance().options()->cube.lazy_scoring) {
item->CreateHypothesis(cube->GetTranslationOption(), m_manager);
}
ChartHypothesis *hypo = item->ReleaseHypothesis();

View File

@ -43,9 +43,8 @@ namespace Moses
Sentence::
Sentence(AllOptions::ptr const& opts) : Phrase(0) , InputType(opts)
{
const StaticData& SD = StaticData::Instance();
if (SD.IsSyntax())
m_defaultLabelSet.insert(SD.GetInputDefaultNonTerminal());
if (is_syntax(opts->search.algo))
m_defaultLabelSet.insert(opts->syntax.input_default_non_terminal);
}
Sentence::
@ -146,36 +145,30 @@ aux_interpret_dlt(string& line) // whatever DLT means ... --- UG
void
Sentence::
aux_interpret_xml(AllOptions const& opts, std::string& line, std::vector<size_t> & xmlWalls,
aux_interpret_xml(std::string& line, std::vector<size_t> & xmlWalls,
std::vector<std::pair<size_t, std::string> >& placeholders)
{
// parse XML markup in translation line
const StaticData &SD = StaticData::Instance();
using namespace std;
if (opts.input.xml_policy != XmlPassThrough) {
int offset = SD.IsSyntax() ? 1 : 0;
bool OK = ProcessAndStripXMLTags(opts, line, m_xmlOptions,
if (m_options->input.xml_policy != XmlPassThrough) {
bool OK = ProcessAndStripXMLTags(*m_options, line,
m_xmlOptions,
m_reorderingConstraint,
xmlWalls, placeholders, offset,
SD.GetXmlBrackets().first,
SD.GetXmlBrackets().second);
xmlWalls, placeholders);
UTIL_THROW_IF2(!OK, "Unable to parse XML in line: " << line);
}
}
void
Sentence::
init(AllOptions::ptr const& opts, string line, std::vector<FactorType> const& factorOrder)
init(string line)
{
using namespace std;
const StaticData &SD = StaticData::Instance();
m_frontSpanCoveredLength = 0;
m_sourceCompleted.resize(0);
if (SD.ContinuePartialTranslation())
if (m_options->input.continue_partial_translation)
aux_init_partial_translation(line);
line = Trim(line);
@ -183,28 +176,28 @@ init(AllOptions::ptr const& opts, string line, std::vector<FactorType> const& fa
aux_interpret_dlt(line); // some poorly documented cache-based stuff
// if sentences is specified as "<passthrough tag1=""/>"
if (SD.options().output.PrintPassThrough ||
SD.options().nbest.include_passthrough) {
if (m_options->output.PrintPassThrough ||m_options->nbest.include_passthrough) {
string pthru = PassthroughSGML(line,"passthrough");
this->SetPassthroughInformation(pthru);
}
vector<size_t> xmlWalls;
vector<pair<size_t, string> >placeholders;
aux_interpret_xml(*opts, line, xmlWalls, placeholders);
aux_interpret_xml(line, xmlWalls, placeholders);
Phrase::CreateFromString(Input, factorOrder, line, NULL);
Phrase::CreateFromString(Input, m_options->input.factor_order, line, NULL);
ProcessPlaceholders(placeholders);
if (SD.IsSyntax()) InitStartEndWord();
if (is_syntax(m_options->search.algo))
InitStartEndWord();
// now that we have final word positions in phrase (from
// CreateFromString), we can make input phrase objects to go with
// our XmlOptions and create TranslationOptions
// only fill the vector if we are parsing XML
if (opts->input.xml_policy != XmlPassThrough) {
if (m_options->input.xml_policy != XmlPassThrough) {
m_xmlCoverageMap.assign(GetSize(), false);
BOOST_FOREACH(XmlOption const* o, m_xmlOptions) {
Range const& r = o->range;
@ -217,7 +210,7 @@ init(AllOptions::ptr const& opts, string line, std::vector<FactorType> const& fa
m_reorderingConstraint.InitializeWalls(GetSize());
// set reordering walls, if "-monotone-at-punction" is set
if (SD.UseReorderingConstraint() && GetSize()) {
if (m_options->reordering.monotone_at_punct && GetSize()) {
Range r(0, GetSize()-1);
m_reorderingConstraint.SetMonotoneAtPunctuation(GetSubString(r));
}
@ -232,14 +225,12 @@ init(AllOptions::ptr const& opts, string line, std::vector<FactorType> const& fa
int
Sentence::
Read(std::istream& in,
const std::vector<FactorType>& factorOrder,
AllOptions const& opts)
Read(std::istream& in)
{
std::string line;
if (getline(in, line, '\n').eof())
return 0;
init(m_options, line, factorOrder);
init(line);
return 1;
}
@ -247,7 +238,7 @@ void
Sentence::
ProcessPlaceholders(const std::vector< std::pair<size_t, std::string> > &placeholders)
{
FactorType placeholderFactor = StaticData::Instance().options().input.placeholder_factor;
FactorType placeholderFactor = m_options->input.placeholder_factor;
if (placeholderFactor == NOT_FOUND) {
return;
}
@ -325,7 +316,7 @@ void Sentence::GetXmlTranslationOptions(std::vector <TranslationOption*> &list,
std::vector <ChartTranslationOptions*>
Sentence::
GetXmlChartTranslationOptions(AllOptions const& opts) const
GetXmlChartTranslationOptions() const
{
std::vector <ChartTranslationOptions*> ret;
@ -333,7 +324,7 @@ GetXmlChartTranslationOptions(AllOptions const& opts) const
// this code is a copy of the 1 in Sentence.
//only fill the vector if we are parsing XML
if (opts.input.xml_policy != XmlPassThrough ) {
if (m_options->input.xml_policy != XmlPassThrough ) {
//TODO: needed to handle exclusive
//for (size_t i=0; i<GetSize(); i++) {
// m_xmlCoverageMap.push_back(false);
@ -374,12 +365,10 @@ CreateFromString(vector<FactorType> const& FOrder, string const& phraseString)
}
Sentence::
Sentence(AllOptions::ptr const& opts, size_t const transId,
string stext, vector<FactorType> const* IFO)
Sentence(AllOptions::ptr const& opts, size_t const transId, string stext)
: InputType(opts, transId)
{
if (IFO) init(opts,stext, *IFO);
else init(opts, stext, opts->input.factor_order);
init(stext);
}
}

View File

@ -64,8 +64,8 @@ protected:
public:
Sentence(AllOptions::ptr const& opts);
Sentence(AllOptions::ptr const& opts, size_t const transId, std::string stext,
std::vector<FactorType> const* IFO = NULL);
Sentence(AllOptions::ptr const& opts, size_t const transId, std::string stext);
// std::vector<FactorType> const* IFO = NULL);
// Sentence(size_t const transId, std::string const& stext);
~Sentence();
@ -94,11 +94,11 @@ public:
//! populates vector argument with XML force translation options for the specific range passed
void GetXmlTranslationOptions(std::vector<TranslationOption*> &list) const;
void GetXmlTranslationOptions(std::vector<TranslationOption*> &list, size_t startPos, size_t endPos) const;
std::vector<ChartTranslationOptions*> GetXmlChartTranslationOptions(AllOptions const& opts) const;
std::vector<ChartTranslationOptions*> GetXmlChartTranslationOptions() const;
virtual int
Read(std::istream& in, const std::vector<FactorType>& factorOrder,
AllOptions const& opts);
Read(std::istream& in);
// , const std::vector<FactorType>& factorOrder, AllOptions const& opts);
void Print(std::ostream& out) const;
@ -115,9 +115,7 @@ public:
}
void
init(AllOptions::ptr const& opts, std::string line,
std::vector<FactorType> const& factorOrder);
void init(std::string line);
std::vector<std::map<std::string,std::string> > const&
GetDltMeta() const {
@ -139,7 +137,7 @@ private:
void
aux_interpret_xml
(AllOptions const& opts, std::string& line, std::vector<size_t> & xmlWalls,
(std::string& line, std::vector<size_t> & xmlWalls,
std::vector<std::pair<size_t, std::string> >& placeholders);
void

View File

@ -61,16 +61,11 @@ bool g_mosesDebug = false;
StaticData StaticData::s_instance;
StaticData::StaticData()
: m_sourceStartPosMattersForRecombination(false)
: m_options(new AllOptions)
, m_requireSortingAfterSourceContext(false)
// , m_isAlwaysCreateDirectTranslationOption(false)
, m_currentWeightSetting("default")
, m_treeStructure(NULL)
{
m_xmlBrackets.first="<";
m_xmlBrackets.second=">";
// memory pools
Phrase::InitializeMemPool();
}
@ -123,34 +118,6 @@ StaticData
}
void
StaticData
::ini_input_options()
{
const PARAM_VEC *params;
m_parameter->SetParameter(m_continuePartialTranslation,
"continue-partial-translation", false );
// specify XML tags opening and closing brackets for XML option
params = m_parameter->GetParam("xml-brackets");
if (params && params->size()) {
std::vector<std::string> brackets = Tokenize(params->at(0));
if(brackets.size()!=2) {
cerr << "invalid xml-brackets value, must specify exactly 2 blank-delimited strings for XML tags opening and closing brackets" << endl;
exit(1);
}
m_xmlBrackets.first= brackets[0];
m_xmlBrackets.second=brackets[1];
VERBOSE(1,"XML tags opening and closing brackets for XML input are: "
<< m_xmlBrackets.first << " and " << m_xmlBrackets.second << endl);
}
m_parameter->SetParameter(m_defaultNonTermOnlyForEmptyRange,
"default-non-term-for-empty-range-only", false );
}
bool
StaticData
::ini_output_options()
@ -161,8 +128,6 @@ StaticData
m_parameter->SetParameter(m_verboseLevel, "verbose", (size_t) 1);
m_parameter->SetParameter(m_includeLHSInSearchGraph,
"include-lhs-in-search-graph", false );
m_parameter->SetParameter<string>(m_outputUnknownsFile,
"output-unknowns", "");
@ -218,7 +183,9 @@ bool StaticData::LoadData(Parameter *parameter)
const PARAM_VEC *params;
m_options.init(*parameter);
m_options->init(*parameter);
if (is_syntax(m_options->search.algo))
m_options->syntax.LoadNonTerminals(*parameter, FactorCollection::Instance());
if (IsSyntax())
LoadChartDecodingParameters();
@ -229,7 +196,6 @@ bool StaticData::LoadData(Parameter *parameter)
m_parameter->SetParameter<string>(m_factorDelimiter, "factor-delimiter", "|");
m_parameter->SetParameter<size_t>(m_lmcache_cleanup_threshold, "clean-lm-cache", 1);
ini_input_options();
m_bookkeeping_options.init(*parameter);
if (!ini_output_options()) return false;
@ -876,7 +842,7 @@ StaticData
// FIXME Does this make sense for F2S? Perhaps it should be changed once
// FIXME the pipeline uses RuleTable consistently.
SearchAlgorithm algo = m_options.search.algo;
SearchAlgorithm algo = m_options->search.algo;
if (algo == SyntaxS2T || algo == SyntaxT2S ||
algo == SyntaxT2S_SCFG || algo == SyntaxF2S) {
// Automatically override PhraseDictionary{Memory,Scope3}. This will

View File

@ -72,7 +72,7 @@ private:
static StaticData s_instance;
protected:
Parameter *m_parameter;
AllOptions m_options;
boost::shared_ptr<AllOptions> m_options;
mutable ScoreComponentCollection m_allWeights;
@ -95,31 +95,22 @@ protected:
size_t m_latticeSamplesSize;
std::string m_latticeSamplesFilePath;
// bool m_dropUnknown; //! false = treat unknown words as unknowns, and translate them as themselves; true = drop (ignore) them
// bool m_markUnknown; //! false = treat unknown words as unknowns, and translate them as themselves; true = mark and (ignore) them
// std::string m_unknownWordPrefix;
// std::string m_unknownWordSuffix;
bool m_wordDeletionEnabled;
// bool m_wordDeletionEnabled;
// bool m_disableDiscarding;
bool m_printAllDerivations;
bool m_printTranslationOptions;
bool m_sourceStartPosMattersForRecombination;
// bool m_sourceStartPosMattersForRecombination;
bool m_requireSortingAfterSourceContext;
mutable size_t m_verboseLevel;
std::string m_factorDelimiter; //! by default, |, but it can be changed
// XmlInputType m_xmlInputType; //! method for handling sentence XML input
std::pair<std::string,std::string> m_xmlBrackets; //! strings to use as XML tags' opening and closing brackets. Default are "<" and ">"
size_t m_lmcache_cleanup_threshold; //! number of translations after which LM claenup is performed (0=never, N=after N translations; default is 1)
// bool m_isAlwaysCreateDirectTranslationOption;
//! constructor. only the 1 static variable can be created
bool m_includeLHSInSearchGraph; //! include LHS of rules in search graph
std::string m_outputUnknownsFile; //! output unknowns in this file
@ -144,7 +135,6 @@ protected:
std::map< std::string, std::set< std::string > > m_weightSettingIgnoreFF; // feature function
std::map< std::string, std::set< size_t > > m_weightSettingIgnoreDP; // decoding path
// FactorType m_placeHolderFactor;
bool m_useLegacyPT;
bool m_defaultNonTermOnlyForEmptyRange;
S2TParsingAlgorithm m_s2tParsingAlgorithm;
@ -174,7 +164,6 @@ protected:
const StatefulFeatureFunction* m_treeStructure;
void ini_input_options();
void ini_oov_options();
bool ini_output_options();
bool ini_performance_options();
@ -182,9 +171,6 @@ protected:
void initialize_features();
public:
// bool IsAlwaysCreateDirectTranslationOption() const {
// return m_isAlwaysCreateDirectTranslationOption;
// }
//! destructor
~StaticData();
@ -219,51 +205,35 @@ public:
return *m_parameter;
}
AllOptions const&
AllOptions::ptr const
options() const {
return m_options;
}
AllOptions&
options() {
return m_options;
}
// AllOptions&
// options() {
// return m_options;
// }
inline bool
GetSourceStartPosMattersForRecombination() const {
return m_sourceStartPosMattersForRecombination;
}
bool
IsWordDeletionEnabled() const {
// return m_wordDeletionEnabled;
return m_options.unk.word_deletion_enabled;
}
int
GetMaxDistortion() const {
return m_options.reordering.max_distortion;
}
bool
UseReorderingConstraint() const {
return m_reorderingConstraint;
}
// inline bool
// GetSourceStartPosMattersForRecombination() const {
// return m_sourceStartPosMattersForRecombination;
// }
bool
UseEarlyDiscarding() const {
return m_options.search.early_discarding_threshold
return m_options->search.early_discarding_threshold
!= -std::numeric_limits<float>::infinity();
}
bool
UseEarlyDistortionCost() const {
return m_options.reordering.use_early_distortion_cost;
return m_options->reordering.use_early_distortion_cost;
}
float
GetTranslationOptionThreshold() const {
return m_options.search.trans_opt_threshold;
return m_options->search.trans_opt_threshold;
}
size_t
@ -288,7 +258,7 @@ public:
bool IsSyntax(SearchAlgorithm algo = DefaultSearchAlgorithm) const {
if (algo == DefaultSearchAlgorithm)
algo = m_options.search.algo;
algo = m_options->search.algo;
return (algo == CYKPlus || algo == ChartIncremental ||
algo == SyntaxS2T || algo == SyntaxT2S ||
@ -333,37 +303,14 @@ public:
return m_outputUnknownsFile;
}
bool GetIncludeLHSInSearchGraph() const {
return m_includeLHSInSearchGraph;
}
std::pair<std::string,std::string> GetXmlBrackets() const {
return m_xmlBrackets;
}
// bool PrintTranslationOptions() const {
// return m_printTranslationOptions;
// }
// bool PrintAllDerivations() const {
// return m_printAllDerivations;
// bool GetIncludeLHSInSearchGraph() const {
// return m_includeLHSInSearchGraph;
// }
const UnknownLHSList &GetUnknownLHS() const {
return m_unknownLHS;
}
const Word &GetInputDefaultNonTerminal() const {
return m_inputDefaultNonTerminal;
}
const Word &GetOutputDefaultNonTerminal() const {
return m_outputDefaultNonTerminal;
}
SourceLabelOverlap GetSourceLabelOverlap() const {
return m_sourceLabelOverlap;
}
size_t GetRuleLimit() const {
return m_ruleLimit;
}
@ -473,7 +420,6 @@ public:
}
//sentence (and thread) specific initialisationn and cleanup
// void InitializeForInput(const InputType& source, ttaskptr const& ttask) const;
void InitializeForInput(ttasksptr const& ttask) const;
void CleanUpAfterSentenceProcessing(ttasksptr const& ttask) const;
@ -525,10 +471,6 @@ public:
return m_defaultNonTermOnlyForEmptyRange;
}
// S2TParsingAlgorithm GetS2TParsingAlgorithm() const {
// return m_s2tParsingAlgorithm;
// }
bool RequireSortingAfterSourceContext() const {
return m_requireSortingAfterSourceContext;
}

View File

@ -3,9 +3,8 @@
#include <sstream>
#include "moses/FF/UnknownWordPenaltyProducer.h"
#include "moses/StaticData.h"
#include "util/string_stream.hh"
#include "moses/parameters/AllOptions.h"
namespace Moses
{
namespace Syntax
@ -14,13 +13,13 @@ namespace F2S
{
GlueRuleSynthesizer::
GlueRuleSynthesizer(HyperTree &trie, const std::vector<FactorType> &iFactors)
: m_hyperTree(trie)
GlueRuleSynthesizer(Moses::AllOptions const& opts, HyperTree &trie)
: m_input_default_nonterminal(opts.syntax.input_default_non_terminal)
, m_output_default_nonterminal(opts.syntax.output_default_non_terminal)
, m_hyperTree(trie)
{
// const std::vector<FactorType> &inputFactorOrder =
// StaticData::Instance().GetInputFactorOrder();
Word *lhs = NULL;
m_dummySourcePhrase.CreateFromString(Input, iFactors, "hello", &lhs);
m_dummySourcePhrase.CreateFromString(Input, opts.input.factor_order, "hello", &lhs);
delete lhs;
}
@ -47,11 +46,10 @@ void GlueRuleSynthesizer::SynthesizeHyperPath(const Forest::Hyperedge &e,
}
}
TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
const Forest::Hyperedge &e)
TargetPhrase*
GlueRuleSynthesizer::
SynthesizeTargetPhrase(const Forest::Hyperedge &e)
{
const StaticData &staticData = StaticData::Instance();
const UnknownWordPenaltyProducer &unknownWordPenaltyProducer =
UnknownWordPenaltyProducer::Instance();
@ -61,7 +59,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
for (std::size_t i = 0; i < e.tail.size(); ++i) {
const Word &symbol = e.tail[i]->pvertex.symbol;
if (symbol.IsNonTerminal()) {
targetPhrase->AddWord(staticData.GetOutputDefaultNonTerminal());
targetPhrase->AddWord(m_output_default_nonterminal);
} else {
// TODO Check this
Word &targetWord = targetPhrase->AddWord();
@ -75,7 +73,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
float score = LOWEST_SCORE;
targetPhrase->GetScoreBreakdown().Assign(&unknownWordPenaltyProducer, score);
targetPhrase->EvaluateInIsolation(m_dummySourcePhrase);
Word *targetLhs = new Word(staticData.GetOutputDefaultNonTerminal());
Word *targetLhs = new Word(m_output_default_nonterminal);
targetPhrase->SetTargetLHS(targetLhs);
targetPhrase->SetAlignmentInfo(alignmentSS.str());

View File

@ -9,6 +9,7 @@
namespace Moses
{
class AllOptions;
namespace Syntax
{
namespace F2S
@ -16,8 +17,10 @@ namespace F2S
class GlueRuleSynthesizer : public HyperTreeCreator
{
Word m_input_default_nonterminal;
Word m_output_default_nonterminal;
public:
GlueRuleSynthesizer(HyperTree &, std::vector<FactorType> const& iFactors);
GlueRuleSynthesizer(Moses::AllOptions const& opts, HyperTree &);
// Synthesize the minimal, monotone rule that can be applied to the given
// hyperedge and add it to the rule trie.

View File

@ -60,9 +60,9 @@ void Manager<RuleMatcher>::Decode()
const StaticData &staticData = StaticData::Instance();
// Get various pruning-related constants.
const std::size_t popLimit = staticData.options().cube.pop_limit;
const std::size_t popLimit = staticData.options()->cube.pop_limit;
const std::size_t ruleLimit = staticData.GetRuleLimit();
const std::size_t stackLimit = staticData.options().search.stack_size;
const std::size_t stackLimit = staticData.options()->search.stack_size;
// Initialize the stacks.
InitializeStacks();
@ -74,8 +74,7 @@ void Manager<RuleMatcher>::Decode()
RuleMatcherCallback callback(m_stackMap, ruleLimit);
// Create a glue rule synthesizer.
GlueRuleSynthesizer glueRuleSynthesizer(*m_glueRuleTrie,
options()->input.factor_order);
GlueRuleSynthesizer glueRuleSynthesizer(*options(), *m_glueRuleTrie);
// Sort the input forest's vertices into bottom-up topological order.
std::vector<const Forest::Vertex *> sortedVertices;
@ -256,7 +255,7 @@ void Manager<RuleMatcher>::ExtractKBest(
// with 0 being 'unlimited.' This actually sets a large-ish limit in case
// too many translations are identical.
const StaticData &staticData = StaticData::Instance();
const std::size_t nBestFactor = staticData.options().nbest.factor;
const std::size_t nBestFactor = staticData.options()->nbest.factor;
std::size_t numDerivations = (nBestFactor == 0) ? k*1000 : k*nBestFactor;
// Extract the derivations.

View File

@ -75,7 +75,7 @@ void KBestExtractor::Extract(
// Generate the target-side yield of the derivation d.
Phrase KBestExtractor::GetOutputPhrase(const Derivation &d)
{
FactorType placeholderFactor = StaticData::Instance().options().input.placeholder_factor;
FactorType placeholderFactor = StaticData::Instance().options()->input.placeholder_factor;
Phrase ret(ARRAY_SIZE_INCR);

View File

@ -24,34 +24,35 @@ RuleTableFF::RuleTableFF(const std::string &line)
s_instances.push_back(this);
}
void RuleTableFF::Load(Moses::AllOptions const& opts)
void RuleTableFF::Load(Moses::AllOptions::ptr const& opts)
{
m_options = opts;
SetFeaturesToApply();
if (opts.search.algo == SyntaxF2S || opts.search.algo == SyntaxT2S) {
if (opts->search.algo == SyntaxF2S || opts->search.algo == SyntaxT2S) {
F2S::HyperTree *trie = new F2S::HyperTree(this);
F2S::HyperTreeLoader loader;
loader.Load(opts, m_input, m_output, m_filePath, *this, *trie, m_sourceTerminalSet);
loader.Load(*opts, m_input, m_output, m_filePath, *this, *trie, m_sourceTerminalSet);
m_table = trie;
} else if (opts.search.algo == SyntaxS2T) {
S2TParsingAlgorithm algorithm = opts.syntax.s2t_parsing_algo; // staticData.GetS2TParsingAlgorithm();
} else if (opts->search.algo == SyntaxS2T) {
S2TParsingAlgorithm algorithm = opts->syntax.s2t_parsing_algo; // staticData.GetS2TParsingAlgorithm();
if (algorithm == RecursiveCYKPlus) {
S2T::RuleTrieCYKPlus *trie = new S2T::RuleTrieCYKPlus(this);
S2T::RuleTrieLoader loader;
loader.Load(opts,m_input, m_output, m_filePath, *this, *trie);
loader.Load(*opts,m_input, m_output, m_filePath, *this, *trie);
m_table = trie;
} else if (algorithm == Scope3) {
S2T::RuleTrieScope3 *trie = new S2T::RuleTrieScope3(this);
S2T::RuleTrieLoader loader;
loader.Load(opts, m_input, m_output, m_filePath, *this, *trie);
loader.Load(*opts, m_input, m_output, m_filePath, *this, *trie);
m_table = trie;
} else {
UTIL_THROW2("ERROR: unhandled S2T parsing algorithm");
}
} else if (opts.search.algo == SyntaxT2S_SCFG) {
} else if (opts->search.algo == SyntaxT2S_SCFG) {
T2S::RuleTrie *trie = new T2S::RuleTrie(this);
T2S::RuleTrieLoader loader;
loader.Load(opts, m_input, m_output, m_filePath, *this, *trie);
loader.Load(*opts, m_input, m_output, m_filePath, *this, *trie);
m_table = trie;
} else {
UTIL_THROW2(

View File

@ -27,7 +27,7 @@ public:
// FIXME Delete m_table?
~RuleTableFF() {}
void Load(AllOptions const& opts);
void Load(AllOptions::ptr const& opts);
const RuleTable *GetTable() const {
return m_table;

View File

@ -163,9 +163,9 @@ void Manager<Parser>::Decode()
const StaticData &staticData = StaticData::Instance();
// Get various pruning-related constants.
const std::size_t popLimit = staticData.options().cube.pop_limit;
const std::size_t popLimit = staticData.options()->cube.pop_limit;
const std::size_t ruleLimit = staticData.GetRuleLimit();
const std::size_t stackLimit = staticData.options().search.stack_size;
const std::size_t stackLimit = staticData.options()->search.stack_size;
// Initialise the PChart and SChart.
InitializeCharts();
@ -303,7 +303,7 @@ void Manager<Parser>::ExtractKBest(
// with 0 being 'unlimited.' This actually sets a large-ish limit in case
// too many translations are identical.
const StaticData &staticData = StaticData::Instance();
const std::size_t nBestFactor = staticData.options().nbest.factor;
const std::size_t nBestFactor = staticData.options()->nbest.factor;
std::size_t numDerivations = (nBestFactor == 0) ? k*1000 : k*nBestFactor;
// Extract the derivations.

View File

@ -57,7 +57,7 @@ OovHandler<RuleTrie>::SynthesizeTargetLhs(const std::string &lhsStr)
{
Word *targetLhs = new Word(true);
targetLhs->CreateFromString(Output,
StaticData::Instance().options().output.factor_order,
StaticData::Instance().options()->output.factor_order,
lhsStr, true);
UTIL_THROW_IF2(targetLhs->GetFactor(0) == NULL, "Null factor for target LHS");
return targetLhs;
@ -83,7 +83,7 @@ TargetPhrase *OovHandler<RuleTrie>::SynthesizeTargetPhrase(
targetPhrase->EvaluateInIsolation(srcPhrase);
targetPhrase->SetTargetLHS(&targetLhs);
targetPhrase->SetAlignmentInfo("0-0");
if (!SD.options().output.detailed_tree_transrep_filepath.empty() ||
if (!SD.options()->output.detailed_tree_transrep_filepath.empty() ||
SD.GetTreeStructure() != NULL) {
std::string value = "[ " + targetLhs[0]->GetString().as_string() + " " +
oov[0]->GetString().as_string() + " ]";
@ -96,7 +96,7 @@ TargetPhrase *OovHandler<RuleTrie>::SynthesizeTargetPhrase(
template<typename RuleTrie>
bool OovHandler<RuleTrie>::ShouldDrop(const Word &oov)
{
if (!StaticData::Instance().options().unk.drop) {
if (!StaticData::Instance().options()->unk.drop) {
return false;
}
const Factor *f = oov[0]; // TODO hack. shouldn't know which factor is surface

View File

@ -11,7 +11,7 @@ namespace Syntax
Phrase GetOneBestTargetYield(const SHyperedge &h)
{
FactorType placeholderFactor = StaticData::Instance().options().input.placeholder_factor;
FactorType placeholderFactor = StaticData::Instance().options()->input.placeholder_factor;
Phrase ret(ARRAY_SIZE_INCR);

View File

@ -3,7 +3,7 @@
#include <sstream>
#include "moses/FF/UnknownWordPenaltyProducer.h"
#include "moses/StaticData.h"
#include <boost/scoped_ptr.hpp>
namespace Moses
{
@ -12,7 +12,9 @@ namespace Syntax
namespace T2S
{
void GlueRuleSynthesizer::SynthesizeRule(const InputTree::Node &node)
void
GlueRuleSynthesizer::
SynthesizeRule(const InputTree::Node &node)
{
const Word &sourceLhs = node.pvertex.symbol;
boost::scoped_ptr<Phrase> sourceRhs(SynthesizeSourcePhrase(node));
@ -22,7 +24,9 @@ void GlueRuleSynthesizer::SynthesizeRule(const InputTree::Node &node)
tpc->Add(tp);
}
Phrase *GlueRuleSynthesizer::SynthesizeSourcePhrase(const InputTree::Node &node)
Phrase*
GlueRuleSynthesizer::
SynthesizeSourcePhrase(const InputTree::Node &node)
{
Phrase *phrase = new Phrase(node.children.size());
for (std::vector<InputTree::Node*>::const_iterator p = node.children.begin();
@ -37,11 +41,10 @@ Phrase *GlueRuleSynthesizer::SynthesizeSourcePhrase(const InputTree::Node &node)
return phrase;
}
TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
const InputTree::Node &node, const Phrase &sourceRhs)
TargetPhrase*
GlueRuleSynthesizer::
SynthesizeTargetPhrase(const InputTree::Node &node, const Phrase &sourceRhs)
{
const StaticData &staticData = StaticData::Instance();
const UnknownWordPenaltyProducer &unknownWordPenaltyProducer =
UnknownWordPenaltyProducer::Instance();
@ -51,7 +54,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
for (std::size_t i = 0; i < node.children.size(); ++i) {
const Word &symbol = node.children[i]->pvertex.symbol;
if (symbol.IsNonTerminal()) {
targetPhrase->AddWord(staticData.GetOutputDefaultNonTerminal());
targetPhrase->AddWord(m_output_default_nonterminal);
} else {
// TODO Check this
Word &targetWord = targetPhrase->AddWord();
@ -65,7 +68,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
float score = LOWEST_SCORE;
targetPhrase->GetScoreBreakdown().Assign(&unknownWordPenaltyProducer, score);
targetPhrase->EvaluateInIsolation(sourceRhs);
Word *targetLhs = new Word(staticData.GetOutputDefaultNonTerminal());
Word *targetLhs = new Word(m_output_default_nonterminal);
targetPhrase->SetTargetLHS(targetLhs);
targetPhrase->SetAlignmentInfo(alignmentSS.str());

View File

@ -16,8 +16,12 @@ namespace T2S
class GlueRuleSynthesizer : public RuleTrieCreator
{
Word m_output_default_nonterminal;
public:
GlueRuleSynthesizer(RuleTrie &trie) : m_ruleTrie(trie) {}
GlueRuleSynthesizer(RuleTrie &trie, Word dflt_nonterm)
: m_ruleTrie(trie)
, m_output_default_nonterminal(dflt_nonterm)
{}
// Synthesize the minimal, montone rule that can be applied to the given node
// and add it to the rule trie.

View File

@ -111,7 +111,8 @@ void Manager<RuleMatcher>::Decode()
F2S::RuleMatcherCallback callback(m_stackMap, ruleLimit);
// Create a glue rule synthesizer.
GlueRuleSynthesizer glueRuleSynthesizer(*m_glueRuleTrie);
Word dflt_nonterm = options()->syntax.output_default_non_terminal;
GlueRuleSynthesizer glueRuleSynthesizer(*m_glueRuleTrie, dflt_nonterm);
// Visit each node of the input tree in post-order.
for (std::vector<InputTree::Node>::const_iterator p =
@ -215,7 +216,7 @@ void Manager<RuleMatcher>::ExtractKBest(
// with 0 being 'unlimited.' This actually sets a large-ish limit in case
// too many translations are identical.
const StaticData &staticData = StaticData::Instance();
const std::size_t nBestFactor = staticData.options().nbest.factor;
const std::size_t nBestFactor = staticData.options()->nbest.factor;
std::size_t numDerivations = (nBestFactor == 0) ? k*1000 : k*nBestFactor;
// Extract the derivations.

View File

@ -47,9 +47,7 @@ void TabbedSentence::CreateFromString(const std::vector<FactorType> &factorOrder
int
TabbedSentence::
Read(std::istream& in,
std::vector<FactorType> const& factorOrder,
AllOptions const& opts)
Read(std::istream& in)
{
TabbedColumns allColumns;
@ -60,17 +58,14 @@ Read(std::istream& in,
boost::split(allColumns, line, boost::is_any_of("\t"));
if(allColumns.size() < 2) {
std::stringstream dummyStream;
dummyStream << line << std::endl;
return Sentence::Read(dummyStream, factorOrder, opts);
Sentence::init(line);
} else {
m_columns.resize(allColumns.size() - 1);
std::copy(allColumns.begin() + 1, allColumns.end(), m_columns.begin());
std::stringstream dummyStream;
dummyStream << allColumns[0] << std::endl;
return Sentence::Read(dummyStream, factorOrder, opts);
Sentence::init(allColumns[0]);
}
return 1;
}
}

Some files were not shown because too many files have changed in this diff Show More