Passing around AllOptions or references thereto everywhere,

strong them locally where appropriate, so that compontents can
become independent of StaticData once instantiated.
This commit is contained in:
Ulrich Germann 2015-12-09 23:05:00 +00:00
parent ad5e27ae56
commit 240b88c683
47 changed files with 232 additions and 209 deletions

View File

@ -167,7 +167,7 @@ int main(int argc, char const* argv[])
MBR_Options& mbr = SD.options().mbr;
lmbr.enabled = true;
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper);
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(SD.options()));
if (!ioWrapper) {
throw runtime_error("Failed to initialise IOWrapper");
}

View File

@ -125,8 +125,8 @@ int main(int argc, char const** argv)
IFVERBOSE(1) {
PrintUserTime("Created input-output object");
}
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper());
AllOptions::ptr opts(new AllOptions(StaticData::Instance().options()));
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(*opts));
if (ioWrapper == NULL) {
cerr << "Error; Failed to create IO object" << endl;
exit(1);

View File

@ -98,10 +98,10 @@ void
BaseManager::
OutputSurface(std::ostream &out, Phrase const& phrase) const
{
std::vector<FactorType> const& factor_order = options().output.factor_order;
std::vector<FactorType> const& factor_order = options()->output.factor_order;
bool markUnknown = options().unk.mark;
std::string const& fd = options().output.FactorDelimiter;
bool markUnknown = options()->unk.mark;
std::string const& fd = options()->output.FactorDelimiter;
size_t size = phrase.GetSize();
for (size_t pos = 0 ; pos < size ; pos++) {
@ -110,7 +110,7 @@ OutputSurface(std::ostream &out, Phrase const& phrase) const
const Word &word = phrase.GetWord(pos);
if(markUnknown && word.IsOOV()) {
out << options().unk.prefix;
out << options()->unk.prefix;
}
out << *factor;
@ -122,7 +122,7 @@ OutputSurface(std::ostream &out, Phrase const& phrase) const
}
if(markUnknown && word.IsOOV()) {
out << options().unk.suffix;
out << options()->unk.suffix;
}
out << " ";
@ -147,7 +147,7 @@ void BaseManager::WriteApplicationContext(std::ostream &out,
}
}
AllOptions const&
AllOptions::ptr const&
BaseManager::
options() const
{

View File

@ -49,7 +49,7 @@ public:
//! the input sentence being decoded
const InputType& GetSource() const;
const ttasksptr GetTtask() const;
AllOptions const& options() const;
AllOptions::ptr const& options() const;
virtual void Decode() = 0;
// outputs

View File

@ -51,7 +51,7 @@ ChartCellBase::~ChartCellBase() {}
ChartCell::ChartCell(size_t startPos, size_t endPos, ChartManager &manager) :
ChartCellBase(startPos, endPos), m_manager(manager)
{
m_nBestIsEnabled = manager.options().nbest.enabled;
m_nBestIsEnabled = manager.options()->nbest.enabled;
}
ChartCell::~ChartCell() {}
@ -67,7 +67,7 @@ bool ChartCell::AddHypothesis(ChartHypothesis *hypo)
MapType::iterator m = m_hypoColl.find(targetLHS);
if (m == m_hypoColl.end()) {
std::pair<Word, ChartHypothesisCollection>
e(targetLHS, ChartHypothesisCollection(m_manager.options()));
e(targetLHS, ChartHypothesisCollection(*m_manager.options()));
m = m_hypoColl.insert(e).first;
}
return m->second.AddHypothesis(hypo, m_manager);
@ -102,7 +102,7 @@ void ChartCell::Decode(const ChartTranslationOptionList &transOptList
}
// pluck things out of queue and add to hypo collection
const size_t popLimit = m_manager.options().cube.pop_limit;
const size_t popLimit = m_manager.options()->cube.pop_limit;
for (size_t numPops = 0; numPops < popLimit && !queue.IsEmpty(); ++numPops) {
ChartHypothesis *hypo = queue.Pop();
AddHypothesis(hypo);

View File

@ -205,7 +205,7 @@ void ChartManager::CalcNBest(
// than n. The n-best factor determines how much bigger the limit should be,
// with 0 being 'unlimited.' This actually sets a large-ish limit in case
// too many translations are identical.
const std::size_t nBestFactor = options().nbest.factor;
const std::size_t nBestFactor = options()->nbest.factor;
std::size_t numDerivations = (nBestFactor == 0) ? n*1000 : n*nBestFactor;
// Extract the derivations.
@ -315,14 +315,14 @@ void ChartManager::OutputBest(OutputCollector *collector) const
void ChartManager::OutputNBest(OutputCollector *collector) const
{
size_t nBestSize = options().nbest.nbest_size;
size_t nBestSize = options()->nbest.nbest_size;
if (nBestSize > 0) {
const size_t translationId = m_source.GetTranslationId();
VERBOSE(2,"WRITING " << nBestSize << " TRANSLATION ALTERNATIVES TO "
<< options().nbest.output_file_path << endl);
<< options()->nbest.output_file_path << endl);
std::vector<boost::shared_ptr<ChartKBestExtractor::Derivation> > nBestList;
CalcNBest(nBestSize, nBestList, options().nbest.only_distinct);
CalcNBest(nBestSize, nBestList, options()->nbest.only_distinct);
OutputNBestList(collector, nBestList, translationId);
IFVERBOSE(2) {
PrintUserTime("N-Best Hypotheses Generation Time:");
@ -343,7 +343,7 @@ void ChartManager::OutputNBestList(OutputCollector *collector,
FixPrecision(out);
}
NBestOptions const& nbo = options().nbest;
NBestOptions const& nbo = options()->nbest;
bool includeWordAlignment = nbo.include_alignment_info;
bool PrintNBestTrees = nbo.print_trees;
@ -365,7 +365,7 @@ void ChartManager::OutputNBestList(OutputCollector *collector,
OutputSurface(out, outputPhrase); // , outputFactorOrder, false);
out << " ||| ";
boost::shared_ptr<ScoreComponentCollection> scoreBreakdown = ChartKBestExtractor::GetOutputScoreBreakdown(derivation);
bool with_labels = options().nbest.include_feature_labels;
bool with_labels = options()->nbest.include_feature_labels;
scoreBreakdown->OutputAllFeatureScores(out, with_labels);
out << " ||| " << derivation.score;
@ -611,11 +611,11 @@ void ChartManager::OutputDetailedTranslationReport(
collector->Write(translationId, out.str());
//DIMw
if (options().output.detailed_all_transrep_filepath.size()) {
if (options()->output.detailed_all_transrep_filepath.size()) {
const Sentence &sentence = static_cast<const Sentence &>(m_source);
size_t nBestSize = options().nbest.nbest_size;
size_t nBestSize = options()->nbest.nbest_size;
std::vector<boost::shared_ptr<ChartKBestExtractor::Derivation> > nBestList;
CalcNBest(nBestSize, nBestList, options().nbest.only_distinct);
CalcNBest(nBestSize, nBestList, options()->nbest.only_distinct);
OutputDetailedAllTranslationReport(collector, nBestList, sentence, translationId);
}
@ -820,11 +820,11 @@ void ChartManager::OutputBestHypo(OutputCollector *collector, const ChartHypothe
Backtrack(hypo);
VERBOSE(3,"0" << std::endl);
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
out << hypo->GetFutureScore() << " ";
}
if (options().output.RecoverPath) {
if (options()->output.RecoverPath) {
out << "||| ";
}
Phrase outPhrase(ARRAY_SIZE_INCR);
@ -837,12 +837,12 @@ void ChartManager::OutputBestHypo(OutputCollector *collector, const ChartHypothe
outPhrase.RemoveWord(0);
outPhrase.RemoveWord(outPhrase.GetSize() - 1);
string output = outPhrase.GetStringRep(options().output.factor_order);
string output = outPhrase.GetStringRep(options()->output.factor_order);
out << output << endl;
} else {
VERBOSE(1, "NO BEST TRANSLATION" << endl);
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
out << "0 ";
}

View File

@ -214,7 +214,7 @@ void ChartParser::Create(const Range &range, ChartParserCallback &to)
if (range.GetNumWordsCovered() == 1
&& range.GetStartPos() != 0
&& range.GetStartPos() != m_source.GetSize()-1) {
bool always = m_ttask.lock()->options().unk.always_create_direct_transopt;
bool always = m_ttask.lock()->options()->unk.always_create_direct_transopt;
// bool alwaysCreateDirectTranslationOption
// = StaticData::Instance().IsAlwaysCreateDirectTranslationOption();
if (to.Empty() || always) {

View File

@ -68,7 +68,7 @@ void ChartTranslationOptions::EvaluateWithSourceContext(const InputType &input,
{
SetInputPath(&inputPath);
// if (StaticData::Instance().GetPlaceholderFactor() != NOT_FOUND) {
if (inputPath.ttask.lock()->options().input.placeholder_factor != NOT_FOUND) {
if (inputPath.ttask.lock()->options()->input.placeholder_factor != NOT_FOUND) {
CreateSourceRuleFromInputPath();
}

View File

@ -62,7 +62,7 @@ GetColumnIncrement(size_t i, size_t j) const
}
ConfusionNet::
ConfusionNet() : InputType()
ConfusionNet(AllOptions::ptr const& opts) : InputType(opts)
{
stats.createOne();
@ -80,7 +80,7 @@ ConfusionNet::
}
ConfusionNet::
ConfusionNet(Sentence const& s) : InputType()
ConfusionNet(Sentence const& s) : InputType(s.options())
{
data.resize(s.GetSize());
for(size_t i=0; i<s.GetSize(); ++i) {
@ -282,10 +282,10 @@ ConfusionNet::
CreateTranslationOptionCollection(ttasksptr const& ttask) const
{
size_t maxNoTransOptPerCoverage
= ttask->options().search.max_trans_opt_per_cov;
= ttask->options()->search.max_trans_opt_per_cov;
// StaticData::Instance().GetMaxNoTransOptPerCoverage();
float translationOptionThreshold
= ttask->options().search.trans_opt_threshold;
= ttask->options()->search.trans_opt_threshold;
// StaticData::Instance().GetTranslationOptionThreshold();
TranslationOptionCollection *rv
= new TranslationOptionCollectionConfusionNet

View File

@ -35,7 +35,7 @@ protected:
void String2Word(const std::string& s,Word& w,const std::vector<FactorType>& factorOrder);
public:
ConfusionNet();
ConfusionNet(AllOptions::ptr const& opts);
virtual ~ConfusionNet();
ConfusionNet(Sentence const& s);

View File

@ -101,7 +101,7 @@ SimpleTranslationInterface::~SimpleTranslationInterface()
//the simplified version of string input/output translation
string SimpleTranslationInterface::translate(const string &inputString)
{
boost::shared_ptr<Moses::IOWrapper> ioWrapper(new IOWrapper);
boost::shared_ptr<Moses::IOWrapper> ioWrapper(new IOWrapper(StaticData::Instance().options()));
// main loop over set of input sentences
size_t sentEnd = inputString.rfind('\n'); //find the last \n, the input stream has to be appended with \n to be translated
const string &newString = sentEnd != string::npos ? inputString : inputString + '\n';
@ -180,7 +180,7 @@ batch_run()
IFVERBOSE(1) PrintUserTime("Created input-output object");
// set up read/writing class:
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper);
boost::shared_ptr<IOWrapper> ioWrapper(new IOWrapper(staticData.options()));
UTIL_THROW_IF2(ioWrapper == NULL, "Error; Failed to create IO object"
<< " [" << HERE << "]");

View File

@ -21,7 +21,7 @@ class ForestInput : public Sentence
public:
friend std::ostream &operator<<(std::ostream&, const ForestInput &);
ForestInput() : Sentence(), m_rootVertex(NULL) {}
ForestInput(AllOptions::ptr const& opts) : Sentence(opts), m_rootVertex(NULL) {}
InputTypeEnum GetType() const {
return ForestInputType;

View File

@ -336,7 +336,7 @@ size_t
Hypothesis::
OutputAlignment(std::ostream &out, bool recursive=true) const
{
WordAlignmentSort const& waso = m_manager.options().output.WA_SortOrder;
WordAlignmentSort const& waso = m_manager.options()->output.WA_SortOrder;
TargetPhrase const& tp = GetCurrTargetPhrase();
// call with head recursion to output things in the right order

View File

@ -36,10 +36,10 @@ namespace Moses
HypothesisStackCubePruning::HypothesisStackCubePruning(Manager& manager) :
HypothesisStack(manager)
{
m_nBestIsEnabled = manager.options().nbest.enabled;
m_nBestIsEnabled = manager.options()->nbest.enabled;
m_bestScore = -std::numeric_limits<float>::infinity();
m_worstScore = -std::numeric_limits<float>::infinity();
m_deterministic = manager.options().cube.deterministic_search;
m_deterministic = manager.options()->cube.deterministic_search;
}
/** remove all hypotheses from the collection */
@ -244,7 +244,7 @@ void HypothesisStackCubePruning::CleanupArcList()
iterator iter;
for (iter = m_hypos.begin() ; iter != m_hypos.end() ; ++iter) {
Hypothesis *mainHypo = *iter;
mainHypo->CleanupArcList(this->m_manager.options().nbest.nbest_size, this->m_manager.options().NBestDistinct());
mainHypo->CleanupArcList(this->m_manager.options()->nbest.nbest_size, this->m_manager.options()->NBestDistinct());
}
}

View File

@ -35,7 +35,7 @@ namespace Moses
HypothesisStackNormal::HypothesisStackNormal(Manager& manager) :
HypothesisStack(manager)
{
m_nBestIsEnabled = manager.options().nbest.enabled;
m_nBestIsEnabled = manager.options()->nbest.enabled;
m_bestScore = -std::numeric_limits<float>::infinity();
m_worstScore = -std::numeric_limits<float>::infinity();
}
@ -75,7 +75,12 @@ pair<HypothesisStackNormal::iterator, bool> HypothesisStackNormal::Add(Hypothesi
size_t toleratedSize = 2*m_maxHypoStackSize-1;
// add in room for stack diversity
if (m_minHypoStackDiversity)
toleratedSize += m_minHypoStackDiversity << m_manager.options().reordering.max_distortion;
{
// so what happens if maxdistortion is negative?
toleratedSize += m_minHypoStackDiversity
<< m_manager.options()->reordering.max_distortion;
}
if (m_hypos.size() > toleratedSize) {
PruneToSize(m_maxHypoStackSize);
} else {
@ -96,7 +101,7 @@ bool HypothesisStackNormal::AddPrune(Hypothesis *hypo)
}
// too bad for stack. don't bother adding hypo into collection
if (m_manager.options().search.disable_discarding == false
if (m_manager.options()->search.disable_discarding == false
&& hypo->GetFutureScore() < m_worstScore
&& ! ( m_minHypoStackDiversity > 0
&& hypo->GetFutureScore() >= GetWorstScoreForBitmap( hypo->GetWordsBitmap() ) ) ) {
@ -265,7 +270,7 @@ void HypothesisStackNormal::CleanupArcList()
iterator iter;
for (iter = m_hypos.begin() ; iter != m_hypos.end() ; ++iter) {
Hypothesis *mainHypo = *iter;
mainHypo->CleanupArcList(this->m_manager.options().nbest.nbest_size, this->m_manager.options().NBestDistinct());
mainHypo->CleanupArcList(this->m_manager.options()->nbest.nbest_size, this->m_manager.options()->NBestDistinct());
}
}

View File

@ -63,8 +63,9 @@ using namespace std;
namespace Moses
{
IOWrapper::IOWrapper()
: m_nBestStream(NULL)
IOWrapper::IOWrapper(AllOptions const& opts)
: m_options(new AllOptions(opts))
, m_nBestStream(NULL)
, m_surpressSingleBestOutput(false)
, m_look_ahead(0)
, m_look_back(0)

View File

@ -61,8 +61,10 @@ POSSIBILITY OF SUCH DAMAGE.
#include "moses/LatticeMBR.h"
#include "moses/ChartKBestExtractor.h"
#include "moses/Syntax/KBestExtractor.h"
#include "moses/parameters/AllOptions.h"
#include <boost/format.hpp>
#include <boost/shared_ptr.hpp>
namespace Moses
{
@ -81,6 +83,7 @@ struct SHyperedge;
class IOWrapper
{
protected:
boost::shared_ptr<AllOptions const> m_options;
const std::vector<Moses::FactorType> *m_inputFactorOrder;
std::string m_inputFilePath;
Moses::InputFileStream *m_inputFile;
@ -124,7 +127,7 @@ protected:
std::string m_hypergraph_output_filepattern;
public:
IOWrapper();
IOWrapper(AllOptions const& opts);
~IOWrapper();
// Moses::InputType* GetInput(Moses::InputType *inputType);
@ -224,13 +227,13 @@ BufferInput()
m_future_input.pop_front();
m_buffered_ahead -= ret->GetSize();
} else {
source.reset(new itype);
source.reset(new itype(m_options));
if (!source->Read(*m_inputStream, *m_inputFactorOrder, opts))
return ret;
ret = source;
}
while (m_buffered_ahead < m_look_ahead) {
source.reset(new itype);
source.reset(new itype(m_options));
if (!source->Read(*m_inputStream, *m_inputFactorOrder, opts))
break;
m_future_input.push_back(source);

View File

@ -329,7 +329,7 @@ OutputNBestList(OutputCollector *collector,
{
const StaticData &staticData = StaticData::Instance();
const std::vector<Moses::FactorType> &outputFactorOrder
= options().output.factor_order;
= options()->output.factor_order;
std::ostringstream out;
// wtf? copied from the original OutputNBestList
@ -351,7 +351,7 @@ OutputNBestList(OutputCollector *collector,
out << translationId << " ||| ";
OutputSurface(out, outputPhrase); // , outputFactorOrder, false);
out << " ||| ";
bool with_labels = options().nbest.include_feature_labels;
bool with_labels = options()->nbest.include_feature_labels;
features.OutputAllFeatureScores(out, with_labels);
out << " ||| " << i->GetScore() << '\n';
}
@ -509,7 +509,7 @@ void Manager::OutputBestHypo(OutputCollector *collector, search::Applied applied
if (collector == NULL) return;
std::ostringstream out;
FixPrecision(out);
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
out << applied.GetScore() << ' ';
}
Phrase outPhrase;
@ -519,7 +519,7 @@ void Manager::OutputBestHypo(OutputCollector *collector, search::Applied applied
"Output phrase should have contained at least 2 words (beginning and end-of-sentence)");
outPhrase.RemoveWord(0);
outPhrase.RemoveWord(outPhrase.GetSize() - 1);
out << outPhrase.GetStringRep(options().output.factor_order);
out << outPhrase.GetStringRep(options()->output.factor_order);
out << '\n';
collector->Write(translationId, out.str());
@ -531,7 +531,7 @@ Manager::
OutputBestNone(OutputCollector *collector, long translationId) const
{
if (collector == NULL) return;
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
collector->Write(translationId, "0 \n");
} else {
collector->Write(translationId, "\n");

View File

@ -29,8 +29,9 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
namespace Moses
{
InputType::InputType(long translationId)
: m_translationId(translationId)
InputType::InputType(AllOptions::ptr const& opts, long translationId)
: m_options(opts)
, m_translationId(translationId)
{
m_frontSpanCoveredLength = 0;
m_sourceCompleted.resize(0);

View File

@ -45,6 +45,7 @@ class TranslationTask;
class InputType
{
protected:
AllOptions::ptr m_options;
long m_translationId; //< contiguous Id
long m_documentId;
long m_topicId;
@ -67,11 +68,15 @@ public:
size_t m_frontSpanCoveredLength;
// how many words from the beginning are covered
InputType(long translationId = 0);
InputType(AllOptions::ptr const& opts, long translationId = 0);
virtual ~InputType();
virtual InputTypeEnum GetType() const = 0;
AllOptions::ptr const& options() const {
return m_options;
}
long GetTranslationId() const {
return m_translationId;
}

View File

@ -28,6 +28,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "moses/TypeDef.h"
#include "moses/StaticData.h"
#include "moses/parameters/AllOptions.h"
//#include "BackwardLMState.h"
#include "moses/LM/Backward.h"
@ -61,12 +62,14 @@ namespace Moses
// Apparently some Boost versions use templates and are pretty strict about types matching.
#define SLOPPY_CHECK_CLOSE(ref, value, tol) BOOST_CHECK_CLOSE(static_cast<double>(ref), static_cast<double>(value), static_cast<double>(tol));
AllOptions::ptr DefaultOptions(new AllOptions);
class BackwardLanguageModelTest
{
public:
BackwardLanguageModelTest() :
dummyInput(new Sentence),
dummyInput(new Sentence(DefaultOptions)),
backwardLM(
static_cast< BackwardLanguageModel<lm::ngram::ProbingModel> * >(
ConstructBackwardLM(

View File

@ -515,8 +515,8 @@ void getLatticeMBRNBest(const Manager& manager, const TrellisPathList& nBestList
vector< float> estimatedScores;
manager.GetForwardBackwardSearchGraph(&connected, &connectedList,
&outgoingHyps, &estimatedScores);
LMBR_Options const& lmbr = manager.options().lmbr;
MBR_Options const& mbr = manager.options().mbr;
LMBR_Options const& lmbr = manager.options()->lmbr;
MBR_Options const& mbr = manager.options()->mbr;
pruneLatticeFB(connectedList, outgoingHyps, incomingEdges, estimatedScores,
manager.GetBestHypothesis(), lmbr.pruning_factor, mbr.scale);
calcNgramExpectations(connectedList, incomingEdges, ngramPosteriors,true);
@ -577,8 +577,8 @@ const TrellisPath doConsensusDecoding(const Manager& manager, const TrellisPathL
map<const Hypothesis*, vector<Edge> > incomingEdges;
vector< float> estimatedScores;
manager.GetForwardBackwardSearchGraph(&connected, &connectedList, &outgoingHyps, &estimatedScores);
LMBR_Options const& lmbr = manager.options().lmbr;
MBR_Options const& mbr = manager.options().mbr;
LMBR_Options const& lmbr = manager.options()->lmbr;
MBR_Options const& mbr = manager.options()->mbr;
pruneLatticeFB(connectedList, outgoingHyps, incomingEdges, estimatedScores,
manager.GetBestHypothesis(), lmbr.pruning_factor, mbr.scale);
calcNgramExpectations(connectedList, incomingEdges, ngramExpectations,false);

View File

@ -73,7 +73,7 @@ Manager::Manager(ttasksptr const& ttask)
boost::shared_ptr<InputType> source = ttask->GetSource();
m_transOptColl = source->CreateTranslationOptionCollection(ttask);
switch(options().search.algo) {
switch(options()->search.algo) {
case Normal:
m_search = new SearchNormal(*this, *m_transOptColl);
break;
@ -279,7 +279,7 @@ void Manager::CalcNBest(size_t count, TrellisPathList &ret, bool onlyDistinct) c
// factor defines stopping point for distinct n-best list if too
// many candidates identical
size_t nBestFactor = options().nbest.factor;
size_t nBestFactor = options()->nbest.factor;
if (nBestFactor < 1) nBestFactor = 1000; // 0 = unlimited
// MAIN loop
@ -303,7 +303,7 @@ void Manager::CalcNBest(size_t count, TrellisPathList &ret, bool onlyDistinct) c
if(onlyDistinct) {
const size_t nBestFactor = options().nbest.factor;
const size_t nBestFactor = options()->nbest.factor;
if (nBestFactor > 0)
contenders.Prune(count * nBestFactor);
} else {
@ -1343,7 +1343,7 @@ OutputSearchGraph(long translationId, std::ostream &out) const
vector<SearchGraphNode> searchGraph;
GetSearchGraph(searchGraph);
for (size_t i = 0; i < searchGraph.size(); ++i) {
OutputSearchNode(options(),translationId,out,searchGraph[i]);
OutputSearchNode(*options(),translationId,out,searchGraph[i]);
}
}
@ -1467,7 +1467,7 @@ void Manager::OutputBest(OutputCollector *collector) const
FixPrecision(debug,PRECISION);
// all derivations - send them to debug stream
if (options().output.PrintAllDerivations) {
if (options()->output.PrintAllDerivations) {
additionalReportingTime.start();
PrintAllDerivations(translationId, debug);
additionalReportingTime.stop();
@ -1478,34 +1478,34 @@ void Manager::OutputBest(OutputCollector *collector) const
// MAP decoding: best hypothesis
const Hypothesis* bestHypo = NULL;
if (!options().mbr.enabled) {
if (!options()->mbr.enabled) {
bestHypo = GetBestHypothesis();
if (bestHypo) {
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
out << bestHypo->GetFutureScore() << ' ';
}
if (options().output.RecoverPath) {
if (options()->output.RecoverPath) {
bestHypo->OutputInput(out);
out << "||| ";
}
if (options().output.PrintID) {
if (options()->output.PrintID) {
out << translationId << " ";
}
// VN : I put back the code for OutputPassthroughInformation
if (options().output.PrintPassThrough) {
if (options()->output.PrintPassThrough) {
OutputPassthroughInformation(out, bestHypo);
}
// end of add back
if (options().output.ReportSegmentation == 2) {
if (options()->output.ReportSegmentation == 2) {
GetOutputLanguageModelOrder(out, bestHypo);
}
OutputSurface(out,*bestHypo, true);
if (options().output.PrintAlignmentInfo) {
if (options()->output.PrintAlignmentInfo) {
out << "||| ";
bestHypo->OutputAlignment(out, options().output.WA_SortOrder);
bestHypo->OutputAlignment(out, options()->output.WA_SortOrder);
}
IFVERBOSE(1) {
@ -1521,7 +1521,7 @@ void Manager::OutputBest(OutputCollector *collector) const
// MBR decoding (n-best MBR, lattice MBR, consensus)
else {
// we first need the n-best translations
size_t nBestSize = options().mbr.size;
size_t nBestSize = options()->mbr.size;
if (nBestSize <= 0) {
cerr << "ERROR: negative size for number of MBR candidate translations not allowed (option mbr-size)" << endl;
exit(1);
@ -1534,11 +1534,11 @@ void Manager::OutputBest(OutputCollector *collector) const
}
// lattice MBR
if (options().lmbr.enabled) {
if (options().nbest.enabled) {
if (options()->lmbr.enabled) {
if (options()->nbest.enabled) {
//lattice mbr nbest
vector<LatticeMBRSolution> solutions;
size_t n = min(nBestSize, options().nbest.nbest_size);
size_t n = min(nBestSize, options()->nbest.nbest_size);
getLatticeMBRNBest(*this,nBestList,solutions,n);
OutputLatticeMBRNBest(m_latticeNBestOut, solutions, translationId);
} else {
@ -1552,7 +1552,7 @@ void Manager::OutputBest(OutputCollector *collector) const
}
// consensus decoding
else if (options().search.consensus) {
else if (options()->search.consensus) {
const TrellisPath &conBestHypo = doConsensusDecoding(*this,nBestList);
OutputBestHypo(conBestHypo, out);
OutputAlignment(m_alignmentOut, conBestHypo);
@ -1563,7 +1563,7 @@ void Manager::OutputBest(OutputCollector *collector) const
// n-best MBR decoding
else {
const TrellisPath &mbrBestHypo = doMBR(nBestList, options());
const TrellisPath &mbrBestHypo = doMBR(nBestList, *options());
OutputBestHypo(mbrBestHypo, out);
OutputAlignment(m_alignmentOut, mbrBestHypo);
IFVERBOSE(2) {
@ -1587,14 +1587,14 @@ void Manager::OutputNBest(OutputCollector *collector) const
return;
}
if (options().lmbr.enabled) {
if (options().nbest.enabled) {
if (options()->lmbr.enabled) {
if (options()->nbest.enabled) {
collector->Write(m_source.GetTranslationId(), m_latticeNBestOut.str());
}
} else {
TrellisPathList nBestList;
ostringstream out;
NBestOptions const& nbo = options().nbest;
NBestOptions const& nbo = options()->nbest;
CalcNBest(nbo.nbest_size, nBestList, nbo.only_distinct);
OutputNBest(out, nBestList);
collector->Write(m_source.GetTranslationId(), out.str());
@ -1606,7 +1606,7 @@ void
Manager::
OutputNBest(std::ostream& out, Moses::TrellisPathList const& nBestList) const
{
NBestOptions const& nbo = options().nbest;
NBestOptions const& nbo = options()->nbest;
bool reportAllFactors = nbo.include_all_factors;
bool includeSegmentation = nbo.include_segmentation;
bool includeWordAlignment = nbo.include_alignment_info;
@ -1625,7 +1625,7 @@ OutputNBest(std::ostream& out, Moses::TrellisPathList const& nBestList) const
out << " |||";
// print scores with feature names
bool with_labels = options().nbest.include_feature_labels;
bool with_labels = options()->nbest.include_feature_labels;
path.GetScoreBreakdown()->OutputAllFeatureScores(out, with_labels);
// total
@ -1664,7 +1664,7 @@ OutputNBest(std::ostream& out, Moses::TrellisPathList const& nBestList) const
}
}
if (options().output.RecoverPath) {
if (options()->output.RecoverPath) {
out << " ||| ";
OutputInput(out, edges[0]);
}
@ -1687,19 +1687,19 @@ OutputSurface(std::ostream &out, Hypothesis const& edge, bool const recursive) c
OutputSurface(out,*edge.GetPrevHypo(), true);
}
std::vector<FactorType> outputFactorOrder = options().output.factor_order;
std::vector<FactorType> outputFactorOrder = options()->output.factor_order;
UTIL_THROW_IF2(outputFactorOrder.size() == 0,
"Must specific at least 1 output factor");
FactorType placeholderFactor = options().input.placeholder_factor;
FactorType placeholderFactor = options()->input.placeholder_factor;
std::map<size_t, const Factor*> placeholders;
if (placeholderFactor != NOT_FOUND) {
// creates map of target position -> factor for placeholders
placeholders = GetPlaceholders(edge, placeholderFactor);
}
bool markUnknown = options().unk.mark;
std::string const& fd = options().output.FactorDelimiter;
bool markUnknown = options()->unk.mark;
std::string const& fd = options()->output.FactorDelimiter;
TargetPhrase const& phrase = edge.GetCurrTargetPhrase();
size_t size = phrase.GetSize();
@ -1718,7 +1718,7 @@ OutputSurface(std::ostream &out, Hypothesis const& edge, bool const recursive) c
//preface surface form with UNK if marking unknowns
const Word &word = phrase.GetWord(pos);
if(markUnknown && word.IsOOV()) {
out << options().unk.prefix;
out << options()->unk.prefix;
}
out << *factor;
@ -1729,7 +1729,7 @@ OutputSurface(std::ostream &out, Hypothesis const& edge, bool const recursive) c
}
if(markUnknown && word.IsOOV()) {
out << options().unk.suffix;
out << options()->unk.suffix;
}
out << " ";
@ -1737,7 +1737,7 @@ OutputSurface(std::ostream &out, Hypothesis const& edge, bool const recursive) c
}
// trace ("report segmentation") option "-t" / "-tt"
int reportSegmentation = options().output.ReportSegmentation;
int reportSegmentation = options()->output.ReportSegmentation;
if (reportSegmentation > 0 && phrase.GetSize() > 0) {
const Range &sourceRange = edge.GetCurrSourceWordsRange();
const int sourceStart = sourceRange.GetStartPos();
@ -1752,7 +1752,7 @@ OutputSurface(std::ostream &out, Hypothesis const& edge, bool const recursive) c
out << ",";
ScoreComponentCollection scoreBreakdown(edge.GetScoreBreakdown());
scoreBreakdown.MinusEquals(edge.GetPrevHypo()->GetScoreBreakdown());
bool with_labels = options().nbest.include_feature_labels;
bool with_labels = options()->nbest.include_feature_labels;
scoreBreakdown.OutputAllFeatureScores(out, with_labels);
}
out << "| ";
@ -1765,7 +1765,7 @@ OutputAlignment(ostream &out, const AlignmentInfo &ai,
size_t sourceOffset, size_t targetOffset) const
{
typedef std::vector< const std::pair<size_t,size_t>* > AlignVec;
AlignVec alignments = ai.GetSortedAlignments(options().output.WA_SortOrder);
AlignVec alignments = ai.GetSortedAlignments(options()->output.WA_SortOrder);
AlignVec::const_iterator it;
for (it = alignments.begin(); it != alignments.end(); ++it) {
@ -1821,7 +1821,7 @@ void Manager::OutputLatticeSamples(OutputCollector *collector) const
if (collector) {
TrellisPathList latticeSamples;
ostringstream out;
CalcLatticeSamples(options().output.lattice_sample_size, latticeSamples);
CalcLatticeSamples(options()->output.lattice_sample_size, latticeSamples);
OutputNBest(out,latticeSamples);
collector->Write(m_source.GetTranslationId(), out.str());
}
@ -1932,7 +1932,7 @@ void Manager::OutputSearchGraphSLF() const
long translationId = m_source.GetTranslationId();
// Output search graph in HTK standard lattice format (SLF)
std::string const& slf = options().output.SearchGraphSLF;
std::string const& slf = options()->output.SearchGraphSLF;
if (slf.size()) {
util::StringStream fileName;
fileName << slf << "/" << translationId << ".slf";
@ -1959,7 +1959,7 @@ void Manager::OutputLatticeMBRNBest(std::ostream& out, const vector<LatticeMBRSo
out << " |||";
const vector<Word> mbrHypo = si->GetWords();
for (size_t i = 0 ; i < mbrHypo.size() ; i++) {
const Factor *factor = mbrHypo[i].GetFactor(options().output.factor_order[0]);
const Factor *factor = mbrHypo[i].GetFactor(options()->output.factor_order[0]);
if (i>0) out << " " << *factor;
else out << *factor;
}
@ -1980,7 +1980,7 @@ void
Manager::
OutputBestHypo(const std::vector<Word>& mbrBestHypo, ostream& out) const
{
FactorType f = options().output.factor_order[0];
FactorType f = options()->output.factor_order[0];
for (size_t i = 0 ; i < mbrBestHypo.size() ; i++) {
const Factor *factor = mbrBestHypo[i].GetFactor(f);
UTIL_THROW_IF2(factor == NULL, "No factor " << f << " at position " << i);
@ -2006,7 +2006,7 @@ void
Manager::
OutputAlignment(std::ostringstream &out, const TrellisPath &path) const
{
WordAlignmentSort waso = options().output.WA_SortOrder;
WordAlignmentSort waso = options()->output.WA_SortOrder;
BOOST_REVERSE_FOREACH(Hypothesis const* e, path.GetEdges())
e->OutputAlignment(out, false);
// Hypothesis::OutputAlignment(out, path.GetEdges(), waso);

View File

@ -39,8 +39,8 @@ MockHypothesisGuard
{
BOOST_CHECK_EQUAL(alignments.size(), targetSegments.size());
std::vector<Moses::FactorType> factors(1,0);
AllOptions const& opts = StaticData::Instance().options();
m_sentence.reset(new Sentence(0, sourceSentence, opts, &factors));
AllOptions::ptr opts(new AllOptions(StaticData::Instance().options()));
m_sentence.reset(new Sentence(opts,0, sourceSentence, &factors));
m_ttask = TranslationTask::create(m_sentence);
m_manager.reset(new Manager(m_ttask));

View File

@ -10,7 +10,7 @@ namespace Moses
Search::Search(Manager& manager)
: m_manager(manager)
, m_source(manager.GetSource())
, m_options(manager.options())
, m_options(*manager.options())
, m_inputPath()
, m_initialTransOpt()
, m_bitmaps(manager.GetSource().GetSize(), manager.GetSource().m_sourceCompleted)

View File

@ -87,13 +87,13 @@ void SearchCubePruning::Decode()
firstStack.CleanupArcList();
CreateForwardTodos(firstStack);
const size_t PopLimit = m_manager.options().cube.pop_limit;
const size_t PopLimit = m_manager.options()->cube.pop_limit;
VERBOSE(2,"Cube Pruning pop limit is " << PopLimit << std::endl);
const size_t Diversity = m_manager.options().cube.diversity;
const size_t Diversity = m_manager.options()->cube.diversity;
VERBOSE(2,"Cube Pruning diversity is " << Diversity << std::endl);
VERBOSE(2,"Max Phrase length is "
<< m_manager.options().search.max_phrase_length << std::endl);
<< m_manager.options()->search.max_phrase_length << std::endl);
// go through each stack
size_t stackNo = 1;
@ -226,7 +226,7 @@ void SearchCubePruning::CreateForwardTodos(HypothesisStackCubePruning &stack)
}
size_t maxSize = size - startPos;
size_t maxSizePhrase = m_manager.options().search.max_phrase_length;
size_t maxSizePhrase = m_manager.options()->search.max_phrase_length;
maxSize = std::min(maxSize, maxSizePhrase);
for (endPos = startPos+1; endPos < startPos + maxSize; endPos++) {
if (bitmap.GetValue(endPos))
@ -267,7 +267,7 @@ SearchCubePruning::
CheckDistortion(const Bitmap &hypoBitmap, const Range &range) const
{
// since we check for reordering limits, its good to have that limit handy
int maxDistortion = m_manager.options().reordering.max_distortion;
int maxDistortion = m_manager.options()->reordering.max_distortion;
if (maxDistortion < 0) return true;
// if there are reordering limits, make sure it is not violated

View File

@ -41,7 +41,7 @@ namespace Moses
{
Sentence::
Sentence() : Phrase(0) , InputType()
Sentence(AllOptions::ptr const& opts) : Phrase(0) , InputType(opts)
{
const StaticData& SD = StaticData::Instance();
if (SD.IsSyntax())
@ -167,8 +167,7 @@ aux_interpret_xml(AllOptions const& opts, std::string& line, std::vector<size_t>
void
Sentence::
init(string line, std::vector<FactorType> const& factorOrder,
AllOptions const& opts)
init(AllOptions::ptr const& opts, string line, std::vector<FactorType> const& factorOrder)
{
using namespace std;
const StaticData &SD = StaticData::Instance();
@ -192,7 +191,7 @@ init(string line, std::vector<FactorType> const& factorOrder,
vector<size_t> xmlWalls;
vector<pair<size_t, string> >placeholders;
aux_interpret_xml(opts, line, xmlWalls, placeholders);
aux_interpret_xml(*opts, line, xmlWalls, placeholders);
Phrase::CreateFromString(Input, factorOrder, line, NULL);
@ -205,7 +204,7 @@ init(string line, std::vector<FactorType> const& factorOrder,
// our XmlOptions and create TranslationOptions
// only fill the vector if we are parsing XML
if (opts.input.xml_policy != XmlPassThrough) {
if (opts->input.xml_policy != XmlPassThrough) {
m_xmlCoverageMap.assign(GetSize(), false);
BOOST_FOREACH(XmlOption const* o, m_xmlOptions) {
Range const& r = o->range;
@ -240,7 +239,7 @@ Read(std::istream& in,
std::string line;
if (getline(in, line, '\n').eof())
return 0;
init(line, factorOrder, opts);
init(m_options, line, factorOrder);
return 1;
}
@ -266,9 +265,9 @@ TranslationOptionCollection*
Sentence::
CreateTranslationOptionCollection(ttasksptr const& ttask) const
{
size_t maxNoTransOptPerCoverage = ttask->options().search.max_trans_opt_per_cov;
size_t maxNoTransOptPerCoverage = ttask->options()->search.max_trans_opt_per_cov;
// StaticData::Instance().GetMaxNoTransOptPerCoverage();
float transOptThreshold = ttask->options().search.trans_opt_threshold;
float transOptThreshold = ttask->options()->search.trans_opt_threshold;
// StaticData::Instance().GetTranslationOptionThreshold();
TranslationOptionCollection *rv
= new TranslationOptionCollectionText(ttask, *this,
@ -375,14 +374,12 @@ CreateFromString(vector<FactorType> const& FOrder, string const& phraseString)
}
Sentence::
Sentence(size_t const transId,
string const& stext,
AllOptions const& opts,
vector<FactorType> const* IFO)
: InputType(transId)
Sentence(AllOptions::ptr const& opts, size_t const transId,
string stext, vector<FactorType> const* IFO)
: InputType(opts, transId)
{
if (IFO) init(stext, *IFO, opts);
else init(stext, opts.input.factor_order, opts);
if (IFO) init(opts,stext, *IFO);
else init(opts, stext, opts->input.factor_order);
}
}

View File

@ -63,9 +63,8 @@ protected:
std::vector<std::map<std::string,std::string> > m_dlt_meta;
public:
Sentence();
Sentence(size_t const transId, std::string const& stext,
AllOptions const& opts,
Sentence(AllOptions::ptr const& opts);
Sentence(AllOptions::ptr const& opts, size_t const transId, std::string stext,
std::vector<FactorType> const* IFO = NULL);
// Sentence(size_t const transId, std::string const& stext);
~Sentence();
@ -117,8 +116,8 @@ public:
void
init(std::string line, std::vector<FactorType> const& factorOrder,
AllOptions const& opts);
init(AllOptions::ptr const& opts, std::string line,
std::vector<FactorType> const& factorOrder);
std::vector<std::map<std::string,std::string> > const&
GetDltMeta() const {

View File

@ -42,7 +42,7 @@ Manager<RuleMatcher>::Manager(ttasksptr const& ttask)
m_rootVertex = p->GetRootVertex();
m_sentenceLength = p->GetSize();
} else if (const TreeInput *p = dynamic_cast<const TreeInput*>(&m_source)) {
T2S::InputTreeBuilder builder(options().output.factor_order);
T2S::InputTreeBuilder builder(options()->output.factor_order);
T2S::InputTree tmpTree;
builder.Build(*p, "Q", tmpTree);
boost::shared_ptr<Forest> forest = boost::make_shared<Forest>();
@ -75,7 +75,7 @@ void Manager<RuleMatcher>::Decode()
// Create a glue rule synthesizer.
GlueRuleSynthesizer glueRuleSynthesizer(*m_glueRuleTrie,
options().input.factor_order);
options()->input.factor_order);
// Sort the input forest's vertices into bottom-up topological order.
std::vector<const Forest::Vertex *> sortedVertices;

View File

@ -23,12 +23,12 @@ void Manager::OutputBest(OutputCollector *collector) const
const SHyperedge *best = GetBestSHyperedge();
if (best == NULL) {
VERBOSE(1, "NO BEST TRANSLATION" << std::endl);
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
out << "0 ";
}
out << '\n';
} else {
if (options().output.ReportHypoScore) {
if (options()->output.ReportHypoScore) {
out << best->label.futureScore << " ";
}
Phrase yield = GetOneBestTargetYield(*best);
@ -37,7 +37,7 @@ void Manager::OutputBest(OutputCollector *collector) const
"Output phrase should have contained at least 2 words (beginning and end-of-sentence)");
yield.RemoveWord(0);
yield.RemoveWord(yield.GetSize()-1);
out << yield.GetStringRep(options().output.factor_order);
out << yield.GetStringRep(options()->output.factor_order);
out << '\n';
}
collector->Write(m_source.GetTranslationId(), out.str());
@ -48,8 +48,8 @@ void Manager::OutputNBest(OutputCollector *collector) const
if (collector) {
long translationId = m_source.GetTranslationId();
KBestExtractor::KBestVec nBestList;
ExtractKBest(options().nbest.nbest_size, nBestList,
options().nbest.only_distinct);
ExtractKBest(options()->nbest.nbest_size, nBestList,
options()->nbest.only_distinct);
OutputNBestList(collector, nBestList, translationId);
}
}
@ -73,7 +73,7 @@ void Manager::OutputNBestList(OutputCollector *collector,
const KBestExtractor::KBestVec &nBestList,
long translationId) const
{
const std::vector<FactorType> &outputFactorOrder = options().output.factor_order;
const std::vector<FactorType> &outputFactorOrder = options()->output.factor_order;
std::ostringstream out;
@ -83,8 +83,8 @@ void Manager::OutputNBestList(OutputCollector *collector,
FixPrecision(out);
}
bool includeWordAlignment = options().nbest.include_alignment_info;
bool PrintNBestTrees = options().nbest.print_trees; // PrintNBestTrees();
bool includeWordAlignment = options()->nbest.include_alignment_info;
bool PrintNBestTrees = options()->nbest.print_trees; // PrintNBestTrees();
for (KBestExtractor::KBestVec::const_iterator p = nBestList.begin();
p != nBestList.end(); ++p) {
@ -103,7 +103,7 @@ void Manager::OutputNBestList(OutputCollector *collector,
out << translationId << " ||| ";
OutputSurface(out, outputPhrase); // , outputFactorOrder, false);
out << " ||| ";
bool with_labels = options().nbest.include_feature_labels;
bool with_labels = options()->nbest.include_feature_labels;
derivation.scoreBreakdown.OutputAllFeatureScores(out, with_labels);
out << " ||| " << derivation.score;

View File

@ -33,7 +33,7 @@ Manager<RuleMatcher>::Manager(ttasksptr const& ttask)
{
if (const TreeInput *p = dynamic_cast<const TreeInput*>(&m_source)) {
// Construct the InputTree.
InputTreeBuilder builder(options().output.factor_order);
InputTreeBuilder builder(options()->output.factor_order);
builder.Build(*p, "Q", m_inputTree);
} else {
UTIL_THROW2("ERROR: T2S::Manager requires input to be a tree");
@ -97,9 +97,9 @@ void Manager<RuleMatcher>::Decode()
const StaticData &staticData = StaticData::Instance();
// Get various pruning-related constants.
const std::size_t popLimit = this->options().cube.pop_limit;
const std::size_t popLimit = this->options()->cube.pop_limit;
const std::size_t ruleLimit = staticData.GetRuleLimit();
const std::size_t stackLimit = this->options().search.stack_size;
const std::size_t stackLimit = this->options()->search.stack_size;
// Initialize the stacks.
InitializeStacks();

View File

@ -53,7 +53,7 @@ class TabbedSentence : public Sentence
{
public:
TabbedSentence() : Sentence() {}
TabbedSentence(AllOptions::ptr const& opts) : Sentence(opts) {}
~TabbedSentence() {}
InputTypeEnum GetType() const {

View File

@ -72,7 +72,7 @@ TargetPhrase::TargetPhrase(ttasksptr& ttask, std::string out_string, const Phras
//ACAT
const StaticData &staticData = StaticData::Instance();
// XXX should this really be InputFactorOrder???
CreateFromString(Output, ttask->options().input.factor_order, out_string,
CreateFromString(Output, ttask->options()->input.factor_order, out_string,
NULL);
}

View File

@ -47,8 +47,8 @@ void PhraseDictionaryALSuffixArray::InitializeForInput(ttasksptr const& ttask)
std::auto_ptr<RuleTableLoader> loader =
RuleTableLoaderFactory::Create(grammarFile);
AllOptions const& opts = ttask->options();
bool ret = loader->Load(opts, m_input, m_output, grammarFile, m_tableLimit, *this);
AllOptions::ptr const& opts = ttask->options();
bool ret = loader->Load(*opts, m_input, m_output, grammarFile, m_tableLimit, *this);
UTIL_THROW_IF2(!ret, "Rules not successfully loaded for sentence id "
<< translationId);

View File

@ -60,7 +60,7 @@ TranslationOptionCollection(ttasksptr const& ttask,
, m_estimatedScores(src.GetSize())
, m_maxNoTransOptPerCoverage(maxNoTransOptPerCoverage)
, m_translationOptionThreshold(translationOptionThreshold)
, m_max_phrase_length(ttask->options().search.max_phrase_length)
, m_max_phrase_length(ttask->options()->search.max_phrase_length)
{
// create 2-d vector
size_t size = src.GetSize();
@ -147,7 +147,7 @@ ProcessUnknownWord()
// bool alwaysCreateDirectTranslationOption
// = StaticData::Instance().IsAlwaysCreateDirectTranslationOption();
bool always = m_ttask.lock()->options().unk.always_create_direct_transopt;
bool always = m_ttask.lock()->options()->unk.always_create_direct_transopt;
// create unknown words for 1 word coverage where we don't have any trans options
for (size_t pos = 0 ; pos < size ; ++pos) {
@ -194,7 +194,7 @@ ProcessOneUnknownWord(const InputPath &inputPath, size_t sourcePos,
const Factor *f = sourceWord[0]; // TODO hack. shouldn't know which factor is surface
const StringPiece s = f->GetString();
bool isEpsilon = (s=="" || s==EPSILON);
bool dropUnk = GetTranslationTask()->options().unk.drop;
bool dropUnk = GetTranslationTask()->options()->unk.drop;
if (dropUnk) {
isDigit = s.find_first_of("0123456789");
if (isDigit == string::npos)
@ -388,7 +388,7 @@ CreateTranslationOptionsForRange
{
typedef DecodeStepTranslation Tstep;
typedef DecodeStepGeneration Gstep;
XmlInputType xml_policy = m_ttask.lock()->options().input.xml_policy;
XmlInputType xml_policy = m_ttask.lock()->options()->input.xml_policy;
if ((xml_policy != XmlExclusive)
|| !HasXmlOptionsOverlappingRange(sPos,ePos)) {

View File

@ -41,7 +41,7 @@ TranslationOptionCollectionConfusionNet(ttasksptr const& ttask,
size_t inputSize = input.GetSize();
m_inputPathMatrix.resize(inputSize);
size_t maxSizePhrase = ttask->options().search.max_phrase_length;
size_t maxSizePhrase = ttask->options()->search.max_phrase_length;
maxSizePhrase = std::min(inputSize, maxSizePhrase);
// 1-word phrases
@ -225,7 +225,7 @@ CreateTranslationOptionsForRangeLEGACY(const DecodeGraph &decodeGraph,
bool retval = true;
size_t const max_phrase_length
= StaticData::Instance().options().search.max_phrase_length;
XmlInputType intype = m_ttask.lock()->options().input.xml_policy;
XmlInputType intype = m_ttask.lock()->options()->input.xml_policy;
if ((intype != XmlExclusive) || !HasXmlOptionsOverlappingRange(startPos,endPos)) {
InputPathList &inputPathList = GetInputPathList(startPos, endPos);

View File

@ -32,7 +32,7 @@ TranslationOptionCollectionLattice
const InputFeature *inputFeature = InputFeature::InstancePtr();
UTIL_THROW_IF2(inputFeature == NULL, "Input feature must be specified");
size_t maxPhraseLength = ttask->options().search.max_phrase_length; //StaticData::Instance().GetMaxPhraseLength();
size_t maxPhraseLength = ttask->options()->search.max_phrase_length; //StaticData::Instance().GetMaxPhraseLength();
size_t size = input.GetSize();
// 1-word phrases
@ -69,7 +69,7 @@ TranslationOptionCollectionLattice
m_inputPathQueue.push_back(path);
// recursive
Extend(*path, input, ttask->options().search.max_phrase_length);
Extend(*path, input, ttask->options()->search.max_phrase_length);
}
}

View File

@ -77,7 +77,7 @@ TranslationTask
boost::shared_ptr<IOWrapper> const& ioWrapper)
: m_source(source) , m_ioWrapper(ioWrapper)
{
m_options = StaticData::Instance().options();
m_options = source->options();
}
TranslationTask::~TranslationTask()
@ -89,8 +89,8 @@ TranslationTask
::SetupManager(SearchAlgorithm algo)
{
boost::shared_ptr<BaseManager> manager;
StaticData const& staticData = StaticData::Instance();
if (algo == DefaultSearchAlgorithm) algo = staticData.options().search.algo;
// StaticData const& staticData = StaticData::Instance();
// if (algo == DefaultSearchAlgorithm) algo = staticData.options().search.algo;
if (!is_syntax(algo))
manager.reset(new Manager(this->self())); // phrase-based
@ -104,7 +104,7 @@ TranslationTask
else if (algo == SyntaxS2T) {
// new-style string-to-tree decoding (ask Phil Williams)
S2TParsingAlgorithm algorithm = options().syntax.s2t_parsing_algo; // staticData.GetS2TParsingAlgorithm();
S2TParsingAlgorithm algorithm = m_options->syntax.s2t_parsing_algo;
if (algorithm == RecursiveCYKPlus) {
typedef Syntax::S2T::EagerParserCallback Callback;
typedef Syntax::S2T::RecursiveCYKPlusParser<Callback> Parser;
@ -132,7 +132,7 @@ TranslationTask
return manager;
}
AllOptions const&
AllOptions::ptr const&
TranslationTask::
options() const
{
@ -185,7 +185,7 @@ void TranslationTask::Run()
Timer initTime;
initTime.start();
boost::shared_ptr<BaseManager> manager = SetupManager();
boost::shared_ptr<BaseManager> manager = SetupManager(m_options->search.algo);
VERBOSE(1, "Line " << translationId << ": Initialize search took "
<< initTime << " seconds total" << endl);
@ -218,7 +218,7 @@ void TranslationTask::Run()
// Output search graph in hypergraph format for Kenneth Heafield's
// lazy hypergraph decoder; writes to stderr
if (options().output.SearchGraphHG.size()) {
if (m_options->output.SearchGraphHG.size()) {
size_t transId = manager->GetSource().GetTranslationId();
string fname = io->GetHypergraphOutputFileName(transId);
manager->OutputSearchGraphAsHypergraph(fname, PRECISION);

View File

@ -44,7 +44,7 @@ class TranslationTask : public Moses::Task
return *this;
}
protected:
AllOptions m_options;
AllOptions::ptr m_options;
boost::weak_ptr<TranslationTask> m_self; // weak ptr to myself
boost::shared_ptr<ContextScope> m_scope; // sores local info
// pointer to ContextScope, which stores context-specific information
@ -115,7 +115,7 @@ public:
}
boost::shared_ptr<BaseManager>
SetupManager(SearchAlgorithm algo = DefaultSearchAlgorithm);
SetupManager(SearchAlgorithm algo); // = DefaultSearchAlgorithm);
boost::shared_ptr<ContextScope> const&
@ -134,7 +134,7 @@ public:
// void SetContextWeights(std::string const& context_weights);
// void ReSetContextWeights(std::map<std::string, float> const& new_weights);
AllOptions const& options() const;
AllOptions::ptr const& options() const;
protected:
boost::shared_ptr<Moses::InputType> m_source;

View File

@ -48,7 +48,7 @@ protected:
std::vector<XmlOption const*> &res);
public:
TreeInput() : Sentence() { }
TreeInput(AllOptions::ptr const& opts) : Sentence(opts) { }
InputTypeEnum GetType() const {
return TreeInputType;

View File

@ -214,7 +214,7 @@ Phrase TrellisPath::GetTargetPhrase() const
Phrase TrellisPath::GetSurfacePhrase() const
{
std::vector<FactorType> const& oFactor = manager().options().output.factor_order;
std::vector<FactorType> const& oFactor = manager().options()->output.factor_order;
Phrase targetPhrase = GetTargetPhrase();
Phrase ret(targetPhrase.GetSize());

View File

@ -11,7 +11,7 @@
namespace Moses
{
WordLattice::WordLattice() : ConfusionNet()
WordLattice::WordLattice(AllOptions::ptr const& opts) : ConfusionNet(opts)
{
UTIL_THROW_IF2(InputFeature::InstancePtr() == NULL,
"Input feature must be specified");
@ -231,9 +231,9 @@ WordLattice
// size_t maxNoTransOptPerCoverage = StaticData::Instance().GetMaxNoTransOptPerCoverage();
// float translationOptionThreshold = StaticData::Instance().GetTranslationOptionThreshold();
size_t maxNoTransOptPerCoverage = ttask->options().search.max_trans_opt_per_cov;
size_t maxNoTransOptPerCoverage = ttask->options()->search.max_trans_opt_per_cov;
// StaticData::Instance().GetMaxNoTransOptPerCoverage();
float translationOptionThreshold = ttask->options().search.trans_opt_threshold;
float translationOptionThreshold = ttask->options()->search.trans_opt_threshold;
// StaticData::Instance().GetTranslationOptionThreshold();

View File

@ -23,7 +23,7 @@ private:
std::vector<std::vector<int> > distances;
public:
WordLattice();
WordLattice(AllOptions::ptr const& opts);
InputTypeEnum GetType() const {
return WordLatticeInput;

View File

@ -14,12 +14,14 @@
#include "ReportingOptions.h"
#include "OOVHandlingOptions.h"
#include "SyntaxOptions.h"
#include <boost/shared_ptr.hpp>
namespace Moses
{
struct
AllOptions : public OptionsBaseClass
{
typedef boost::shared_ptr<AllOptions const> ptr;
SearchOptions search;
CubePruningOptions cube;
NBestOptions nbest;

View File

@ -11,7 +11,7 @@ namespace Moses {
Range const& src = this->GetCurrSourceWordsRange();
Range const& trg = this->GetCurrTargetWordsRange();
WordAlignmentSort waso = m_manager.options().output.WA_SortOrder;
WordAlignmentSort waso = m_manager.options()->output.WA_SortOrder;
vector<pair<size_t,size_t> const* > a
= this->GetCurrTargetPhrase().GetAlignTerm().GetSortedAlignments(waso);
typedef pair<size_t,size_t> item;

View File

@ -2,6 +2,7 @@
#pragma once
#include "moses/Util.h"
#include "moses/ContextScope.h"
#include "moses/parameters/AllOptions.h"
#include <sys/time.h>
#include <boost/unordered_map.hpp>
@ -21,7 +22,8 @@ namespace MosesServer{
Session(uint64_t const session_id)
: id(session_id), scope(new Moses::ContextScope)
: id(session_id)
, scope(new Moses::ContextScope)
{
last_access = start_time = time(NULL);
}

View File

@ -57,13 +57,6 @@ Run()
parse_request(params);
// cerr << "SESSION ID" << ret->m_session_id << endl;
if (m_session_id)
{
Session const& S = m_translator->get_session(m_session_id);
m_scope = S.scope;
m_session_id = S.id;
}
else m_scope.reset(new Moses::ContextScope);
// settings within the session scope
param_t::const_iterator si = params.find("context-weights");
@ -90,7 +83,7 @@ TranslationRequest::
add_phrase_aln_info(Hypothesis const& h, vector<xmlrpc_c::value>& aInfo) const
{
// if (!m_withAlignInfo) return;
if (!options().output.ReportSegmentation) return;
if (!options()->output.ReportSegmentation) return;
Range const& trg = h.GetCurrTargetWordsRange();
Range const& src = h.GetCurrSourceWordsRange();
@ -152,7 +145,7 @@ insertGraphInfo(Manager& manager, map<string, xmlrpc_c::value>& retData)
x["recombined"] = value_int(n.recombinationHypo->GetId());
x["cover-start"] = value_int(hypo->GetCurrSourceWordsRange().GetStartPos());
x["cover-end"] = value_int(hypo->GetCurrSourceWordsRange().GetEndPos());
x["out"] = value_string(hypo->GetCurrTargetPhrase().GetStringRep(options().output.factor_order));
x["out"] = value_string(hypo->GetCurrTargetPhrase().GetStringRep(options()->output.factor_order));
}
searchGraphXml.push_back(value_struct(x));
}
@ -166,7 +159,7 @@ outputNBest(const Manager& manager, map<string, xmlrpc_c::value>& retData)
TrellisPathList nBestList;
vector<xmlrpc_c::value> nBestXml;
Moses::NBestOptions const& nbo = m_options.nbest;
Moses::NBestOptions const& nbo = m_options->nbest;
manager.CalcNBest(nbo.nbest_size, nBestList, nbo.only_distinct);
manager.OutputNBest(cout, nBestList);
@ -196,7 +189,7 @@ TranslationRequest::
insertTranslationOptions(Moses::Manager& manager,
std::map<std::string, xmlrpc_c::value>& retData)
{
std::vector<Moses::FactorType> const& ofactor_order = options().output.factor_order;
std::vector<Moses::FactorType> const& ofactor_order = options()->output.factor_order;
const TranslationOptionCollection* toptsColl = manager.getSntTranslationOptions();
vector<xmlrpc_c::value> toptsXml;
@ -231,7 +224,7 @@ TranslationRequest(xmlrpc_c::paramList const& paramList,
: m_cond(cond), m_mutex(mut), m_done(false), m_paramList(paramList)
, m_session_id(0)
{
m_options = StaticData::Instance().options();
}
bool
@ -251,23 +244,42 @@ parse_request(std::map<std::string, xmlrpc_c::value> const& params)
{
// parse XMLRPC request
m_paramList.verifyEnd(1); // ??? UG
m_options.update(params);
typedef std::map<std::string, xmlrpc_c::value> params_t;
params_t::const_iterator si;
si = params.find("session-id");
if (si != params.end())
{
m_session_id = xmlrpc_c::value_int(si->second);
Session const& S = m_translator->get_session(m_session_id);
m_scope = S.scope;
m_session_id = S.id;
}
else
{
m_session_id = 0;
m_scope.reset(new Moses::ContextScope);
}
boost::shared_ptr<Moses::AllOptions> opts(new Moses::AllOptions(StaticData::Instance().options()));
opts->update(params);
m_withGraphInfo = check(params, "sg");
if (m_withGraphInfo || opts->nbest.nbest_size > 0) {
opts->output.SearchGraph = "true";
opts->nbest.enabled = true;
}
m_options = opts;
// source text must be given, or we don't know what to translate
typedef std::map<std::string, xmlrpc_c::value> params_t;
params_t::const_iterator si = params.find("text");
si = params.find("text");
if (si == params.end())
throw xmlrpc_c::fault("Missing source text", xmlrpc_c::fault::CODE_PARSE);
m_source_string = xmlrpc_c::value_string(si->second);
XVERBOSE(1,"Input: " << m_source_string << endl);
si = params.find("session-id");
if (si != params.end())
m_session_id = xmlrpc_c::value_int(si->second);
else
m_session_id = 0;
m_withGraphInfo = check(params, "sg");
m_withTopts = check(params, "topt");
m_withScoreBreakdown = check(params, "add-score-breakdown");
si = params.find("lambda");
@ -284,8 +296,6 @@ parse_request(std::map<std::string, xmlrpc_c::value> const& params)
string const model_name = xmlrpc_c::value_string(si->second);
PhraseDictionaryMultiModel* pdmm
= (PhraseDictionaryMultiModel*) FindPhraseDictionary(model_name);
// Moses::PhraseDictionaryMultiModel* pdmm
// = FindPhraseDictionary(model_name);
pdmm->SetTemporaryMultiModelWeightsVector(w);
}
}
@ -307,7 +317,7 @@ parse_request(std::map<std::string, xmlrpc_c::value> const& params)
// for (size_t i = 1; i < tmp.size(); i += 2)
// m_bias[xmlrpc_c::value_int(tmp[i-1])] = xmlrpc_c::value_double(tmp[i]);
// }
m_source.reset(new Sentence(0,m_source_string,m_options));
m_source.reset(new Sentence(m_options,0,m_source_string));
} // end of Translationtask::parse_request()
@ -315,9 +325,9 @@ void
TranslationRequest::
run_chart_decoder()
{
Moses::TreeInput tinput;
Moses::TreeInput tinput(m_options);
istringstream buf(m_source_string + "\n");
tinput.Read(buf, options().input.factor_order, m_options);
tinput.Read(buf, options()->input.factor_order, *m_options);
Moses::ChartManager manager(this->self());
manager.Decode();
@ -352,7 +362,7 @@ pack_hypothesis(const Moses::Manager& manager,
dest[key] = xmlrpc_c::value_string(target.str());
// if (m_withAlignInfo) {
if (options().output.ReportSegmentation) {
if (options()->output.ReportSegmentation) {
// phrase alignment, if requested
vector<xmlrpc_c::value> p_aln;
@ -362,7 +372,7 @@ pack_hypothesis(const Moses::Manager& manager,
}
// if (m_withWordAlignInfo) {
if (options().output.PrintAlignmentInfo) {
if (options()->output.PrintAlignmentInfo) {
// word alignment, if requested
vector<xmlrpc_c::value> w_aln;
BOOST_REVERSE_FOREACH(Hypothesis const* e, edges)
@ -388,11 +398,6 @@ void
TranslationRequest::
run_phrase_decoder()
{
if (m_withGraphInfo || m_options.nbest.nbest_size > 0) {
m_options.output.SearchGraph = "true";
m_options.nbest.enabled = true;
}
Manager manager(this->self());
manager.Decode();
pack_hypothesis(manager, manager.GetBestHypothesis(), "text", m_retData);
@ -401,7 +406,7 @@ run_phrase_decoder()
if (m_withGraphInfo) insertGraphInfo(manager,m_retData);
if (m_withTopts) insertTranslationOptions(manager,m_retData);
if (m_options.nbest.nbest_size) outputNBest(manager, m_retData);
if (m_options->nbest.nbest_size) outputNBest(manager, m_retData);
}
}