daily automatic beautifier

This commit is contained in:
MosesAdmin 2015-08-08 00:00:45 +01:00
parent 883c34aee9
commit 21aa5af640
29 changed files with 169 additions and 171 deletions

View File

@ -191,11 +191,11 @@ int main(int argc, char* argv[])
BOOST_FOREACH(float const& p, pgrid) {
lmbr.precision = p;
BOOST_FOREACH(float const& r, rgrid) {
lmbr.ratio = r;
lmbr.ratio = r;
BOOST_FOREACH(size_t const prune_i, prune_grid) {
lmbr.pruning_factor = prune_i;
lmbr.pruning_factor = prune_i;
BOOST_FOREACH(float const& scale_i, scale_grid) {
mbr.scale = scale_i;
mbr.scale = scale_i;
size_t lineCount = source->GetTranslationId();
cout << lineCount << " ||| " << p << " "
<< r << " " << size_t(prune_i) << " " << scale_i

View File

@ -288,10 +288,10 @@ void ChartHypothesis::CleanupArcList()
*/
const StaticData &staticData = StaticData::Instance();
size_t nBestSize = staticData.options().nbest.nbest_size;
bool distinctNBest = (staticData.options().nbest.only_distinct
|| staticData.options().mbr.enabled
|| staticData.GetOutputSearchGraph()
|| staticData.GetOutputSearchGraphHypergraph());
bool distinctNBest = (staticData.options().nbest.only_distinct
|| staticData.options().mbr.enabled
|| staticData.GetOutputSearchGraph()
|| staticData.GetOutputSearchGraphHypergraph());
if (!distinctNBest && m_arcList->size() > nBestSize) {
// prune arc list only if there too many arcs

View File

@ -322,8 +322,8 @@ void ChartManager::OutputNBest(OutputCollector *collector) const
if (nBestSize > 0) {
const size_t translationId = m_source.GetTranslationId();
VERBOSE(2,"WRITING " << nBestSize << " TRANSLATION ALTERNATIVES TO "
<< staticData.options().nbest.output_file_path << endl);
VERBOSE(2,"WRITING " << nBestSize << " TRANSLATION ALTERNATIVES TO "
<< staticData.options().nbest.output_file_path << endl);
std::vector<boost::shared_ptr<ChartKBestExtractor::Derivation> > nBestList;
CalcNBest(nBestSize, nBestList,staticData.options().nbest.only_distinct);
OutputNBestList(collector, nBestList, translationId);

View File

@ -106,7 +106,7 @@ void ChartParserUnknown::Process(const Word &sourceWord, const WordsRange &range
targetPhrase->SetTargetLHS(targetLHS);
targetPhrase->SetAlignmentInfo("0-0");
targetPhrase->EvaluateInIsolation(*unksrc);
if (staticData.IsDetailedTreeFragmentsTranslationReportingEnabled() || staticData.options().nbest.print_trees || staticData.GetTreeStructure() != NULL) {
targetPhrase->SetProperty("Tree","[ " + (*targetLHS)[0]->GetString().as_string() + " "+sourceWord[0]->GetString().as_string()+" ]");
}

View File

@ -68,8 +68,8 @@ ConfusionNet() : InputType()
const StaticData& SD = StaticData::Instance();
if (SD.IsSyntax()) {
m_defaultLabelSet.insert(SD.GetInputDefaultNonTerminal());
}
m_defaultLabelSet.insert(SD.GetInputDefaultNonTerminal());
}
UTIL_THROW_IF2(&InputFeature::Instance() == NULL, "Input feature must be specified");
}

View File

@ -177,8 +177,8 @@ batch_run()
#endif
// using context for adaptation:
// e.g., context words / strings from config file / cmd line
std::string context_string;
// e.g., context words / strings from config file / cmd line
std::string context_string;
params.SetParameter(context_string,"context-string",string(""));
// ... or weights for documents/domains from config file / cmd. line
@ -189,18 +189,18 @@ batch_run()
size_t size_t_max = std::numeric_limits<size_t>::max();
bool use_context_window = ioWrapper->GetLookAhead() || ioWrapper->GetLookBack();
bool use_context = use_context_window || context_string.size();
bool use_sliding_context_window = (use_context_window
&& ioWrapper->GetLookAhead() != size_t_max);
bool use_sliding_context_window = (use_context_window
&& ioWrapper->GetLookAhead() != size_t_max);
boost::shared_ptr<std::vector<std::string> > context_window;
boost::shared_ptr<std::vector<std::string> >* cw;
cw = use_context_window ? &context_window : NULL;
if (!cw && context_string.size())
if (!cw && context_string.size())
context_window.reset(new std::vector<std::string>(1,context_string));
// global scope of caches, biases, etc., if any
boost::shared_ptr<ContextScope> gscope;
if (!use_sliding_context_window)
if (!use_sliding_context_window)
gscope.reset(new ContextScope);
// main loop over set of input sentences
@ -212,21 +212,20 @@ batch_run()
boost::shared_ptr<ContextScope> lscope;
if (gscope) lscope = gscope;
else lscope.reset(new ContextScope);
boost::shared_ptr<TranslationTask> task;
boost::shared_ptr<TranslationTask> task;
task = TranslationTask::create(source, ioWrapper, lscope);
if (cw)
{
if (context_string.size())
context_window->push_back(context_string);
if(!use_sliding_context_window)
cw = NULL;
}
if (cw) {
if (context_string.size())
context_window->push_back(context_string);
if(!use_sliding_context_window)
cw = NULL;
}
if (context_window)
task->SetContextWindow(context_window);
if (context_weights != "")
if (context_weights != "")
task->SetContextWeights(context_weights);
// Allow for (sentence-)context-specific processing prior to

View File

@ -43,10 +43,10 @@ ConstrainedDecoding::ConstrainedDecoding(const std::string &line)
void ConstrainedDecoding::Load()
{
const StaticData &staticData = StaticData::Instance();
bool addBeginEndWord
= ((staticData.options().search.algo == CYKPlus)
|| (staticData.options().search.algo == ChartIncremental));
bool addBeginEndWord
= ((staticData.options().search.algo == CYKPlus)
|| (staticData.options().search.algo == ChartIncremental));
for(size_t i = 0; i < m_paths.size(); ++i) {
InputFileStream constraintFile(m_paths[i]);
std::string line;

View File

@ -166,7 +166,7 @@ FeatureFactory
weights.assign(feature->GetNumScoreComponents(),1.0);
} else {
VERBOSE(2,"WARNING: No weights specified in config file for FF "
<< featureName << ". Using default values supplied by FF.");
<< featureName << ". Using default values supplied by FF.");
}
}
UTIL_THROW_IF2(weights.size() != feature->GetNumScoreComponents(),

View File

@ -21,7 +21,7 @@ HyperParameterAsWeight::HyperParameterAsWeight(const std::string &line)
staticData.m_options.search.stack_size = weights[0] * 1000;
staticData.m_options.search.beam_width = weights[1] * 10;
}

View File

@ -36,7 +36,7 @@ namespace Moses
HypothesisStackNormal::HypothesisStackNormal(Manager& manager) :
HypothesisStack(manager)
{
m_nBestIsEnabled = StaticData::Instance().options().nbest.enabled;
m_nBestIsEnabled = StaticData::Instance().options().nbest.enabled;
m_bestScore = -std::numeric_limits<float>::infinity();
m_worstScore = -std::numeric_limits<float>::infinity();
}

View File

@ -79,12 +79,12 @@ namespace Moses
IOWrapper::IOWrapper()
: m_nBestStream(NULL)
// , m_outputWordGraphStream(NULL)
// , m_outputSearchGraphStream(NULL)
// , m_detailedTranslationReportingStream(NULL)
// , m_unknownsStream(NULL)
// , m_alignmentInfoStream(NULL)
// , m_latticeSamplesStream(NULL)
// , m_outputWordGraphStream(NULL)
// , m_outputSearchGraphStream(NULL)
// , m_detailedTranslationReportingStream(NULL)
// , m_unknownsStream(NULL)
// , m_alignmentInfoStream(NULL)
// , m_latticeSamplesStream(NULL)
, m_surpressSingleBestOutput(false)
, m_look_ahead(0)
, m_look_back(0)
@ -142,13 +142,13 @@ IOWrapper::IOWrapper()
P.SetParameter<string>(path, "translation-details", "");
if (path.size()) m_detailedTranslationCollector.reset(new OutputCollector(path));
P.SetParameter<string>(path, "tree-translation-details", "");
if (path.size()) m_detailTreeFragmentsOutputCollector.reset(new OutputCollector(path));
P.SetParameter<string>(path, "output-word-graph", "");
if (path.size()) m_wordGraphCollector.reset(new OutputCollector(path));
size_t latticeSamplesSize = staticData.GetLatticeSamplesSize();
string latticeSamplesFile = staticData.GetLatticeSamplesFilePath();
if (latticeSamplesSize) {
@ -157,7 +157,7 @@ IOWrapper::IOWrapper()
m_surpressSingleBestOutput = true;
}
}
if (!m_surpressSingleBestOutput) {
m_singleBestOutputCollector.reset(new Moses::OutputCollector(&std::cout));
}
@ -193,8 +193,8 @@ IOWrapper::~IOWrapper()
if (m_inputFile != NULL)
delete m_inputFile;
// if (m_nBestStream != NULL && !m_surpressSingleBestOutput) {
// outputting n-best to file, rather than stdout. need to close file and delete obj
// delete m_nBestStream;
// outputting n-best to file, rather than stdout. need to close file and delete obj
// delete m_nBestStream;
// }
// delete m_detailedTranslationReportingStream;
@ -251,16 +251,15 @@ ReadInput(boost::shared_ptr<std::vector<std::string> >* cw)
boost::shared_ptr<InputType> source = GetBufferedInput();
if (source) {
source->SetTranslationId(m_currentLine++);
// when using a sliding context window, remove obsolete past input from buffer:
if (m_past_input.size() && m_look_back != std::numeric_limits<size_t>::max())
{
list<boost::shared_ptr<InputType> >::iterator m = m_past_input.end();
for (size_t cnt = 0; cnt < m_look_back && --m != m_past_input.begin();)
cnt += (*m)->GetSize();
while (m_past_input.begin() != m) m_past_input.pop_front();
}
if (m_past_input.size() && m_look_back != std::numeric_limits<size_t>::max()) {
list<boost::shared_ptr<InputType> >::iterator m = m_past_input.end();
for (size_t cnt = 0; cnt < m_look_back && --m != m_past_input.begin();)
cnt += (*m)->GetSize();
while (m_past_input.begin() != m) m_past_input.pop_front();
}
if (m_look_back)
m_past_input.push_back(source);
}
@ -268,15 +267,15 @@ ReadInput(boost::shared_ptr<std::vector<std::string> >* cw)
return source;
}
boost::shared_ptr<std::vector<std::string> >
boost::shared_ptr<std::vector<std::string> >
IOWrapper::
GetCurrentContextWindow() const
{
boost::shared_ptr<std::vector<string> > context(new std::vector<string>);
BOOST_FOREACH(boost::shared_ptr<InputType> const& i, m_past_input)
context->push_back(i->ToString());
context->push_back(i->ToString());
BOOST_FOREACH(boost::shared_ptr<InputType> const& i, m_future_input)
context->push_back(i->ToString());
context->push_back(i->ToString());
return context;
}

View File

@ -129,7 +129,7 @@ public:
// Moses::InputType* GetInput(Moses::InputType *inputType);
boost::shared_ptr<InputType>
boost::shared_ptr<InputType>
ReadInput(boost::shared_ptr<std::vector<std::string> >* cw = NULL);
Moses::OutputCollector *GetSingleBestOutputCollector() {
@ -184,7 +184,7 @@ public:
// post editing
std::ifstream *spe_src, *spe_trg, *spe_aln;
std::list<boost::shared_ptr<InputType> > const& GetPastInput() const {
std::list<boost::shared_ptr<InputType> > const& GetPastInput() const {
return m_past_input;
}
@ -198,7 +198,7 @@ public:
size_t GetLookBack() const {
return m_look_back;
}
private:
template<class itype>
boost::shared_ptr<InputType>
@ -207,7 +207,7 @@ private:
boost::shared_ptr<InputType>
GetBufferedInput();
boost::shared_ptr<std::vector<std::string> >
boost::shared_ptr<std::vector<std::string> >
GetCurrentContextWindow() const;
};
@ -230,7 +230,7 @@ BufferInput()
}
while (m_buffered_ahead < m_look_ahead) {
source.reset(new itype);
if (!source->Read(*m_inputStream, *m_inputFactorOrder))
if (!source->Read(*m_inputStream, *m_inputFactorOrder))
break;
m_future_input.push_back(source);
m_buffered_ahead += source->GetSize();

View File

@ -223,8 +223,8 @@ namespace
const float log_10 = logf(10);
}
template <class Model, class Best>
search::History
template <class Model, class Best>
search::History
Manager::
PopulateBest(const Model &model, const std::vector<lm::WordIndex> &words, Best &out)
{

View File

@ -490,18 +490,18 @@ bool Edge::operator< (const Edge& compare ) const
ostream& operator<< (ostream& out, const Edge& edge)
{
out << "Head: " << edge.m_headNode->GetId()
<< ", Tail: " << edge.m_tailNode->GetId()
<< ", Score: " << edge.m_score
out << "Head: " << edge.m_headNode->GetId()
<< ", Tail: " << edge.m_tailNode->GetId()
<< ", Score: " << edge.m_score
<< ", Phrase: " << edge.m_targetPhrase << endl;
return out;
}
bool ascendingCoverageCmp(const Hypothesis* a, const Hypothesis* b)
{
return (a->GetWordsBitmap().GetNumWordsCovered()
<
b->GetWordsBitmap().GetNumWordsCovered());
return (a->GetWordsBitmap().GetNumWordsCovered()
<
b->GetWordsBitmap().GetNumWordsCovered());
}
void getLatticeMBRNBest(const Manager& manager, const TrellisPathList& nBestList,
@ -514,19 +514,19 @@ void getLatticeMBRNBest(const Manager& manager, const TrellisPathList& nBestList
std::map < const Hypothesis*, set <const Hypothesis*> > outgoingHyps;
map<const Hypothesis*, vector<Edge> > incomingEdges;
vector< float> estimatedScores;
manager.GetForwardBackwardSearchGraph(&connected, &connectedList,
&outgoingHyps, &estimatedScores);
manager.GetForwardBackwardSearchGraph(&connected, &connectedList,
&outgoingHyps, &estimatedScores);
LMBR_Options const& lmbr = manager.options().lmbr;
MBR_Options const& mbr = manager.options().mbr;
pruneLatticeFB(connectedList, outgoingHyps, incomingEdges, estimatedScores,
manager.GetBestHypothesis(), lmbr.pruning_factor, mbr.scale);
pruneLatticeFB(connectedList, outgoingHyps, incomingEdges, estimatedScores,
manager.GetBestHypothesis(), lmbr.pruning_factor, mbr.scale);
calcNgramExpectations(connectedList, incomingEdges, ngramPosteriors,true);
vector<float> mbrThetas = lmbr.theta;
float p = lmbr.precision;
float r = lmbr.ratio;
float mapWeight = lmbr.map_weight;
if (mbrThetas.size() == 0) {
if (mbrThetas.size() == 0) {
// thetas were not specified on the command line, so use p and r instead
mbrThetas.push_back(-1); //Theta 0
mbrThetas.push_back(1/(bleu_order*p));
@ -580,8 +580,8 @@ const TrellisPath doConsensusDecoding(const Manager& manager, const TrellisPathL
manager.GetForwardBackwardSearchGraph(&connected, &connectedList, &outgoingHyps, &estimatedScores);
LMBR_Options const& lmbr = manager.options().lmbr;
MBR_Options const& mbr = manager.options().mbr;
pruneLatticeFB(connectedList, outgoingHyps, incomingEdges, estimatedScores,
manager.GetBestHypothesis(), lmbr.pruning_factor, mbr.scale);
pruneLatticeFB(connectedList, outgoingHyps, incomingEdges, estimatedScores,
manager.GetBestHypothesis(), lmbr.pruning_factor, mbr.scale);
calcNgramExpectations(connectedList, incomingEdges, ngramExpectations,false);
//expected length is sum of expected unigram counts

View File

@ -1616,9 +1616,9 @@ void Manager::OutputNBest(OutputCollector *collector) const
TrellisPathList nBestList;
ostringstream out;
CalcNBest(options().nbest.nbest_size, nBestList,
options().nbest.only_distinct);
OutputNBest(out, nBestList, staticData.GetOutputFactorOrder(),
m_source.GetTranslationId(),
options().nbest.only_distinct);
OutputNBest(out, nBestList, staticData.GetOutputFactorOrder(),
m_source.GetTranslationId(),
staticData.GetReportSegmentation());
collector->Write(m_source.GetTranslationId(), out.str());
}

View File

@ -43,8 +43,8 @@ namespace Moses
class OutputCollector
{
public:
OutputCollector(std::ostream* outStream= &std::cout,
std::ostream* debugStream=&std::cerr)
OutputCollector(std::ostream* outStream= &std::cout,
std::ostream* debugStream=&std::cerr)
: m_nextOutput(0)
, m_outStream(outStream)
, m_debugStream(debugStream)
@ -52,37 +52,36 @@ public:
, m_isHoldingDebugStream(false) {}
OutputCollector(std::string xout, std::string xerr = "")
: m_nextOutput(0)
{
// TO DO open magic streams instead of regular ofstreams! [UG]
: m_nextOutput(0) {
// TO DO open magic streams instead of regular ofstreams! [UG]
if (xout == "/dev/stderr") {
m_outStream = &std::cerr;
m_isHoldingOutputStream = false;
} else if (xout.size() && xout != "/dev/stdout" && xout != "-") {
m_outStream = new std::ofstream(xout.c_str());
UTIL_THROW_IF2(!m_outStream->good(), "Failed to open output file"
<< xout);
m_isHoldingOutputStream = true;
} else {
m_outStream = &std::cout;
m_isHoldingOutputStream = false;
}
if (xerr == "/dev/stdout") {
m_debugStream = &std::cout;
m_isHoldingDebugStream = false;
} else if (xerr.size() && xerr != "/dev/stderr") {
m_debugStream = new std::ofstream(xerr.c_str());
UTIL_THROW_IF2(!m_debugStream->good(), "Failed to open debug stream"
<< xerr);
m_isHoldingDebugStream = true;
} else {
m_debugStream = &std::cerr;
m_isHoldingDebugStream = false;
}
if (xout == "/dev/stderr") {
m_outStream = &std::cerr;
m_isHoldingOutputStream = false;
} else if (xout.size() && xout != "/dev/stdout" && xout != "-") {
m_outStream = new std::ofstream(xout.c_str());
UTIL_THROW_IF2(!m_outStream->good(), "Failed to open output file"
<< xout);
m_isHoldingOutputStream = true;
} else {
m_outStream = &std::cout;
m_isHoldingOutputStream = false;
}
if (xerr == "/dev/stdout") {
m_debugStream = &std::cout;
m_isHoldingDebugStream = false;
} else if (xerr.size() && xerr != "/dev/stderr") {
m_debugStream = new std::ofstream(xerr.c_str());
UTIL_THROW_IF2(!m_debugStream->good(), "Failed to open debug stream"
<< xerr);
m_isHoldingDebugStream = true;
} else {
m_debugStream = &std::cerr;
m_isHoldingDebugStream = false;
}
}
~OutputCollector() {
if (m_isHoldingOutputStream)
delete m_outStream;

View File

@ -214,10 +214,10 @@ Parameter::Parameter()
AddParam(server_opts,"server", "Run moses as a translation server.");
AddParam(server_opts,"server-port", "Port for moses server");
AddParam(server_opts,"server-log", "Log destination for moses server");
AddParam(server_opts,"session-timeout",
"Timeout for sessions, e.g. '2h30m' or 1d (=24h)");
AddParam(server_opts,"session-timeout",
"Timeout for sessions, e.g. '2h30m' or 1d (=24h)");
AddParam(server_opts,"session-cache-size", string("Max. number of sessions cached.")
+"Least recently used session is dumped first.");
+"Least recently used session is dumped first.");
AddParam(server_opts,"serial", "Run server in serial mode, processing only one request at a time.");
po::options_description irstlm_opts("IRSTLM Options");

View File

@ -44,7 +44,7 @@ RuleCube::RuleCube(const ChartTranslationOptions &transOpt,
{
RuleCubeItem *item = new RuleCubeItem(transOpt, allChartCells);
m_covered.insert(item);
if (StaticData::Instance().options().cube.lazy_scoring) {
if (StaticData::Instance().options().cube.lazy_scoring) {
item->EstimateScore();
} else {
item->CreateHypothesis(transOpt, manager);

View File

@ -20,8 +20,8 @@ Search::Search(Manager& manager)
Search *
Search::
CreateSearch(Manager& manager, const InputType &source,
SearchAlgorithm searchAlgorithm,
const TranslationOptionCollection &transOptColl)
SearchAlgorithm searchAlgorithm,
const TranslationOptionCollection &transOptColl)
{
switch(searchAlgorithm) {
case Normal:
@ -42,8 +42,8 @@ out_of_time()
if (!timelimit) return false;
double elapsed_time = GetUserTime();
if (elapsed_time <= timelimit) return false;
VERBOSE(1,"Decoding is out of time (" << elapsed_time << ","
<< timelimit << ")" << std::endl);
VERBOSE(1,"Decoding is out of time (" << elapsed_time << ","
<< timelimit << ")" << std::endl);
interrupted_flag = 1;
return true;
}

View File

@ -46,7 +46,7 @@ protected:
AllOptions const& m_options;
/** flag indicating that decoder ran out of time (see switch -time-out) */
size_t interrupted_flag;
size_t interrupted_flag;
bool out_of_time();
};

View File

@ -92,8 +92,8 @@ void SearchCubePruning::Decode()
// check if decoding ran out of time
double _elapsed_time = GetUserTime();
if (timelimit && _elapsed_time > timelimit) {
VERBOSE(1,"Decoding is out of time (" << _elapsed_time << ","
<< timelimit << ")" << std::endl);
VERBOSE(1,"Decoding is out of time (" << _elapsed_time << ","
<< timelimit << ")" << std::endl);
return;
}
HypothesisStackCubePruning &sourceHypoColl = *static_cast<HypothesisStackCubePruning*>(*iterStack);

View File

@ -16,8 +16,8 @@ namespace Moses
* /param transOptColl collection of translation options to be used for this sentence
*/
SearchNormal::
SearchNormal(Manager& manager, const InputType &source,
const TranslationOptionCollection &transOptColl)
SearchNormal(Manager& manager, const InputType &source,
const TranslationOptionCollection &transOptColl)
: Search(manager)
, m_source(source)
, m_hypoStackColl(source.GetSize() + 1)
@ -38,8 +38,8 @@ SearchNormal(Manager& manager, const InputType &source,
std::vector < HypothesisStackNormal >::iterator iterStack;
for (size_t ind = 0 ; ind < m_hypoStackColl.size() ; ++ind) {
HypothesisStackNormal *sourceHypoColl = new HypothesisStackNormal(m_manager);
sourceHypoColl->SetMaxHypoStackSize(this->m_options.search.stack_size,
this->m_options.search.stack_diversity);
sourceHypoColl->SetMaxHypoStackSize(this->m_options.search.stack_size,
this->m_options.search.stack_diversity);
sourceHypoColl->SetBeamWidth(this->m_options.search.beam_width);
m_hypoStackColl[ind] = sourceHypoColl;
}
@ -51,26 +51,26 @@ SearchNormal::~SearchNormal()
}
bool
bool
SearchNormal::
ProcessOneStack(HypothesisStack* hstack)
{
if (this->out_of_time()) return false;
SentenceStats &stats = m_manager.GetSentenceStats();
HypothesisStackNormal &sourceHypoColl
= *static_cast<HypothesisStackNormal*>(hstack);
HypothesisStackNormal &sourceHypoColl
= *static_cast<HypothesisStackNormal*>(hstack);
// the stack is pruned before processing (lazy pruning):
VERBOSE(3,"processing hypothesis from next stack");
IFVERBOSE(2) stats.StartTimeStack();
IFVERBOSE(2) stats.StartTimeStack();
sourceHypoColl.PruneToSize(m_options.search.stack_size);
VERBOSE(3,std::endl);
sourceHypoColl.CleanupArcList();
IFVERBOSE(2) stats.StopTimeStack();
IFVERBOSE(2) stats.StopTimeStack();
// go through each hypothesis on the stack and try to expand it
BOOST_FOREACH(Hypothesis* h, sourceHypoColl)
ProcessOneHypothesis(*h);
BOOST_FOREACH(Hypothesis* h, sourceHypoColl)
ProcessOneHypothesis(*h);
return true;
}
@ -90,7 +90,7 @@ void SearchNormal::Decode()
// go through each stack
BOOST_FOREACH(HypothesisStack* hstack, m_hypoStackColl) {
if (!ProcessOneStack(hstack)) return;
IFVERBOSE(2) OutputHypoStackSize();
IFVERBOSE(2) OutputHypoStackSize();
actual_hypoStack = static_cast<HypothesisStackNormal*>(hstack);
}
}

View File

@ -22,30 +22,30 @@ class SearchNormal: public Search
{
protected:
const InputType &m_source;
//! stacks to store hypotheses (partial translations)
//! stacks to store hypotheses (partial translations)
// no of elements = no of words in source + 1
std::vector < HypothesisStack* > m_hypoStackColl;
std::vector < HypothesisStack* > m_hypoStackColl;
/** actual (full expanded) stack of hypotheses*/
HypothesisStackNormal* actual_hypoStack;
HypothesisStackNormal* actual_hypoStack;
/** pre-computed list of translation options for the phrases in this sentence */
const TranslationOptionCollection &m_transOptColl;
const TranslationOptionCollection &m_transOptColl;
// functions for creating hypotheses
virtual bool
ProcessOneStack(HypothesisStack* hstack);
virtual void
virtual void
ProcessOneHypothesis(const Hypothesis &hypothesis);
virtual void
virtual void
ExpandAllHypotheses(const Hypothesis &hypothesis, size_t startPos, size_t endPos);
virtual void
ExpandHypothesis(const Hypothesis &hypothesis, const TranslationOption &transOpt,
float expectedScore);
virtual void
ExpandHypothesis(const Hypothesis &hypothesis, const TranslationOption &transOpt,
float expectedScore);
public:
SearchNormal(Manager& manager, const InputType &source, const TranslationOptionCollection &transOptColl);

View File

@ -267,12 +267,12 @@ StaticData
m_parameter->SetParameter(m_printAllDerivations , "print-all-derivations", false );
// additional output
m_parameter->SetParameter<string>(m_detailedTranslationReportingFilePath,
"translation-details", "");
m_parameter->SetParameter<string>(m_detailedTranslationReportingFilePath,
"translation-details", "");
m_parameter->SetParameter<string>(m_detailedTreeFragmentsTranslationReportingFilePath,
"tree-translation-details", "");
m_parameter->SetParameter<string>(m_detailedAllTranslationReportingFilePath,
"translation-all-details", "");
"tree-translation-details", "");
m_parameter->SetParameter<string>(m_detailedAllTranslationReportingFilePath,
"translation-all-details", "");
m_parameter->SetParameter<long>(m_startTranslationId, "start-translation-id", 0);
//lattice samples
@ -435,12 +435,12 @@ bool StaticData::LoadData(Parameter *parameter)
ini_oov_options();
// set m_nbest_options.enabled = true if necessary:
if (m_options.mbr.enabled
|| m_options.mira
if (m_options.mbr.enabled
|| m_options.mira
|| m_options.search.consensus
|| m_outputSearchGraph
|| m_outputSearchGraph
|| m_outputSearchGraphSLF
|| m_outputSearchGraphHypergraph
|| m_outputSearchGraphHypergraph
#ifdef HAVE_PROTOBUF
|| m_outputSearchGraphPB
#endif

View File

@ -290,13 +290,13 @@ public:
}
AllOptions const&
options() const {
return m_options;
options() const {
return m_options;
}
AllOptions&
options() {
return m_options;
AllOptions&
options() {
return m_options;
}
const std::vector<FactorType> &GetInputFactorOrder() const {
@ -367,8 +367,8 @@ public:
}
bool UseEarlyDiscarding() const {
return m_options.search.early_discarding_threshold
!= -std::numeric_limits<float>::infinity();
return m_options.search.early_discarding_threshold
!= -std::numeric_limits<float>::infinity();
}
bool UseEarlyDistortionCost() const {
return m_options.reordering.use_early_distortion_cost;
@ -474,7 +474,7 @@ public:
bool IsSyntax(SearchAlgorithm algo = DefaultSearchAlgorithm) const {
if (algo == DefaultSearchAlgorithm)
algo = m_options.search.algo;
return (algo == CYKPlus || algo == ChartIncremental ||
algo == SyntaxS2T || algo == SyntaxT2S ||
algo == SyntaxF2S || algo == SyntaxT2S_SCFG);

View File

@ -254,7 +254,7 @@ void Manager<RuleMatcher>::ExtractKBest(
// with 0 being 'unlimited.' This actually sets a large-ish limit in case
// too many translations are identical.
const StaticData &staticData = StaticData::Instance();
const std::size_t nBestFactor = staticData.options().nbest.factor;
const std::size_t nBestFactor = staticData.options().nbest.factor;
std::size_t numDerivations = (nBestFactor == 0) ? k*1000 : k*nBestFactor;
// Extract the derivations.

View File

@ -182,7 +182,8 @@ bool TargetPhrase::HasTtaskSPtr() const
return m_ttask_flag;
}
const ttasksptr TargetPhrase::GetTtask() const {
const ttasksptr TargetPhrase::GetTtask() const
{
return m_ttask.lock();
}

View File

@ -23,7 +23,7 @@ using namespace std;
namespace Moses
{
boost::shared_ptr<std::vector<std::string> >
boost::shared_ptr<std::vector<std::string> >
TranslationTask::
GetContextWindow() const
{
@ -88,7 +88,7 @@ boost::shared_ptr<TranslationTask>
TranslationTask
::create(boost::shared_ptr<InputType> const& source,
boost::shared_ptr<IOWrapper> const& ioWrapper,
boost::shared_ptr<ContextScope> const& scope)
boost::shared_ptr<ContextScope> const& scope)
{
boost::shared_ptr<TranslationTask> ret(new TranslationTask(source, ioWrapper));
ret->m_self = ret;
@ -100,7 +100,7 @@ TranslationTask
::TranslationTask(boost::shared_ptr<InputType> const& source,
boost::shared_ptr<IOWrapper> const& ioWrapper)
: m_source(source) , m_ioWrapper(ioWrapper)
{
{
m_options = StaticData::Instance().options();
}

View File

@ -97,7 +97,7 @@ public:
boost::shared_ptr<TranslationTask>
create(boost::shared_ptr<Moses::InputType> const& source,
boost::shared_ptr<Moses::IOWrapper> const& ioWrapper,
boost::shared_ptr<ContextScope> const& scope);
boost::shared_ptr<ContextScope> const& scope);
~TranslationTask();
/** Translate one sentence
@ -124,10 +124,10 @@ public:
return m_scope;
}
boost::shared_ptr<std::vector<std::string> >
boost::shared_ptr<std::vector<std::string> >
GetContextWindow() const;
void
void
SetContextWindow(boost::shared_ptr<std::vector<std::string> > const& cw);
std::map<std::string, float> const& GetContextWeights() const;