2008-06-11 14:52:57 +04:00
|
|
|
// $Id$
|
|
|
|
// vim:tabstop=2
|
|
|
|
/***********************************************************************
|
|
|
|
Moses - factored phrase-based language decoder
|
|
|
|
Copyright (C) 2006 University of Edinburgh
|
|
|
|
|
|
|
|
This library is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU Lesser General Public
|
|
|
|
License as published by the Free Software Foundation; either
|
|
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
|
|
|
|
This library is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
Lesser General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
|
|
License along with this library; if not, write to the Free Software
|
|
|
|
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
***********************************************************************/
|
|
|
|
|
2011-11-18 16:07:41 +04:00
|
|
|
#include "util/check.hh"
|
2008-06-11 14:52:57 +04:00
|
|
|
#include <iostream>
|
|
|
|
#include <limits>
|
|
|
|
#include <vector>
|
|
|
|
#include <algorithm>
|
2009-02-06 18:43:06 +03:00
|
|
|
|
|
|
|
#include "FFState.h"
|
2008-06-11 14:52:57 +04:00
|
|
|
#include "TranslationOption.h"
|
|
|
|
#include "TranslationOptionCollection.h"
|
|
|
|
#include "DummyScoreProducers.h"
|
|
|
|
#include "Hypothesis.h"
|
|
|
|
#include "Util.h"
|
|
|
|
#include "SquareMatrix.h"
|
|
|
|
#include "LexicalReordering.h"
|
|
|
|
#include "StaticData.h"
|
|
|
|
#include "InputType.h"
|
|
|
|
#include "LMList.h"
|
2009-08-07 20:47:54 +04:00
|
|
|
#include "Manager.h"
|
2008-06-11 14:52:57 +04:00
|
|
|
|
|
|
|
using namespace std;
|
|
|
|
|
2008-10-09 03:51:26 +04:00
|
|
|
namespace Moses
|
|
|
|
{
|
2008-06-11 14:52:57 +04:00
|
|
|
|
|
|
|
#ifdef USE_HYPO_POOL
|
2011-02-24 16:14:42 +03:00
|
|
|
ObjectPool<Hypothesis> Hypothesis::s_objectPool("Hypothesis", 300000);
|
2008-06-11 14:52:57 +04:00
|
|
|
#endif
|
|
|
|
|
2009-08-07 20:47:54 +04:00
|
|
|
Hypothesis::Hypothesis(Manager& manager, InputType const& source, const TargetPhrase &emptyTarget)
|
2011-02-24 16:14:42 +03:00
|
|
|
: m_prevHypo(NULL)
|
|
|
|
, m_targetPhrase(emptyTarget)
|
|
|
|
, m_sourcePhrase(0)
|
|
|
|
, m_sourceCompleted(source.GetSize(), manager.m_source.m_sourceCompleted)
|
|
|
|
, m_sourceInput(source)
|
|
|
|
, m_currSourceWordsRange(
|
|
|
|
m_sourceCompleted.GetFirstGapPos()>0 ? 0 : NOT_FOUND,
|
|
|
|
m_sourceCompleted.GetFirstGapPos()>0 ? m_sourceCompleted.GetFirstGapPos()-1 : NOT_FOUND)
|
|
|
|
, m_currTargetWordsRange(0, emptyTarget.GetSize()-1)
|
|
|
|
, m_wordDeleted(false)
|
2012-12-31 04:57:21 +04:00
|
|
|
, m_ffStates(StatefulFeatureFunction::GetStatefulFeatureFunctions().size())
|
2011-02-24 16:14:42 +03:00
|
|
|
, m_arcList(NULL)
|
2009-08-07 20:47:54 +04:00
|
|
|
, m_transOpt(NULL)
|
|
|
|
, m_manager(manager)
|
2013-04-22 23:12:43 +04:00
|
|
|
, m_totalScore(0.0f)
|
|
|
|
, m_futureScore(0.0f)
|
2009-08-07 20:47:54 +04:00
|
|
|
|
2010-05-14 13:33:03 +04:00
|
|
|
, m_id(m_manager.GetNextHypoId())
|
2011-02-24 16:14:42 +03:00
|
|
|
{
|
|
|
|
// used for initial seeding of trans process
|
|
|
|
// initialize scores
|
|
|
|
//_hash_computed = false;
|
|
|
|
//s_HypothesesCreated = 1;
|
2012-12-31 04:57:21 +04:00
|
|
|
const vector<const StatefulFeatureFunction*>& ffs = StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
2011-02-24 16:14:42 +03:00
|
|
|
for (unsigned i = 0; i < ffs.size(); ++i)
|
|
|
|
m_ffStates[i] = ffs[i]->EmptyHypothesisState(source);
|
|
|
|
m_manager.GetSentenceStats().AddCreated();
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/***
|
|
|
|
* continue prevHypo by appending the phrases in transOpt
|
|
|
|
*/
|
|
|
|
Hypothesis::Hypothesis(const Hypothesis &prevHypo, const TranslationOption &transOpt)
|
2011-02-24 16:14:42 +03:00
|
|
|
: m_prevHypo(&prevHypo)
|
|
|
|
, m_targetPhrase(transOpt.GetTargetPhrase())
|
|
|
|
, m_sourcePhrase(transOpt.GetSourcePhrase())
|
|
|
|
, m_sourceCompleted (prevHypo.m_sourceCompleted )
|
|
|
|
, m_sourceInput (prevHypo.m_sourceInput)
|
|
|
|
, m_currSourceWordsRange (transOpt.GetSourceWordsRange())
|
|
|
|
, m_currTargetWordsRange ( prevHypo.m_currTargetWordsRange.GetEndPos() + 1
|
|
|
|
,prevHypo.m_currTargetWordsRange.GetEndPos() + transOpt.GetTargetPhrase().GetSize())
|
|
|
|
, m_wordDeleted(false)
|
|
|
|
, m_totalScore(0.0f)
|
|
|
|
, m_futureScore(0.0f)
|
2009-08-07 20:47:54 +04:00
|
|
|
, m_ffStates(prevHypo.m_ffStates.size())
|
2011-02-24 16:14:42 +03:00
|
|
|
, m_arcList(NULL)
|
2009-08-07 20:47:54 +04:00
|
|
|
, m_transOpt(&transOpt)
|
|
|
|
, m_manager(prevHypo.GetManager())
|
2011-02-24 16:14:42 +03:00
|
|
|
, m_id(m_manager.GetNextHypoId())
|
2013-04-24 16:44:11 +04:00
|
|
|
, m_scoreBreakdown(prevHypo.GetScoreBreakdown())
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2013-04-24 16:44:11 +04:00
|
|
|
m_scoreBreakdown.PlusEquals(transOpt.GetScoreBreakdown());
|
2013-04-23 00:25:36 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
// assert that we are not extending our hypothesis by retranslating something
|
|
|
|
// that this hypothesis has already translated!
|
2011-11-18 16:07:41 +04:00
|
|
|
CHECK(!m_sourceCompleted.Overlap(m_currSourceWordsRange));
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
//_hash_computed = false;
|
2008-06-11 14:52:57 +04:00
|
|
|
m_sourceCompleted.SetValue(m_currSourceWordsRange.GetStartPos(), m_currSourceWordsRange.GetEndPos(), true);
|
|
|
|
m_wordDeleted = transOpt.IsDeletionOption();
|
2011-02-24 16:14:42 +03:00
|
|
|
m_manager.GetSentenceStats().AddCreated();
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
Hypothesis::~Hypothesis()
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
for (unsigned i = 0; i < m_ffStates.size(); ++i)
|
|
|
|
delete m_ffStates[i];
|
|
|
|
|
|
|
|
if (m_arcList) {
|
|
|
|
ArcList::iterator iter;
|
|
|
|
for (iter = m_arcList->begin() ; iter != m_arcList->end() ; ++iter) {
|
|
|
|
FREEHYPO(*iter);
|
|
|
|
}
|
|
|
|
m_arcList->clear();
|
|
|
|
|
|
|
|
delete m_arcList;
|
|
|
|
m_arcList = NULL;
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void Hypothesis::AddArc(Hypothesis *loserHypo)
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
if (!m_arcList) {
|
|
|
|
if (loserHypo->m_arcList) { // we don't have an arcList, but loser does
|
|
|
|
this->m_arcList = loserHypo->m_arcList; // take ownership, we'll delete
|
|
|
|
loserHypo->m_arcList = 0; // prevent a double deletion
|
|
|
|
} else {
|
|
|
|
this->m_arcList = new ArcList();
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (loserHypo->m_arcList) { // both have an arc list: merge. delete loser
|
|
|
|
size_t my_size = m_arcList->size();
|
|
|
|
size_t add_size = loserHypo->m_arcList->size();
|
|
|
|
this->m_arcList->resize(my_size + add_size, 0);
|
|
|
|
std::memcpy(&(*m_arcList)[0] + my_size, &(*loserHypo->m_arcList)[0], add_size * sizeof(Hypothesis *));
|
|
|
|
delete loserHypo->m_arcList;
|
|
|
|
loserHypo->m_arcList = 0;
|
|
|
|
} else { // loserHypo doesn't have any arcs
|
|
|
|
// DO NOTHING
|
|
|
|
}
|
|
|
|
}
|
|
|
|
m_arcList->push_back(loserHypo);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/***
|
|
|
|
* return the subclass of Hypothesis most appropriate to the given translation option
|
|
|
|
*/
|
2008-06-19 03:14:09 +04:00
|
|
|
Hypothesis* Hypothesis::CreateNext(const TranslationOption &transOpt, const Phrase* constraint) const
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
return Create(*this, transOpt, constraint);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/***
|
|
|
|
* return the subclass of Hypothesis most appropriate to the given translation option
|
|
|
|
*/
|
2008-06-19 03:14:09 +04:00
|
|
|
Hypothesis* Hypothesis::Create(const Hypothesis &prevHypo, const TranslationOption &transOpt, const Phrase* constrainingPhrase)
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2008-06-19 03:14:09 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
// This method includes code for constraint decoding
|
|
|
|
|
|
|
|
bool createHypothesis = true;
|
|
|
|
|
|
|
|
if (constrainingPhrase != NULL) {
|
|
|
|
|
|
|
|
size_t constraintSize = constrainingPhrase->GetSize();
|
|
|
|
|
|
|
|
size_t start = 1 + prevHypo.GetCurrTargetWordsRange().GetEndPos();
|
|
|
|
|
|
|
|
const Phrase &transOptPhrase = transOpt.GetTargetPhrase();
|
|
|
|
size_t transOptSize = transOptPhrase.GetSize();
|
|
|
|
|
|
|
|
size_t endpoint = start + transOptSize - 1;
|
|
|
|
|
|
|
|
|
|
|
|
if (endpoint < constraintSize) {
|
|
|
|
WordsRange range(start, endpoint);
|
|
|
|
Phrase relevantConstraint = constrainingPhrase->GetSubString(range);
|
|
|
|
|
|
|
|
if ( ! relevantConstraint.IsCompatible(transOptPhrase) ) {
|
|
|
|
createHypothesis = false;
|
|
|
|
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
createHypothesis = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (createHypothesis) {
|
|
|
|
|
|
|
|
#ifdef USE_HYPO_POOL
|
|
|
|
Hypothesis *ptr = s_objectPool.getPtr();
|
|
|
|
return new(ptr) Hypothesis(prevHypo, transOpt);
|
|
|
|
#else
|
|
|
|
return new Hypothesis(prevHypo, transOpt);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
} else {
|
|
|
|
// If the previous hypothesis plus the proposed translation option
|
|
|
|
// fail to match the provided constraint,
|
|
|
|
// return a null hypothesis.
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
/***
|
|
|
|
* return the subclass of Hypothesis most appropriate to the given target phrase
|
|
|
|
*/
|
|
|
|
|
2009-08-07 20:47:54 +04:00
|
|
|
Hypothesis* Hypothesis::Create(Manager& manager, InputType const& m_source, const TargetPhrase &emptyTarget)
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
|
|
|
#ifdef USE_HYPO_POOL
|
2011-02-24 16:14:42 +03:00
|
|
|
Hypothesis *ptr = s_objectPool.getPtr();
|
|
|
|
return new(ptr) Hypothesis(manager, m_source, emptyTarget);
|
2008-06-11 14:52:57 +04:00
|
|
|
#else
|
2011-02-24 16:14:42 +03:00
|
|
|
return new Hypothesis(manager, m_source, emptyTarget);
|
2008-06-11 14:52:57 +04:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
/** check, if two hypothesis can be recombined.
|
|
|
|
this is actually a sorting function that allows us to
|
|
|
|
keep an ordered list of hypotheses. This makes recombination
|
2011-02-24 16:14:42 +03:00
|
|
|
much quicker.
|
2008-06-11 14:52:57 +04:00
|
|
|
*/
|
2009-02-06 18:43:06 +03:00
|
|
|
int Hypothesis::RecombineCompare(const Hypothesis &compare) const
|
2011-02-24 16:14:42 +03:00
|
|
|
{
|
|
|
|
// -1 = this < compare
|
|
|
|
// +1 = this > compare
|
|
|
|
// 0 = this ==compare
|
|
|
|
int comp = m_sourceCompleted.Compare(compare.m_sourceCompleted);
|
|
|
|
if (comp != 0)
|
|
|
|
return comp;
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < m_ffStates.size(); ++i) {
|
|
|
|
if (m_ffStates[i] == NULL || compare.m_ffStates[i] == NULL) {
|
|
|
|
comp = m_ffStates[i] - compare.m_ffStates[i];
|
|
|
|
} else {
|
|
|
|
comp = m_ffStates[i]->Compare(*compare.m_ffStates[i]);
|
|
|
|
}
|
|
|
|
if (comp != 0) return comp;
|
|
|
|
}
|
2012-05-07 03:04:55 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
return 0;
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2013-04-22 16:03:07 +04:00
|
|
|
void Hypothesis::EvaluateWith(const StatefulFeatureFunction &sfff,
|
2012-07-02 18:57:54 +04:00
|
|
|
int state_idx) {
|
2013-04-22 16:03:07 +04:00
|
|
|
m_ffStates[state_idx] = sfff.Evaluate(
|
2012-07-02 18:57:54 +04:00
|
|
|
*this,
|
|
|
|
m_prevHypo ? m_prevHypo->m_ffStates[state_idx] : NULL,
|
2013-04-24 16:44:11 +04:00
|
|
|
&m_scoreBreakdown);
|
2012-07-02 18:57:54 +04:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2013-04-22 16:03:07 +04:00
|
|
|
void Hypothesis::EvaluateWith(const StatelessFeatureFunction& slff) {
|
2013-04-24 16:44:11 +04:00
|
|
|
slff.Evaluate(PhraseBasedFeatureContext(this), &m_scoreBreakdown);
|
2012-07-02 18:57:54 +04:00
|
|
|
}
|
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
/***
|
|
|
|
* calculate the logarithm of our total translation score (sum up components)
|
|
|
|
*/
|
2011-02-24 16:14:42 +03:00
|
|
|
void Hypothesis::CalcScore(const SquareMatrix &futureScore)
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2013-04-11 19:22:47 +04:00
|
|
|
const StaticData &staticData = StaticData::Instance();
|
|
|
|
clock_t t=0; // used to track time
|
|
|
|
|
2009-02-06 18:43:06 +03:00
|
|
|
// some stateless score producers cache their values in the translation
|
2011-02-24 16:14:42 +03:00
|
|
|
// option: add these here
|
2009-07-20 17:56:37 +04:00
|
|
|
// language model scores for n-grams completely contained within a target
|
|
|
|
// phrase are also included here
|
2009-02-06 18:43:06 +03:00
|
|
|
|
|
|
|
// compute values of stateless feature functions that were not
|
2012-09-04 20:50:30 +04:00
|
|
|
// cached in the translation option
|
2011-02-24 16:14:42 +03:00
|
|
|
const vector<const StatelessFeatureFunction*>& sfs =
|
2012-12-31 04:57:21 +04:00
|
|
|
StatelessFeatureFunction::GetStatelessFeatureFunctions();
|
2012-09-04 20:50:30 +04:00
|
|
|
for (unsigned i = 0; i < sfs.size(); ++i) {
|
2013-04-22 16:03:07 +04:00
|
|
|
const StatelessFeatureFunction &ff = *sfs[i];
|
2013-05-13 18:53:56 +04:00
|
|
|
if (ff.GetStatelessFeatureType() == RequiresSegmentation) {
|
2013-04-22 16:03:07 +04:00
|
|
|
EvaluateWith(ff);
|
2012-09-04 20:50:30 +04:00
|
|
|
}
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
const vector<const StatefulFeatureFunction*>& ffs =
|
2012-12-31 04:57:21 +04:00
|
|
|
StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
2011-02-24 16:14:42 +03:00
|
|
|
for (unsigned i = 0; i < ffs.size(); ++i) {
|
2013-04-22 16:03:07 +04:00
|
|
|
const StatefulFeatureFunction &ff = *ffs[i];
|
|
|
|
m_ffStates[i] = ff.Evaluate(
|
2011-02-24 16:14:42 +03:00
|
|
|
*this,
|
|
|
|
m_prevHypo ? m_prevHypo->m_ffStates[i] : NULL,
|
2013-04-24 16:44:11 +04:00
|
|
|
&m_scoreBreakdown);
|
2011-02-24 16:14:42 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
IFVERBOSE(2) {
|
|
|
|
t = clock(); // track time excluding LM
|
|
|
|
}
|
|
|
|
|
|
|
|
// FUTURE COST
|
|
|
|
m_futureScore = futureScore.CalcFutureScore( m_sourceCompleted );
|
|
|
|
|
|
|
|
// TOTAL
|
2013-04-24 16:44:11 +04:00
|
|
|
m_totalScore = m_scoreBreakdown.GetWeightedScore() + m_futureScore;
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
IFVERBOSE(2) {
|
|
|
|
m_manager.GetSentenceStats().AddTimeOtherScore( clock()-t );
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
const Hypothesis* Hypothesis::GetPrevHypo()const
|
|
|
|
{
|
|
|
|
return m_prevHypo;
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* print hypothesis information for pharaoh-style logging
|
|
|
|
*/
|
|
|
|
void Hypothesis::PrintHypothesis() const
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
if (!m_prevHypo) {
|
|
|
|
TRACE_ERR(endl << "NULL hypo" << endl);
|
|
|
|
return;
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
TRACE_ERR(endl << "creating hypothesis "<< m_id <<" from "<< m_prevHypo->m_id<<" ( ");
|
|
|
|
int end = (int)(m_prevHypo->m_targetPhrase.GetSize()-1);
|
|
|
|
int start = end-1;
|
|
|
|
if ( start < 0 ) start = 0;
|
|
|
|
if ( m_prevHypo->m_currTargetWordsRange.GetStartPos() == NOT_FOUND ) {
|
|
|
|
TRACE_ERR( "<s> ");
|
2011-02-24 16:14:42 +03:00
|
|
|
} else {
|
2008-06-11 14:52:57 +04:00
|
|
|
TRACE_ERR( "... ");
|
|
|
|
}
|
|
|
|
if (end>=0) {
|
|
|
|
WordsRange range(start, end);
|
|
|
|
TRACE_ERR( m_prevHypo->m_targetPhrase.GetSubString(range) << " ");
|
|
|
|
}
|
|
|
|
TRACE_ERR( ")"<<endl);
|
2011-02-24 16:14:42 +03:00
|
|
|
TRACE_ERR( "\tbase score "<< (m_prevHypo->m_totalScore - m_prevHypo->m_futureScore) <<endl);
|
|
|
|
TRACE_ERR( "\tcovering "<<m_currSourceWordsRange.GetStartPos()<<"-"<<m_currSourceWordsRange.GetEndPos()<<": "
|
|
|
|
<< *m_sourcePhrase <<endl);
|
|
|
|
TRACE_ERR( "\ttranslated as: "<<(Phrase&) m_targetPhrase<<endl); // <<" => translation cost "<<m_score[ScoreType::PhraseTrans];
|
|
|
|
|
|
|
|
if (m_wordDeleted) TRACE_ERR( "\tword deleted"<<endl);
|
2008-06-11 14:52:57 +04:00
|
|
|
// TRACE_ERR( "\tdistance: "<<GetCurrSourceWordsRange().CalcDistortion(m_prevHypo->GetCurrSourceWordsRange())); // << " => distortion cost "<<(m_score[ScoreType::Distortion]*weightDistortion)<<endl;
|
|
|
|
// TRACE_ERR( "\tlanguage model cost "); // <<m_score[ScoreType::LanguageModelScore]<<endl;
|
|
|
|
// TRACE_ERR( "\tword penalty "); // <<(m_score[ScoreType::WordPenalty]*weightWordPenalty)<<endl;
|
2011-02-24 16:14:42 +03:00
|
|
|
TRACE_ERR( "\tscore "<<m_totalScore - m_futureScore<<" + future cost "<<m_futureScore<<" = "<<m_totalScore<<endl);
|
2013-04-24 16:44:11 +04:00
|
|
|
TRACE_ERR( "\tunweighted feature scores: " << m_scoreBreakdown << endl);
|
2011-02-24 16:14:42 +03:00
|
|
|
//PrintLMScores();
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void Hypothesis::CleanupArcList()
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
// point this hypo's main hypo to itself
|
|
|
|
SetWinningHypo(this);
|
|
|
|
|
|
|
|
if (!m_arcList) return;
|
|
|
|
|
|
|
|
/* keep only number of arcs we need to create all n-best paths.
|
|
|
|
* However, may not be enough if only unique candidates are needed,
|
|
|
|
* so we'll keep all of arc list if nedd distinct n-best list
|
|
|
|
*/
|
|
|
|
const StaticData &staticData = StaticData::Instance();
|
|
|
|
size_t nBestSize = staticData.GetNBestSize();
|
2013-02-23 00:48:40 +04:00
|
|
|
bool distinctNBest = staticData.GetDistinctNBest() || staticData.UseMBR() || staticData.GetOutputSearchGraph() || staticData.GetOutputSearchGraphSLF() || staticData.GetOutputSearchGraphHypergraph() || staticData.UseLatticeMBR() ;
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
if (!distinctNBest && m_arcList->size() > nBestSize * 5) {
|
|
|
|
// prune arc list only if there too many arcs
|
|
|
|
nth_element(m_arcList->begin()
|
|
|
|
, m_arcList->begin() + nBestSize - 1
|
|
|
|
, m_arcList->end()
|
|
|
|
, CompareHypothesisTotalScore());
|
|
|
|
|
|
|
|
// delete bad ones
|
|
|
|
ArcList::iterator iter;
|
|
|
|
for (iter = m_arcList->begin() + nBestSize ; iter != m_arcList->end() ; ++iter) {
|
|
|
|
Hypothesis *arc = *iter;
|
|
|
|
FREEHYPO(arc);
|
|
|
|
}
|
|
|
|
m_arcList->erase(m_arcList->begin() + nBestSize
|
|
|
|
, m_arcList->end());
|
|
|
|
}
|
|
|
|
|
|
|
|
// set all arc's main hypo variable to this hypo
|
|
|
|
ArcList::iterator iter = m_arcList->begin();
|
|
|
|
for (; iter != m_arcList->end() ; ++iter) {
|
|
|
|
Hypothesis *arc = *iter;
|
|
|
|
arc->SetWinningHypo(this);
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
TO_STRING_BODY(Hypothesis)
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
// friend
|
2010-07-18 02:29:06 +04:00
|
|
|
ostream& operator<<(ostream& out, const Hypothesis& hypo)
|
2011-02-24 16:14:42 +03:00
|
|
|
{
|
|
|
|
hypo.ToStream(out);
|
|
|
|
// words bitmap
|
|
|
|
out << "[" << hypo.m_sourceCompleted << "] ";
|
|
|
|
|
|
|
|
// scores
|
|
|
|
out << " [total=" << hypo.GetTotalScore() << "]";
|
|
|
|
out << " " << hypo.GetScoreBreakdown();
|
|
|
|
|
|
|
|
// alignment
|
2012-10-19 18:10:10 +04:00
|
|
|
out << " " << hypo.GetCurrTargetPhrase().GetAlignNonTerm();
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
/*
|
|
|
|
const Hypothesis *prevHypo = hypo.GetPrevHypo();
|
|
|
|
if (prevHypo)
|
|
|
|
out << endl << *prevHypo;
|
|
|
|
*/
|
|
|
|
|
|
|
|
return out;
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
std::string Hypothesis::GetSourcePhraseStringRep(const vector<FactorType> factorsToPrint) const
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
if (!m_prevHypo) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
return m_sourcePhrase->GetStringRep(factorsToPrint);
|
2008-06-11 14:52:57 +04:00
|
|
|
#if 0
|
2011-02-24 16:14:42 +03:00
|
|
|
if(m_sourcePhrase) {
|
|
|
|
return m_sourcePhrase->GetSubString(m_currSourceWordsRange).GetStringRep(factorsToPrint);
|
|
|
|
} else {
|
|
|
|
return m_sourceInput.GetSubString(m_currSourceWordsRange).GetStringRep(factorsToPrint);
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
#endif
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
std::string Hypothesis::GetTargetPhraseStringRep(const vector<FactorType> factorsToPrint) const
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
if (!m_prevHypo) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
return m_targetPhrase.GetStringRep(factorsToPrint);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
std::string Hypothesis::GetSourcePhraseStringRep() const
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<FactorType> allFactors;
|
2013-02-08 23:28:19 +04:00
|
|
|
for(size_t i=0; i < MAX_NUM_FACTORS; i++) {
|
2011-02-24 16:14:42 +03:00
|
|
|
allFactors.push_back(i);
|
|
|
|
}
|
|
|
|
return GetSourcePhraseStringRep(allFactors);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
std::string Hypothesis::GetTargetPhraseStringRep() const
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<FactorType> allFactors;
|
2013-02-08 23:28:19 +04:00
|
|
|
for(size_t i=0; i < MAX_NUM_FACTORS; i++) {
|
2011-02-24 16:14:42 +03:00
|
|
|
allFactors.push_back(i);
|
|
|
|
}
|
|
|
|
return GetTargetPhraseStringRep(allFactors);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2008-10-09 03:51:26 +04:00
|
|
|
}
|
|
|
|
|