2008-06-11 14:52:57 +04:00
|
|
|
// $Id$
|
|
|
|
// vim:tabstop=2
|
|
|
|
|
|
|
|
/***********************************************************************
|
|
|
|
Moses - factored phrase-based language decoder
|
|
|
|
Copyright (C) 2006 University of Edinburgh
|
|
|
|
|
|
|
|
This library is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU Lesser General Public
|
|
|
|
License as published by the Free Software Foundation; either
|
|
|
|
version 2.1 of the License, or (at your option) any later version.
|
|
|
|
|
|
|
|
This library is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
Lesser General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Lesser General Public
|
|
|
|
License along with this library; if not, write to the Free Software
|
|
|
|
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
***********************************************************************/
|
|
|
|
#ifdef WIN32
|
|
|
|
#include <hash_set>
|
|
|
|
#else
|
|
|
|
#include <ext/hash_set>
|
|
|
|
#endif
|
|
|
|
|
2011-10-04 19:46:24 +04:00
|
|
|
#include <algorithm>
|
2008-06-11 14:52:57 +04:00
|
|
|
#include <cmath>
|
2013-02-15 22:06:54 +04:00
|
|
|
#include <limits>
|
|
|
|
#include <map>
|
|
|
|
#include <set>
|
2008-06-11 14:52:57 +04:00
|
|
|
#include "Manager.h"
|
|
|
|
#include "TypeDef.h"
|
|
|
|
#include "Util.h"
|
|
|
|
#include "TargetPhrase.h"
|
|
|
|
#include "TrellisPath.h"
|
|
|
|
#include "TrellisPathCollection.h"
|
|
|
|
#include "TranslationOption.h"
|
|
|
|
#include "TranslationOptionCollection.h"
|
2012-08-10 23:32:00 +04:00
|
|
|
#include "Timer.h"
|
2013-05-25 02:57:06 +04:00
|
|
|
#include "moses/FF/DistortionScoreProducer.h"
|
2013-10-03 14:05:53 +04:00
|
|
|
#include "moses/LM/Base.h"
|
|
|
|
#include "moses/TranslationModel/PhraseDictionary.h"
|
2012-08-10 23:32:00 +04:00
|
|
|
|
2008-09-24 20:48:23 +04:00
|
|
|
#ifdef HAVE_PROTOBUF
|
|
|
|
#include "hypergraph.pb.h"
|
|
|
|
#include "rule.pb.h"
|
|
|
|
#endif
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2013-03-04 23:20:01 +04:00
|
|
|
#include "util/exception.hh"
|
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
using namespace std;
|
|
|
|
|
2008-10-09 03:51:26 +04:00
|
|
|
namespace Moses
|
|
|
|
{
|
2013-05-11 17:13:26 +04:00
|
|
|
Manager::Manager(size_t lineNumber, InputType const& source, SearchAlgorithm searchAlgorithm)
|
|
|
|
:m_transOptColl(source.CreateTranslationOptionCollection())
|
2011-02-24 16:14:42 +03:00
|
|
|
,m_search(Search::CreateSearch(*this, source, searchAlgorithm, *m_transOptColl))
|
|
|
|
,interrupted_flag(0)
|
|
|
|
,m_hypoId(0)
|
2013-02-20 20:03:23 +04:00
|
|
|
,m_lineNumber(lineNumber)
|
2011-02-24 16:14:42 +03:00
|
|
|
,m_source(source)
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2013-09-27 12:35:24 +04:00
|
|
|
StaticData::Instance().InitializeForInput(m_source);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
Manager::~Manager()
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
delete m_transOptColl;
|
|
|
|
delete m_search;
|
2013-06-24 15:21:29 +04:00
|
|
|
// this is a comment ...
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2012-12-21 19:59:52 +04:00
|
|
|
StaticData::Instance().CleanUpAfterSentenceProcessing(m_source);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Main decoder loop that translates a sentence by expanding
|
|
|
|
* hypotheses stack by stack, until the end of the sentence.
|
|
|
|
*/
|
|
|
|
void Manager::ProcessSentence()
|
|
|
|
{
|
2014-01-03 22:45:31 +04:00
|
|
|
// initialize statistics
|
2011-02-24 16:14:42 +03:00
|
|
|
ResetSentenceStats(m_source);
|
2014-01-03 22:45:31 +04:00
|
|
|
IFVERBOSE(2) {
|
|
|
|
GetSentenceStats().StartTimeTotal();
|
|
|
|
}
|
2013-06-03 15:33:18 +04:00
|
|
|
|
2013-05-31 15:28:57 +04:00
|
|
|
// check if alternate weight setting is used
|
|
|
|
// this is not thread safe! it changes StaticData
|
|
|
|
if (StaticData::Instance().GetHasAlternateWeightSettings()) {
|
|
|
|
if (m_source.GetSpecifiesWeightSetting()) {
|
|
|
|
StaticData::Instance().SetWeightSetting(m_source.GetWeightSetting());
|
2013-06-03 15:33:18 +04:00
|
|
|
} else {
|
2013-05-31 15:28:57 +04:00
|
|
|
StaticData::Instance().SetWeightSetting("default");
|
|
|
|
}
|
|
|
|
}
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-05-31 15:28:57 +04:00
|
|
|
// get translation options
|
2014-01-03 22:45:31 +04:00
|
|
|
IFVERBOSE(1) {
|
|
|
|
GetSentenceStats().StartTimeCollectOpts();
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
m_transOptColl->CreateTranslationOptions();
|
2008-12-13 15:08:55 +03:00
|
|
|
|
|
|
|
// some reporting on how long this took
|
2014-01-03 22:45:31 +04:00
|
|
|
IFVERBOSE(1) {
|
|
|
|
GetSentenceStats().StopTimeCollectOpts();
|
|
|
|
TRACE_ERR("Line "<< m_lineNumber << ": Collecting options took " << GetSentenceStats().GetTimeCollectOpts() << " seconds" << endl);
|
2011-02-24 16:14:42 +03:00
|
|
|
}
|
2008-12-13 15:08:55 +03:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
// search for best translation with the specified algorithm
|
2012-08-10 23:32:00 +04:00
|
|
|
Timer searchTime;
|
|
|
|
searchTime.start();
|
2011-02-24 16:14:42 +03:00
|
|
|
m_search->ProcessSentence();
|
2012-08-10 23:32:00 +04:00
|
|
|
VERBOSE(1, "Line " << m_lineNumber << ": Search took " << searchTime << " seconds" << endl);
|
2014-01-03 22:45:31 +04:00
|
|
|
IFVERBOSE(2) {
|
|
|
|
GetSentenceStats().StopTimeTotal();
|
|
|
|
TRACE_ERR(GetSentenceStats());
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-03-07 10:57:48 +03:00
|
|
|
/**
|
|
|
|
* Print all derivations in search graph. Note: The number of derivations is exponential in the sentence length
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2010-05-19 20:42:18 +04:00
|
|
|
void Manager::PrintAllDerivations(long translationId, ostream& outputStream) const
|
2010-03-07 10:57:48 +03:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
2010-03-07 10:57:48 +03:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<const Hypothesis*> sortedPureHypo = hypoStackColl.back()->GetSortedList();
|
2010-03-07 10:57:48 +03:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
if (sortedPureHypo.size() == 0)
|
|
|
|
return;
|
2010-03-07 10:57:48 +03:00
|
|
|
|
2010-03-08 18:28:40 +03:00
|
|
|
float remainingScore = 0;
|
|
|
|
vector<const TargetPhrase*> remainingPhrases;
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
// add all pure paths
|
|
|
|
vector<const Hypothesis*>::const_iterator iterBestHypo;
|
|
|
|
for (iterBestHypo = sortedPureHypo.begin()
|
|
|
|
; iterBestHypo != sortedPureHypo.end()
|
|
|
|
; ++iterBestHypo) {
|
|
|
|
printThisHypothesis(translationId, *iterBestHypo, remainingPhrases, remainingScore, outputStream);
|
2010-05-19 20:42:18 +04:00
|
|
|
printDivergentHypothesis(translationId, *iterBestHypo, remainingPhrases, remainingScore, outputStream);
|
2010-03-08 18:28:40 +03:00
|
|
|
}
|
|
|
|
}
|
2010-03-07 10:57:48 +03:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
const TranslationOptionCollection* Manager::getSntTranslationOptions()
|
|
|
|
{
|
|
|
|
return m_transOptColl;
|
|
|
|
}
|
2010-03-08 18:28:40 +03:00
|
|
|
|
2010-05-19 20:42:18 +04:00
|
|
|
void Manager::printDivergentHypothesis(long translationId, const Hypothesis* hypo, const vector <const TargetPhrase*> & remainingPhrases, float remainingScore , ostream& outputStream ) const
|
2010-03-08 18:28:40 +03:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
//Backtrack from the predecessor
|
|
|
|
if (hypo->GetId() > 0) {
|
|
|
|
vector <const TargetPhrase*> followingPhrases;
|
|
|
|
followingPhrases.push_back(& (hypo->GetCurrTargetPhrase()));
|
|
|
|
///((Phrase) hypo->GetPrevHypo()->GetTargetPhrase());
|
|
|
|
followingPhrases.insert(followingPhrases.end()--, remainingPhrases.begin(), remainingPhrases.end());
|
|
|
|
printDivergentHypothesis(translationId, hypo->GetPrevHypo(), followingPhrases , remainingScore + hypo->GetScore() - hypo->GetPrevHypo()->GetScore(), outputStream);
|
|
|
|
}
|
|
|
|
|
|
|
|
//Process the arcs
|
|
|
|
const ArcList *pAL = hypo->GetArcList();
|
|
|
|
if (pAL) {
|
|
|
|
const ArcList &arcList = *pAL;
|
|
|
|
// every possible Arc to replace this edge
|
|
|
|
ArcList::const_iterator iterArc;
|
|
|
|
for (iterArc = arcList.begin() ; iterArc != arcList.end() ; ++iterArc) {
|
|
|
|
const Hypothesis *loserHypo = *iterArc;
|
|
|
|
const Hypothesis* loserPrevHypo = loserHypo->GetPrevHypo();
|
|
|
|
float arcScore = loserHypo->GetScore() - loserPrevHypo->GetScore();
|
|
|
|
vector <const TargetPhrase* > followingPhrases;
|
|
|
|
followingPhrases.push_back(&(loserHypo->GetCurrTargetPhrase()));
|
|
|
|
followingPhrases.insert(followingPhrases.end()--, remainingPhrases.begin(), remainingPhrases.end());
|
|
|
|
printThisHypothesis(translationId, loserPrevHypo, followingPhrases, remainingScore + arcScore, outputStream);
|
|
|
|
printDivergentHypothesis(translationId, loserPrevHypo, followingPhrases, remainingScore + arcScore, outputStream);
|
|
|
|
}
|
|
|
|
}
|
2010-03-08 18:28:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-05-19 20:42:18 +04:00
|
|
|
void Manager::printThisHypothesis(long translationId, const Hypothesis* hypo, const vector <const TargetPhrase*> & remainingPhrases, float remainingScore, ostream& outputStream) const
|
2010-03-08 18:28:40 +03:00
|
|
|
{
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
outputStream << translationId << " ||| ";
|
|
|
|
|
2010-03-08 18:28:40 +03:00
|
|
|
//Yield of this hypothesis
|
2011-02-24 16:14:42 +03:00
|
|
|
hypo->ToStream(outputStream);
|
2010-03-08 18:28:40 +03:00
|
|
|
for (size_t p = 0; p < remainingPhrases.size(); ++p) {
|
|
|
|
const TargetPhrase * phrase = remainingPhrases[p];
|
|
|
|
size_t size = phrase->GetSize();
|
2011-02-24 16:14:42 +03:00
|
|
|
for (size_t pos = 0 ; pos < size ; pos++) {
|
|
|
|
const Factor *factor = phrase->GetFactor(pos, 0);
|
|
|
|
outputStream << *factor;
|
|
|
|
outputStream << " ";
|
2010-03-07 10:57:48 +03:00
|
|
|
}
|
2010-03-08 18:28:40 +03:00
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-05-19 20:42:18 +04:00
|
|
|
outputStream << "||| " << hypo->GetScore() + remainingScore;
|
|
|
|
outputStream << endl;
|
2010-03-07 10:57:48 +03:00
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-03-08 18:28:40 +03:00
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
|
|
|
|
/**
|
|
|
|
* After decoding, the hypotheses in the stacks and additional arcs
|
|
|
|
* form a search graph that can be mined for n-best lists.
|
|
|
|
* The heavy lifting is done in the TrellisPath and TrellisPathCollection
|
|
|
|
* this function controls this for one sentence.
|
|
|
|
*
|
|
|
|
* \param count the number of n-best translations to produce
|
|
|
|
* \param ret holds the n-best list that was calculated
|
|
|
|
*/
|
|
|
|
void Manager::CalcNBest(size_t count, TrellisPathList &ret,bool onlyDistinct) const
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
if (count <= 0)
|
|
|
|
return;
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<const Hypothesis*> sortedPureHypo = hypoStackColl.back()->GetSortedList();
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
if (sortedPureHypo.size() == 0)
|
|
|
|
return;
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
TrellisPathCollection contenders;
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
set<Phrase> distinctHyps;
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
// add all pure paths
|
|
|
|
vector<const Hypothesis*>::const_iterator iterBestHypo;
|
|
|
|
for (iterBestHypo = sortedPureHypo.begin()
|
|
|
|
; iterBestHypo != sortedPureHypo.end()
|
|
|
|
; ++iterBestHypo) {
|
|
|
|
contenders.Add(new TrellisPath(*iterBestHypo));
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
|
|
|
|
// factor defines stopping point for distinct n-best list if too many candidates identical
|
2011-02-24 16:14:42 +03:00
|
|
|
size_t nBestFactor = StaticData::Instance().GetNBestFactor();
|
2008-06-11 14:52:57 +04:00
|
|
|
if (nBestFactor < 1) nBestFactor = 1000; // 0 = unlimited
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
// MAIN loop
|
|
|
|
for (size_t iteration = 0 ; (onlyDistinct ? distinctHyps.size() : ret.GetSize()) < count && contenders.GetSize() > 0 && (iteration < count * nBestFactor) ; iteration++) {
|
|
|
|
// get next best from list of contenders
|
|
|
|
TrellisPath *path = contenders.pop();
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(path == NULL, "path is NULL");
|
2011-02-24 16:14:42 +03:00
|
|
|
// create deviations from current best
|
|
|
|
path->CreateDeviantPaths(contenders);
|
|
|
|
if(onlyDistinct) {
|
|
|
|
Phrase tgtPhrase = path->GetSurfacePhrase();
|
|
|
|
if (distinctHyps.insert(tgtPhrase).second) {
|
2008-06-11 14:52:57 +04:00
|
|
|
ret.Add(path);
|
2010-03-31 17:16:39 +04:00
|
|
|
} else {
|
|
|
|
delete path;
|
|
|
|
path = NULL;
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
} else {
|
|
|
|
ret.Add(path);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
if(onlyDistinct) {
|
|
|
|
const size_t nBestFactor = StaticData::Instance().GetNBestFactor();
|
|
|
|
if (nBestFactor > 0)
|
|
|
|
contenders.Prune(count * nBestFactor);
|
|
|
|
} else {
|
|
|
|
contenders.Prune(count);
|
|
|
|
}
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-10-04 19:46:24 +04:00
|
|
|
struct SGNReverseCompare {
|
|
|
|
bool operator() (const SearchGraphNode& s1, const SearchGraphNode& s2) const {
|
|
|
|
return s1.hypo->GetId() > s2.hypo->GetId();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Implements lattice sampling, as in Chatterjee & Cancedda, emnlp 2010
|
|
|
|
**/
|
2013-05-29 21:16:15 +04:00
|
|
|
void Manager::CalcLatticeSamples(size_t count, TrellisPathList &ret) const
|
|
|
|
{
|
|
|
|
|
2011-10-04 19:46:24 +04:00
|
|
|
vector<SearchGraphNode> searchGraph;
|
|
|
|
GetSearchGraph(searchGraph);
|
|
|
|
|
|
|
|
//Calculation of the sigmas of each hypothesis and edge. In C&C notation this is
|
|
|
|
//the "log of the cumulative unnormalized probability of all the paths in the
|
|
|
|
// lattice for the hypothesis to a final node"
|
|
|
|
typedef pair<int, int> Edge;
|
|
|
|
map<const Hypothesis*, float> sigmas;
|
|
|
|
map<Edge, float> edgeScores;
|
|
|
|
map<const Hypothesis*, set<const Hypothesis*> > outgoingHyps;
|
|
|
|
map<int,const Hypothesis*> idToHyp;
|
|
|
|
map<int,float> fscores;
|
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
//Iterating through the hypos in reverse order of id gives a reverse
|
|
|
|
//topological order. We rely on the fact that hypo ids are given out
|
2011-10-04 19:46:24 +04:00
|
|
|
//sequentially, as the search proceeds.
|
2013-05-29 21:16:15 +04:00
|
|
|
//NB: Could just sort by stack.
|
2011-10-04 19:46:24 +04:00
|
|
|
sort(searchGraph.begin(), searchGraph.end(), SGNReverseCompare());
|
|
|
|
|
|
|
|
//first task is to fill in the outgoing hypos and edge scores.
|
|
|
|
for (vector<SearchGraphNode>::const_iterator i = searchGraph.begin();
|
2013-05-29 21:16:15 +04:00
|
|
|
i != searchGraph.end(); ++i) {
|
2011-10-04 19:46:24 +04:00
|
|
|
const Hypothesis* hypo = i->hypo;
|
|
|
|
idToHyp[hypo->GetId()] = hypo;
|
|
|
|
fscores[hypo->GetId()] = i->fscore;
|
|
|
|
if (hypo->GetId()) {
|
|
|
|
//back to current
|
|
|
|
const Hypothesis* prevHypo = i->hypo->GetPrevHypo();
|
|
|
|
outgoingHyps[prevHypo].insert(hypo);
|
2013-05-29 21:16:15 +04:00
|
|
|
edgeScores[Edge(prevHypo->GetId(),hypo->GetId())] =
|
2011-10-04 19:46:24 +04:00
|
|
|
hypo->GetScore() - prevHypo->GetScore();
|
|
|
|
}
|
|
|
|
//forward from current
|
|
|
|
if (i->forward >= 0) {
|
|
|
|
map<int,const Hypothesis*>::const_iterator idToHypIter = idToHyp.find(i->forward);
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(idToHypIter == idToHyp.end(),
|
|
|
|
"Couldn't find hypothesis " << i->forward);
|
2011-10-04 19:46:24 +04:00
|
|
|
const Hypothesis* nextHypo = idToHypIter->second;
|
|
|
|
outgoingHyps[hypo].insert(nextHypo);
|
|
|
|
map<int,float>::const_iterator fscoreIter = fscores.find(nextHypo->GetId());
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(fscoreIter == fscores.end(),
|
|
|
|
"Couldn't find scores for hypothsis " << nextHypo->GetId());
|
2013-05-29 21:16:15 +04:00
|
|
|
edgeScores[Edge(hypo->GetId(),nextHypo->GetId())] =
|
2011-10-04 19:46:24 +04:00
|
|
|
i->fscore - fscoreIter->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
//then run through again to calculate sigmas
|
|
|
|
for (vector<SearchGraphNode>::const_iterator i = searchGraph.begin();
|
2013-05-29 21:16:15 +04:00
|
|
|
i != searchGraph.end(); ++i) {
|
2011-10-04 19:46:24 +04:00
|
|
|
|
|
|
|
if (i->forward == -1) {
|
|
|
|
sigmas[i->hypo] = 0;
|
|
|
|
} else {
|
2013-05-29 21:16:15 +04:00
|
|
|
map<const Hypothesis*, set<const Hypothesis*> >::const_iterator outIter =
|
2011-10-04 19:46:24 +04:00
|
|
|
outgoingHyps.find(i->hypo);
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(outIter == outgoingHyps.end(),
|
|
|
|
"Couldn't find hypothesis " << i->hypo->GetId());
|
2011-10-04 19:46:24 +04:00
|
|
|
float sigma = 0;
|
|
|
|
for (set<const Hypothesis*>::const_iterator j = outIter->second.begin();
|
2013-05-29 21:16:15 +04:00
|
|
|
j != outIter->second.end(); ++j) {
|
2011-10-04 19:46:24 +04:00
|
|
|
map<const Hypothesis*, float>::const_iterator succIter = sigmas.find(*j);
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(succIter == sigmas.end(),
|
|
|
|
"Couldn't find hypothesis " << (*j)->GetId());
|
2013-05-29 21:16:15 +04:00
|
|
|
map<Edge,float>::const_iterator edgeScoreIter =
|
2011-10-04 19:46:24 +04:00
|
|
|
edgeScores.find(Edge(i->hypo->GetId(),(*j)->GetId()));
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(edgeScoreIter == edgeScores.end(),
|
|
|
|
"Couldn't find edge for hypothesis " << (*j)->GetId());
|
2011-10-04 19:46:24 +04:00
|
|
|
float term = edgeScoreIter->second + succIter->second; // Add sigma(*j)
|
|
|
|
if (sigma == 0) {
|
2013-05-29 21:16:15 +04:00
|
|
|
sigma = term;
|
2011-10-04 19:46:24 +04:00
|
|
|
} else {
|
|
|
|
sigma = log_sum(sigma,term);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sigmas[i->hypo] = sigma;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//The actual sampling!
|
|
|
|
const Hypothesis* startHypo = searchGraph.back().hypo;
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(startHypo->GetId() != 0, "Expecting the start hypothesis ");
|
2011-10-04 19:46:24 +04:00
|
|
|
for (size_t i = 0; i < count; ++i) {
|
|
|
|
vector<const Hypothesis*> path;
|
|
|
|
path.push_back(startHypo);
|
|
|
|
while(1) {
|
2013-05-29 21:16:15 +04:00
|
|
|
map<const Hypothesis*, set<const Hypothesis*> >::const_iterator outIter =
|
2011-10-04 19:46:24 +04:00
|
|
|
outgoingHyps.find(path.back());
|
|
|
|
if (outIter == outgoingHyps.end() || !outIter->second.size()) {
|
|
|
|
//end of the path
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
//score the possibles
|
|
|
|
vector<const Hypothesis*> candidates;
|
|
|
|
vector<float> candidateScores;
|
|
|
|
float scoreTotal = 0;
|
|
|
|
for (set<const Hypothesis*>::const_iterator j = outIter->second.begin();
|
2013-05-29 21:16:15 +04:00
|
|
|
j != outIter->second.end(); ++j) {
|
2011-10-04 19:46:24 +04:00
|
|
|
candidates.push_back(*j);
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(sigmas.find(*j) == sigmas.end(),
|
|
|
|
"Hypothesis " << (*j)->GetId() << " not found");
|
2011-10-04 19:46:24 +04:00
|
|
|
Edge edge(path.back()->GetId(),(*j)->GetId());
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(edgeScores.find(edge) == edgeScores.end(),
|
|
|
|
"Edge not found");
|
2011-10-04 19:46:24 +04:00
|
|
|
candidateScores.push_back(sigmas[*j] + edgeScores[edge]);
|
|
|
|
if (scoreTotal == 0) {
|
|
|
|
scoreTotal = candidateScores.back();
|
|
|
|
} else {
|
|
|
|
scoreTotal = log_sum(candidateScores.back(), scoreTotal);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//normalise
|
|
|
|
transform(candidateScores.begin(), candidateScores.end(), candidateScores.begin(), bind2nd(minus<float>(),scoreTotal));
|
|
|
|
//copy(candidateScores.begin(),candidateScores.end(),ostream_iterator<float>(cerr," "));
|
|
|
|
//cerr << endl;
|
|
|
|
|
|
|
|
//draw the sample
|
2013-12-19 00:15:39 +04:00
|
|
|
float frandom = log((float)rand()/RAND_MAX);
|
2011-10-04 19:46:24 +04:00
|
|
|
size_t position = 1;
|
|
|
|
float sum = candidateScores[0];
|
2013-12-19 00:15:39 +04:00
|
|
|
for (; position < candidateScores.size() && sum < frandom; ++position) {
|
2011-10-04 19:46:24 +04:00
|
|
|
sum = log_sum(sum,candidateScores[position]);
|
|
|
|
}
|
2013-12-19 00:15:39 +04:00
|
|
|
//cerr << "Random: " << frandom << " Chose " << position-1 << endl;
|
2011-10-04 19:46:24 +04:00
|
|
|
const Hypothesis* chosen = candidates[position-1];
|
2013-05-29 21:16:15 +04:00
|
|
|
path.push_back(chosen);
|
2011-10-04 19:46:24 +04:00
|
|
|
}
|
|
|
|
//cerr << "Path: " << endl;
|
|
|
|
//for (size_t j = 0; j < path.size(); ++j) {
|
2013-05-29 21:16:15 +04:00
|
|
|
// cerr << path[j]->GetId() << " " << path[j]->GetScoreBreakdown() << endl;
|
2011-10-04 19:46:24 +04:00
|
|
|
//}
|
|
|
|
//cerr << endl;
|
|
|
|
|
|
|
|
//Convert the hypos to TrellisPath
|
|
|
|
ret.Add(new TrellisPath(path));
|
|
|
|
//cerr << ret.at(ret.GetSize()-1).GetScoreBreakdown() << endl;
|
2013-05-29 21:16:15 +04:00
|
|
|
}
|
2011-10-04 19:46:24 +04:00
|
|
|
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Manager::CalcDecoderStatistics() const
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
|
|
|
const Hypothesis *hypo = GetBestHypothesis();
|
2011-02-24 16:14:42 +03:00
|
|
|
if (hypo != NULL) {
|
|
|
|
GetSentenceStats().CalcFinalStats(*hypo);
|
2008-06-11 14:52:57 +04:00
|
|
|
IFVERBOSE(2) {
|
2011-02-24 16:14:42 +03:00
|
|
|
if (hypo != NULL) {
|
|
|
|
string buff;
|
|
|
|
string buff2;
|
|
|
|
TRACE_ERR( "Source and Target Units:"
|
|
|
|
<< hypo->GetInput());
|
|
|
|
buff2.insert(0,"] ");
|
|
|
|
buff2.insert(0,(hypo->GetCurrTargetPhrase()).ToString());
|
|
|
|
buff2.insert(0,":");
|
|
|
|
buff2.insert(0,(hypo->GetCurrSourceWordsRange()).ToString());
|
|
|
|
buff2.insert(0,"[");
|
|
|
|
|
|
|
|
hypo = hypo->GetPrevHypo();
|
|
|
|
while (hypo != NULL) {
|
|
|
|
//dont print out the empty final hypo
|
|
|
|
buff.insert(0,buff2);
|
|
|
|
buff2.clear();
|
|
|
|
buff2.insert(0,"] ");
|
|
|
|
buff2.insert(0,(hypo->GetCurrTargetPhrase()).ToString());
|
|
|
|
buff2.insert(0,":");
|
|
|
|
buff2.insert(0,(hypo->GetCurrSourceWordsRange()).ToString());
|
|
|
|
buff2.insert(0,"[");
|
|
|
|
hypo = hypo->GetPrevHypo();
|
|
|
|
}
|
|
|
|
TRACE_ERR( buff << endl);
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-05-11 17:13:26 +04:00
|
|
|
void OutputWordGraph(std::ostream &outputWordGraphStream, const Hypothesis *hypo, size_t &linkId)
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
const Hypothesis *prevHypo = hypo->GetPrevHypo();
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2008-09-12 22:09:06 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
outputWordGraphStream << "J=" << linkId++
|
|
|
|
<< "\tS=" << prevHypo->GetId()
|
|
|
|
<< "\tE=" << hypo->GetId()
|
|
|
|
<< "\ta=";
|
|
|
|
|
|
|
|
// phrase table scores
|
2013-11-15 21:26:26 +04:00
|
|
|
const std::vector<PhraseDictionary*> &phraseTables = PhraseDictionary::GetColl();
|
2013-02-22 23:17:57 +04:00
|
|
|
std::vector<PhraseDictionary*>::const_iterator iterPhraseTable;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterPhraseTable = phraseTables.begin() ; iterPhraseTable != phraseTables.end() ; ++iterPhraseTable) {
|
2013-02-22 23:17:57 +04:00
|
|
|
const PhraseDictionary *phraseTable = *iterPhraseTable;
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<float> scores = hypo->GetScoreBreakdown().GetScoresForProducer(phraseTable);
|
|
|
|
|
|
|
|
outputWordGraphStream << scores[0];
|
|
|
|
vector<float>::const_iterator iterScore;
|
|
|
|
for (iterScore = ++scores.begin() ; iterScore != scores.end() ; ++iterScore) {
|
|
|
|
outputWordGraphStream << ", " << *iterScore;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// language model scores
|
|
|
|
outputWordGraphStream << "\tl=";
|
2012-12-20 20:38:15 +04:00
|
|
|
|
2013-05-27 18:54:50 +04:00
|
|
|
const std::vector<const StatefulFeatureFunction*> &statefulFFs = StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
|
|
|
for (size_t i = 0; i < statefulFFs.size(); ++i) {
|
|
|
|
const StatefulFeatureFunction *ff = statefulFFs[i];
|
|
|
|
const LanguageModel *lm = dynamic_cast<const LanguageModel*>(ff);
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<float> scores = hypo->GetScoreBreakdown().GetScoresForProducer(lm);
|
|
|
|
|
|
|
|
outputWordGraphStream << scores[0];
|
|
|
|
vector<float>::const_iterator iterScore;
|
|
|
|
for (iterScore = ++scores.begin() ; iterScore != scores.end() ; ++iterScore) {
|
|
|
|
outputWordGraphStream << ", " << *iterScore;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// re-ordering
|
|
|
|
outputWordGraphStream << "\tr=";
|
|
|
|
|
2013-02-20 02:29:48 +04:00
|
|
|
const std::vector<FeatureFunction*> &ffs = FeatureFunction::GetFeatureFunctions();
|
|
|
|
std::vector<FeatureFunction*>::const_iterator iter;
|
|
|
|
for (iter = ffs.begin(); iter != ffs.end(); ++iter) {
|
|
|
|
const FeatureFunction *ff = *iter;
|
|
|
|
|
|
|
|
const DistortionScoreProducer *model = dynamic_cast<const DistortionScoreProducer*>(ff);
|
|
|
|
if (model) {
|
|
|
|
outputWordGraphStream << hypo->GetScoreBreakdown().GetScoreForProducer(model);
|
|
|
|
}
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
// lexicalised re-ordering
|
2013-01-16 14:51:22 +04:00
|
|
|
/*
|
2012-12-20 19:13:04 +04:00
|
|
|
const std::vector<LexicalReordering*> &lexOrderings = StaticData::Instance().GetReorderModels();
|
2011-02-24 16:14:42 +03:00
|
|
|
std::vector<LexicalReordering*>::const_iterator iterLexOrdering;
|
|
|
|
for (iterLexOrdering = lexOrderings.begin() ; iterLexOrdering != lexOrderings.end() ; ++iterLexOrdering) {
|
|
|
|
LexicalReordering *lexicalReordering = *iterLexOrdering;
|
|
|
|
vector<float> scores = hypo->GetScoreBreakdown().GetScoresForProducer(lexicalReordering);
|
|
|
|
|
|
|
|
outputWordGraphStream << scores[0];
|
|
|
|
vector<float>::const_iterator iterScore;
|
|
|
|
for (iterScore = ++scores.begin() ; iterScore != scores.end() ; ++iterScore) {
|
|
|
|
outputWordGraphStream << ", " << *iterScore;
|
|
|
|
}
|
|
|
|
}
|
2013-01-16 14:51:22 +04:00
|
|
|
*/
|
2011-02-24 16:14:42 +03:00
|
|
|
// words !!
|
2011-05-16 17:41:15 +04:00
|
|
|
// outputWordGraphStream << "\tw=" << hypo->GetCurrTargetPhrase();
|
|
|
|
|
|
|
|
// output both source and target phrases in the word graph
|
|
|
|
outputWordGraphStream << "\tw=" << hypo->GetSourcePhraseStringRep() << "|" << hypo->GetCurrTargetPhrase();
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
outputWordGraphStream << endl;
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2013-10-13 09:59:05 +04:00
|
|
|
void Manager::GetOutputLanguageModelOrder( std::ostream &out, const Hypothesis *hypo ) {
|
|
|
|
Phrase translation;
|
|
|
|
hypo->GetOutputPhrase(translation);
|
|
|
|
const std::vector<const StatefulFeatureFunction*> &statefulFFs = StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
|
|
|
for (size_t i = 0; i < statefulFFs.size(); ++i) {
|
|
|
|
const StatefulFeatureFunction *ff = statefulFFs[i];
|
|
|
|
if (const LanguageModel *lm = dynamic_cast<const LanguageModel*>(ff)) {
|
|
|
|
lm->ReportHistoryOrder(out, translation);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
void Manager::GetWordGraph(long translationId, std::ostream &outputWordGraphStream) const
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
const StaticData &staticData = StaticData::Instance();
|
|
|
|
string fileName = staticData.GetParam("output-word-graph")[0];
|
|
|
|
bool outputNBest = Scan<bool>(staticData.GetParam("output-word-graph")[1]);
|
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
|
|
|
|
|
|
|
outputWordGraphStream << "VERSION=1.0" << endl
|
|
|
|
<< "UTTERANCE=" << translationId << endl;
|
|
|
|
|
|
|
|
size_t linkId = 0;
|
|
|
|
std::vector < HypothesisStack* >::const_iterator iterStack;
|
|
|
|
for (iterStack = ++hypoStackColl.begin() ; iterStack != hypoStackColl.end() ; ++iterStack) {
|
|
|
|
const HypothesisStack &stack = **iterStack;
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
|
|
|
for (iterHypo = stack.begin() ; iterHypo != stack.end() ; ++iterHypo) {
|
|
|
|
const Hypothesis *hypo = *iterHypo;
|
2013-05-11 17:13:26 +04:00
|
|
|
OutputWordGraph(outputWordGraphStream, hypo, linkId);
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
if (outputNBest) {
|
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
|
|
|
if (arcList != NULL) {
|
|
|
|
ArcList::const_iterator iterArcList;
|
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
2013-05-11 17:13:26 +04:00
|
|
|
OutputWordGraph(outputWordGraphStream, loserHypo, linkId);
|
2011-02-24 16:14:42 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} //if (outputNBest)
|
|
|
|
} //for (iterHypo
|
|
|
|
} // for (iterStack
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
void Manager::GetSearchGraph(vector<SearchGraphNode>& searchGraph) const
|
|
|
|
{
|
2010-05-05 20:53:41 +04:00
|
|
|
std::map < int, bool > connected;
|
|
|
|
std::map < int, int > forward;
|
|
|
|
std::map < int, double > forwardScore;
|
|
|
|
|
|
|
|
// *** find connected hypotheses ***
|
|
|
|
std::vector< const Hypothesis *> connectedList;
|
|
|
|
GetConnectedGraph(&connected, &connectedList);
|
|
|
|
|
|
|
|
// ** compute best forward path for each hypothesis *** //
|
|
|
|
|
|
|
|
// forward cost of hypotheses on final stack is 0
|
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
|
|
|
const HypothesisStack &finalStack = *hypoStackColl.back();
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterHypo = finalStack.begin() ; iterHypo != finalStack.end() ; ++iterHypo) {
|
2010-05-05 20:53:41 +04:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
|
|
|
forwardScore[ hypo->GetId() ] = 0.0f;
|
|
|
|
forward[ hypo->GetId() ] = -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// compete for best forward score of previous hypothesis
|
|
|
|
std::vector < HypothesisStack* >::const_iterator iterStack;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterStack = --hypoStackColl.end() ; iterStack != hypoStackColl.begin() ; --iterStack) {
|
2010-05-05 20:53:41 +04:00
|
|
|
const HypothesisStack &stack = **iterStack;
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterHypo = stack.begin() ; iterHypo != stack.end() ; ++iterHypo) {
|
2010-05-05 20:53:41 +04:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
if (connected.find( hypo->GetId() ) != connected.end()) {
|
|
|
|
// make a play for previous hypothesis
|
|
|
|
const Hypothesis *prevHypo = hypo->GetPrevHypo();
|
|
|
|
double fscore = forwardScore[ hypo->GetId() ] +
|
|
|
|
hypo->GetScore() - prevHypo->GetScore();
|
|
|
|
if (forwardScore.find( prevHypo->GetId() ) == forwardScore.end()
|
|
|
|
|| forwardScore.find( prevHypo->GetId() )->second < fscore) {
|
|
|
|
forwardScore[ prevHypo->GetId() ] = fscore;
|
|
|
|
forward[ prevHypo->GetId() ] = hypo->GetId();
|
|
|
|
}
|
|
|
|
// all arcs also make a play
|
2010-05-05 20:53:41 +04:00
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
2011-02-24 16:14:42 +03:00
|
|
|
if (arcList != NULL) {
|
|
|
|
ArcList::const_iterator iterArcList;
|
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
|
|
|
// make a play
|
|
|
|
const Hypothesis *loserPrevHypo = loserHypo->GetPrevHypo();
|
|
|
|
double fscore = forwardScore[ hypo->GetId() ] +
|
|
|
|
loserHypo->GetScore() - loserPrevHypo->GetScore();
|
|
|
|
if (forwardScore.find( loserPrevHypo->GetId() ) == forwardScore.end()
|
|
|
|
|| forwardScore.find( loserPrevHypo->GetId() )->second < fscore) {
|
|
|
|
forwardScore[ loserPrevHypo->GetId() ] = fscore;
|
|
|
|
forward[ loserPrevHypo->GetId() ] = loserHypo->GetId();
|
|
|
|
}
|
|
|
|
} // end for arc list
|
|
|
|
} // end if arc list empty
|
2010-05-05 20:53:41 +04:00
|
|
|
} // end if hypo connected
|
|
|
|
} // end for hypo
|
|
|
|
} // end for stack
|
|
|
|
|
|
|
|
// *** output all connected hypotheses *** //
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-05-05 20:53:41 +04:00
|
|
|
connected[ 0 ] = true;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterStack = hypoStackColl.begin() ; iterStack != hypoStackColl.end() ; ++iterStack) {
|
2010-05-05 20:53:41 +04:00
|
|
|
const HypothesisStack &stack = **iterStack;
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterHypo = stack.begin() ; iterHypo != stack.end() ; ++iterHypo) {
|
2010-05-05 20:53:41 +04:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
if (connected.find( hypo->GetId() ) != connected.end()) {
|
2010-05-05 20:53:41 +04:00
|
|
|
searchGraph.push_back(SearchGraphNode(hypo,NULL,forward[hypo->GetId()],
|
2011-02-24 16:14:42 +03:00
|
|
|
forwardScore[hypo->GetId()]));
|
|
|
|
|
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
|
|
|
if (arcList != NULL) {
|
|
|
|
ArcList::const_iterator iterArcList;
|
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
|
|
|
searchGraph.push_back(SearchGraphNode(loserHypo,hypo,
|
|
|
|
forward[hypo->GetId()], forwardScore[hypo->GetId()]));
|
|
|
|
}
|
|
|
|
} // end if arcList empty
|
2010-05-05 20:53:41 +04:00
|
|
|
} // end if connected
|
|
|
|
} // end for iterHypo
|
2011-02-24 16:14:42 +03:00
|
|
|
} // end for iterStack
|
2010-05-05 20:53:41 +04:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2013-02-20 20:03:23 +04:00
|
|
|
void Manager::OutputFeatureWeightsForSLF(std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
|
|
|
outputSearchGraphStream.setf(std::ios::fixed);
|
|
|
|
outputSearchGraphStream.precision(6);
|
|
|
|
|
2013-02-24 04:31:29 +04:00
|
|
|
const vector<const StatelessFeatureFunction*>& slf = StatelessFeatureFunction::GetStatelessFeatureFunctions();
|
|
|
|
const vector<const StatefulFeatureFunction*>& sff = StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
2013-02-20 20:03:23 +04:00
|
|
|
size_t featureIndex = 1;
|
|
|
|
for (size_t i = 0; i < sff.size(); ++i) {
|
|
|
|
featureIndex = OutputFeatureWeightsForSLF(featureIndex, sff[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
for (size_t i = 0; i < slf.size(); ++i) {
|
2013-02-24 04:31:29 +04:00
|
|
|
/*
|
2013-02-20 20:03:23 +04:00
|
|
|
if (slf[i]->GetScoreProducerWeightShortName() != "u" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "tm" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "I" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "g")
|
2013-02-24 04:31:29 +04:00
|
|
|
*/
|
2013-02-20 20:03:23 +04:00
|
|
|
{
|
|
|
|
featureIndex = OutputFeatureWeightsForSLF(featureIndex, slf[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
}
|
2013-11-15 21:26:26 +04:00
|
|
|
const vector<PhraseDictionary*>& pds = PhraseDictionary::GetColl();
|
2013-02-20 20:03:23 +04:00
|
|
|
for( size_t i=0; i<pds.size(); i++ ) {
|
|
|
|
featureIndex = OutputFeatureWeightsForSLF(featureIndex, pds[i], outputSearchGraphStream);
|
|
|
|
}
|
2013-11-15 21:43:41 +04:00
|
|
|
const vector<GenerationDictionary*>& gds = GenerationDictionary::GetColl();
|
2013-02-20 20:03:23 +04:00
|
|
|
for( size_t i=0; i<gds.size(); i++ ) {
|
|
|
|
featureIndex = OutputFeatureWeightsForSLF(featureIndex, gds[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Manager::OutputFeatureValuesForSLF(const Hypothesis* hypo, bool zeros, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
|
|
|
outputSearchGraphStream.setf(std::ios::fixed);
|
|
|
|
outputSearchGraphStream.precision(6);
|
|
|
|
|
|
|
|
// outputSearchGraphStream << endl;
|
|
|
|
// outputSearchGraphStream << (*hypo) << endl;
|
2013-05-29 21:16:15 +04:00
|
|
|
// const ScoreComponentCollection& scoreCollection = hypo->GetScoreBreakdown();
|
2013-02-20 20:03:23 +04:00
|
|
|
// outputSearchGraphStream << scoreCollection << endl;
|
|
|
|
|
2013-02-24 04:31:29 +04:00
|
|
|
const vector<const StatelessFeatureFunction*>& slf =StatelessFeatureFunction::GetStatelessFeatureFunctions();
|
|
|
|
const vector<const StatefulFeatureFunction*>& sff = StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
2013-02-20 20:03:23 +04:00
|
|
|
size_t featureIndex = 1;
|
|
|
|
for (size_t i = 0; i < sff.size(); ++i) {
|
|
|
|
featureIndex = OutputFeatureValuesForSLF(featureIndex, zeros, hypo, sff[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
for (size_t i = 0; i < slf.size(); ++i) {
|
2013-02-24 04:31:29 +04:00
|
|
|
/*
|
2013-02-20 20:03:23 +04:00
|
|
|
if (slf[i]->GetScoreProducerWeightShortName() != "u" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "tm" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "I" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "g")
|
2013-02-24 04:31:29 +04:00
|
|
|
*/
|
2013-02-20 20:03:23 +04:00
|
|
|
{
|
|
|
|
featureIndex = OutputFeatureValuesForSLF(featureIndex, zeros, hypo, slf[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
}
|
2013-11-15 21:26:26 +04:00
|
|
|
const vector<PhraseDictionary*>& pds = PhraseDictionary::GetColl();
|
2013-02-20 20:03:23 +04:00
|
|
|
for( size_t i=0; i<pds.size(); i++ ) {
|
|
|
|
featureIndex = OutputFeatureValuesForSLF(featureIndex, zeros, hypo, pds[i], outputSearchGraphStream);
|
|
|
|
}
|
2013-11-15 21:43:41 +04:00
|
|
|
const vector<GenerationDictionary*>& gds = GenerationDictionary::GetColl();
|
2013-02-20 20:03:23 +04:00
|
|
|
for( size_t i=0; i<gds.size(); i++ ) {
|
|
|
|
featureIndex = OutputFeatureValuesForSLF(featureIndex, zeros, hypo, gds[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2013-02-22 21:24:35 +04:00
|
|
|
void Manager::OutputFeatureValuesForHypergraph(const Hypothesis* hypo, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
|
|
|
outputSearchGraphStream.setf(std::ios::fixed);
|
|
|
|
outputSearchGraphStream.precision(6);
|
|
|
|
|
2013-02-24 04:31:29 +04:00
|
|
|
const vector<const StatelessFeatureFunction*>& slf =StatelessFeatureFunction::GetStatelessFeatureFunctions();
|
|
|
|
const vector<const StatefulFeatureFunction*>& sff = StatefulFeatureFunction::GetStatefulFeatureFunctions();
|
2013-02-22 21:24:35 +04:00
|
|
|
size_t featureIndex = 1;
|
|
|
|
for (size_t i = 0; i < sff.size(); ++i) {
|
|
|
|
featureIndex = OutputFeatureValuesForHypergraph(featureIndex, hypo, sff[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
for (size_t i = 0; i < slf.size(); ++i) {
|
2013-02-24 04:31:29 +04:00
|
|
|
/*
|
2013-02-22 21:24:35 +04:00
|
|
|
if (slf[i]->GetScoreProducerWeightShortName() != "u" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "tm" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "I" &&
|
|
|
|
slf[i]->GetScoreProducerWeightShortName() != "g")
|
2013-02-24 04:31:29 +04:00
|
|
|
*/
|
2013-02-22 21:24:35 +04:00
|
|
|
{
|
|
|
|
featureIndex = OutputFeatureValuesForHypergraph(featureIndex, hypo, slf[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
}
|
2013-11-15 21:26:26 +04:00
|
|
|
const vector<PhraseDictionary*>& pds = PhraseDictionary::GetColl();
|
2013-02-22 21:24:35 +04:00
|
|
|
for( size_t i=0; i<pds.size(); i++ ) {
|
|
|
|
featureIndex = OutputFeatureValuesForHypergraph(featureIndex, hypo, pds[i], outputSearchGraphStream);
|
|
|
|
}
|
2013-11-15 21:43:41 +04:00
|
|
|
const vector<GenerationDictionary*>& gds = GenerationDictionary::GetColl();
|
2013-02-22 21:24:35 +04:00
|
|
|
for( size_t i=0; i<gds.size(); i++ ) {
|
|
|
|
featureIndex = OutputFeatureValuesForHypergraph(featureIndex, hypo, gds[i], outputSearchGraphStream);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2013-02-20 20:03:23 +04:00
|
|
|
|
|
|
|
size_t Manager::OutputFeatureWeightsForSLF(size_t index, const FeatureFunction* ff, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
|
|
|
size_t numScoreComps = ff->GetNumScoreComponents();
|
2013-05-08 18:34:56 +04:00
|
|
|
if (numScoreComps != 0) {
|
2013-02-20 20:03:23 +04:00
|
|
|
vector<float> values = StaticData::Instance().GetAllWeights().GetScoresForProducer(ff);
|
|
|
|
for (size_t i = 0; i < numScoreComps; ++i) {
|
2013-05-29 21:16:15 +04:00
|
|
|
outputSearchGraphStream << "# " << ff->GetScoreProducerDescription()
|
|
|
|
<< " " << ff->GetScoreProducerDescription()
|
|
|
|
<< " " << (i+1) << " of " << numScoreComps << endl
|
|
|
|
<< "x" << (index+i) << "scale=" << values[i] << endl;
|
2013-02-20 20:03:23 +04:00
|
|
|
}
|
|
|
|
return index+numScoreComps;
|
|
|
|
} else {
|
|
|
|
cerr << "Sparse features are not supported when outputting HTK standard lattice format" << endl;
|
|
|
|
assert(false);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t Manager::OutputFeatureValuesForSLF(size_t index, bool zeros, const Hypothesis* hypo, const FeatureFunction* ff, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
|
|
|
|
|
|
|
// { const FeatureFunction* sp = ff;
|
|
|
|
// const FVector& m_scores = scoreCollection.GetScoresVector();
|
|
|
|
// FVector& scores = const_cast<FVector&>(m_scores);
|
|
|
|
// std::string prefix = sp->GetScoreProducerDescription() + FName::SEP;
|
|
|
|
// // std::cout << "prefix==" << prefix << endl;
|
|
|
|
// // cout << "m_scores==" << m_scores << endl;
|
|
|
|
// // cout << "m_scores.size()==" << m_scores.size() << endl;
|
|
|
|
// // cout << "m_scores.coreSize()==" << m_scores.coreSize() << endl;
|
|
|
|
// // cout << "m_scores.cbegin() ?= m_scores.cend()\t" << (m_scores.cbegin() == m_scores.cend()) << endl;
|
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-02-20 20:03:23 +04:00
|
|
|
// // for(FVector::FNVmap::const_iterator i = m_scores.cbegin(); i != m_scores.cend(); i++) {
|
|
|
|
// // std::cout<<prefix << "\t" << (i->first) << "\t" << (i->second) << std::endl;
|
|
|
|
// // }
|
|
|
|
// for(int i=0, n=v.size(); i<n; i+=1) {
|
|
|
|
// // outputSearchGraphStream << prefix << i << "==" << v[i] << std::endl;
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-02-20 20:03:23 +04:00
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// FVector featureValues = scoreCollection.GetVectorForProducer(ff);
|
|
|
|
// outputSearchGraphStream << featureValues << endl;
|
2013-05-29 21:16:15 +04:00
|
|
|
const ScoreComponentCollection& scoreCollection = hypo->GetScoreBreakdown();
|
2013-02-20 20:03:23 +04:00
|
|
|
|
|
|
|
vector<float> featureValues = scoreCollection.GetScoresForProducer(ff);
|
|
|
|
size_t numScoreComps = featureValues.size();//featureValues.coreSize();
|
|
|
|
// if (numScoreComps != ScoreProducer::unlimited) {
|
2013-05-29 21:16:15 +04:00
|
|
|
// vector<float> values = StaticData::Instance().GetAllWeights().GetScoresForProducer(ff);
|
2013-02-20 20:03:23 +04:00
|
|
|
for (size_t i = 0; i < numScoreComps; ++i) {
|
|
|
|
outputSearchGraphStream << "x" << (index+i) << "=" << ((zeros) ? 0.0 : featureValues[i]) << " ";
|
2013-05-29 21:16:15 +04:00
|
|
|
}
|
|
|
|
return index+numScoreComps;
|
2013-02-20 20:03:23 +04:00
|
|
|
// } else {
|
|
|
|
// cerr << "Sparse features are not supported when outputting HTK standard lattice format" << endl;
|
|
|
|
// assert(false);
|
|
|
|
// return 0;
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
|
2013-02-22 21:24:35 +04:00
|
|
|
size_t Manager::OutputFeatureValuesForHypergraph(size_t index, const Hypothesis* hypo, const FeatureFunction* ff, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
2013-05-29 21:16:15 +04:00
|
|
|
ScoreComponentCollection scoreCollection = hypo->GetScoreBreakdown();
|
2013-03-07 21:35:29 +04:00
|
|
|
const Hypothesis *prevHypo = hypo->GetPrevHypo();
|
|
|
|
if (prevHypo) {
|
|
|
|
scoreCollection.MinusEquals( prevHypo->GetScoreBreakdown() );
|
|
|
|
}
|
2013-02-22 21:24:35 +04:00
|
|
|
vector<float> featureValues = scoreCollection.GetScoresForProducer(ff);
|
|
|
|
size_t numScoreComps = featureValues.size();
|
|
|
|
|
|
|
|
if (numScoreComps > 1) {
|
|
|
|
for (size_t i = 0; i < numScoreComps; ++i) {
|
2013-02-24 04:31:29 +04:00
|
|
|
outputSearchGraphStream << ff->GetScoreProducerDescription() << i << "=" << featureValues[i] << " ";
|
2013-02-22 21:24:35 +04:00
|
|
|
}
|
|
|
|
} else {
|
2013-02-24 04:31:29 +04:00
|
|
|
outputSearchGraphStream << ff->GetScoreProducerDescription() << "=" << featureValues[0] << " ";
|
2013-02-22 21:24:35 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
return index+numScoreComps;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**! Output search graph in hypergraph format of Kenneth Heafield's lazy hypergraph decoder */
|
|
|
|
void Manager::OutputSearchGraphAsHypergraph(long translationId, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
2013-03-21 20:17:16 +04:00
|
|
|
|
|
|
|
VERBOSE(2,"Getting search graph to output as hypergraph for sentence " << translationId << std::endl)
|
|
|
|
|
2013-02-22 21:24:35 +04:00
|
|
|
vector<SearchGraphNode> searchGraph;
|
|
|
|
GetSearchGraph(searchGraph);
|
2013-02-23 00:51:56 +04:00
|
|
|
|
2013-03-21 20:17:16 +04:00
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
map<int,int> mosesIDToHypergraphID;
|
|
|
|
// map<int,int> hypergraphIDToMosesID;
|
2013-02-22 21:24:35 +04:00
|
|
|
set<int> terminalNodes;
|
2013-03-05 02:02:16 +04:00
|
|
|
multimap<int,int> hypergraphIDToArcs;
|
2013-02-23 00:51:56 +04:00
|
|
|
|
2013-03-21 20:17:16 +04:00
|
|
|
VERBOSE(2,"Gathering information about search graph to output as hypergraph for sentence " << translationId << std::endl)
|
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
long numNodes = 0;
|
|
|
|
long endNode = 0;
|
|
|
|
{
|
|
|
|
long hypergraphHypothesisID = 0;
|
|
|
|
for (size_t arcNumber = 0, size=searchGraph.size(); arcNumber < size; ++arcNumber) {
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
// Get an id number for the previous hypothesis
|
|
|
|
const Hypothesis *prevHypo = searchGraph[arcNumber].hypo->GetPrevHypo();
|
|
|
|
if (prevHypo!=NULL) {
|
2013-05-29 21:16:15 +04:00
|
|
|
int mosesPrevHypothesisID = prevHypo->GetId();
|
|
|
|
if (mosesIDToHypergraphID.count(mosesPrevHypothesisID) == 0) {
|
|
|
|
mosesIDToHypergraphID[mosesPrevHypothesisID] = hypergraphHypothesisID;
|
|
|
|
// hypergraphIDToMosesID[hypergraphHypothesisID] = mosesPrevHypothesisID;
|
|
|
|
hypergraphHypothesisID += 1;
|
|
|
|
}
|
2013-03-04 23:20:01 +04:00
|
|
|
}
|
2013-02-22 21:24:35 +04:00
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
// Get an id number for this hypothesis
|
2013-03-08 00:13:08 +04:00
|
|
|
int mosesHypothesisID;
|
|
|
|
if (searchGraph[arcNumber].recombinationHypo) {
|
2013-05-29 21:16:15 +04:00
|
|
|
mosesHypothesisID = searchGraph[arcNumber].recombinationHypo->GetId();
|
2013-03-08 00:13:08 +04:00
|
|
|
} else {
|
2013-05-29 21:16:15 +04:00
|
|
|
mosesHypothesisID = searchGraph[arcNumber].hypo->GetId();
|
2013-03-08 00:13:08 +04:00
|
|
|
}
|
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
if (mosesIDToHypergraphID.count(mosesHypothesisID) == 0) {
|
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
mosesIDToHypergraphID[mosesHypothesisID] = hypergraphHypothesisID;
|
|
|
|
// hypergraphIDToMosesID[hypergraphHypothesisID] = mosesHypothesisID;
|
2013-02-22 21:24:35 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
bool terminalNode = (searchGraph[arcNumber].forward == -1);
|
|
|
|
if (terminalNode) {
|
|
|
|
// Final arc to end node, representing the end of the sentence </s>
|
|
|
|
terminalNodes.insert(hypergraphHypothesisID);
|
|
|
|
}
|
|
|
|
|
|
|
|
hypergraphHypothesisID += 1;
|
2013-02-22 21:24:35 +04:00
|
|
|
}
|
2013-03-08 00:13:08 +04:00
|
|
|
|
|
|
|
// Record that this arc ends at this node
|
|
|
|
hypergraphIDToArcs.insert(pair<int,int>(mosesIDToHypergraphID[mosesHypothesisID],arcNumber));
|
|
|
|
|
2013-02-22 21:24:35 +04:00
|
|
|
}
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
// Unique end node
|
|
|
|
endNode = hypergraphHypothesisID;
|
|
|
|
// mosesIDToHypergraphID[hypergraphHypothesisID] = hypergraphHypothesisID;
|
|
|
|
numNodes = endNode + 1;
|
2013-02-22 21:24:35 +04:00
|
|
|
|
|
|
|
}
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-02-22 21:24:35 +04:00
|
|
|
|
|
|
|
long numArcs = searchGraph.size() + terminalNodes.size();
|
|
|
|
|
|
|
|
// Print number of nodes and arcs
|
2013-02-23 00:51:56 +04:00
|
|
|
outputSearchGraphStream << numNodes << " " << numArcs << endl;
|
2013-02-22 21:24:35 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
VERBOSE(2,"Search graph to output as hypergraph for sentence " << translationId
|
|
|
|
<< " contains " << numArcs << " arcs and " << numNodes << " nodes" << std::endl)
|
2013-03-21 20:17:16 +04:00
|
|
|
|
|
|
|
VERBOSE(2,"Outputting search graph to output as hypergraph for sentence " << translationId << std::endl)
|
|
|
|
|
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
for (int hypergraphHypothesisID=0; hypergraphHypothesisID < endNode; hypergraphHypothesisID+=1) {
|
2013-03-21 20:17:16 +04:00
|
|
|
if (hypergraphHypothesisID % 100000 == 0) {
|
|
|
|
VERBOSE(2,"Processed " << hypergraphHypothesisID << " of " << numNodes << " hypergraph nodes for sentence " << translationId << std::endl);
|
|
|
|
}
|
2013-03-05 02:02:16 +04:00
|
|
|
// int mosesID = hypergraphIDToMosesID[hypergraphHypothesisID];
|
|
|
|
size_t count = hypergraphIDToArcs.count(hypergraphHypothesisID);
|
2013-03-21 20:17:16 +04:00
|
|
|
// VERBOSE(2,"Hypergraph node " << hypergraphHypothesisID << " has " << count << " incoming arcs" << std::endl)
|
2013-02-22 21:24:35 +04:00
|
|
|
if (count > 0) {
|
2013-03-21 20:17:16 +04:00
|
|
|
outputSearchGraphStream << count << "\n";
|
2013-02-22 21:24:35 +04:00
|
|
|
|
2013-03-05 02:02:16 +04:00
|
|
|
pair<multimap<int,int>::iterator, multimap<int,int>::iterator> range =
|
2013-05-29 21:16:15 +04:00
|
|
|
hypergraphIDToArcs.equal_range(hypergraphHypothesisID);
|
2013-02-22 21:24:35 +04:00
|
|
|
for (multimap<int,int>::iterator it=range.first; it!=range.second; ++it) {
|
2013-05-29 21:16:15 +04:00
|
|
|
int lineNumber = (*it).second;
|
|
|
|
const Hypothesis *thisHypo = searchGraph[lineNumber].hypo;
|
|
|
|
int mosesHypothesisID;// = thisHypo->GetId();
|
|
|
|
if (searchGraph[lineNumber].recombinationHypo) {
|
|
|
|
mosesHypothesisID = searchGraph[lineNumber].recombinationHypo->GetId();
|
|
|
|
} else {
|
|
|
|
mosesHypothesisID = searchGraph[lineNumber].hypo->GetId();
|
|
|
|
}
|
|
|
|
// int actualHypergraphHypothesisID = mosesIDToHypergraphID[mosesHypothesisID];
|
2013-11-23 00:27:46 +04:00
|
|
|
UTIL_THROW_IF2(
|
2013-05-29 21:16:15 +04:00
|
|
|
(hypergraphHypothesisID != mosesIDToHypergraphID[mosesHypothesisID]),
|
|
|
|
"Error while writing search lattice as hypergraph for sentence " << translationId << ". " <<
|
|
|
|
"Moses node " << mosesHypothesisID << " was expected to have hypergraph id " << hypergraphHypothesisID <<
|
|
|
|
", but actually had hypergraph id " << mosesIDToHypergraphID[mosesHypothesisID] <<
|
|
|
|
". There are " << numNodes << " nodes in the search lattice."
|
|
|
|
);
|
|
|
|
|
|
|
|
const Hypothesis *prevHypo = thisHypo->GetPrevHypo();
|
|
|
|
if (prevHypo==NULL) {
|
|
|
|
// VERBOSE(2,"Hypergraph node " << hypergraphHypothesisID << " start of sentence" << std::endl)
|
|
|
|
outputSearchGraphStream << "<s> ||| \n";
|
|
|
|
} else {
|
|
|
|
int startNode = mosesIDToHypergraphID[prevHypo->GetId()];
|
|
|
|
// VERBOSE(2,"Hypergraph node " << hypergraphHypothesisID << " has parent node " << startNode << std::endl)
|
2013-11-23 00:27:46 +04:00
|
|
|
UTIL_THROW_IF2(
|
2013-05-29 21:16:15 +04:00
|
|
|
(startNode >= hypergraphHypothesisID),
|
|
|
|
"Error while writing search lattice as hypergraph for sentence" << translationId << ". " <<
|
|
|
|
"The nodes must be output in topological order. The code attempted to violate this restriction."
|
|
|
|
);
|
|
|
|
|
|
|
|
const TargetPhrase &targetPhrase = thisHypo->GetCurrTargetPhrase();
|
|
|
|
int targetWordCount = targetPhrase.GetSize();
|
|
|
|
|
|
|
|
outputSearchGraphStream << "[" << startNode << "]";
|
|
|
|
for (int targetWordIndex=0; targetWordIndex<targetWordCount; targetWordIndex+=1) {
|
|
|
|
outputSearchGraphStream << " " << targetPhrase.GetWord(targetWordIndex);
|
|
|
|
}
|
|
|
|
outputSearchGraphStream << " ||| ";
|
|
|
|
OutputFeatureValuesForHypergraph(thisHypo, outputSearchGraphStream);
|
|
|
|
outputSearchGraphStream << "\n";
|
|
|
|
}
|
2013-02-22 21:24:35 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Print node and arc(s) for end of sentence </s>
|
2013-03-21 20:17:16 +04:00
|
|
|
outputSearchGraphStream << terminalNodes.size() << "\n";
|
2013-02-22 21:24:35 +04:00
|
|
|
for (set<int>::iterator it=terminalNodes.begin(); it!=terminalNodes.end(); ++it) {
|
2013-03-21 20:17:16 +04:00
|
|
|
outputSearchGraphStream << "[" << (*it) << "] </s> ||| \n";
|
2013-02-22 21:24:35 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-02-15 22:06:54 +04:00
|
|
|
/**! Output search graph in HTK standard lattice format (SLF) */
|
|
|
|
void Manager::OutputSearchGraphAsSLF(long translationId, std::ostream &outputSearchGraphStream) const
|
|
|
|
{
|
|
|
|
|
|
|
|
vector<SearchGraphNode> searchGraph;
|
|
|
|
GetSearchGraph(searchGraph);
|
|
|
|
|
|
|
|
long numArcs = 0;
|
|
|
|
long numNodes = 0;
|
|
|
|
|
|
|
|
map<int,int> nodes;
|
|
|
|
set<int> terminalNodes;
|
|
|
|
|
|
|
|
// Unique start node
|
|
|
|
nodes[0] = 0;
|
|
|
|
|
|
|
|
for (size_t arcNumber = 0; arcNumber < searchGraph.size(); ++arcNumber) {
|
|
|
|
|
2013-02-16 00:49:26 +04:00
|
|
|
int targetWordCount = searchGraph[arcNumber].hypo->GetCurrTargetPhrase().GetSize();
|
|
|
|
numArcs += targetWordCount;
|
2013-02-15 22:06:54 +04:00
|
|
|
|
|
|
|
int hypothesisID = searchGraph[arcNumber].hypo->GetId();
|
|
|
|
if (nodes.count(hypothesisID) == 0) {
|
2013-05-29 21:16:15 +04:00
|
|
|
|
2013-02-16 00:49:26 +04:00
|
|
|
numNodes += targetWordCount;
|
2013-02-15 22:06:54 +04:00
|
|
|
nodes[hypothesisID] = numNodes;
|
2013-02-16 00:49:26 +04:00
|
|
|
//numNodes += 1;
|
2013-02-15 22:06:54 +04:00
|
|
|
|
|
|
|
bool terminalNode = (searchGraph[arcNumber].forward == -1);
|
|
|
|
if (terminalNode) {
|
2013-05-29 21:16:15 +04:00
|
|
|
numArcs += 1;
|
2013-02-15 22:06:54 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2013-02-16 00:49:26 +04:00
|
|
|
numNodes += 1;
|
2013-02-15 22:06:54 +04:00
|
|
|
|
|
|
|
// Unique end node
|
|
|
|
nodes[numNodes] = numNodes;
|
|
|
|
|
2013-02-16 00:49:26 +04:00
|
|
|
outputSearchGraphStream << "UTTERANCE=Sentence_" << translationId << endl;
|
2013-02-15 22:06:54 +04:00
|
|
|
outputSearchGraphStream << "VERSION=1.1" << endl;
|
2013-02-20 20:03:23 +04:00
|
|
|
outputSearchGraphStream << "base=2.71828182845905" << endl;
|
2013-02-16 00:49:26 +04:00
|
|
|
outputSearchGraphStream << "NODES=" << (numNodes+1) << endl;
|
2013-02-15 22:06:54 +04:00
|
|
|
outputSearchGraphStream << "LINKS=" << numArcs << endl;
|
|
|
|
|
2013-02-20 20:03:23 +04:00
|
|
|
OutputFeatureWeightsForSLF(outputSearchGraphStream);
|
|
|
|
|
2013-02-16 00:49:26 +04:00
|
|
|
for (size_t arcNumber = 0, lineNumber = 0; lineNumber < searchGraph.size(); ++lineNumber) {
|
|
|
|
const Hypothesis *thisHypo = searchGraph[lineNumber].hypo;
|
2013-02-15 22:06:54 +04:00
|
|
|
const Hypothesis *prevHypo = thisHypo->GetPrevHypo();
|
|
|
|
if (prevHypo) {
|
|
|
|
|
|
|
|
int startNode = nodes[prevHypo->GetId()];
|
|
|
|
int endNode = nodes[thisHypo->GetId()];
|
2013-02-16 00:49:26 +04:00
|
|
|
bool terminalNode = (searchGraph[lineNumber].forward == -1);
|
|
|
|
const TargetPhrase &targetPhrase = thisHypo->GetCurrTargetPhrase();
|
|
|
|
int targetWordCount = targetPhrase.GetSize();
|
|
|
|
|
|
|
|
for (int targetWordIndex=0; targetWordIndex<targetWordCount; targetWordIndex+=1) {
|
2013-05-29 21:16:15 +04:00
|
|
|
int x = (targetWordCount-targetWordIndex);
|
2013-02-16 00:49:26 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
outputSearchGraphStream << "J=" << arcNumber;
|
2013-02-16 00:49:26 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
if (targetWordIndex==0) {
|
|
|
|
outputSearchGraphStream << " S=" << startNode;
|
|
|
|
} else {
|
|
|
|
outputSearchGraphStream << " S=" << endNode - x;
|
|
|
|
}
|
2013-02-16 00:49:26 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
outputSearchGraphStream << " E=" << endNode - (x-1)
|
|
|
|
<< " W=" << targetPhrase.GetWord(targetWordIndex);
|
2013-02-20 20:03:23 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
OutputFeatureValuesForSLF(thisHypo, (targetWordIndex>0), outputSearchGraphStream);
|
2013-02-20 20:03:23 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
outputSearchGraphStream << endl;
|
2013-02-15 22:06:54 +04:00
|
|
|
|
2013-05-29 21:16:15 +04:00
|
|
|
arcNumber += 1;
|
2013-02-16 00:49:26 +04:00
|
|
|
}
|
2013-02-15 22:06:54 +04:00
|
|
|
|
|
|
|
if (terminalNode && terminalNodes.count(endNode) == 0) {
|
2013-05-29 21:16:15 +04:00
|
|
|
terminalNodes.insert(endNode);
|
|
|
|
outputSearchGraphStream << "J=" << arcNumber
|
|
|
|
<< " S=" << endNode
|
|
|
|
<< " E=" << numNodes
|
|
|
|
<< endl;
|
|
|
|
arcNumber += 1;
|
2013-02-15 22:06:54 +04:00
|
|
|
}
|
2013-05-29 21:16:15 +04:00
|
|
|
}
|
2013-02-15 22:06:54 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2010-05-05 20:53:41 +04:00
|
|
|
void OutputSearchNode(long translationId, std::ostream &outputSearchGraphStream,
|
2011-02-24 16:14:42 +03:00
|
|
|
const SearchGraphNode& searchNode)
|
2008-06-11 14:52:57 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
const vector<FactorType> &outputFactorOrder = StaticData::Instance().GetOutputFactorOrder();
|
|
|
|
bool extendedFormat = StaticData::Instance().GetOutputSearchGraphExtended();
|
|
|
|
outputSearchGraphStream << translationId;
|
|
|
|
|
|
|
|
// special case: initial hypothesis
|
|
|
|
if ( searchNode.hypo->GetId() == 0 ) {
|
|
|
|
outputSearchGraphStream << " hyp=0 stack=0";
|
2012-05-07 03:04:55 +04:00
|
|
|
if (extendedFormat) {
|
2011-02-24 16:14:42 +03:00
|
|
|
outputSearchGraphStream << " forward=" << searchNode.forward << " fscore=" << searchNode.fscore;
|
|
|
|
}
|
|
|
|
outputSearchGraphStream << endl;
|
|
|
|
return;
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2010-05-05 20:53:41 +04:00
|
|
|
const Hypothesis *prevHypo = searchNode.hypo->GetPrevHypo();
|
2008-06-11 14:52:57 +04:00
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
// output in traditional format
|
|
|
|
if (!extendedFormat) {
|
|
|
|
outputSearchGraphStream << " hyp=" << searchNode.hypo->GetId()
|
|
|
|
<< " stack=" << searchNode.hypo->GetWordsBitmap().GetNumWordsCovered()
|
|
|
|
<< " back=" << prevHypo->GetId()
|
|
|
|
<< " score=" << searchNode.hypo->GetScore()
|
|
|
|
<< " transition=" << (searchNode.hypo->GetScore() - prevHypo->GetScore());
|
|
|
|
|
|
|
|
if (searchNode.recombinationHypo != NULL)
|
|
|
|
outputSearchGraphStream << " recombined=" << searchNode.recombinationHypo->GetId();
|
|
|
|
|
|
|
|
outputSearchGraphStream << " forward=" << searchNode.forward << " fscore=" << searchNode.fscore
|
|
|
|
<< " covered=" << searchNode.hypo->GetCurrSourceWordsRange().GetStartPos()
|
|
|
|
<< "-" << searchNode.hypo->GetCurrSourceWordsRange().GetEndPos()
|
|
|
|
<< " out=" << searchNode.hypo->GetCurrTargetPhrase().GetStringRep(outputFactorOrder)
|
|
|
|
<< endl;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// output in extended format
|
2012-05-07 03:04:55 +04:00
|
|
|
// if (searchNode.recombinationHypo != NULL)
|
|
|
|
// outputSearchGraphStream << " hyp=" << searchNode.recombinationHypo->GetId();
|
|
|
|
// else
|
|
|
|
outputSearchGraphStream << " hyp=" << searchNode.hypo->GetId();
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2012-05-07 03:04:55 +04:00
|
|
|
outputSearchGraphStream << " stack=" << searchNode.hypo->GetWordsBitmap().GetNumWordsCovered()
|
|
|
|
<< " back=" << prevHypo->GetId()
|
|
|
|
<< " score=" << searchNode.hypo->GetScore()
|
|
|
|
<< " transition=" << (searchNode.hypo->GetScore() - prevHypo->GetScore());
|
2011-10-27 15:26:08 +04:00
|
|
|
|
2012-05-07 03:04:55 +04:00
|
|
|
if (searchNode.recombinationHypo != NULL)
|
|
|
|
outputSearchGraphStream << " recombined=" << searchNode.recombinationHypo->GetId();
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2012-05-07 03:04:55 +04:00
|
|
|
outputSearchGraphStream << " forward=" << searchNode.forward << " fscore=" << searchNode.fscore
|
2013-05-29 21:16:15 +04:00
|
|
|
<< " covered=" << searchNode.hypo->GetCurrSourceWordsRange().GetStartPos()
|
|
|
|
<< "-" << searchNode.hypo->GetCurrSourceWordsRange().GetEndPos();
|
2011-10-27 15:26:08 +04:00
|
|
|
|
|
|
|
// Modified so that -osgx is a superset of -osg (GST Oct 2011)
|
2011-02-24 16:14:42 +03:00
|
|
|
ScoreComponentCollection scoreBreakdown = searchNode.hypo->GetScoreBreakdown();
|
|
|
|
scoreBreakdown.MinusEquals( prevHypo->GetScoreBreakdown() );
|
2012-05-05 04:08:29 +04:00
|
|
|
//outputSearchGraphStream << " scores = [ " << StaticData::Instance().GetAllWeights();
|
2013-05-29 21:16:15 +04:00
|
|
|
outputSearchGraphStream << " scores=\"" << scoreBreakdown << "\"";
|
2011-10-27 15:26:08 +04:00
|
|
|
|
2012-05-05 04:08:29 +04:00
|
|
|
outputSearchGraphStream << " out=\"" << searchNode.hypo->GetSourcePhraseStringRep() << "|" <<
|
2013-05-29 21:16:15 +04:00
|
|
|
searchNode.hypo->GetCurrTargetPhrase().GetStringRep(outputFactorOrder) << "\"" << endl;
|
2011-05-16 17:41:15 +04:00
|
|
|
// outputSearchGraphStream << " out=" << searchNode.hypo->GetCurrTargetPhrase().GetStringRep(outputFactorOrder) << endl;
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2008-09-23 23:39:56 +04:00
|
|
|
void Manager::GetConnectedGraph(
|
2011-02-24 16:14:42 +03:00
|
|
|
std::map< int, bool >* pConnected,
|
|
|
|
std::vector< const Hypothesis* >* pConnectedList) const
|
|
|
|
{
|
2008-09-23 23:39:56 +04:00
|
|
|
std::map < int, bool >& connected = *pConnected;
|
|
|
|
std::vector< const Hypothesis *>& connectedList = *pConnectedList;
|
2008-06-11 14:52:57 +04:00
|
|
|
|
|
|
|
// start with the ones in the final stack
|
2008-09-23 23:39:56 +04:00
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
2008-06-11 14:52:57 +04:00
|
|
|
const HypothesisStack &finalStack = *hypoStackColl.back();
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterHypo = finalStack.begin() ; iterHypo != finalStack.end() ; ++iterHypo) {
|
2008-06-11 14:52:57 +04:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
|
|
|
connected[ hypo->GetId() ] = true;
|
|
|
|
connectedList.push_back( hypo );
|
|
|
|
}
|
|
|
|
// move back from known connected hypotheses
|
|
|
|
for(size_t i=0; i<connectedList.size(); i++) {
|
|
|
|
const Hypothesis *hypo = connectedList[i];
|
|
|
|
|
|
|
|
// add back pointer
|
|
|
|
const Hypothesis *prevHypo = hypo->GetPrevHypo();
|
2011-05-16 17:41:15 +04:00
|
|
|
if (prevHypo && prevHypo->GetId() > 0 // don't add empty hypothesis
|
2011-02-24 16:14:42 +03:00
|
|
|
&& connected.find( prevHypo->GetId() ) == connected.end()) { // don't add already added
|
2008-06-11 14:52:57 +04:00
|
|
|
connected[ prevHypo->GetId() ] = true;
|
|
|
|
connectedList.push_back( prevHypo );
|
|
|
|
}
|
|
|
|
|
|
|
|
// add arcs
|
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
2011-02-24 16:14:42 +03:00
|
|
|
if (arcList != NULL) {
|
2008-06-11 14:52:57 +04:00
|
|
|
ArcList::const_iterator iterArcList;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
|
|
|
if (connected.find( loserHypo->GetId() ) == connected.end()) { // don't add already added
|
|
|
|
connected[ loserHypo->GetId() ] = true;
|
|
|
|
connectedList.push_back( loserHypo );
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2008-09-23 23:39:56 +04:00
|
|
|
}
|
|
|
|
|
2010-02-03 13:23:32 +03:00
|
|
|
void Manager::GetWinnerConnectedGraph(
|
2011-02-24 16:14:42 +03:00
|
|
|
std::map< int, bool >* pConnected,
|
|
|
|
std::vector< const Hypothesis* >* pConnectedList) const
|
|
|
|
{
|
2010-02-03 13:23:32 +03:00
|
|
|
std::map < int, bool >& connected = *pConnected;
|
|
|
|
std::vector< const Hypothesis *>& connectedList = *pConnectedList;
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-02-03 13:23:32 +03:00
|
|
|
// start with the ones in the final stack
|
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
|
|
|
const HypothesisStack &finalStack = *hypoStackColl.back();
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterHypo = finalStack.begin() ; iterHypo != finalStack.end() ; ++iterHypo) {
|
2010-02-03 13:23:32 +03:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
|
|
|
connected[ hypo->GetId() ] = true;
|
|
|
|
connectedList.push_back( hypo );
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-02-03 13:23:32 +03:00
|
|
|
// move back from known connected hypotheses
|
|
|
|
for(size_t i=0; i<connectedList.size(); i++) {
|
|
|
|
const Hypothesis *hypo = connectedList[i];
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-02-03 13:23:32 +03:00
|
|
|
// add back pointer
|
|
|
|
const Hypothesis *prevHypo = hypo->GetPrevHypo();
|
|
|
|
if (prevHypo->GetId() > 0 // don't add empty hypothesis
|
2011-02-24 16:14:42 +03:00
|
|
|
&& connected.find( prevHypo->GetId() ) == connected.end()) { // don't add already added
|
2010-02-03 13:23:32 +03:00
|
|
|
connected[ prevHypo->GetId() ] = true;
|
|
|
|
connectedList.push_back( prevHypo );
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-02-03 13:23:32 +03:00
|
|
|
// add arcs
|
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
2011-02-24 16:14:42 +03:00
|
|
|
if (arcList != NULL) {
|
2010-02-03 13:23:32 +03:00
|
|
|
ArcList::const_iterator iterArcList;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
2010-02-03 13:23:32 +03:00
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
2011-02-24 16:14:42 +03:00
|
|
|
if (connected.find( loserHypo->GetPrevHypo()->GetId() ) == connected.end() && loserHypo->GetPrevHypo()->GetId() > 0) { // don't add already added & don't add hyp 0
|
2010-02-03 13:23:32 +03:00
|
|
|
connected[ loserHypo->GetPrevHypo()->GetId() ] = true;
|
|
|
|
connectedList.push_back( loserHypo->GetPrevHypo() );
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-02-24 16:14:42 +03:00
|
|
|
|
2010-02-03 13:23:32 +03:00
|
|
|
|
2008-09-24 20:48:23 +04:00
|
|
|
#ifdef HAVE_PROTOBUF
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
void SerializeEdgeInfo(const Hypothesis* hypo, hgmert::Hypergraph_Edge* edge)
|
|
|
|
{
|
|
|
|
hgmert::Rule* rule = edge->mutable_rule();
|
|
|
|
hypo->GetCurrTargetPhrase().WriteToRulePB(rule);
|
|
|
|
const Hypothesis* prev = hypo->GetPrevHypo();
|
|
|
|
// if the feature values are empty, they default to 0
|
|
|
|
if (!prev) return;
|
|
|
|
// score breakdown is an aggregate (forward) quantity, but the exported
|
|
|
|
// graph object just wants the feature values on the edges
|
|
|
|
const ScoreComponentCollection& scores = hypo->GetScoreBreakdown();
|
|
|
|
const ScoreComponentCollection& pscores = prev->GetScoreBreakdown();
|
|
|
|
for (unsigned int i = 0; i < scores.size(); ++i)
|
|
|
|
edge->add_feature_values((scores[i] - pscores[i]) * -1.0);
|
2008-09-24 20:48:23 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
hgmert::Hypergraph_Node* GetHGNode(
|
2011-02-24 16:14:42 +03:00
|
|
|
const Hypothesis* hypo,
|
|
|
|
std::map< int, int>* i2hgnode,
|
|
|
|
hgmert::Hypergraph* hg,
|
|
|
|
int* hgNodeIdx)
|
|
|
|
{
|
|
|
|
hgmert::Hypergraph_Node* hgnode;
|
2008-09-24 20:48:23 +04:00
|
|
|
std::map < int, int >::iterator idxi = i2hgnode->find(hypo->GetId());
|
2011-02-24 16:14:42 +03:00
|
|
|
if (idxi == i2hgnode->end()) {
|
|
|
|
*hgNodeIdx = ((*i2hgnode)[hypo->GetId()] = hg->nodes_size());
|
|
|
|
hgnode = hg->add_nodes();
|
|
|
|
} else {
|
|
|
|
*hgNodeIdx = idxi->second;
|
|
|
|
hgnode = hg->mutable_nodes(*hgNodeIdx);
|
|
|
|
}
|
|
|
|
return hgnode;
|
2008-09-24 20:48:23 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void Manager::SerializeSearchGraphPB(
|
2011-02-24 16:14:42 +03:00
|
|
|
long translationId,
|
|
|
|
std::ostream& outputStream) const
|
|
|
|
{
|
|
|
|
using namespace hgmert;
|
2008-09-24 20:48:23 +04:00
|
|
|
std::map < int, bool > connected;
|
|
|
|
std::map < int, int > i2hgnode;
|
|
|
|
std::vector< const Hypothesis *> connectedList;
|
2011-02-24 16:14:42 +03:00
|
|
|
GetConnectedGraph(&connected, &connectedList);
|
2008-09-24 20:48:23 +04:00
|
|
|
connected[ 0 ] = true;
|
|
|
|
Hypergraph hg;
|
2011-02-24 16:14:42 +03:00
|
|
|
hg.set_is_sorted(false);
|
|
|
|
int num_feats = (*m_search->GetHypothesisStacks().back()->begin())->GetScoreBreakdown().size();
|
|
|
|
hg.set_num_features(num_feats);
|
|
|
|
StaticData::Instance().GetScoreIndexManager().SerializeFeatureNamesToPB(&hg);
|
|
|
|
Hypergraph_Node* goal = hg.add_nodes(); // idx=0 goal node must have idx 0
|
|
|
|
Hypergraph_Node* source = hg.add_nodes(); // idx=1
|
|
|
|
i2hgnode[-1] = 1; // source node
|
2008-09-24 20:48:23 +04:00
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
|
|
|
const HypothesisStack &finalStack = *hypoStackColl.back();
|
|
|
|
for (std::vector < HypothesisStack* >::const_iterator iterStack = hypoStackColl.begin();
|
2011-02-24 16:14:42 +03:00
|
|
|
iterStack != hypoStackColl.end() ; ++iterStack) {
|
2008-09-24 20:48:23 +04:00
|
|
|
const HypothesisStack &stack = **iterStack;
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
|
|
|
|
for (iterHypo = stack.begin() ; iterHypo != stack.end() ; ++iterHypo) {
|
2008-09-24 20:48:23 +04:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
bool is_goal = hypo->GetWordsBitmap().IsComplete();
|
|
|
|
if (connected.find( hypo->GetId() ) != connected.end()) {
|
|
|
|
int headNodeIdx;
|
|
|
|
Hypergraph_Node* headNode = GetHGNode(hypo, &i2hgnode, &hg, &headNodeIdx);
|
|
|
|
if (is_goal) {
|
|
|
|
Hypergraph_Edge* ge = hg.add_edges();
|
|
|
|
ge->set_head_node(0); // goal
|
|
|
|
ge->add_tail_nodes(headNodeIdx);
|
|
|
|
ge->mutable_rule()->add_trg_words("[X,1]");
|
|
|
|
}
|
|
|
|
Hypergraph_Edge* edge = hg.add_edges();
|
|
|
|
SerializeEdgeInfo(hypo, edge);
|
|
|
|
edge->set_head_node(headNodeIdx);
|
|
|
|
const Hypothesis* prev = hypo->GetPrevHypo();
|
|
|
|
int tailNodeIdx = 1; // source
|
|
|
|
if (prev)
|
|
|
|
tailNodeIdx = i2hgnode.find(prev->GetId())->second;
|
|
|
|
edge->add_tail_nodes(tailNodeIdx);
|
2008-09-24 20:48:23 +04:00
|
|
|
|
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
2011-02-24 16:14:42 +03:00
|
|
|
if (arcList != NULL) {
|
2008-09-24 20:48:23 +04:00
|
|
|
ArcList::const_iterator iterArcList;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
2008-09-24 20:48:23 +04:00
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
2013-11-21 21:51:01 +04:00
|
|
|
UTIL_THROW_IF2(!connected[loserHypo->GetId()],
|
|
|
|
"Hypothesis " << loserHypo->GetId() << " is not connected");
|
2011-02-24 16:14:42 +03:00
|
|
|
Hypergraph_Edge* edge = hg.add_edges();
|
|
|
|
SerializeEdgeInfo(loserHypo, edge);
|
|
|
|
edge->set_head_node(headNodeIdx);
|
|
|
|
tailNodeIdx = i2hgnode.find(loserHypo->GetPrevHypo()->GetId())->second;
|
|
|
|
edge->add_tail_nodes(tailNodeIdx);
|
2008-09-24 20:48:23 +04:00
|
|
|
}
|
|
|
|
} // end if arcList empty
|
|
|
|
} // end if connected
|
|
|
|
} // end for iterHypo
|
|
|
|
} // end for iterStack
|
2011-02-24 16:14:42 +03:00
|
|
|
hg.SerializeToOstream(&outputStream);
|
2008-09-24 20:48:23 +04:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2010-05-05 20:53:41 +04:00
|
|
|
void Manager::OutputSearchGraph(long translationId, std::ostream &outputSearchGraphStream) const
|
2008-09-23 23:39:56 +04:00
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
vector<SearchGraphNode> searchGraph;
|
|
|
|
GetSearchGraph(searchGraph);
|
|
|
|
for (size_t i = 0; i < searchGraph.size(); ++i) {
|
|
|
|
OutputSearchNode(translationId,outputSearchGraphStream,searchGraph[i]);
|
|
|
|
}
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
|
|
|
|
2011-02-24 16:14:42 +03:00
|
|
|
void Manager::GetForwardBackwardSearchGraph(std::map< int, bool >* pConnected,
|
|
|
|
std::vector< const Hypothesis* >* pConnectedList, std::map < const Hypothesis*, set< const Hypothesis* > >* pOutgoingHyps, vector< float>* pFwdBwdScores) const
|
|
|
|
{
|
|
|
|
std::map < int, bool > &connected = *pConnected;
|
|
|
|
std::vector< const Hypothesis *>& connectedList = *pConnectedList;
|
|
|
|
std::map < int, int > forward;
|
|
|
|
std::map < int, double > forwardScore;
|
|
|
|
|
|
|
|
std::map < const Hypothesis*, set <const Hypothesis*> > & outgoingHyps = *pOutgoingHyps;
|
|
|
|
vector< float> & estimatedScores = *pFwdBwdScores;
|
|
|
|
|
|
|
|
// *** find connected hypotheses ***
|
|
|
|
GetWinnerConnectedGraph(&connected, &connectedList);
|
|
|
|
|
|
|
|
// ** compute best forward path for each hypothesis *** //
|
|
|
|
|
|
|
|
// forward cost of hypotheses on final stack is 0
|
|
|
|
const std::vector < HypothesisStack* > &hypoStackColl = m_search->GetHypothesisStacks();
|
|
|
|
const HypothesisStack &finalStack = *hypoStackColl.back();
|
|
|
|
HypothesisStack::const_iterator iterHypo;
|
|
|
|
for (iterHypo = finalStack.begin() ; iterHypo != finalStack.end() ; ++iterHypo) {
|
|
|
|
const Hypothesis *hypo = *iterHypo;
|
|
|
|
forwardScore[ hypo->GetId() ] = 0.0f;
|
|
|
|
forward[ hypo->GetId() ] = -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// compete for best forward score of previous hypothesis
|
|
|
|
std::vector < HypothesisStack* >::const_iterator iterStack;
|
|
|
|
for (iterStack = --hypoStackColl.end() ; iterStack != hypoStackColl.begin() ; --iterStack) {
|
|
|
|
const HypothesisStack &stack = **iterStack;
|
2010-02-03 13:23:32 +03:00
|
|
|
HypothesisStack::const_iterator iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
for (iterHypo = stack.begin() ; iterHypo != stack.end() ; ++iterHypo) {
|
2010-02-03 13:23:32 +03:00
|
|
|
const Hypothesis *hypo = *iterHypo;
|
2011-02-24 16:14:42 +03:00
|
|
|
if (connected.find( hypo->GetId() ) != connected.end()) {
|
|
|
|
// make a play for previous hypothesis
|
|
|
|
const Hypothesis *prevHypo = hypo->GetPrevHypo();
|
|
|
|
double fscore = forwardScore[ hypo->GetId() ] +
|
|
|
|
hypo->GetScore() - prevHypo->GetScore();
|
|
|
|
if (forwardScore.find( prevHypo->GetId() ) == forwardScore.end()
|
|
|
|
|| forwardScore.find( prevHypo->GetId() )->second < fscore) {
|
|
|
|
forwardScore[ prevHypo->GetId() ] = fscore;
|
|
|
|
forward[ prevHypo->GetId() ] = hypo->GetId();
|
|
|
|
}
|
|
|
|
//store outgoing info
|
|
|
|
outgoingHyps[prevHypo].insert(hypo);
|
|
|
|
|
|
|
|
// all arcs also make a play
|
|
|
|
const ArcList *arcList = hypo->GetArcList();
|
|
|
|
if (arcList != NULL) {
|
|
|
|
ArcList::const_iterator iterArcList;
|
|
|
|
for (iterArcList = arcList->begin() ; iterArcList != arcList->end() ; ++iterArcList) {
|
|
|
|
const Hypothesis *loserHypo = *iterArcList;
|
|
|
|
// make a play
|
|
|
|
const Hypothesis *loserPrevHypo = loserHypo->GetPrevHypo();
|
|
|
|
double fscore = forwardScore[ hypo->GetId() ] +
|
|
|
|
loserHypo->GetScore() - loserPrevHypo->GetScore();
|
|
|
|
if (forwardScore.find( loserPrevHypo->GetId() ) == forwardScore.end()
|
|
|
|
|| forwardScore.find( loserPrevHypo->GetId() )->second < fscore) {
|
|
|
|
forwardScore[ loserPrevHypo->GetId() ] = fscore;
|
|
|
|
forward[ loserPrevHypo->GetId() ] = loserHypo->GetId();
|
|
|
|
}
|
|
|
|
//store outgoing info
|
|
|
|
outgoingHyps[loserPrevHypo].insert(hypo);
|
|
|
|
|
|
|
|
|
|
|
|
} // end for arc list
|
|
|
|
} // end if arc list empty
|
|
|
|
} // end if hypo connected
|
|
|
|
} // end for hypo
|
|
|
|
} // end for stack
|
|
|
|
|
2014-01-16 04:06:58 +04:00
|
|
|
for (std::vector< const Hypothesis *>::iterator it = connectedList.begin(); it != connectedList.end(); ++it) {
|
2011-02-24 16:14:42 +03:00
|
|
|
float estimatedScore = (*it)->GetScore() + forwardScore[(*it)->GetId()];
|
|
|
|
estimatedScores.push_back(estimatedScore);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2008-06-11 14:52:57 +04:00
|
|
|
const Hypothesis *Manager::GetBestHypothesis() const
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
return m_search->GetBestHypothesis();
|
2008-06-11 14:52:57 +04:00
|
|
|
}
|
2008-10-09 03:51:26 +04:00
|
|
|
|
2010-05-14 13:33:03 +04:00
|
|
|
int Manager::GetNextHypoId()
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
return m_hypoId++;
|
2010-05-14 13:33:03 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void Manager::ResetSentenceStats(const InputType& source)
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
m_sentenceStats = std::auto_ptr<SentenceStats>(new SentenceStats(source));
|
2010-05-14 13:33:03 +04:00
|
|
|
}
|
|
|
|
SentenceStats& Manager::GetSentenceStats() const
|
|
|
|
{
|
2011-02-24 16:14:42 +03:00
|
|
|
return *m_sentenceStats;
|
2010-05-14 13:33:03 +04:00
|
|
|
|
2008-10-09 03:51:26 +04:00
|
|
|
}
|
|
|
|
|
2010-05-14 13:33:03 +04:00
|
|
|
}
|