mirror of
https://github.com/moses-smt/mosesdecoder.git
synced 2024-12-25 12:52:29 +03:00
parsing doesn't depend on input sentence anymore
This commit is contained in:
parent
0596c3e9e4
commit
afcf65098f
@ -221,16 +221,19 @@ InputPath &ChartParser::GetInputPath(size_t startPos, size_t endPos)
|
||||
CHECK(offset < m_targetPhrasesfromPt[startPos].size());
|
||||
return *m_targetPhrasesfromPt[startPos][offset];
|
||||
}
|
||||
|
||||
/*
|
||||
const Sentence &ChartParser::GetSentence() const {
|
||||
const Sentence &sentence = static_cast<const Sentence&>(m_source);
|
||||
return sentence;
|
||||
}
|
||||
|
||||
*/
|
||||
size_t ChartParser::GetSize() const
|
||||
{
|
||||
return m_source.GetSize();
|
||||
}
|
||||
|
||||
|
||||
long ChartParser::GetTranslationId() const
|
||||
{
|
||||
return m_source.GetTranslationId();
|
||||
}
|
||||
} // namespace Moses
|
||||
|
@ -62,7 +62,8 @@ public:
|
||||
void Create(const WordsRange &range, ChartParserCallback &to);
|
||||
|
||||
//! the sentence being decoded
|
||||
const Sentence &GetSentence() const;
|
||||
//const Sentence &GetSentence() const;
|
||||
long GetTranslationId() const;
|
||||
size_t GetSize() const;
|
||||
const InputPath &GetInputPath(size_t startPos, size_t endPos) const;
|
||||
|
||||
|
@ -3,10 +3,11 @@
|
||||
|
||||
namespace Moses
|
||||
{
|
||||
/*
|
||||
const Sentence &ChartRuleLookupManager::GetSentence() const
|
||||
{
|
||||
return m_parser.GetSentence();
|
||||
}
|
||||
|
||||
*/
|
||||
} // namespace Moses
|
||||
|
||||
|
@ -53,7 +53,7 @@ public:
|
||||
|
||||
const ChartParser &GetParser() const
|
||||
{ return m_parser; }
|
||||
const Sentence &GetSentence() const;
|
||||
//const Sentence &GetSentence() const;
|
||||
|
||||
const ChartCellLabel &GetSourceAt(size_t at) const {
|
||||
return m_cellCollection.GetSourceWordLabel(at);
|
||||
|
@ -40,11 +40,10 @@ ChartRuleLookupManagerMemoryPerSentence::ChartRuleLookupManagerMemoryPerSentence
|
||||
{
|
||||
CHECK(m_dottedRuleColls.size() == 0);
|
||||
|
||||
const Sentence &src = parser.GetSentence();
|
||||
size_t sourceSize = src.GetSize();
|
||||
size_t sourceSize = parser.GetSize();
|
||||
m_dottedRuleColls.resize(sourceSize);
|
||||
|
||||
const PhraseDictionaryNodeMemory &rootNode = m_ruleTable.GetRootNode(src);
|
||||
const PhraseDictionaryNodeMemory &rootNode = m_ruleTable.GetRootNode(parser.GetTranslationId());
|
||||
|
||||
for (size_t ind = 0; ind < m_dottedRuleColls.size(); ++ind) {
|
||||
#ifdef USE_BOOST_POOL
|
||||
@ -177,8 +176,7 @@ void ChartRuleLookupManagerMemoryPerSentence::ExtendPartialRuleApplication(
|
||||
DottedRuleColl & dottedRuleColl)
|
||||
{
|
||||
// source non-terminal labels for the remainder
|
||||
const NonTerminalSet &sourceNonTerms =
|
||||
GetSentence().GetLabelSet(startPos, endPos);
|
||||
const NonTerminalSet &sourceNonTerms = GetParser().GetInputPath(startPos, endPos).GetNonTerminalSet();
|
||||
|
||||
// target non-terminal labels for the remainder
|
||||
const ChartCellLabelSet &targetNonTerms = GetTargetLabelSet(startPos, endPos);
|
||||
|
@ -50,8 +50,7 @@ ChartRuleLookupManagerOnDisk::ChartRuleLookupManagerOnDisk(
|
||||
{
|
||||
CHECK(m_expandableDottedRuleListVec.size() == 0);
|
||||
|
||||
const Sentence &sentence = parser.GetSentence();
|
||||
size_t sourceSize = sentence.GetSize();
|
||||
size_t sourceSize = parser.GetSize();
|
||||
m_expandableDottedRuleListVec.resize(sourceSize);
|
||||
|
||||
for (size_t ind = 0; ind < m_expandableDottedRuleListVec.size(); ++ind) {
|
||||
@ -146,7 +145,7 @@ void ChartRuleLookupManagerOnDisk::GetChartRuleCollection(
|
||||
// ,&defaultTargetNonTerm = staticData.GetOutputDefaultNonTerminal();
|
||||
|
||||
// go through each SOURCE lhs
|
||||
const NonTerminalSet &sourceLHSSet = GetSentence().GetLabelSet(startPos, endPos);
|
||||
const NonTerminalSet &sourceLHSSet = GetParser().GetInputPath(startPos, endPos).GetNonTerminalSet();
|
||||
|
||||
NonTerminalSet::const_iterator iterSourceLHS;
|
||||
for (iterSourceLHS = sourceLHSSet.begin(); iterSourceLHS != sourceLHSSet.end(); ++iterSourceLHS) {
|
||||
@ -217,7 +216,7 @@ void ChartRuleLookupManagerOnDisk::GetChartRuleCollection(
|
||||
const OnDiskPt::PhraseNode &prevNode = prevDottedRule.GetLastNode();
|
||||
|
||||
//get node for each source LHS
|
||||
const NonTerminalSet &lhsSet = GetSentence().GetLabelSet(range.GetStartPos(), range.GetEndPos());
|
||||
const NonTerminalSet &lhsSet = GetParser().GetInputPath(range.GetStartPos(), range.GetEndPos()).GetNonTerminalSet();
|
||||
NonTerminalSet::const_iterator iterLabelSet;
|
||||
for (iterLabelSet = lhsSet.begin(); iterLabelSet != lhsSet.end(); ++iterLabelSet) {
|
||||
const Word &sourceLHS = *iterLabelSet;
|
||||
|
@ -319,10 +319,9 @@ void PhraseDictionaryFuzzyMatch::CleanUpAfterSentenceProcessing(const InputType
|
||||
m_collection.erase(source.GetTranslationId());
|
||||
}
|
||||
|
||||
const PhraseDictionaryNodeMemory &PhraseDictionaryFuzzyMatch::GetRootNode(const InputType &source) const
|
||||
const PhraseDictionaryNodeMemory &PhraseDictionaryFuzzyMatch::GetRootNode(long translationId) const
|
||||
{
|
||||
long transId = source.GetTranslationId();
|
||||
std::map<long, PhraseDictionaryNodeMemory>::const_iterator iter = m_collection.find(transId);
|
||||
std::map<long, PhraseDictionaryNodeMemory>::const_iterator iter = m_collection.find(translationId);
|
||||
CHECK(iter != m_collection.end());
|
||||
return iter->second;
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ public:
|
||||
~PhraseDictionaryFuzzyMatch();
|
||||
void Load();
|
||||
|
||||
const PhraseDictionaryNodeMemory &GetRootNode(const InputType &source) const;
|
||||
const PhraseDictionaryNodeMemory &GetRootNode(long translationId) const;
|
||||
|
||||
ChartRuleLookupManager *CreateRuleLookupManager(
|
||||
const ChartParser &parser,
|
||||
|
@ -91,11 +91,9 @@ void Scope3Parser::Init()
|
||||
{
|
||||
InitRuleApplicationVector();
|
||||
|
||||
const Sentence &sentence = GetSentence();
|
||||
|
||||
// Build a map from Words to index-sets.
|
||||
SentenceMap sentMap;
|
||||
FillSentenceMap(sentence, sentMap);
|
||||
FillSentenceMap(sentMap);
|
||||
|
||||
// Build a trie containing 'elastic' application contexts
|
||||
const UTrieNode &rootNode = m_ruleTable.GetRootNode();
|
||||
@ -109,12 +107,12 @@ void Scope3Parser::Init()
|
||||
m_varSpanTrie = vstBuilder.Build(*art);
|
||||
|
||||
// Fill each cell with a list of pointers to relevant ART nodes.
|
||||
AddRulesToCells(*art, std::make_pair<int, int>(-1, -1), sentence.GetSize()-1, 0);
|
||||
AddRulesToCells(*art, std::make_pair<int, int>(-1, -1), GetParser().GetSize()-1, 0);
|
||||
}
|
||||
|
||||
void Scope3Parser::InitRuleApplicationVector()
|
||||
{
|
||||
const size_t sourceSize = GetSentence().GetSize();
|
||||
const size_t sourceSize = GetParser().GetSize();
|
||||
m_ruleApplications.resize(sourceSize);
|
||||
for (size_t start = 0; start < sourceSize; ++start) {
|
||||
size_t maxSpan = sourceSize-start+1;
|
||||
@ -122,11 +120,11 @@ void Scope3Parser::InitRuleApplicationVector()
|
||||
}
|
||||
}
|
||||
|
||||
void Scope3Parser::FillSentenceMap(
|
||||
const Sentence &sent, SentenceMap &sentMap)
|
||||
void Scope3Parser::FillSentenceMap(SentenceMap &sentMap)
|
||||
{
|
||||
for (size_t i = 0; i < sent.GetSize(); ++i) {
|
||||
sentMap[sent.GetWord(i)].push_back(i);
|
||||
for (size_t i = 0; i < GetParser().GetSize(); ++i) {
|
||||
const Word &word = GetParser().GetInputPath(i, i).GetLastWord();
|
||||
sentMap[word].push_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -80,7 +80,7 @@ private:
|
||||
|
||||
void Init();
|
||||
void InitRuleApplicationVector();
|
||||
void FillSentenceMap(const Sentence &, SentenceMap &);
|
||||
void FillSentenceMap(SentenceMap &);
|
||||
void AddRulesToCells(const ApplicableRuleTrie &, std::pair<int, int>, int,
|
||||
int);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user