This commit is contained in:
Ulrich Germann 2015-10-19 16:26:05 +01:00
commit 08b8157042
61 changed files with 249 additions and 252 deletions

View File

@ -249,7 +249,7 @@ size_t PhraseNode::ReadChild(Word &wordFound, uint64_t &childFilePos, const char
return memRead;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseNode::
GetTargetPhraseCollection(size_t tableLimit, OnDiskWrapper &onDiskWrapper) const
{

View File

@ -93,10 +93,10 @@ public:
const PhraseNode *GetChild(const Word &wordSought, OnDiskWrapper &onDiskWrapper) const;
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection(size_t tableLimit,
OnDiskWrapper &onDiskWrapper) const;
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection(size_t tableLimit,
OnDiskWrapper &onDiskWrapper) const;
void AddCounts(const std::vector<float> &counts) {
m_counts = counts;
}

View File

@ -127,9 +127,9 @@ Moses::TargetPhraseCollection::shared_ptr TargetPhraseCollection::ConvertToMoses
CollType::const_iterator iter;
for (iter = m_coll.begin(); iter != m_coll.end(); ++iter) {
const TargetPhrase &tp = **iter;
Moses::TargetPhrase *mosesPhrase
= tp.ConvertToMoses(inputFactors, outputFactors, vocab,
phraseDict, weightT, isSyntax);
Moses::TargetPhrase *mosesPhrase
= tp.ConvertToMoses(inputFactors, outputFactors, vocab,
phraseDict, weightT, isSyntax);
/*
// debugging output

View File

@ -54,7 +54,7 @@ protected:
public:
typedef boost::shared_ptr<TargetPhraseCollection const> shared_const_ptr;
typedef boost::shared_ptr<TargetPhraseCollection> shared_ptr;
static size_t s_sortScoreInd;
TargetPhraseCollection();

View File

@ -116,7 +116,7 @@ typedef
boost::unordered_set<TargetPhrase*,PhrasePtrHasher,PhrasePtrComparator> PhraseSet;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryInterpolated::GetTargetPhraseCollection(const Phrase& src) const
{

View File

@ -38,7 +38,7 @@ class Sentence;
class ChartCellCollectionBase;
class Word;
class Phrase;
// class TargetPhraseCollection;
// class TargetPhraseCollection;
class DecodeGraph;
class ChartParserUnknown

View File

@ -115,11 +115,11 @@ void ChartTranslationOptionList::Add(const TargetPhraseCollection &tpc,
}
}
void
void
ChartTranslationOptionList::
AddPhraseOOV(TargetPhrase &phrase,
std::list<TargetPhraseCollection::shared_ptr > &waste_memory,
const WordsRange &range)
AddPhraseOOV(TargetPhrase &phrase,
std::list<TargetPhraseCollection::shared_ptr > &waste_memory,
const WordsRange &range)
{
TargetPhraseCollection::shared_ptr tpc(new TargetPhraseCollection);
tpc->Add(&phrase);

View File

@ -147,8 +147,8 @@ void DecodeStepTranslation::ProcessInitialTranslationLEGACY(
const size_t tableLimit = phraseDictionary->GetTableLimit();
const WordsRange wordsRange(startPos, endPos);
TargetPhraseCollectionWithSourcePhrase::shared_ptr phraseColl
= phraseDictionary->GetTargetPhraseCollectionLEGACY(source,wordsRange);
TargetPhraseCollectionWithSourcePhrase::shared_ptr phraseColl
= phraseDictionary->GetTargetPhraseCollectionLEGACY(source,wordsRange);
if (phraseColl != NULL) {
IFVERBOSE(3) {
@ -239,7 +239,7 @@ ProcessLEGACY(TranslationOption const& in,
size_t const tableLimit = pdict->GetTableLimit();
TargetPhraseCollectionWithSourcePhrase::shared_ptr phraseColl
= pdict->GetTargetPhraseCollectionLEGACY(toc->GetSource(),srcRange);
= pdict->GetTargetPhraseCollectionLEGACY(toc->GetSource(),srcRange);
if (phraseColl != NULL) {
TargetPhraseCollection::const_iterator iterTargetPhrase, iterEnd;

View File

@ -314,7 +314,7 @@ struct CompareHypothesisTotalScore {
ObjectPool<Hypothesis> &pool = Hypothesis::GetObjectPool(); \
pool.freeObject(hypo); \
} \
#else
#define FREEHYPO(hypo) delete hypo
#endif

View File

@ -11,12 +11,12 @@ using namespace std;
namespace Moses
{
InputPath::
InputPath(ttaskwptr const theTask,
Phrase const& phrase,
NonTerminalSet const& sourceNonTerms,
InputPath(ttaskwptr const theTask,
Phrase const& phrase,
NonTerminalSet const& sourceNonTerms,
WordsRange const& range, InputPath const *prevNode,
const ScorePair *inputScore)
: ttask(theTask)
: ttask(theTask)
, m_prevPath(prevNode)
, m_phrase(phrase)
, m_range(range)
@ -36,7 +36,7 @@ InputPath(ttaskwptr const theTask,
InputPath::~InputPath()
{
// std::cerr << "Deconstructing InputPath" << std::endl;
@ -80,14 +80,14 @@ GetPtNode(const PhraseDictionary &phraseDictionary) const
return iter->second.second;
}
void
void
InputPath::
SetTargetPhrases(const PhraseDictionary &phraseDictionary,
TargetPhraseCollection::shared_ptr const& targetPhrases,
const void *ptNode)
SetTargetPhrases(const PhraseDictionary &phraseDictionary,
TargetPhraseCollection::shared_ptr const& targetPhrases,
const void *ptNode)
{
std::pair<TargetPhraseCollection::shared_ptr, const void*>
value(targetPhrases, ptNode);
std::pair<TargetPhraseCollection::shared_ptr, const void*>
value(targetPhrases, ptNode);
m_targetPhrases[&phraseDictionary] = value;
}

View File

@ -33,10 +33,10 @@ class InputPath
public:
typedef std::pair<TargetPhraseCollection::shared_ptr, const void*>
typedef std::pair<TargetPhraseCollection::shared_ptr, const void*>
TPCollStoreEntry;
typedef std::map<const PhraseDictionary*, TPCollStoreEntry>
typedef std::map<const PhraseDictionary*, TPCollStoreEntry>
TargetPhrases;
public:
@ -65,12 +65,12 @@ public:
, m_nextNode(NOT_FOUND) {
}
InputPath(ttaskwptr const ttask,
Phrase const& phrase,
NonTerminalSet const& sourceNonTerms,
WordsRange const& range,
InputPath const* prevNode,
ScorePair const* inputScore);
InputPath(ttaskwptr const ttask,
Phrase const& phrase,
NonTerminalSet const& sourceNonTerms,
WordsRange const& range,
InputPath const* prevNode,
ScorePair const* inputScore);
~InputPath();
@ -101,9 +101,9 @@ public:
m_nextNode = nextNode;
}
void
SetTargetPhrases(const PhraseDictionary &phraseDictionary,
TargetPhraseCollection::shared_ptr const& targetPhrases,
void
SetTargetPhrases(const PhraseDictionary &phraseDictionary,
TargetPhraseCollection::shared_ptr const& targetPhrases,
const void *ptNode);
TargetPhraseCollection::shared_ptr

View File

@ -149,7 +149,7 @@ PDTAimp::GetTargetPhraseCollection(Phrase const &src) const
if(useCache) piter.first->second = ret;
m_tgtColls.push_back(ret);
}
return ret;
return ret;
}
@ -387,8 +387,8 @@ void PDTAimp::CacheSource(ConfusionNet const& src)
//std::cerr << i->first.first << "-" << i->first.second << ": " << targetPhrase << std::endl;
}
TargetPhraseCollectionWithSourcePhrase::shared_ptr
rv = PruneTargetCandidates(tCands, costs, sourcePhrases);
TargetPhraseCollectionWithSourcePhrase::shared_ptr
rv = PruneTargetCandidates(tCands, costs, sourcePhrases);
if(rv->IsEmpty())
rv.reset();
@ -430,7 +430,7 @@ void PDTAimp::CreateTargetPhrase(TargetPhrase& targetPhrase,
targetPhrase.EvaluateInIsolation(*srcPtr, m_obj->GetFeaturesToApply());
}
TargetPhraseCollectionWithSourcePhrase::shared_ptr
TargetPhraseCollectionWithSourcePhrase::shared_ptr
PDTAimp::PruneTargetCandidates
(const std::vector<TargetPhrase> & tCands,
std::vector<std::pair<float,size_t> >& costs,

View File

@ -28,8 +28,8 @@ void GlueRuleSynthesizer::SynthesizeRule(const Forest::Hyperedge &e)
HyperPath source;
SynthesizeHyperPath(e, source);
TargetPhrase *tp = SynthesizeTargetPhrase(e);
TargetPhraseCollection::shared_ptr tpc
= GetOrCreateTargetPhraseCollection(m_hyperTree, source);
TargetPhraseCollection::shared_ptr tpc
= GetOrCreateTargetPhraseCollection(m_hyperTree, source);
tpc->Add(tp);
}

View File

@ -47,12 +47,12 @@ public:
const Node *GetChild(const HyperPath::NodeSeq &) const;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection() const {
return m_targetPhraseCollection;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection() {
return m_targetPhraseCollection;
}
@ -77,7 +77,7 @@ public:
private:
friend class HyperTreeCreator;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const HyperPath &);
Node &GetOrCreateNode(const HyperPath &);

View File

@ -130,8 +130,8 @@ bool HyperTreeLoader::Load(const std::vector<FactorType> &input,
ff.GetFeaturesToApply());
// Add rule to trie.
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(trie, sourceFragment);
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(trie, sourceFragment);
phraseColl->Add(targetPhrase);
count++;

View File

@ -51,8 +51,8 @@ void RuleMatcherHyperTree<Callback>::EnumerateHyperedges(
m_hyperedge.label.inputWeight += (*p)->weight;
}
// Set the output hyperedge label's translation set pointer.
m_hyperedge.label.translations
= item.trieNode->GetTargetPhraseCollection();
m_hyperedge.label.translations
= item.trieNode->GetTargetPhraseCollection();
// Pass the output hyperedge to the callback.
callback(m_hyperedge);
}

View File

@ -33,7 +33,7 @@ boost::shared_ptr<RuleTrie> OovHandler<RuleTrie>::SynthesizeRuleTrie(
Word *tgtLHS = SynthesizeTargetLhs(targetLhsStr);
TargetPhrase *tp = SynthesizeTargetPhrase(oov, *srcPhrase, *tgtLHS, prob);
TargetPhraseCollection::shared_ptr tpc;
tpc= GetOrCreateTargetPhraseCollection(*trie, *srcPhrase, *tp, NULL);
tpc= GetOrCreateTargetPhraseCollection(*trie, *srcPhrase, *tp, NULL);
// TODO Check NULL is valid argument
tpc->Add(tp);
}

View File

@ -25,8 +25,8 @@ public:
, m_key(key)
, m_ranges(ranges) {}
void Search(const std::vector<int> &labels,
const TargetPhraseCollection::shared_ptr tpc,
void Search(const std::vector<int> &labels,
const TargetPhraseCollection::shared_ptr tpc,
Callback &callback) {
m_labels = &labels;
m_matchCB = &callback;

View File

@ -28,10 +28,10 @@ public:
private:
friend class RuleTrieCreator;
virtual TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS) = 0;
virtual TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS) = 0;
virtual void SortAndPrune(std::size_t) = 0;
};

View File

@ -86,11 +86,11 @@ const RuleTrieCYKPlus::Node *RuleTrieCYKPlus::Node::GetNonTerminalChild(
return (p == m_nonTermMap.end()) ? NULL : &p->second;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
RuleTrieCYKPlus::
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
{
Node &currNode = GetOrCreateNode(source, target, sourceLHS);
return currNode.GetTargetPhraseCollection();

View File

@ -50,12 +50,12 @@ public:
const Node *GetChild(const Word &sourceTerm) const;
const Node *GetNonTerminalChild(const Word &targetNonTerm) const;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection() const {
return m_targetPhraseCollection;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection() {
return m_targetPhraseCollection;
}

View File

@ -21,9 +21,9 @@ protected:
// Provide access to RuleTrie's private GetOrCreateTargetPhraseCollection
// function.
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection
( RuleTrie &trie, const Phrase &source, const TargetPhrase &target,
( RuleTrie &trie, const Phrase &source, const TargetPhrase &target,
const Word *sourceLHS) {
return trie.GetOrCreateTargetPhraseCollection(source, target, sourceLHS);
}

View File

@ -125,9 +125,9 @@ bool RuleTrieLoader::Load(const std::vector<FactorType> &input,
targetPhrase->GetScoreBreakdown().Assign(&ff, scoreVector);
targetPhrase->EvaluateInIsolation(sourcePhrase, ff.GetFeaturesToApply());
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(trie, sourcePhrase,
*targetPhrase, sourceLHS);
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(trie, sourcePhrase,
*targetPhrase, sourceLHS);
phraseColl->Add(targetPhrase);
// not implemented correctly in memory pt. just delete it for now

View File

@ -102,9 +102,9 @@ GetOrCreateTargetPhraseCollection(const TargetPhrase &target)
TargetPhraseCollection::shared_ptr
RuleTrieScope3::
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
{
Node &currNode = GetOrCreateNode(source, target, sourceLHS);
return currNode.GetOrCreateTargetPhraseCollection(target);

View File

@ -35,7 +35,7 @@ public:
SymbolEqualityPred> TerminalMap;
typedef boost::unordered_map<std::vector<int>,
TargetPhraseCollection::shared_ptr> LabelMap;
TargetPhraseCollection::shared_ptr> LabelMap;
~Node() {
delete m_gapNode;
@ -61,7 +61,7 @@ public:
Node *GetOrCreateNonTerminalChild(const Word &targetNonTerm);
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const TargetPhrase &);
bool IsLeaf() const {
@ -106,10 +106,10 @@ public:
bool HasPreterminalRule(const Word &) const;
private:
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS);
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS);
Node &GetOrCreateNode(const Phrase &source, const TargetPhrase &target,
const Word *sourceLHS);

View File

@ -17,8 +17,8 @@ void GlueRuleSynthesizer::SynthesizeRule(const InputTree::Node &node)
const Word &sourceLhs = node.pvertex.symbol;
boost::scoped_ptr<Phrase> sourceRhs(SynthesizeSourcePhrase(node));
TargetPhrase *tp = SynthesizeTargetPhrase(node, *sourceRhs);
TargetPhraseCollection::shared_ptr tpc
= GetOrCreateTargetPhraseCollection(m_ruleTrie, sourceLhs, *sourceRhs);
TargetPhraseCollection::shared_ptr tpc
= GetOrCreateTargetPhraseCollection(m_ruleTrie, sourceLhs, *sourceRhs);
tpc->Add(tp);
}

View File

@ -76,15 +76,15 @@ GetOrCreateNonTerminalChild(const Word &targetNonTerm)
return &m_nonTermMap[targetNonTerm];
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
RuleTrie::
Node::
GetOrCreateTargetPhraseCollection(const Word &sourceLHS)
{
UTIL_THROW_IF2(!sourceLHS.IsNonTerminal(),
"Not a non-terminal: " << sourceLHS);
TargetPhraseCollection::shared_ptr& foo
= m_targetPhraseCollections[sourceLHS];
TargetPhraseCollection::shared_ptr& foo
= m_targetPhraseCollections[sourceLHS];
if (!foo) foo.reset(new TargetPhraseCollection);
return foo;
}
@ -110,7 +110,7 @@ GetNonTerminalChild(const Word &targetNonTerm) const
return (p == m_nonTermMap.end()) ? NULL : &p->second;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
RuleTrie::
GetOrCreateTargetPhraseCollection
( const Word &sourceLHS, const Phrase &sourceRHS )

View File

@ -53,13 +53,13 @@ public:
const Node *GetChild(const Word &sourceTerm) const;
const Node *GetNonTerminalChild(const Word &targetNonTerm) const;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection(const Word &sourceLHS) const {
TPCMap::const_iterator p = m_targetPhraseCollections.find(sourceLHS);
if (p != m_targetPhraseCollections.end())
return p->second;
if (p != m_targetPhraseCollections.end())
return p->second;
else
return TargetPhraseCollection::shared_ptr();
return TargetPhraseCollection::shared_ptr();
}
// FIXME IS there any reason to distinguish these two for T2S?
@ -86,7 +86,7 @@ public:
private:
friend class RuleTrieCreator;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection
(const Word &sourceLHS, const Phrase &sourceRHS);

View File

@ -56,8 +56,8 @@ bool RuleTrieLoader::Load(const std::vector<FactorType> &input,
StringPiece line;
int noflags = double_conversion::StringToDoubleConverter::NO_FLAGS;
double_conversion::StringToDoubleConverter
converter(noflags, NAN, NAN, "inf", "nan");
double_conversion::StringToDoubleConverter
converter(noflags, NAN, NAN, "inf", "nan");
while(true) {
try {
@ -134,8 +134,8 @@ bool RuleTrieLoader::Load(const std::vector<FactorType> &input,
targetPhrase->GetScoreBreakdown().Assign(&ff, scoreVector);
targetPhrase->EvaluateInIsolation(sourcePhrase, ff.GetFeaturesToApply());
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(trie, *sourceLHS, sourcePhrase);
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(trie, *sourceLHS, sourcePhrase);
phraseColl->Add(targetPhrase);
// not implemented correctly in memory pt. just delete it for now

View File

@ -167,8 +167,8 @@ void ChartRuleLookupManagerMemoryPerSentence::AddAndExtend(
size_t endPos)
{
TargetPhraseCollection::shared_ptr tpc
= node->GetTargetPhraseCollection();
TargetPhraseCollection::shared_ptr tpc
= node->GetTargetPhraseCollection();
// add target phrase collection (except if rule is empty or a unary non-terminal rule)
if (!tpc->IsEmpty() && (m_stackVec.empty() || endPos != m_unaryPos)) {
m_completedRules[endPos].Add(*tpc, m_stackVec, m_stackScores, *m_outColl);

View File

@ -238,15 +238,15 @@ void ChartRuleLookupManagerOnDisk::GetChartRuleCollection(
continue;
TargetPhraseCollection::shared_ptr targetPhraseCollection;
const OnDiskPt::PhraseNode *node
= prevNode.GetChild(*sourceLHSBerkeleyDb, m_dbWrapper);
const OnDiskPt::PhraseNode *node
= prevNode.GetChild(*sourceLHSBerkeleyDb, m_dbWrapper);
if (node) {
uint64_t tpCollFilePos = node->GetValue();
std::map<uint64_t, TargetPhraseCollection::shared_ptr >::const_iterator iterCache = m_cache.find(tpCollFilePos);
if (iterCache == m_cache.end()) {
OnDiskPt::TargetPhraseCollection::shared_ptr tpcollBerkeleyDb
= node->GetTargetPhraseCollection(m_dictionary.GetTableLimit(), m_dbWrapper);
= node->GetTargetPhraseCollection(m_dictionary.GetTableLimit(), m_dbWrapper);
std::vector<float> weightT = staticData.GetWeights(&m_dictionary);
targetPhraseCollection

View File

@ -107,7 +107,7 @@ void PhraseDictionaryCompact::Load()
// }
// };
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryCompact::GetTargetPhraseCollectionNonCacheLEGACY(const Phrase &sourcePhrase) const
{

View File

@ -61,7 +61,7 @@ ProvidesPrefixCheck() const
return false;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionary::
GetTargetPhraseCollectionLEGACY(const Phrase& src) const
{
@ -78,7 +78,7 @@ GetTargetPhraseCollectionLEGACY(const Phrase& src) const
if (iter == cache.end()) {
// not in cache, need to look up from phrase table
ret = GetTargetPhraseCollectionNonCacheLEGACY(src);
if (ret) { // make a copy
if (ret) { // make a copy
ret.reset(new TargetPhraseCollection(*ret));
}
cache[hash] = entry(ret, clock());
@ -95,7 +95,7 @@ GetTargetPhraseCollectionLEGACY(const Phrase& src) const
return ret;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionary::
GetTargetPhraseCollectionNonCacheLEGACY(const Phrase& src) const
{
@ -229,8 +229,8 @@ void PhraseDictionary::ReduceCache() const
cache.erase(iterRemove);
} else iter++;
}
VERBOSE(2,"Reduced persistent translation option cache in "
<< reduceCacheTime << " seconds." << std::endl);
VERBOSE(2,"Reduced persistent translation option cache in "
<< reduceCacheTime << " seconds." << std::endl);
}
CacheColl &PhraseDictionary::GetCache() const

View File

@ -114,13 +114,12 @@ public:
//! find list of translations that can translates src. Only for phrase input
public:
virtual TargetPhraseCollection::shared_ptr
virtual TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionLEGACY(const Phrase& src) const;
virtual TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionLEGACY(ttasksptr const& ttask,
Phrase const& src) const
{
GetTargetPhraseCollectionLEGACY(ttasksptr const& ttask,
Phrase const& src) const {
return GetTargetPhraseCollectionLEGACY(src);
}
@ -129,8 +128,7 @@ public:
virtual void
GetTargetPhraseCollectionBatch
(ttasksptr const& ttask, InputPathList const& inputPathQueue) const
{
(ttasksptr const& ttask, InputPathList const& inputPathQueue) const {
GetTargetPhraseCollectionBatch(inputPathQueue);
}
@ -159,7 +157,7 @@ public:
// LEGACY
//! find list of translations that can translates a portion of src. Used by confusion network decoding
virtual
virtual
TargetPhraseCollectionWithSourcePhrase::shared_ptr
GetTargetPhraseCollectionLEGACY(InputType const& src,WordsRange const& range) const;
@ -188,10 +186,10 @@ protected:
mutable boost::scoped_ptr<CacheColl> m_cache;
#endif
virtual
virtual
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionNonCacheLEGACY(const Phrase& src) const;
void ReduceCache() const;
protected:

View File

@ -111,7 +111,7 @@ public:
void Load();
void Load(const std::string files);
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection(const Phrase &src) const;
TargetPhraseCollection::shared_ptr

View File

@ -98,18 +98,18 @@ TargetPhraseCollection::shared_ptr PhraseDictionaryGroup::GetTargetPhraseCollec
UTIL_THROW2("Don't call me without the translation task.");
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryGroup::
GetTargetPhraseCollectionLEGACY(const ttasksptr& ttask, const Phrase& src) const
{
TargetPhraseCollection::shared_ptr ret
= CreateTargetPhraseCollection(ttask, src);
TargetPhraseCollection::shared_ptr ret
= CreateTargetPhraseCollection(ttask, src);
ret->NthElement(m_tableLimit); // sort the phrases for pruning later
const_cast<PhraseDictionaryGroup*>(this)->CacheForCleanup(ret);
return ret;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryGroup::
CreateTargetPhraseCollection(const ttasksptr& ttask, const Phrase& src) const
{
@ -123,8 +123,8 @@ CreateTargetPhraseCollection(const ttasksptr& ttask, const Phrase& src) const
// Collect phrases from this table
const PhraseDictionary& pd = *m_memberPDs[i];
TargetPhraseCollection::shared_ptr
ret_raw = pd.GetTargetPhraseCollectionLEGACY(ttask, src);
TargetPhraseCollection::shared_ptr
ret_raw = pd.GetTargetPhraseCollectionLEGACY(ttask, src);
if (ret_raw != NULL) {
// Process each phrase from table
@ -178,8 +178,8 @@ CreateTargetPhraseCollection(const ttasksptr& ttask, const Phrase& src) const
ChartRuleLookupManager*
PhraseDictionaryGroup::
CreateRuleLookupManager(const ChartParser &,
const ChartCellCollectionBase&, size_t)
CreateRuleLookupManager(const ChartParser &,
const ChartCellCollectionBase&, size_t)
{
UTIL_THROW(util::Exception, "Phrase table used in chart decoder");
}
@ -191,7 +191,7 @@ void PhraseDictionaryGroup::CacheForCleanup(TargetPhraseCollection::shared_ptr
ref.push_back(tpc);
}
void
void
PhraseDictionaryGroup::
CleanUpAfterSentenceProcessing(const InputType &source)
{

View File

@ -43,9 +43,9 @@ class PhraseDictionaryGroup: public PhraseDictionary
public:
PhraseDictionaryGroup(const std::string& line);
void Load();
TargetPhraseCollection::shared_ptr
CreateTargetPhraseCollection(const ttasksptr& ttask,
const Phrase& src) const;
TargetPhraseCollection::shared_ptr
CreateTargetPhraseCollection(const ttasksptr& ttask,
const Phrase& src) const;
std::vector<std::vector<float> > getWeights(size_t numWeights,
bool normalize) const;
void CacheForCleanup(TargetPhraseCollection::shared_ptr tpc);

View File

@ -49,17 +49,17 @@ PhraseDictionaryMemory::PhraseDictionaryMemory(const std::string &line)
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryMemory::
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
const TargetPhrase &target,
const Word *sourceLHS)
{
PhraseDictionaryNodeMemory &currNode = GetOrCreateNode(source, target, sourceLHS);
return currNode.GetTargetPhraseCollection();
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryMemory::
GetTargetPhraseCollectionLEGACY(const Phrase& sourceOrig) const
{
@ -74,7 +74,7 @@ GetTargetPhraseCollectionLEGACY(const Phrase& sourceOrig) const
const Word& word = source.GetWord(pos);
currNode = currNode->GetChild(word);
if (currNode == NULL)
return TargetPhraseCollection::shared_ptr();
return TargetPhraseCollection::shared_ptr();
}
return currNode->GetTargetPhraseCollection();
@ -171,8 +171,8 @@ GetTargetPhraseCollectionBatch(const InputPathList &inputPathQueue) const
const PhraseDictionaryNodeMemory *ptNode = prevPtNode->GetChild(lastWord);
TargetPhraseCollection::shared_ptr targetPhrases;
if (ptNode) {
targetPhrases = ptNode->GetTargetPhraseCollection();
}
targetPhrases = ptNode->GetTargetPhraseCollection();
}
inputPath.SetTargetPhrases(*this, targetPhrases, ptNode);
}
}

View File

@ -56,10 +56,10 @@ public:
std::size_t);
// only used by multi-model phrase table, and other meta-features
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionLEGACY(const Phrase& src) const;
void
void
GetTargetPhraseCollectionBatch(const InputPathList &inputPathQueue) const;
TO_STRING();
@ -70,9 +70,9 @@ protected:
(const Phrase &source, const TargetPhrase &target, const Word *sourceLHS);
PhraseDictionaryNodeMemory &
GetOrCreateNode(const Phrase &source, const TargetPhrase &target,
const Word *sourceLHS);
GetOrCreateNode(const Phrase &source, const TargetPhrase &target,
const Word *sourceLHS);
void SortAndPrune();
PhraseDictionaryNodeMemory m_collection;

View File

@ -59,7 +59,7 @@ PhraseDictionaryMultiModel(int type, const std::string &line)
}
}
void
void
PhraseDictionaryMultiModel::
SetParameter(const std::string& key, const std::string& value)
{
@ -93,7 +93,7 @@ void PhraseDictionaryMultiModel::Load()
}
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryMultiModel::
GetTargetPhraseCollectionLEGACY(const Phrase& src) const
{
@ -107,7 +107,7 @@ GetTargetPhraseCollectionLEGACY(const Phrase& src) const
CollectSufficientStatistics(src, allStats);
ret = CreateTargetPhraseCollectionLinearInterpolation(src, allStats, multimodelweights);
RemoveAllInMap(*allStats);
delete allStats; // ??? Why the detour through malloc? UG
delete allStats; // ??? Why the detour through malloc? UG
ret->NthElement(m_tableLimit); // sort the phrases for pruning later
const_cast<PhraseDictionaryMultiModel*>(this)->CacheForCleanup(ret);
@ -115,7 +115,7 @@ GetTargetPhraseCollectionLEGACY(const Phrase& src) const
return ret;
}
void
void
PhraseDictionaryMultiModel::
CollectSufficientStatistics
(const Phrase& src, std::map<std::string, multiModelStats*>* allStats) const
@ -172,11 +172,11 @@ CollectSufficientStatistics
}
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryMultiModel::
CreateTargetPhraseCollectionLinearInterpolation
( const Phrase& src,
std::map<std::string,multiModelStats*>* allStats,
( const Phrase& src,
std::map<std::string,multiModelStats*>* allStats,
std::vector<std::vector<float> > &multimodelweights) const
{
TargetPhraseCollection::shared_ptr ret(new TargetPhraseCollection);
@ -204,7 +204,7 @@ CreateTargetPhraseCollectionLinearInterpolation
}
//TODO: is it worth caching the results as long as weights don't change?
std::vector<std::vector<float> >
std::vector<std::vector<float> >
PhraseDictionaryMultiModel::
getWeights(size_t numWeights, bool normalize) const
{
@ -255,7 +255,7 @@ getWeights(size_t numWeights, bool normalize) const
return multimodelweights;
}
std::vector<float>
std::vector<float>
PhraseDictionaryMultiModel::
normalizeWeights(std::vector<float> &weights) const
{
@ -270,15 +270,15 @@ normalizeWeights(std::vector<float> &weights) const
ChartRuleLookupManager *
PhraseDictionaryMultiModel::
CreateRuleLookupManager(const ChartParser &, const ChartCellCollectionBase&,
std::size_t)
CreateRuleLookupManager(const ChartParser &, const ChartCellCollectionBase&,
std::size_t)
{
UTIL_THROW(util::Exception, "Phrase table used in chart decoder");
}
//copied from PhraseDictionaryCompact; free memory allocated to TargetPhraseCollection (and each TargetPhrase) at end of sentence
void
void
PhraseDictionaryMultiModel::
CacheForCleanup(TargetPhraseCollection::shared_ptr tpc)
{
@ -286,7 +286,7 @@ CacheForCleanup(TargetPhraseCollection::shared_ptr tpc)
}
void
void
PhraseDictionaryMultiModel::
CleanUpAfterSentenceProcessing(const InputType &source)
{
@ -306,7 +306,7 @@ CleanUpAfterSentenceProcessing(const InputType &source)
}
void
void
PhraseDictionaryMultiModel::
CleanUpComponentModels(const InputType &source)
{
@ -315,7 +315,7 @@ CleanUpComponentModels(const InputType &source)
}
}
const std::vector<float>*
const std::vector<float>*
PhraseDictionaryMultiModel::
GetTemporaryMultiModelWeightsVector() const
{
@ -331,7 +331,7 @@ GetTemporaryMultiModelWeightsVector() const
#endif
}
void
void
PhraseDictionaryMultiModel::
SetTemporaryMultiModelWeightsVector(std::vector<float> weights)
{
@ -344,7 +344,7 @@ SetTemporaryMultiModelWeightsVector(std::vector<float> weights)
}
#ifdef WITH_DLIB
vector<float>
vector<float>
PhraseDictionaryMultiModel::
MinimizePerplexity(vector<pair<string, string> > &phrase_pair_vector)
{
@ -418,7 +418,7 @@ MinimizePerplexity(vector<pair<string, string> > &phrase_pair_vector)
}
vector<float>
vector<float>
PhraseDictionaryMultiModel::
Optimize(OptimizationObjective *ObjectiveFunction, size_t numModels)
{

View File

@ -72,29 +72,29 @@ public:
~PhraseDictionaryMultiModel();
void Load();
virtual void
virtual void
CollectSufficientStatistics
(const Phrase& src, std::map<std::string,multiModelStats*>* allStats)
const;
(const Phrase& src, std::map<std::string,multiModelStats*>* allStats)
const;
virtual TargetPhraseCollection::shared_ptr
virtual TargetPhraseCollection::shared_ptr
CreateTargetPhraseCollectionLinearInterpolation
(const Phrase& src, std::map<std::string,multiModelStats*>* allStats,
(const Phrase& src, std::map<std::string,multiModelStats*>* allStats,
std::vector<std::vector<float> > &multimodelweights) const;
std::vector<std::vector<float> >
std::vector<std::vector<float> >
getWeights(size_t numWeights, bool normalize) const;
std::vector<float>
std::vector<float>
normalizeWeights(std::vector<float> &weights) const;
void
void
CacheForCleanup(TargetPhraseCollection::shared_ptr tpc);
void
void
CleanUpAfterSentenceProcessing(const InputType &source);
virtual void
virtual void
CleanUpComponentModels(const InputType &source);
#ifdef WITH_DLIB
@ -103,26 +103,26 @@ public:
#endif
// functions below required by base class
virtual TargetPhraseCollection::shared_ptr
virtual TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionLEGACY(const Phrase& src) const;
virtual void
virtual void
InitializeForInput(ttasksptr const& ttask) {
// Don't do anything source specific here as this object is shared
// between threads.
}
ChartRuleLookupManager*
CreateRuleLookupManager(const ChartParser &, const ChartCellCollectionBase&,
std::size_t);
CreateRuleLookupManager(const ChartParser &, const ChartCellCollectionBase&,
std::size_t);
void
void
SetParameter(const std::string& key, const std::string& value);
const std::vector<float>*
const std::vector<float>*
GetTemporaryMultiModelWeightsVector() const;
void
void
SetTemporaryMultiModelWeightsVector(std::vector<float> weights);
protected:

View File

@ -134,8 +134,8 @@ TargetPhraseCollection::shared_ptr PhraseDictionaryMultiModelCounts::GetTargetPh
CollectSufficientStats(src, fs, allStats);
TargetPhraseCollection::shared_ptr ret
= CreateTargetPhraseCollectionCounts(src, fs, allStats, multimodelweights);
TargetPhraseCollection::shared_ptr ret
= CreateTargetPhraseCollectionCounts(src, fs, allStats, multimodelweights);
ret->NthElement(m_tableLimit); // sort the phrases for pruning later
const_cast<PhraseDictionaryMultiModelCounts*>(this)->CacheForCleanup(ret);
@ -149,8 +149,8 @@ void PhraseDictionaryMultiModelCounts::CollectSufficientStats(const Phrase& src,
for(size_t i = 0; i < m_numModels; ++i) {
const PhraseDictionary &pd = *m_pd[i];
TargetPhraseCollection::shared_ptr ret_raw
= pd.GetTargetPhraseCollectionLEGACY(src);
TargetPhraseCollection::shared_ptr ret_raw
= pd.GetTargetPhraseCollectionLEGACY(src);
if (ret_raw != NULL) {
TargetPhraseCollection::const_iterator iterTargetPhrase;
@ -202,7 +202,7 @@ void PhraseDictionaryMultiModelCounts::CollectSufficientStats(const Phrase& src,
}
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryMultiModelCounts::
CreateTargetPhraseCollectionCounts(const Phrase &src, vector<float> &fs, map<string,multiModelCountsStats*>* allStats, vector<vector<float> > &multimodelweights) const
{

View File

@ -134,9 +134,9 @@ private:
public:
PhraseDictionaryNodeMemory()
PhraseDictionaryNodeMemory()
: m_targetPhraseCollection(new TargetPhraseCollection) { }
bool IsLeaf() const {
return m_sourceTermMap.empty() && m_nonTermMap.empty();
}
@ -153,11 +153,11 @@ public:
const PhraseDictionaryNodeMemory *GetChild(const Word &sourceNonTerm, const Word &targetNonTerm) const;
#endif
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection() const {
return m_targetPhraseCollection;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection() {
return m_targetPhraseCollection;
}

View File

@ -54,7 +54,7 @@ void PhraseDictionaryTransliteration::GetTargetPhraseCollectionBatch(const Input
}
}
void
void
PhraseDictionaryTransliteration::
GetTargetPhraseCollection(InputPath &inputPath) const
{
@ -92,8 +92,8 @@ GetTargetPhraseCollection(InputPath &inputPath) const
UTIL_THROW_IF2(ret != 0, "Transliteration script error");
TargetPhraseCollection::shared_ptr tpColl(new TargetPhraseCollection);
vector<TargetPhrase*> targetPhrases
= CreateTargetPhrases(sourcePhrase, outDir.path());
vector<TargetPhrase*> targetPhrases
= CreateTargetPhrases(sourcePhrase, outDir.path());
vector<TargetPhrase*>::const_iterator iter;
for (iter = targetPhrases.begin(); iter != targetPhrases.end(); ++iter) {
TargetPhrase *tp = *iter;

View File

@ -59,7 +59,7 @@ public:
// get translation candidates for a given source phrase
// returns null pointer if nothing found
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionNonCacheLEGACY(Phrase const &src) const;
void InitializeForInput(ttasksptr const& ttask);
@ -75,8 +75,8 @@ public:
// legacy
TargetPhraseCollectionWithSourcePhrase::shared_ptr
GetTargetPhraseCollectionLEGACY(InputType const& src,
WordsRange const & srcRange) const;
GetTargetPhraseCollectionLEGACY(InputType const& src,
WordsRange const & srcRange) const;
};

View File

@ -49,14 +49,13 @@ protected:
// Provide access to RuleTableTrie's private
// GetOrCreateTargetPhraseCollection function.
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(RuleTableTrie &ruleTable,
const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
{
return ruleTable.GetOrCreateTargetPhraseCollection(source, target,
sourceLHS);
const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS) {
return ruleTable.GetOrCreateTargetPhraseCollection(source, target,
sourceLHS);
}
};

View File

@ -225,8 +225,8 @@ bool RuleTableLoaderCompact::LoadRuleSection(
// Insert rule into table.
TargetPhraseCollection::shared_ptr coll;
coll = GetOrCreateTargetPhraseCollection(ruleTable, sourcePhrase,
*targetPhrase, &sourceLHS);
coll = GetOrCreateTargetPhraseCollection(ruleTable, sourcePhrase,
*targetPhrase, &sourceLHS);
coll->Add(targetPhrase);
}

View File

@ -242,9 +242,9 @@ bool RuleTableLoaderStandard::Load(FormatType format
targetPhrase->GetScoreBreakdown().Assign(&ruleTable, scoreVector);
targetPhrase->EvaluateInIsolation(sourcePhrase, ruleTable.GetFeaturesToApply());
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(ruleTable, sourcePhrase,
*targetPhrase, sourceLHS);
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(ruleTable, sourcePhrase,
*targetPhrase, sourceLHS);
phraseColl->Add(targetPhrase);
// not implemented correctly in memory pt. just delete it for now

View File

@ -282,9 +282,9 @@ void PhraseDictionaryFuzzyMatch::InitializeForInput(ttasksptr const& ttask)
targetPhrase->GetScoreBreakdown().Assign(this, scoreVector);
targetPhrase->EvaluateInIsolation(sourcePhrase, GetFeaturesToApply());
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(rootNode, sourcePhrase,
*targetPhrase, sourceLHS);
TargetPhraseCollection::shared_ptr phraseColl
= GetOrCreateTargetPhraseCollection(rootNode, sourcePhrase,
*targetPhrase, sourceLHS);
phraseColl->Add(targetPhrase);
count++;
@ -303,12 +303,12 @@ void PhraseDictionaryFuzzyMatch::InitializeForInput(ttasksptr const& ttask)
//removedirectoryrecursively(dirName);
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryFuzzyMatch::
GetOrCreateTargetPhraseCollection(PhraseDictionaryNodeMemory &rootNode
, const Phrase &source
, const TargetPhrase &target
, const Word *sourceLHS)
, const Phrase &source
, const TargetPhrase &target
, const Word *sourceLHS)
{
PhraseDictionaryNodeMemory &currNode = GetOrCreateNode(rootNode, source, target, sourceLHS);
return currNode.GetTargetPhraseCollection();

View File

@ -60,11 +60,11 @@ public:
TO_STRING();
protected:
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(PhraseDictionaryNodeMemory &rootNode
, const Phrase &source
, const TargetPhrase &target
, const Word *sourceLHS);
, const Phrase &source
, const TargetPhrase &target
, const Word *sourceLHS);
PhraseDictionaryNodeMemory &GetOrCreateNode(PhraseDictionaryNodeMemory &rootNode
, const Phrase &source

View File

@ -158,13 +158,13 @@ void PhraseDictionaryOnDisk::GetTargetPhraseCollectionBatch(InputPath &inputPath
ptNode = prevPtNode->GetChild(*lastWordOnDisk, wrapper);
if (ptNode) tpc = GetTargetPhraseCollection(ptNode);
inputPath.SetTargetPhrases(*this, tpc, ptNode);
delete lastWordOnDisk;
}
}
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryOnDisk::
GetTargetPhraseCollection(const OnDiskPt::PhraseNode *ptNode) const
{
@ -192,21 +192,21 @@ GetTargetPhraseCollection(const OnDiskPt::PhraseNode *ptNode) const
return ret;
}
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
PhraseDictionaryOnDisk::
GetTargetPhraseCollectionNonCache(const OnDiskPt::PhraseNode *ptNode) const
{
OnDiskPt::OnDiskWrapper& wrapper
= const_cast<OnDiskPt::OnDiskWrapper&>(GetImplementation());
OnDiskPt::OnDiskWrapper& wrapper
= const_cast<OnDiskPt::OnDiskWrapper&>(GetImplementation());
vector<float> weightT = StaticData::Instance().GetWeights(this);
OnDiskPt::Vocab &vocab = wrapper.GetVocab();
OnDiskPt::TargetPhraseCollection::shared_ptr targetPhrasesOnDisk
= ptNode->GetTargetPhraseCollection(m_tableLimit, wrapper);
OnDiskPt::TargetPhraseCollection::shared_ptr targetPhrasesOnDisk
= ptNode->GetTargetPhraseCollection(m_tableLimit, wrapper);
TargetPhraseCollection::shared_ptr targetPhrases
= targetPhrasesOnDisk->ConvertToMoses(m_input, m_output, *this,
weightT, vocab, false);
= targetPhrasesOnDisk->ConvertToMoses(m_input, m_output, *this,
weightT, vocab, false);
// delete targetPhrasesOnDisk;

View File

@ -78,10 +78,10 @@ public:
virtual void InitializeForInput(ttasksptr const& ttask);
void GetTargetPhraseCollectionBatch(const InputPathList &inputPathQueue) const;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollection(const OnDiskPt::PhraseNode *ptNode) const;
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetTargetPhraseCollectionNonCache(const OnDiskPt::PhraseNode *ptNode) const;
void SetParameter(const std::string& key, const std::string& value);

View File

@ -51,10 +51,10 @@ public:
private:
friend class RuleTableLoader;
virtual TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS) = 0;
virtual TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS) = 0;
virtual void SortAndPrune() = 0;

View File

@ -38,11 +38,11 @@
namespace Moses
{
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
RuleTableUTrie::
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS)
{
UTrieNode &currNode = GetOrCreateNode(source, target, sourceLHS);
return currNode.GetOrCreateTargetPhraseCollection(target);

View File

@ -57,10 +57,10 @@ public:
const ChartCellCollectionBase &, std::size_t);
private:
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS);
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const Phrase &source,
const TargetPhrase &target,
const Word *sourceLHS);
UTrieNode &GetOrCreateNode(const Phrase &source, const TargetPhrase &target,
const Word *sourceLHS);

View File

@ -51,7 +51,7 @@ public:
TerminalEqualityPred> TerminalMap;
typedef boost::unordered_map<std::vector<int>,
TargetPhraseCollection::shared_ptr> LabelMap;
TargetPhraseCollection::shared_ptr> LabelMap;
#else
typedef std::map<Word, UTrieNode> TerminalMap;
typedef std::map<std::vector<int>, TargetPhraseCollection::shared_ptr> LabelMap;
@ -78,7 +78,7 @@ public:
UTrieNode *GetOrCreateTerminalChild(const Word &sourceTerm);
UTrieNode *GetOrCreateNonTerminalChild(const Word &targetNonTerm);
TargetPhraseCollection::shared_ptr
TargetPhraseCollection::shared_ptr
GetOrCreateTargetPhraseCollection(const TargetPhrase &);
bool IsLeaf() const {

View File

@ -47,8 +47,8 @@ void Scope3Parser::GetChartRuleCollection(
const size_t start = range.GetStartPos();
const size_t end = range.GetEndPos();
std::vector<std::pair<const UTrieNode *, const VarSpanNode *> > &pairVec
= m_ruleApplications[start][end-start+1];
std::vector<std::pair<const UTrieNode *, const VarSpanNode *> > &pairVec
= m_ruleApplications[start][end-start+1];
MatchCallback matchCB(range, outColl);
for (std::vector<std::pair<const UTrieNode *, const VarSpanNode *> >::const_iterator p = pairVec.begin(); p != pairVec.end(); ++p) {
@ -71,7 +71,7 @@ void Scope3Parser::GetChartRuleCollection(
UTrieNode::LabelMap::const_iterator p = labelMap.begin();
for (; p != labelMap.end(); ++p) {
const std::vector<int> &labels = p->first;
TargetPhraseCollection::shared_ptr tpc = p->second;
TargetPhraseCollection::shared_ptr tpc = p->second;
assert(labels.size() == varSpanNode.m_rank);
bool failCheck = false;
for (size_t i = 0; i < varSpanNode.m_rank; ++i) {

View File

@ -67,7 +67,7 @@ private:
struct MatchCallback {
public:
MatchCallback(const WordsRange &range, ChartParserCallback &out)
: m_range(range) , m_out(out) // , m_tpc(NULL)
: m_range(range) , m_out(out) // , m_tpc(NULL)
{ }
void operator()(const StackVec &stackVec) {

View File

@ -37,8 +37,8 @@ void SkeletonPT::GetTargetPhraseCollectionBatch(const InputPathList &inputPathQu
// add target phrase to phrase-table cache
size_t hash = hash_value(sourcePhrase);
std::pair<TargetPhraseCollection::shared_ptr, clock_t>
value(tpColl, clock());
std::pair<TargetPhraseCollection::shared_ptr, clock_t>
value(tpColl, clock());
cache[hash] = value;
inputPath.SetTargetPhrases(*this, tpColl, NULL);

View File

@ -61,8 +61,8 @@ TranslationOptionCollectionLattice
const ScorePair &scores = col[i].second;
ScorePair *inputScore = new ScorePair(scores);
InputPath *path
= new InputPath(ttask, subphrase, labels, range, NULL, inputScore);
InputPath *path
= new InputPath(ttask, subphrase, labels, range, NULL, inputScore);
path->SetNextNode(nextNode);
m_inputPathQueue.push_back(path);
@ -114,8 +114,8 @@ void TranslationOptionCollectionLattice::Extend(const InputPath &prevPath, const
ScorePair *inputScore = new ScorePair(*prevInputScore);
inputScore->PlusEquals(scores);
InputPath *path = new InputPath(prevPath.ttask, subphrase, labels,
range, &prevPath, inputScore);
InputPath *path = new InputPath(prevPath.ttask, subphrase, labels,
range, &prevPath, inputScore);
path->SetNextNode(nextNode);
m_inputPathQueue.push_back(path);
@ -142,8 +142,8 @@ void TranslationOptionCollectionLattice::CreateTranslationOptions()
for (size_t i = 0; i < m_inputPathQueue.size(); ++i) {
const InputPath &path = *m_inputPathQueue[i];
TargetPhraseCollection::shared_ptr tpColl
= path.GetTargetPhrases(phraseDictionary);
TargetPhraseCollection::shared_ptr tpColl
= path.GetTargetPhrases(phraseDictionary);
const WordsRange &range = path.GetWordsRange();
if (tpColl && tpColl->GetSize()) {

View File

@ -428,7 +428,7 @@ inline float CalcTranslationScore(const std::vector<float> &probVector,
out << *this; \
return out.str(); \
} \
//! delete and remove every element of a collection object such as set, list etc
template<class COLL>
void RemoveAllInColl(COLL &coll)