mirror of
https://github.com/moses-smt/mosesdecoder.git
synced 2025-01-04 01:45:52 +03:00
added thread-safe cache to GlobalLexiconModel
This commit is contained in:
parent
f2046da6aa
commit
33e72473f0
@ -29,8 +29,6 @@ GlobalLexicalModel::GlobalLexicalModel(const string &filePath,
|
||||
m_bias = new Word();
|
||||
const Factor* factor = factorCollection.AddFactor( Input, inFactors[0], "**BIAS**" );
|
||||
m_bias->SetFactor( inFactors[0], factor );
|
||||
|
||||
m_cache = NULL;
|
||||
}
|
||||
|
||||
GlobalLexicalModel::~GlobalLexicalModel()
|
||||
@ -44,7 +42,7 @@ GlobalLexicalModel::~GlobalLexicalModel()
|
||||
}
|
||||
delete iter->first; // delete output word
|
||||
}
|
||||
if (m_cache != NULL) delete m_cache;
|
||||
// if (m_cache != NULL) delete m_cache;
|
||||
}
|
||||
|
||||
void GlobalLexicalModel::LoadData(const string &filePath,
|
||||
@ -112,11 +110,19 @@ void GlobalLexicalModel::LoadData(const string &filePath,
|
||||
|
||||
void GlobalLexicalModel::InitializeForInput( Sentence const& in )
|
||||
{
|
||||
m_input = ∈
|
||||
if (m_cache != NULL) delete m_cache;
|
||||
m_cache = new map< const TargetPhrase*, float >;
|
||||
m_local.reset(new ThreadLocalStorage);
|
||||
m_local->input = ∈
|
||||
// m_input = ∈
|
||||
// m_input.reset(&in);
|
||||
// m_cache.reset(new _LexiconCache);
|
||||
}
|
||||
|
||||
//void GlobalLexicalModel::CleanUp()
|
||||
//{
|
||||
// m_input.release(); // don't delete input sentence
|
||||
// m_cache.reset();
|
||||
//}
|
||||
|
||||
float GlobalLexicalModel::ScorePhrase( const TargetPhrase& targetPhrase ) const
|
||||
{
|
||||
float score = 0;
|
||||
@ -133,8 +139,8 @@ float GlobalLexicalModel::ScorePhrase( const TargetPhrase& targetPhrase ) const
|
||||
}
|
||||
|
||||
set< const Word*, WordComparer > alreadyScored; // do not score a word twice
|
||||
for(size_t inputIndex = 0; inputIndex < m_input->GetSize(); inputIndex++ ) {
|
||||
const Word& inputWord = m_input->GetWord( inputIndex );
|
||||
for(size_t inputIndex = 0; inputIndex < m_local->input->GetSize(); inputIndex++ ) {
|
||||
const Word& inputWord = m_local->input->GetWord( inputIndex );
|
||||
if ( alreadyScored.find( &inputWord ) == alreadyScored.end() ) {
|
||||
SingleHash::const_iterator inputWordHash = targetWordHash->second.find( &inputWord );
|
||||
if( inputWordHash != targetWordHash->second.end() ) {
|
||||
@ -154,13 +160,14 @@ float GlobalLexicalModel::ScorePhrase( const TargetPhrase& targetPhrase ) const
|
||||
|
||||
float GlobalLexicalModel::GetFromCacheOrScorePhrase( const TargetPhrase& targetPhrase ) const
|
||||
{
|
||||
map< const TargetPhrase*, float >::const_iterator query = m_cache->find( &targetPhrase );
|
||||
if ( query != m_cache->end() ) {
|
||||
LexiconCache& m_cache = m_local->cache;
|
||||
map< const TargetPhrase*, float >::const_iterator query = m_cache.find( &targetPhrase );
|
||||
if ( query != m_cache.end() ) {
|
||||
return query->second;
|
||||
}
|
||||
|
||||
float score = ScorePhrase( targetPhrase );
|
||||
m_cache->insert( pair<const TargetPhrase*, float>(&targetPhrase, score) );
|
||||
m_cache.insert( pair<const TargetPhrase*, float>(&targetPhrase, score) );
|
||||
std::cerr << "add to cache " << targetPhrase << ": " << score << endl;
|
||||
return score;
|
||||
}
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include "Factor.h"
|
||||
#include "Phrase.h"
|
||||
#include "TypeDef.h"
|
||||
@ -13,6 +14,10 @@
|
||||
#include "FactorTypeSet.h"
|
||||
#include "Sentence.h"
|
||||
|
||||
#ifdef WITH_THREADS
|
||||
#include <boost/thread/tss.hpp>
|
||||
#endif
|
||||
|
||||
namespace Moses
|
||||
{
|
||||
|
||||
@ -31,10 +36,27 @@ class GlobalLexicalModel : public StatelessFeatureFunction
|
||||
{
|
||||
typedef std::map< const Word*, std::map< const Word*, float, WordComparer >, WordComparer > DoubleHash;
|
||||
typedef std::map< const Word*, float, WordComparer > SingleHash;
|
||||
typedef std::map< const TargetPhrase*, float > LexiconCache;
|
||||
|
||||
struct ThreadLocalStorage
|
||||
{
|
||||
LexiconCache cache;
|
||||
const Sentence *input;
|
||||
};
|
||||
|
||||
private:
|
||||
DoubleHash m_hash;
|
||||
std::map< const TargetPhrase*, float > *m_cache;
|
||||
const Sentence *m_input;
|
||||
#ifdef WITH_THREADS
|
||||
boost::thread_specific_ptr<ThreadLocalStorage> m_local;
|
||||
//boost::thread_specific_ptr<_LexiconCache> m_cache;
|
||||
//boost::thread_specific_ptr<const Sentence*> m_input;
|
||||
#else
|
||||
std::auto_ptr<ThreadLocalStorage> m_local;
|
||||
//std::auto_ptr<_LexiconCache> m_cache;
|
||||
// std::auto_ptr<const Sentence> m_input;
|
||||
#endif
|
||||
|
||||
// const Sentence *m_input;
|
||||
Word *m_bias;
|
||||
|
||||
FactorMask m_inputFactors;
|
||||
@ -67,6 +89,7 @@ public:
|
||||
};
|
||||
|
||||
void InitializeForInput( Sentence const& in );
|
||||
void CleanUp() {};
|
||||
|
||||
void Evaluate(const TargetPhrase&, ScoreComponentCollection* ) const;
|
||||
};
|
||||
|
@ -154,6 +154,11 @@ void TranslationSystem::CleanUpAfterSentenceProcessing() const
|
||||
LanguageModel &languageModel = **iterLM;
|
||||
languageModel.CleanUpAfterSentenceProcessing();
|
||||
}
|
||||
|
||||
for(size_t i=0; i<m_globalLexicalModels.size(); ++i) {
|
||||
m_globalLexicalModels[i]->CleanUp();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
float TranslationSystem::GetWeightWordPenalty() const
|
||||
|
Loading…
Reference in New Issue
Block a user