Conflicts:
	moses/LM/IRST.cpp
This commit is contained in:
Ulrich Germann 2015-11-01 15:37:36 +00:00
commit 8424fb6e2a
6 changed files with 27 additions and 27 deletions

View File

@ -125,25 +125,25 @@ public:
return m_context_weights;
}
#endif
bool
SetContextWeights(std::string const& spec) {
if (m_context_weights) return false;
boost::unique_lock<boost::shared_mutex> lock(m_lock);
SPTR<std::map<std::string,float> > M(new std::map<std::string, float>);
// TO DO; This needs to be done with StringPiece.find, not Tokenize
// PRIORITY: low
std::vector<std::string> tokens = Tokenize(spec,":");
for (std::vector<std::string>::iterator it = tokens.begin();
it != tokens.end(); it++) {
for (std::vector<std::string>::iterator it = tokens.begin();
it != tokens.end(); it++) {
std::vector<std::string> key_and_value = Tokenize(*it, ",");
(*M)[key_and_value[0]] = atof(key_and_value[1].c_str());
}
m_context_weights = M;
return true;
}
bool
SetContextWeights(SPTR<std::map<std::string,float> const> const& w) {
if (m_context_weights) return false;

View File

@ -228,10 +228,10 @@ batch_run()
if (context_window)
task->SetContextWindow(context_window);
if (context_weights != "" && !task->GetScope()->GetContextWeights())
task->GetScope()->SetContextWeights(context_weights);
// Allow for (sentence-)context-specific processing prior to
// decoding. This can be used, for example, for context-sensitive
// phrase lookup.

View File

@ -39,16 +39,16 @@ void read_ini(const char *inifile, string &model, string &words, string &wordstx
namespace Moses
{
class Murmur: public DALM::State::HashFunction
class Murmur: public DALM::State::HashFunction
{
public:
Murmur(std::size_t seed=0): seed(seed){
}
virtual std::size_t operator()(const DALM::VocabId *words, std::size_t size) const{
return util::MurmurHashNative(words, sizeof(DALM::VocabId) * size, seed);
}
Murmur(std::size_t seed=0): seed(seed) {
}
virtual std::size_t operator()(const DALM::VocabId *words, std::size_t size) const {
return util::MurmurHashNative(words, sizeof(DALM::VocabId) * size, seed);
}
private:
std::size_t seed;
std::size_t seed;
};
class DALMState : public FFState
@ -173,13 +173,13 @@ public:
virtual bool operator==(const FFState& other) const {
const DALMChartState &o = static_cast<const DALMChartState &>(other);
// check left state.
if(prefixLength != o.prefixLength) return false;
const DALM::Fragment &f = prefixFragments[prefixLength-1];
const DALM::Fragment &of = o.prefixFragments[prefixLength-1];
if(DALM::compare_fragments(f, of) != 0) return false;
// check right state.
if(rightContext.get_count() != o.rightContext.get_count()) return false;
return rightContext.compare(o.rightContext) == 0;
@ -301,7 +301,7 @@ void LanguageModelDALM::CalcScore(const Phrase &phrase, float &fullScore, float
}
currPos++;
if (currPos >= m_ContextSize){
if (currPos >= m_ContextSize) {
break;
}
}
@ -564,7 +564,7 @@ void LanguageModelDALM::EvaluateTerminal(
} else {
hypoScore += score;
prefixLength++;
if(state.get_count() < std::min(prevLen+1, (int)m_ContextSize)){
if(state.get_count() < std::min(prevLen+1, (int)m_ContextSize)) {
newState->SetAsLarge();
}
if(prefixLength >= m_ContextSize) newState->SetAsLarge();
@ -626,8 +626,8 @@ void LanguageModelDALM::EvaluateNonTerminal(
state = prevState->GetRightContext();
return;
} else if(state.get_count() <= prefixPos+1) {
if(state.get_count() == prefixPos+1 && !gap.is_finalized()){
prefixLength++;
if(state.get_count() == prefixPos+1 && !gap.is_finalized()) {
prefixLength++;
}
newState->SetAsLarge();
state = prevState->GetRightContext();
@ -636,10 +636,10 @@ void LanguageModelDALM::EvaluateNonTerminal(
newState->SetAsLarge();
} else {
prefixLength++;
if(state.get_count() < std::min(prevLen+1, (int)m_ContextSize)){
if(state.get_count() < std::min(prevLen+1, (int)m_ContextSize)) {
newState->SetAsLarge();
}
if(prefixLength >= m_ContextSize) newState->SetAsLarge();
}
}
@ -651,7 +651,7 @@ void LanguageModelDALM::EvaluateNonTerminal(
if (prevState->LargeEnough()) {
newState->SetAsLarge();
//if(prevPrefixLength < prevState->GetHypoSize()) {
hypoScore += m_lm->sum_bows(state, prevPrefixLength, state.get_count());
hypoScore += m_lm->sum_bows(state, prevPrefixLength, state.get_count());
//}
// copy language model state
state = prevState->GetRightContext();

View File

@ -69,11 +69,11 @@ public:
virtual bool HasScope() const {
return false;
}
virtual SPTR<ContextScope> GetScope() const {
return SPTR<ContextScope>();
}
/** No longer does anything as not using mem pool for Phrase class anymore */
static void InitializeMemPool();

View File

@ -68,7 +68,7 @@ TargetPhrase::TargetPhrase(ttasksptr& ttask, std::string out_string, const Phras
, m_container(pt)
{
if (ttask) m_scope = ttask->GetScope();
//ACAT
const StaticData &staticData = StaticData::Instance();
// XXX should this really be InputFactorOrder???

View File

@ -167,7 +167,7 @@ interpret_dlt()
m_scope->SetContextWeights(j->second);
}
}
void TranslationTask::Run()
{