diff --git a/contrib/other-builds/moses/.project b/contrib/other-builds/moses/.project
index 6081d2d3f..e69282e0e 100644
--- a/contrib/other-builds/moses/.project
+++ b/contrib/other-builds/moses/.project
@@ -1081,6 +1081,16 @@
1
PARENT-3-PROJECT_LOC/moses/FF/FFState.h
+
+ FF/Factory.cpp
+ 1
+ PARENT-3-PROJECT_LOC/moses/FF/Factory.cpp
+
+
+ FF/Factory.h
+ 1
+ PARENT-3-PROJECT_LOC/moses/FF/Factory.h
+
FF/FeatureFunction.cpp
1
diff --git a/moses/FF/Factory.cpp b/moses/FF/Factory.cpp
new file mode 100644
index 000000000..33f3519fc
--- /dev/null
+++ b/moses/FF/Factory.cpp
@@ -0,0 +1,185 @@
+#include "moses/FF/Factory.h"
+#include "moses/StaticData.h"
+
+#include "moses/TranslationModel/PhraseDictionaryTreeAdaptor.h"
+#include "moses/TranslationModel/RuleTable/PhraseDictionaryOnDisk.h"
+#include "moses/TranslationModel/PhraseDictionaryMemory.h"
+#include "moses/TranslationModel/CompactPT/PhraseDictionaryCompact.h"
+#include "moses/TranslationModel/PhraseDictionaryMultiModel.h"
+#include "moses/TranslationModel/PhraseDictionaryMultiModelCounts.h"
+#include "moses/TranslationModel/RuleTable/PhraseDictionaryALSuffixArray.h"
+#include "moses/TranslationModel/PhraseDictionaryDynSuffixArray.h"
+
+#include "moses/LexicalReordering.h"
+
+#include "moses/FF/BleuScoreFeature.h"
+#include "moses/FF/TargetWordInsertionFeature.h"
+#include "moses/FF/SourceWordDeletionFeature.h"
+#include "moses/FF/GlobalLexicalModel.h"
+#include "moses/FF/GlobalLexicalModelUnlimited.h"
+#include "moses/FF/UnknownWordPenaltyProducer.h"
+#include "moses/FF/WordTranslationFeature.h"
+#include "moses/FF/TargetBigramFeature.h"
+#include "moses/FF/TargetNgramFeature.h"
+#include "moses/FF/PhraseBoundaryFeature.h"
+#include "moses/FF/PhrasePairFeature.h"
+#include "moses/FF/PhraseLengthFeature.h"
+#include "moses/FF/DistortionScoreProducer.h"
+#include "moses/FF/WordPenaltyProducer.h"
+#include "moses/FF/InputFeature.h"
+#include "moses/FF/PhrasePenalty.h"
+#include "moses/FF/OSM-Feature/OpSequenceModel.h"
+
+#include "moses/LM/Ken.h"
+#ifdef LM_IRST
+#include "moses/LM/IRST.h"
+#endif
+
+#ifdef LM_SRI
+#include "moses/LM/SRI.h"
+#endif
+
+#ifdef LM_RAND
+#include "moses/LM/Rand.h"
+#endif
+
+#ifdef HAVE_SYNLM
+#include "moses/SyntacticLanguageModel.h"
+#endif
+
+#include "util/exception.hh"
+
+#include
+
+namespace Moses
+{
+
+class FeatureFactory
+{
+public:
+ virtual ~FeatureFactory() {}
+
+ virtual void Create(const std::string &line) = 0;
+
+protected:
+ template static void DefaultSetup(F *feature);
+
+ FeatureFactory() {}
+};
+
+template void FeatureFactory::DefaultSetup(F *feature)
+{
+ StaticData &static_data = StaticData::InstanceNonConst();
+ std::vector &weights = static_data.GetParameter()->GetWeights(feature->GetScoreProducerDescription());
+
+ if (feature->IsTuneable() || weights.size()) {
+ // if it's tuneable, ini file MUST have weights
+ // even it it's not tuneable, people can still set the weights in the ini file
+ static_data.SetWeights(feature, weights);
+ } else {
+ std::vector defaultWeights = feature->DefaultWeights();
+ static_data.SetWeights(feature, defaultWeights);
+ }
+}
+
+namespace
+{
+
+template class DefaultFeatureFactory : public FeatureFactory
+{
+public:
+ void Create(const std::string &line) {
+ DefaultSetup(new F(line));
+ }
+};
+
+class KenFactory : public FeatureFactory
+{
+public:
+ void Create(const std::string &line) {
+ DefaultSetup(ConstructKenLM(line));
+ }
+};
+
+#ifdef LM_RAND
+class RandFactory : public FeatureFactory
+{
+public:
+ void Create(const std::string &line) {
+ DefaultSetup(NewRandLM());
+ }
+};
+#endif
+
+} // namespace
+
+FeatureRegistry::FeatureRegistry()
+{
+// Feature with same name as class
+#define MOSES_FNAME(name) Add(#name, new DefaultFeatureFactory< name >());
+// Feature with different name than class.
+#define MOSES_FNAME2(name, type) Add(name, new DefaultFeatureFactory< type >());
+ MOSES_FNAME(GlobalLexicalModel);
+ //MOSES_FNAME(GlobalLexicalModelUnlimited); This was commented out in the original
+ MOSES_FNAME(SourceWordDeletionFeature);
+ MOSES_FNAME(TargetWordInsertionFeature);
+ MOSES_FNAME(PhraseBoundaryFeature);
+ MOSES_FNAME(PhraseLengthFeature);
+ MOSES_FNAME(WordTranslationFeature);
+ MOSES_FNAME(TargetBigramFeature);
+ MOSES_FNAME(TargetNgramFeature);
+ MOSES_FNAME(PhrasePairFeature);
+ MOSES_FNAME(LexicalReordering);
+ MOSES_FNAME2("Generation", GenerationDictionary);
+ MOSES_FNAME(BleuScoreFeature);
+ MOSES_FNAME2("Distortion", DistortionScoreProducer);
+ MOSES_FNAME2("WordPenalty", WordPenaltyProducer);
+ MOSES_FNAME(InputFeature);
+ MOSES_FNAME2("PhraseDictionaryBinary", PhraseDictionaryTreeAdaptor);
+ MOSES_FNAME(PhraseDictionaryOnDisk);
+ MOSES_FNAME(PhraseDictionaryMemory);
+ MOSES_FNAME(PhraseDictionaryCompact);
+ MOSES_FNAME(PhraseDictionaryMultiModel);
+ MOSES_FNAME(PhraseDictionaryMultiModelCounts);
+ MOSES_FNAME(PhraseDictionaryALSuffixArray);
+ MOSES_FNAME(PhraseDictionaryDynSuffixArray);
+ MOSES_FNAME(OpSequenceModel);
+ MOSES_FNAME(PhrasePenalty);
+ MOSES_FNAME2("UnknownWordPenalty", UnknownWordPenaltyProducer);
+
+#ifdef HAVE_SYNLM
+ MOSES_FNAME(SyntacticLanguageModel);
+#endif
+#ifdef LM_IRST
+ MOSES_FNAME2("IRSTLM", LanguageModelIRST);
+#endif
+#ifdef LM_SRI
+ MOSES_FNAME2("SRILM", LanguageModelSRI);
+#endif
+#ifdef LM_RAND
+ Add("RANDLM", new RandFactory());
+#endif
+ Add("KENLM", new KenFactory());
+}
+
+FeatureRegistry::~FeatureRegistry() {}
+
+void FeatureRegistry::Add(const std::string &name, FeatureFactory *factory)
+{
+ std::pair > to_ins(name, boost::shared_ptr(factory));
+ UTIL_THROW_IF(!registry_.insert(to_ins).second, util::Exception, "Duplicate feature name " << name);
+}
+
+namespace
+{
+class UnknownFeatureException : public util::Exception {};
+}
+
+void FeatureRegistry::Construct(const std::string &name, const std::string &line)
+{
+ Map::iterator i = registry_.find(name);
+ UTIL_THROW_IF(i == registry_.end(), UnknownFeatureException, "Feature name " << name << " is not registered.");
+ i->second->Create(line);
+}
+
+} // namespace Moses
diff --git a/moses/FF/Factory.h b/moses/FF/Factory.h
new file mode 100644
index 000000000..4fc93d061
--- /dev/null
+++ b/moses/FF/Factory.h
@@ -0,0 +1,30 @@
+#pragma once
+
+#include
+
+#include
+#include
+
+namespace Moses
+{
+
+class FeatureFactory;
+
+class FeatureRegistry
+{
+public:
+ FeatureRegistry();
+
+ ~FeatureRegistry();
+
+ void Construct(const std::string &name, const std::string &line);
+
+private:
+ void Add(const std::string &name, FeatureFactory *factory);
+
+ typedef boost::unordered_map > Map;
+
+ Map registry_;
+};
+
+} // namespace Moses
diff --git a/moses/FF/FeatureFunction.cpp b/moses/FF/FeatureFunction.cpp
index c137c8294..86f22cdad 100644
--- a/moses/FF/FeatureFunction.cpp
+++ b/moses/FF/FeatureFunction.cpp
@@ -109,5 +109,10 @@ void FeatureFunction::ReadParameters()
}
}
+std::vector FeatureFunction::DefaultWeights() const
+{
+ UTIL_THROW(util::Exception, "No default weights");
+}
+
}
diff --git a/moses/FF/FeatureFunction.h b/moses/FF/FeatureFunction.h
index 264f3f38b..e435e52c6 100644
--- a/moses/FF/FeatureFunction.h
+++ b/moses/FF/FeatureFunction.h
@@ -78,6 +78,7 @@ public:
virtual bool IsTuneable() const {
return m_tuneable;
}
+ virtual std::vector DefaultWeights() const;
//! Called before search and collecting of translation options
virtual void InitializeForInput(InputType const& source) {
diff --git a/moses/FF/UnknownWordPenaltyProducer.cpp b/moses/FF/UnknownWordPenaltyProducer.cpp
index 4ba033e58..fcb91a430 100644
--- a/moses/FF/UnknownWordPenaltyProducer.cpp
+++ b/moses/FF/UnknownWordPenaltyProducer.cpp
@@ -13,5 +13,11 @@ UnknownWordPenaltyProducer::UnknownWordPenaltyProducer(const std::string &line)
ReadParameters();
}
+std::vector UnknownWordPenaltyProducer::DefaultWeights() const
+{
+ std::vector ret(1, 1.0f);
+ return ret;
+}
+
}
diff --git a/moses/FF/UnknownWordPenaltyProducer.h b/moses/FF/UnknownWordPenaltyProducer.h
index 3cfaefef4..15880f698 100644
--- a/moses/FF/UnknownWordPenaltyProducer.h
+++ b/moses/FF/UnknownWordPenaltyProducer.h
@@ -20,6 +20,7 @@ public:
bool IsUseable(const FactorMask &mask) const {
return true;
}
+ std::vector DefaultWeights() const;
};
diff --git a/moses/Jamfile b/moses/Jamfile
index e423517d8..20ac3cabf 100644
--- a/moses/Jamfile
+++ b/moses/Jamfile
@@ -30,6 +30,25 @@ if $(have-clock[2]) = 0 {
alias rt ;
}
+#This is a kludge to force rebuilding if different --with options are passed.
+#Could have used features like on but getting these to apply only to
+#linking was ugly and it still didn't trigger an install (since the install
+#path doesn't encode features). It stores a file lm.log with the previous
+#options and forces a rebuild if the current options differ.
+local current = ;
+for local i in srilm irstlm randlm {
+ local optval = [ option.get "with-$(i)" ] ;
+ if $(optval) {
+ current += "--with-$(i)=$(optval)" ;
+ }
+}
+current = $(current:J=" ") ;
+current ?= "" ;
+path-constant LM-LOG : bin/lm.log ;
+update-if-changed $(LM-LOG) $(current) ;
+
+obj FF_Factory.o : FF/Factory.cpp LM//macros headers ../lm//kenlm : $(LM-LOG) ;
+
lib moses :
[ glob
*.cpp
@@ -45,8 +64,9 @@ lib moses :
ThreadPool.cpp
SyntacticLanguageModel.cpp
*Test.cpp Mock*.cpp
+ LM/Factory.cpp
]
-headers LM//LM TranslationModel/CompactPT//CompactPT synlm ThreadPool rt
+headers FF_Factory.o LM//LM TranslationModel/CompactPT//CompactPT synlm ThreadPool rt
..//search ../util/double-conversion//double-conversion ..//z ../OnDiskPt//OnDiskPt ;
alias headers-to-install : [ glob-tree *.h ] ;
diff --git a/moses/LM/Jamfile b/moses/LM/Jamfile
index c9f9007d4..770648aed 100644
--- a/moses/LM/Jamfile
+++ b/moses/LM/Jamfile
@@ -7,26 +7,8 @@
import option path ;
-#This is a kludge to force rebuilding if different --with options are passed.
-#Could have used features like on but getting these to apply only to
-#linking was ugly and it still didn't trigger an install (since the install
-#path doesn't encode features). It stores a file lm.log with the previous
-#options and forces a rebuild if the current options differ.
-local current = ;
-for local i in srilm irstlm randlm {
- local optval = [ option.get "with-$(i)" ] ;
- if $(optval) {
- current += "--with-$(i)=$(optval)" ;
- }
-}
-current = $(current:J=" ") ;
-current ?= "" ;
-
-path-constant LM-LOG : bin/lm.log ;
-update-if-changed $(LM-LOG) $(current) ;
-
-
local dependencies = ;
+local lmmacros = ;
#IRSTLM
local with-irstlm = [ option.get "with-irstlm" ] ;
@@ -35,6 +17,7 @@ if $(with-irstlm) {
obj IRST.o : IRST.cpp ..//headers : $(with-irstlm)/include $(with-irstlm)/include/irstlm ;
alias irst : IRST.o irstlm : : : LM_IRST ;
dependencies += irst ;
+ lmmacros += LM_IRST ;
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ;
echo "!!! You are linking the IRSTLM library; be sure the release is >= 5.70.02 !!!" ;
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ;
@@ -63,6 +46,7 @@ if $(with-srilm) {
obj ParallelBackoff.o : ParallelBackoff.cpp ..//headers : $(with-srilm)/include $(with-srilm)/include/srilm off ;
alias sri : SRI.o ParallelBackoff.o sri-libs : : : LM_SRI ;
dependencies += sri ;
+ lmmacros += LM_SRI ;
}
#RandLM
@@ -72,6 +56,7 @@ if $(with-randlm) {
obj Rand.o : Rand.cpp RandLM ..//headers : $(with-randlm)/include $(with-randlm)/include/RandLM ;
alias rand : Rand.o RandLM : : : LM_RAND ;
dependencies += rand ;
+ lmmacros += LM_RAND ;
}
# LDHTLM
@@ -82,6 +67,7 @@ if $(with-ldhtlm) {
obj LDHT.o : LDHT.cpp LDHT ..//headers : $(with-ldhtlm)/include $(with-ldhtlm)/include/LDHT ;
alias ldht : LDHT.o LDHT ticpp : : : LM_LDHT ;
dependencies += ldht ;
+ lmmacros += LM_LDHT ;
}
#ORLM is always compiled but needs special headers
@@ -92,4 +78,4 @@ obj ORLM.o : ORLM.cpp ..//headers ../TranslationModel/DynSAInclude//dynsa : : :
alias LM : Base.cpp Implementation.cpp Joint.cpp Ken.cpp MultiFactor.cpp Remote.cpp SingleFactor.cpp ORLM.o
../../lm//kenlm ..//headers $(dependencies) ;
-
+alias macros : : : : $(lmmacros) ;
diff --git a/moses/LM/Ken.cpp b/moses/LM/Ken.cpp
index edfbc7f75..cf5e0d061 100644
--- a/moses/LM/Ken.cpp
+++ b/moses/LM/Ken.cpp
@@ -66,7 +66,7 @@ struct KenLMState : public FFState {
template class LanguageModelKen : public LanguageModel
{
public:
- LanguageModelKen(const std::string &description, const std::string &line, const std::string &file, FactorType factorType, bool lazy);
+ LanguageModelKen(const std::string &line, const std::string &file, FactorType factorType, bool lazy);
const FFState *EmptyHypothesisState(const InputType &/*input*/) const {
KenLMState *ret = new KenLMState();
@@ -137,8 +137,8 @@ private:
std::vector &m_mapping;
};
-template LanguageModelKen::LanguageModelKen(const std::string &description, const std::string &line, const std::string &file, FactorType factorType, bool lazy)
- :LanguageModel(description, line)
+template LanguageModelKen::LanguageModelKen(const std::string &line, const std::string &file, FactorType factorType, bool lazy)
+ :LanguageModel("KENLM", line)
,m_factorType(factorType)
{
lm::ngram::Config config;
@@ -351,7 +351,7 @@ bool LanguageModelKen::IsUseable(const FactorMask &mask) const
} // namespace
-LanguageModel *ConstructKenLM(const std::string &description, const std::string &line)
+LanguageModel *ConstructKenLM(const std::string &line)
{
FactorType factorType;
string filePath;
@@ -375,10 +375,10 @@ LanguageModel *ConstructKenLM(const std::string &description, const std::string
}
}
- return ConstructKenLM(description, line, filePath, factorType, lazy);
+ return ConstructKenLM(line, filePath, factorType, lazy);
}
-LanguageModel *ConstructKenLM(const std::string &description, const std::string &line, const std::string &file, FactorType factorType, bool lazy)
+LanguageModel *ConstructKenLM(const std::string &line, const std::string &file, FactorType factorType, bool lazy)
{
try {
lm::ngram::ModelType model_type;
@@ -386,23 +386,23 @@ LanguageModel *ConstructKenLM(const std::string &description, const std::string
switch(model_type) {
case lm::ngram::PROBING:
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
case lm::ngram::REST_PROBING:
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
case lm::ngram::TRIE:
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
case lm::ngram::QUANT_TRIE:
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
case lm::ngram::ARRAY_TRIE:
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
case lm::ngram::QUANT_ARRAY_TRIE:
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
default:
std::cerr << "Unrecognized kenlm model type " << model_type << std::endl;
abort();
}
} else {
- return new LanguageModelKen(description, line, file, factorType, lazy);
+ return new LanguageModelKen(line, file, factorType, lazy);
}
} catch (std::exception &e) {
std::cerr << e.what() << std::endl;
diff --git a/moses/LM/Ken.h b/moses/LM/Ken.h
index 360ac7be8..7df38af75 100644
--- a/moses/LM/Ken.h
+++ b/moses/LM/Ken.h
@@ -31,10 +31,10 @@ namespace Moses
class LanguageModel;
-LanguageModel *ConstructKenLM(const std::string &description, const std::string &line);
+LanguageModel *ConstructKenLM(const std::string &line);
//! This will also load. Returns a templated KenLM class
-LanguageModel *ConstructKenLM(const std::string &description, const std::string &line, const std::string &file, FactorType factorType, bool lazy);
+LanguageModel *ConstructKenLM(const std::string &line, const std::string &file, FactorType factorType, bool lazy);
} // namespace Moses
diff --git a/moses/StaticData.cpp b/moses/StaticData.cpp
index 2404411ff..81284afef 100644
--- a/moses/StaticData.cpp
+++ b/moses/StaticData.cpp
@@ -22,14 +22,11 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include
#include "util/check.hh"
-#include "moses/TranslationModel/PhraseDictionaryTreeAdaptor.h"
-#include "moses/TranslationModel/RuleTable/PhraseDictionaryOnDisk.h"
-#include "moses/TranslationModel/PhraseDictionaryMemory.h"
-#include "moses/TranslationModel/CompactPT/PhraseDictionaryCompact.h"
-#include "moses/TranslationModel/PhraseDictionaryMultiModel.h"
-#include "moses/TranslationModel/PhraseDictionaryMultiModelCounts.h"
-#include "moses/TranslationModel/RuleTable/PhraseDictionaryALSuffixArray.h"
-#include "moses/TranslationModel/PhraseDictionaryDynSuffixArray.h"
+
+#include "moses/FF/Factory.h"
+#include "moses/FF/WordPenaltyProducer.h"
+#include "moses/FF/UnknownWordPenaltyProducer.h"
+#include "moses/FF/InputFeature.h"
#include "DecodeStepTranslation.h"
#include "DecodeStepGeneration.h"
@@ -46,37 +43,6 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "InputFileStream.h"
#include "ScoreComponentCollection.h"
-#include "moses/FF/BleuScoreFeature.h"
-#include "moses/FF/TargetWordInsertionFeature.h"
-#include "moses/FF/SourceWordDeletionFeature.h"
-#include "moses/FF/GlobalLexicalModel.h"
-#include "moses/FF/GlobalLexicalModelUnlimited.h"
-#include "moses/FF/UnknownWordPenaltyProducer.h"
-#include "moses/FF/WordTranslationFeature.h"
-#include "moses/FF/TargetBigramFeature.h"
-#include "moses/FF/TargetNgramFeature.h"
-#include "moses/FF/PhraseBoundaryFeature.h"
-#include "moses/FF/PhrasePairFeature.h"
-#include "moses/FF/PhraseLengthFeature.h"
-#include "moses/FF/DistortionScoreProducer.h"
-#include "moses/FF/WordPenaltyProducer.h"
-#include "moses/FF/InputFeature.h"
-#include "moses/FF/PhrasePenalty.h"
-#include "moses/FF/OSM-Feature/OpSequenceModel.h"
-
-#include "LM/Ken.h"
-#ifdef LM_IRST
-#include "LM/IRST.h"
-#endif
-
-#ifdef LM_SRI
-#include "LM/SRI.h"
-#endif
-
-#ifdef HAVE_SYNLM
-#include "SyntacticLanguageModel.h"
-#endif
-
#ifdef WITH_THREADS
#include
#endif
@@ -567,6 +533,7 @@ bool StaticData::LoadData(Parameter *parameter)
map featureIndexMap;
const vector &features = m_parameter->GetParam("feature");
+ FeatureRegistry registry;
for (size_t i = 0; i < features.size(); ++i) {
const string &line = Trim(features[i]);
cerr << "line=" << line << endl;
@@ -576,151 +543,8 @@ bool StaticData::LoadData(Parameter *parameter)
vector toks = Tokenize(line);
const string &feature = toks[0];
- //int featureIndex = GetFeatureIndex(featureIndexMap, feature);
- if (feature == "GlobalLexicalModel") {
- GlobalLexicalModel *model = new GlobalLexicalModel(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "GlobalLexicalModelUnlimited") {
- GlobalLexicalModelUnlimited *model = NULL; //new GlobalLexicalModelUnlimited(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "SourceWordDeletionFeature") {
- SourceWordDeletionFeature *model = new SourceWordDeletionFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "TargetWordInsertionFeature") {
- TargetWordInsertionFeature *model = new TargetWordInsertionFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "PhraseBoundaryFeature") {
- PhraseBoundaryFeature *model = new PhraseBoundaryFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "PhraseLengthFeature") {
- PhraseLengthFeature *model = new PhraseLengthFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "WordTranslationFeature") {
- WordTranslationFeature *model = new WordTranslationFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "TargetBigramFeature") {
- TargetBigramFeature *model = new TargetBigramFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "TargetNgramFeature") {
- TargetNgramFeature *model = new TargetNgramFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "PhrasePairFeature") {
- PhrasePairFeature *model = new PhrasePairFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- //SetWeights(model, weights);
- } else if (feature == "LexicalReordering") {
- LexicalReordering *model = new LexicalReordering(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "KENLM") {
- LanguageModel *model = ConstructKenLM(feature, line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- }
-#ifdef LM_IRST
- else if (feature == "IRSTLM") {
- LanguageModelIRST *model = new LanguageModelIRST(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- }
-#endif
-#ifdef LM_SRI
- else if (feature == "SRILM") {
- LanguageModelSRI *model = new LanguageModelSRI(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- }
-#endif
- else if (feature == "Generation") {
- GenerationDictionary *model = new GenerationDictionary(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "BleuScoreFeature") {
- BleuScoreFeature *model = new BleuScoreFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "Distortion") {
- DistortionScoreProducer *model = new DistortionScoreProducer(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "WordPenalty") {
- WordPenaltyProducer *model = new WordPenaltyProducer(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "UnknownWordPenalty") {
- UnknownWordPenaltyProducer *model = new UnknownWordPenaltyProducer(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- if (weights.size() == 0)
- weights.push_back(1.0f);
- SetWeights(model, weights);
- } else if (feature == "InputFeature") {
- InputFeature *model = new InputFeature(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
-
- } else if (feature == "PhraseDictionaryBinary") {
- PhraseDictionaryTreeAdaptor* model = new PhraseDictionaryTreeAdaptor(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryOnDisk") {
- PhraseDictionaryOnDisk* model = new PhraseDictionaryOnDisk(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryMemory") {
- PhraseDictionaryMemory* model = new PhraseDictionaryMemory(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryCompact") {
- PhraseDictionaryCompact* model = new PhraseDictionaryCompact(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryMultiModel") {
- PhraseDictionaryMultiModel* model = new PhraseDictionaryMultiModel(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryMultiModelCounts") {
- PhraseDictionaryMultiModelCounts* model = new PhraseDictionaryMultiModelCounts(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryALSuffixArray") {
- PhraseDictionaryALSuffixArray* model = new PhraseDictionaryALSuffixArray(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhraseDictionaryDynSuffixArray") {
- PhraseDictionaryDynSuffixArray* model = new PhraseDictionaryDynSuffixArray(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "OpSequenceModel") {
- OpSequenceModel* model = new OpSequenceModel(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- } else if (feature == "PhrasePenalty") {
- PhrasePenalty* model = new PhrasePenalty(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- }
-
-#ifdef HAVE_SYNLM
- else if (feature == "SyntacticLanguageModel") {
- SyntacticLanguageModel *model = new SyntacticLanguageModel(line);
- vector weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
- SetWeights(model, weights);
- }
-#endif
- else {
- UserMessage::Add("Unknown feature function:" + feature);
- return false;
- }
+ registry.Construct(feature, line);
}
OverrideFeatures();