mirror of
https://github.com/moses-smt/mosesdecoder.git
synced 2024-12-25 21:03:22 +03:00
beautify
This commit is contained in:
parent
3b76143072
commit
21ff6d4831
@ -58,7 +58,7 @@ int main(int argc, char **argv)
|
||||
|
||||
PhraseDictionaryCompact pdc("input-factor=0 output-factor=0 num-features=5 path=" + ttable);
|
||||
pdc.Load();
|
||||
|
||||
|
||||
std::string line;
|
||||
while(getline(std::cin, line)) {
|
||||
Phrase sourcePhrase;
|
||||
|
@ -45,18 +45,18 @@ LanguageModelIRST::LanguageModelIRST(const std::string &line)
|
||||
const StaticData &staticData = StaticData::Instance();
|
||||
int threadCount = staticData.ThreadCount();
|
||||
if (threadCount != 1) {
|
||||
throw runtime_error("Error: " + SPrint(threadCount) + " number of threads specified but IRST LM is not threadsafe.");
|
||||
throw runtime_error("Error: " + SPrint(threadCount) + " number of threads specified but IRST LM is not threadsafe.");
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < m_args.size(); ++i) {
|
||||
const vector<string> &args = m_args[i];
|
||||
|
||||
if (args[0] == "factor") {
|
||||
m_factorType = Scan<FactorType>(args[1]);
|
||||
m_factorType = Scan<FactorType>(args[1]);
|
||||
} else if (args[0] == "order") {
|
||||
m_nGramOrder = Scan<size_t>(args[1]);
|
||||
m_nGramOrder = Scan<size_t>(args[1]);
|
||||
} else if (args[0] == "path") {
|
||||
m_filePath = args[1];
|
||||
m_filePath = args[1];
|
||||
} else {
|
||||
throw "Unknown argument " + args[0];
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ void Manager::ProcessSentence()
|
||||
{
|
||||
// reset statistics
|
||||
ResetSentenceStats(m_source);
|
||||
|
||||
|
||||
// check if alternate weight setting is used
|
||||
// this is not thread safe! it changes StaticData
|
||||
if (StaticData::Instance().GetHasAlternateWeightSettings()) {
|
||||
@ -89,8 +89,7 @@ void Manager::ProcessSentence()
|
||||
std::cerr << "sentence specifies weight setting\n";
|
||||
std::cerr << "calling SetWeightSetting( " << m_source.GetWeightSetting() << ")\n";
|
||||
StaticData::Instance().SetWeightSetting(m_source.GetWeightSetting());
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
StaticData::Instance().SetWeightSetting("default");
|
||||
}
|
||||
}
|
||||
|
@ -113,8 +113,7 @@ int Sentence::Read(std::istream& in,const std::vector<FactorType>& factorOrder)
|
||||
if (meta.find("weight-setting") != meta.end()) {
|
||||
this->SetWeightSetting(meta["weight-setting"]);
|
||||
this->SetSpecifiesWeightSetting(true);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
this->SetSpecifiesWeightSetting(false);
|
||||
}
|
||||
|
||||
|
@ -691,9 +691,9 @@ bool StaticData::LoadData(Parameter *parameter)
|
||||
SetWeights(model, weights);
|
||||
} else if (feature == "PhraseDictionaryDynSuffixArray") {
|
||||
PhraseDictionaryDynSuffixArray* model = new PhraseDictionaryDynSuffixArray(line);
|
||||
vector<float> weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
|
||||
SetWeights(model, weights);
|
||||
}
|
||||
vector<float> weights = m_parameter->GetWeights(model->GetScoreProducerDescription());
|
||||
SetWeights(model, weights);
|
||||
}
|
||||
|
||||
#ifdef HAVE_SYNLM
|
||||
else if (feature == "SyntacticLanguageModel") {
|
||||
@ -1150,8 +1150,7 @@ void StaticData::LoadFeatureFunctions()
|
||||
PhraseDictionary *pt = dynamic_cast<PhraseDictionary*>(ff);
|
||||
if (pt) {
|
||||
m_phraseDictionary.push_back(pt);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// load phrase table last. They can depend on other features
|
||||
ff->Load();
|
||||
}
|
||||
@ -1159,8 +1158,8 @@ void StaticData::LoadFeatureFunctions()
|
||||
|
||||
// load phrase table
|
||||
for (size_t i = 0; i < m_phraseDictionary.size(); ++i) {
|
||||
PhraseDictionary *pt = m_phraseDictionary[i];
|
||||
pt->Load();
|
||||
PhraseDictionary *pt = m_phraseDictionary[i];
|
||||
pt->Load();
|
||||
}
|
||||
|
||||
}
|
||||
@ -1194,9 +1193,10 @@ bool StaticData::CheckWeights() const
|
||||
return true;
|
||||
}
|
||||
|
||||
void StaticData::ProcessAlternateWeightSettings() {
|
||||
void StaticData::ProcessAlternateWeightSettings()
|
||||
{
|
||||
const vector<string> &weightSpecification = m_parameter->GetParam("alternate-weight-setting");
|
||||
|
||||
|
||||
// get mapping from feature names to feature functions
|
||||
map<string,FeatureFunction*> nameToFF;
|
||||
const std::vector<FeatureFunction*> &ffs = FeatureFunction::GetFeatureFunctions();
|
||||
@ -1229,8 +1229,8 @@ void StaticData::ProcessAlternateWeightSettings() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// weight lines
|
||||
|
||||
// weight lines
|
||||
else {
|
||||
CHECK(currentId != "");
|
||||
vector<string> tokens = Tokenize(weightSpecification[i]);
|
||||
@ -1248,11 +1248,10 @@ void StaticData::ProcessAlternateWeightSettings() {
|
||||
// check if a valid nane
|
||||
map<string,FeatureFunction*>::iterator ffLookUp = nameToFF.find(name);
|
||||
if (ffLookUp == nameToFF.end()) {
|
||||
cerr << "ERROR: alternate weight setting " << currentId << " specifies weight(s) for " << name << " but there is no such feature function" << endl;
|
||||
hasErrors = true;
|
||||
}
|
||||
else {
|
||||
m_weightSetting[ currentId ]->Assign( nameToFF[name], weights);
|
||||
cerr << "ERROR: alternate weight setting " << currentId << " specifies weight(s) for " << name << " but there is no such feature function" << endl;
|
||||
hasErrors = true;
|
||||
} else {
|
||||
m_weightSetting[ currentId ]->Assign( nameToFF[name], weights);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,7 +206,7 @@ protected:
|
||||
|
||||
int m_threadCount;
|
||||
long m_startTranslationId;
|
||||
|
||||
|
||||
// alternate weight settings
|
||||
std::map< std::string, ScoreComponentCollection* > m_weightSetting;
|
||||
|
||||
|
@ -52,7 +52,7 @@ void PhraseDictionaryCompact::Load()
|
||||
std::string suffix = ".minphr";
|
||||
if(tFilePath.substr(tFilePath.length() - suffix.length(), suffix.length()) == suffix) {
|
||||
if(!FileExists(tFilePath)) {
|
||||
throw runtime_error("Error: File " + tFilePath + " does not exit.");
|
||||
throw runtime_error("Error: File " + tFilePath + " does not exit.");
|
||||
exit(1);
|
||||
}
|
||||
} else {
|
||||
|
@ -9,22 +9,19 @@ using namespace std;
|
||||
namespace Moses
|
||||
{
|
||||
PhraseDictionaryDynSuffixArray::PhraseDictionaryDynSuffixArray(const std::string &line)
|
||||
:PhraseDictionary("PhraseDictionaryDynSuffixArray", line)
|
||||
,m_biSA(new BilingualDynSuffixArray())
|
||||
:PhraseDictionary("PhraseDictionaryDynSuffixArray", line)
|
||||
,m_biSA(new BilingualDynSuffixArray())
|
||||
{
|
||||
|
||||
for (size_t i = 0; i < m_args.size(); ++i) {
|
||||
const vector<string> &args = m_args[i];
|
||||
if (args[0] == "source") {
|
||||
m_source = args[1];
|
||||
}
|
||||
else if (args[0] == "target") {
|
||||
} else if (args[0] == "target") {
|
||||
m_target = args[1];
|
||||
}
|
||||
else if (args[0] == "alignment") {
|
||||
} else if (args[0] == "alignment") {
|
||||
m_alignments = args[1];
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
//throw "Unknown argument " + args[0];
|
||||
}
|
||||
}
|
||||
|
@ -52,8 +52,8 @@ namespace Moses
|
||||
{
|
||||
|
||||
PhraseDictionaryFuzzyMatch::PhraseDictionaryFuzzyMatch(const std::string &line)
|
||||
:PhraseDictionary("PhraseDictionaryFuzzyMatch", line)
|
||||
,m_FuzzyMatchWrapper(NULL)
|
||||
:PhraseDictionary("PhraseDictionaryFuzzyMatch", line)
|
||||
,m_FuzzyMatchWrapper(NULL)
|
||||
{}
|
||||
|
||||
PhraseDictionaryFuzzyMatch::~PhraseDictionaryFuzzyMatch()
|
||||
|
@ -46,7 +46,7 @@ void RuleTableTrie::Load()
|
||||
bool ret = loader->Load(m_input, m_output, m_filePath, m_tableLimit,
|
||||
*this);
|
||||
if (!ret) {
|
||||
throw runtime_error("Error: Loading " + m_filePath);
|
||||
throw runtime_error("Error: Loading " + m_filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -389,8 +389,8 @@ void TranslationOptionCollection::CreateTranslationOptions()
|
||||
(endPos-startPos+1 > decodeGraphBackoff[graphInd] || // size exceeds backoff limit or ...
|
||||
m_collection[startPos][endPos-startPos].size() > 0)) { // already covered
|
||||
VERBOSE(3,"No backoff to graph " << graphInd << " for span [" << startPos << ";" << endPos << "]");
|
||||
VERBOSE(3,", length limit: " << decodeGraphBackoff[graphInd]);
|
||||
VERBOSE(3,", found so far: " << m_collection[startPos][endPos-startPos].size() << endl);
|
||||
VERBOSE(3,", length limit: " << decodeGraphBackoff[graphInd]);
|
||||
VERBOSE(3,", found so far: " << m_collection[startPos][endPos-startPos].size() << endl);
|
||||
// do not create more options
|
||||
continue;
|
||||
}
|
||||
@ -510,7 +510,7 @@ void TranslationOptionCollection::CreateTranslationOptionsForRange(
|
||||
int indexStep = 1;
|
||||
|
||||
for (++iterStep; iterStep != decodeGraph.end() ; ++iterStep, ++indexStep) {
|
||||
const DecodeStep &decodeStep = **iterStep;
|
||||
const DecodeStep &decodeStep = **iterStep;
|
||||
PartialTranslOptColl* newPtoc = new PartialTranslOptColl;
|
||||
|
||||
// go thru each intermediate trans opt just created
|
||||
|
Loading…
Reference in New Issue
Block a user