verbose=0 nothing goes to stderr except for real, aborting errors

git-svn-id: https://mosesdecoder.svn.sourceforge.net/svnroot/mosesdecoder/trunk@1232 1f5c12ca-751b-0410-a591-d2e778427230
This commit is contained in:
hieuhoang1972 2007-02-22 23:44:38 +00:00
parent 6c5cb3a6ec
commit c58393a4b4
15 changed files with 54 additions and 98 deletions

View File

@ -187,7 +187,7 @@ void IOStream::OutputBestHypo(const Hypothesis *hypo, long /*translationId*/, bo
{
if (hypo != NULL)
{
VERBOSE(2,"BEST TRANSLATION: " << *hypo << endl);
VERBOSE(1,"BEST TRANSLATION: " << *hypo << endl);
VERBOSE(3,"Best path: ");
Backtrack(hypo);
VERBOSE(3,"0" << std::endl);
@ -200,7 +200,7 @@ void IOStream::OutputBestHypo(const Hypothesis *hypo, long /*translationId*/, bo
}
else
{
TRACE_ERR("NO BEST TRANSLATION" << endl);
VERBOSE(1, "NO BEST TRANSLATION" << endl);
}
}

View File

@ -71,10 +71,13 @@ bool readInput(IOStream &ioStream, int inputType, InputType*& source)
int main(int argc, char* argv[])
{
IFVERBOSE(1)
{
TRACE_ERR("command: ");
for(int i=0;i<argc;++i) TRACE_ERR(argv[i]<<" ");
TRACE_ERR(endl);
}
// load data structures
Parameter *parameter = new Parameter();
@ -115,6 +118,7 @@ int main(int argc, char* argv[])
while(readInput(*ioStream,staticData.GetInputType(),source))
{
// note: source is only valid within this while loop!
IFVERBOSE(1)
ResetUserTime();
VERBOSE(2,"\nTRANSLATING(" << ++lineCount << "): " << *source);
@ -154,6 +158,7 @@ int main(int argc, char* argv[])
delete ioStream;
IFVERBOSE(1)
PrintUserTime("End.");
#ifdef HACK_EXIT
@ -191,6 +196,7 @@ IOStream *GetIODevice(const StaticData &staticData)
}
ioStream->ResetTranslationId();
IFVERBOSE(1)
PrintUserTime("Created input-output object");
return ioStream;

View File

@ -42,16 +42,13 @@ void PrintTranslationAnalysis(std::ostream &os, const Hypothesis* hypo)
std::vector<std::vector<unsigned int> >& lmstats = *(*tpi)->GetLMStats();
std::vector<std::vector<unsigned int> >::iterator i = lmstats.begin();
std::vector<unsigned int>::iterator acc = lmAcc.begin();
// TRACE_ERR("\n");
for (; i != lmstats.end(); ++i, ++acc) {
std::vector<unsigned int>::iterator j = i->begin();
lmCalls += i->size();
// TRACE_ERR("lm: ");
for (; j != i->end(); ++j) {
// TRACE_ERR(*j << " ");
(*acc) += *j;
}
// TRACE_ERR(" (total=" << *acc << ", lmcalls=" << lmCalls << ")" << std::endl);
}
}

View File

@ -66,7 +66,7 @@ bool ConfusionNet::ReadF(std::istream& in,
const std::vector<FactorType>& factorOrder,
int format)
{
TRACE_ERR( "read confusion net with format "<<format<<"\n");
VERBOSE(1, "read confusion net with format "<<format<<"\n");
switch(format)
{
case 0: return ReadFormat0(in,factorOrder);
@ -113,12 +113,12 @@ bool ConfusionNet::ReadFormat0(std::istream& in,
String2Word(word,w,factorOrder);
if(prob<0.0)
{
TRACE_ERR("WARN: negative prob: "<<prob<<" ->set to 0.0\n");
VERBOSE(1, "WARN: negative prob: "<<prob<<" ->set to 0.0\n");
prob=0.0;
}
else if (prob>1.0)
{
TRACE_ERR("WARN: prob > 1.0 : "<<prob<<" -> set to 1.0\n");
VERBOSE(1, "WARN: prob > 1.0 : "<<prob<<" -> set to 1.0\n");
prob=1.0;
}
col.push_back(std::make_pair(w,std::max(static_cast<float>(log(prob)),
@ -151,7 +151,7 @@ bool ConfusionNet::ReadFormat1(std::istream& in,
if(is>>word>>prob) {
data[i][j].second = (float) log(prob);
if(data[i][j].second<0) {
TRACE_ERR("WARN: neg costs: "<<data[i][j].second<<" -> set to 0\n");
VERBOSE(1, "WARN: neg costs: "<<data[i][j].second<<" -> set to 0\n");
data[i][j].second=0.0;}
String2Word(word,data[i][j].first,factorOrder);
} else return 0;

View File

@ -117,7 +117,7 @@ void HypothesisCollection::AddPrune(Hypothesis *hypo)
{
iterExisting = m_hypos.find(hypo);
TRACE_ERR("Offending hypo = " << **iterExisting << endl);
assert(false);
abort();
}
return;
}
@ -162,7 +162,6 @@ void HypothesisCollection::PruneToSize(size_t newSize)
// and remember the threshold
float scoreThreshold = bestScores.top();
// TRACE_ERR( "threshold: " << scoreThreshold << endl);
// delete all hypos under score threshold
iter = m_hypos.begin();

View File

@ -3,6 +3,7 @@
#include "FactorCollection.h"
#include "NGramNode.h"
#include "InputFileStream.h"
#include "StaticData.h"
using namespace std;
@ -17,7 +18,7 @@ bool LanguageModelInternal::Load(const std::string &filePath
, size_t nGramOrder)
{
assert(nGramOrder <= 3);
TRACE_ERR( "Loading Internal LM: " << filePath << endl);
VERBOSE(1, "Loading Internal LM: " << filePath << endl);
FactorCollection &factorCollection = FactorCollection::Instance();
@ -34,7 +35,7 @@ bool LanguageModelInternal::Load(const std::string &filePath
m_sentenceEndArray[m_factorType] = m_sentenceEnd;
// read in file
TRACE_ERR( filePath << endl);
VERBOSE(1, filePath << endl);
InputFileStream inFile(filePath);

View File

@ -87,11 +87,6 @@ public:
{
return 0;
}
/*
for (size_t i = 0 ; i < contextFactor.size() ; ++i)
TRACE_ERR( contextFactor[i] << " ";
TRACE_ERR( std::endl;
*/
// joint context for internal LM
std::vector<const Word*> jointContext;
@ -120,11 +115,6 @@ public:
jointContext.push_back(jointWord);
}
/*
for (size_t i = 0 ; i < chunkContext.size() ; ++i)
TRACE_ERR(chunkContext[i] << " ");
TRACE_ERR(std::endl);
*/
// calc score on chunked phrase
float ret = m_lmImpl->GetValue(jointContext, finalState, len);

View File

@ -78,11 +78,7 @@ public:
{
return 0;
}
/*
for (size_t i = 0 ; i < contextFactor.size() ; ++i)
TRACE_ERR( contextFactor[i] << " ";
TRACE_ERR( std::endl;
*/
// only process context where last word is a word we want
const Factor *factor = (*contextFactor.back())[m_factorType];
std::string strWord = factor->GetString();
@ -113,11 +109,7 @@ public:
// create context factor the right way round
std::reverse(chunkContext.begin(), chunkContext.end());
/*
for (size_t i = 0 ; i < chunkContext.size() ; ++i)
TRACE_ERR( chunkContext[i] << " ";
TRACE_ERR( std::endl;
*/
// calc score on chunked phrase
float ret = m_lmImpl->GetValue(chunkContext, finalState, len);

View File

@ -87,7 +87,7 @@ void LexicalReordering::LoadFile()
if (probs.size() != m_direction.size() * m_numOrientationTypes) {
TRACE_ERR( "found " << probs.size() << " probabilities, expected "
<< m_direction.size() * m_numOrientationTypes << endl);
exit(0);
abort();
}
std::vector<float> scv(probs.size());
std::transform(probs.begin(),probs.end(),probs.begin(),TransformScore);
@ -234,14 +234,6 @@ std::vector<float> LexicalReordering::CalcScore(Hypothesis *hypothesis)
else {
score[ orientation + i * m_numOrientationTypes ] = value;
}
// IFVERBOSE(3) {
// TRACE_ERR( "\tdistortion type " << orientation << " =>");
// for(unsigned int j=0;j<score.size();j++) {
// TRACE_ERR( " " << score[j]);
// }
// TRACE_ERR( endl);
// }
}
}
return score;

View File

@ -47,7 +47,7 @@ Manager::Manager(InputType const& source)
{
const StaticData &staticData = StaticData::Instance();
TRACE_ERR("Translating: " << m_source << endl);
VERBOSE(1, "Translating: " << m_source << endl);
std::vector < HypothesisCollection >::iterator iterStack;
for (iterStack = m_hypoStack.begin() ; iterStack != m_hypoStack.end() ; ++iterStack)
{
@ -60,7 +60,7 @@ Manager::Manager(InputType const& source)
Manager::~Manager()
{
delete m_possibleTranslations;
TRACE_ERR("Finished translating" << endl);
VERBOSE(1, "Finished translating" << endl);
}
/**

View File

@ -100,13 +100,11 @@ template<typename T> class ObjectPool {
// the block size is doubled every time
// if allocation fails, block size is reduced by 1/4
void allocate() {
// TRACE_ERR("start "<<name<<" - objectpool allocate "<<N<<"\n");
try {
if(dataSize.empty()) dataSize.push_back(N);
else dataSize.push_back(dataSize.back()*2);
void *m=malloc(sizeof(Object)*dataSize.back());
while(!m) {
// TRACE_ERR("malloc failed for size "<<dataSize.back()<<"!\n");
dataSize.back()=static_cast<size_t>(dataSize.back()*0.75);
m=malloc(sizeof(Object)*dataSize.back());
}

View File

@ -115,7 +115,8 @@ public:
void AddEquivPhrase(const Phrase &source, const TargetPhrase &targetPhrase)
{
assert(GetTargetPhraseCollection(source)==0);
TRACE_ERR( "adding unk source phrase "<<source<<"\n");
VERBOSE(2, "adding unk source phrase "<<source<<"\n");
std::pair<MapSrc2Tgt::iterator,bool> p
=m_cache.insert(std::make_pair(source,static_cast<TargetPhraseCollection const*>(0)));
if(p.second || p.first->second==0)
@ -125,7 +126,7 @@ public:
p.first->second=ptr;
m_tgtColls.push_back(ptr);
}
else TRACE_ERR("WARNING: you added an already existing phrase!\n");
else VERBOSE(2, "WARNING: you added an already existing phrase!\n");
}
TargetPhraseCollection const*
@ -352,8 +353,6 @@ public:
State curr(stack.back());
stack.pop_back();
//TRACE_ERR("processing state "<<curr<<" stack size: "<<stack.size()<<"\n");
assert(curr.end()<srcSize);
const ConfusionNet::Column &currCol=src[curr.end()];
// in a given column, loop over all possibilities

View File

@ -68,7 +68,6 @@ public:
void DetachAll()
{
m_list.clear();
// TRACE_ERR( "clearing out list of " << m_list.size() << " partial translation options\n";
}
/** return number of pruned partial hypotheses */

View File

@ -91,9 +91,6 @@ public:
//! produced by sp
void PlusEquals(const ScoreProducer* sp, const std::vector<float>& scores)
{
if(scores.size() != sp->GetNumScoreComponents()) TRACE_ERR("ERROR: "<<scores.size()<<" "<<sp->GetNumScoreComponents()<<"\n");
assert(scores.size() == sp->GetNumScoreComponents());
size_t i = m_sim->GetBeginIndex(sp->GetScoreBookkeepingID());
for (std::vector<float>::const_iterator vi = scores.begin();

View File

@ -161,7 +161,7 @@ bool StaticData::LoadData(Parameter *parameter)
SetBooleanParameter( &m_computeLMBackoffStats, "lmstats", false );
if (m_computeLMBackoffStats &&
! m_isDetailedTranslationReportingEnabled) {
TRACE_ERR( "-lmstats implies -translation-details, enabling" << std::endl);
VERBOSE(1, "-lmstats implies -translation-details, enabling" << std::endl);
m_isDetailedTranslationReportingEnabled = true;
}
@ -188,7 +188,6 @@ bool StaticData::LoadData(Parameter *parameter)
: -1;
m_useDistortionFutureCosts = (m_parameter->GetParam("use-distortion-future-costs").size() > 0)
? Scan<bool>(m_parameter->GetParam("use-distortion-future-costs")[0]) : false;
//TRACE_ERR( "using distortion future costs? "<<UseDistortionFutureCosts()<<"\n");
m_beamThreshold = (m_parameter->GetParam("beam-threshold").size() > 0) ?
TransformScore(Scan<float>(m_parameter->GetParam("beam-threshold")[0]))
@ -196,7 +195,6 @@ bool StaticData::LoadData(Parameter *parameter)
m_maxNoTransOptPerCoverage = (m_parameter->GetParam("max-trans-opt-per-coverage").size() > 0)
? Scan<size_t>(m_parameter->GetParam("max-trans-opt-per-coverage")[0]) : DEFAULT_MAX_TRANS_OPT_SIZE;
//TRACE_ERR( "max translation options per coverage span: "<<m_maxNoTransOptPerCoverage<<"\n");
m_maxNoPartTransOpt = (m_parameter->GetParam("max-partial-trans-opt").size() > 0)
? Scan<size_t>(m_parameter->GetParam("max-partial-trans-opt")[0]) : DEFAULT_MAX_PART_TRANS_OPT_SIZE;
@ -414,15 +412,10 @@ bool StaticData::LoadLexicalReorderingModel()
}
assert(m_lexWeights.size() == numWeightsInTable); //the end result should be a weight vector of the same size as the user configured model
// TRACE_ERR( "distortion-weights: ");
//for(size_t weight=0; weight<m_lexWeights.size(); weight++)
//{
// TRACE_ERR( m_lexWeights[weight] << "\t");
//}
//TRACE_ERR( endl);
// loading the file
std::string filePath= specification[3];
IFVERBOSE(1)
PrintUserTime(string("Start loading distortion table ") + filePath);
m_reorderModels.push_back(new LexicalReordering(filePath, orientation, direction, condition, m_lexWeights, input, output));
}
@ -437,15 +430,10 @@ bool StaticData::LoadLanguageModels()
// weights
vector<float> weightAll = Scan<float>(m_parameter->GetParam("weight-l"));
//TRACE_ERR( "weight-l: ");
//
for (size_t i = 0 ; i < weightAll.size() ; i++)
{
// TRACE_ERR( weightAll[i] << "\t");
m_allWeights.push_back(weightAll[i]);
}
//TRACE_ERR( endl);
// initialize n-gram order for each factor. populated only by factored lm
const vector<string> &lmVector = m_parameter->GetParam("lmodel-file");
@ -469,6 +457,7 @@ bool StaticData::LoadLanguageModels()
string &languageModelFile = token[3];
IFVERBOSE(1)
PrintUserTime(string("Start loading LanguageModel ") + languageModelFile);
LanguageModel *lm = LanguageModelFactory::CreateLanguageModel(
@ -490,6 +479,7 @@ bool StaticData::LoadLanguageModels()
// flag indicating that language models were loaded,
// since phrase table loading requires their presence
m_fLMsLoaded = true;
IFVERBOSE(1)
PrintUserTime("Finished loading LanguageModels");
return true;
}
@ -501,12 +491,15 @@ bool StaticData::LoadGenerationTables()
const vector<string> &generationVector = m_parameter->GetParam("generation-file");
const vector<float> &weight = Scan<float>(m_parameter->GetParam("weight-generation"));
IFVERBOSE(1)
{
TRACE_ERR( "weight-generation: ");
for (size_t i = 0 ; i < weight.size() ; i++)
{
TRACE_ERR( weight[i] << "\t");
}
TRACE_ERR(endl);
}
size_t currWeightNum = 0;
for(size_t currDict = 0 ; currDict < generationVector.size(); currDict++)
@ -525,7 +518,7 @@ bool StaticData::LoadGenerationTables()
filePath += ".gz";
}
TRACE_ERR( filePath << endl);
VERBOSE(1, filePath << endl);
m_generationDictionary.push_back(new GenerationDictionary(numFeatures, m_scoreIndexManager));
assert(m_generationDictionary.back() && "could not create GenerationDictionary");
@ -562,16 +555,8 @@ bool StaticData::LoadPhraseTables()
// weights
vector<float> weightAll = Scan<float>(m_parameter->GetParam("weight-t"));
//TRACE_ERR("weight-t: ");
//for (size_t i = 0 ; i < weightAll.size() ; i++)
//{
// TRACE_ERR(weightAll[i] << "\t");
//}
//TRACE_ERR( endl;
const vector<string> &translationVector = m_parameter->GetParam("ttable-file");
vector<size_t> maxTargetPhrase = Scan<size_t>(m_parameter->GetParam("ttable-limit"));
//TRACE_ERR("ttable-limits: ";copy(maxTargetPhrase.begin(),maxTargetPhrase.end(),ostream_iterator<size_t>(cerr," "));cerr<<"\n");
size_t index = 0;
size_t weightAllOffset = 0;
@ -622,7 +607,7 @@ bool StaticData::LoadPhraseTables()
std::copy(weight.begin(),weight.end(),std::back_inserter(m_allWeights));
IFVERBOSE(1)
PrintUserTime(string("Start loading PhraseTable ") + filePath);
if (!FileExists(filePath+".binphr.idx"))
{
@ -644,7 +629,7 @@ bool StaticData::LoadPhraseTables()
}
else
{
TRACE_ERR( "using binary phrase tables for idx "<<currDict<<"\n");
VERBOSE(1, "using binary phrase tables for idx "<<currDict<<"\n");
PhraseDictionaryTreeAdaptor *pd=new PhraseDictionaryTreeAdaptor(numScoreComponent,(currDict==0 ? m_numInputScores : 0));
if (!pd->Load(input,output,filePath,weight,
maxTargetPhrase[index],
@ -661,6 +646,7 @@ bool StaticData::LoadPhraseTables()
}
}
IFVERBOSE(1)
PrintUserTime("Finished loading phrase tables");
return true;
}