make util::StringStream more like std::stringstream

This commit is contained in:
Hieu Hoang 2015-10-03 00:33:38 +01:00
parent dcea021cd7
commit b6231e8c73
30 changed files with 91 additions and 132 deletions

View File

@ -225,8 +225,7 @@ Word *OnDiskWrapper::ConvertFromMoses(const std::vector<Moses::FactorType> &fact
bool isNonTerminal = origWord.IsNonTerminal();
Word *newWord = new Word(isNonTerminal);
string str;
util::StringStream strme(str);
util::StringStream strme;
size_t factorType = factorsVec[0];
const Moses::Factor *factor = origWord.GetFactor(factorType);
@ -246,7 +245,7 @@ Word *OnDiskWrapper::ConvertFromMoses(const std::vector<Moses::FactorType> &fact
} // for (size_t factorType
bool found;
uint64_t vocabId = m_vocab.GetVocabId(str, found);
uint64_t vocabId = m_vocab.GetVocabId(strme.str(), found);
if (!found) {
// factor not in phrase table -> phrse definately not in. exit
delete newWord;

View File

@ -33,10 +33,9 @@ template <class Stream> void WriteCounts(Stream &out, const std::vector<uint64_t
}
size_t SizeNeededForCounts(const std::vector<uint64_t> &number) {
std::string buf;
util::StringStream stream(buf);
util::StringStream stream;
WriteCounts(stream, number);
return buf.size();
return stream.str().size();
}
bool IsEntirelyWhiteSpace(const StringPiece &line) {

View File

@ -132,13 +132,12 @@ void GlobalLexicalModelUnlimited::EvaluateWhenApplied(const Hypothesis& cur_hypo
}
if (m_biasFeature) {
string str;
util::StringStream feature(str);
util::StringStream feature;
feature << "glm_";
feature << targetString;
feature << "~";
feature << "**BIAS**";
accumulator->SparsePlusEquals(str, 1);
accumulator->SparsePlusEquals(feature.str(), 1);
}
boost::unordered_set<uint64_t> alreadyScored;
@ -167,14 +166,13 @@ void GlobalLexicalModelUnlimited::EvaluateWhenApplied(const Hypothesis& cur_hypo
if (m_sourceContext) {
if (sourceIndex == 0) {
// add <s> trigger feature for source
string str;
util::StringStream feature(str);
util::StringStream feature;
feature << "glm_";
feature << targetString;
feature << "~";
feature << "<s>,";
feature << sourceString;
accumulator->SparsePlusEquals(str, 1);
accumulator->SparsePlusEquals(feature.str(), 1);
alreadyScored.insert(sourceHash);
}
@ -186,15 +184,14 @@ void GlobalLexicalModelUnlimited::EvaluateWhenApplied(const Hypothesis& cur_hypo
contextExists = FindStringPiece(m_vocabSource, contextString ) != m_vocabSource.end();
if (m_unrestricted || contextExists) {
string str;
util::StringStream feature(str);
util::StringStream feature;
feature << "glm_";
feature << targetString;
feature << "~";
feature << sourceString;
feature << ",";
feature << contextString;
accumulator->SparsePlusEquals(str, 1);
accumulator->SparsePlusEquals(feature.str(), 1);
alreadyScored.insert(sourceHash);
}
}
@ -308,13 +305,12 @@ void GlobalLexicalModelUnlimited::EvaluateWhenApplied(const Hypothesis& cur_hypo
}
}
} else {
string str;
util::StringStream feature(str);
util::StringStream feature;
feature << "glm_";
feature << targetString;
feature << "~";
feature << sourceString;
accumulator->SparsePlusEquals(str, 1);
accumulator->SparsePlusEquals(feature.str(), 1);
alreadyScored.insert(sourceHash);
}
@ -328,8 +324,7 @@ void GlobalLexicalModelUnlimited::AddFeature(ScoreComponentCollection* accumulat
StringPiece sourceTrigger, StringPiece sourceWord,
StringPiece targetTrigger, StringPiece targetWord) const
{
string str;
util::StringStream feature(str);
util::StringStream feature;
feature << "glm_";
feature << targetTrigger;
feature << ",";
@ -338,7 +333,7 @@ void GlobalLexicalModelUnlimited::AddFeature(ScoreComponentCollection* accumulat
feature << sourceTrigger;
feature << ",";
feature << sourceWord;
accumulator->SparsePlusEquals(str, 1);
accumulator->SparsePlusEquals(feature.str(), 1);
}

View File

@ -27,8 +27,7 @@ const std::string& SparseReorderingFeatureKey::Name (const string& wordListId)
{
static string kSep = "-";
static string name;
std::string str;
util::StringStream buf(str);
util::StringStream buf;
// type side position id word reotype
if (type == Phrase) {
buf << "phr";
@ -56,7 +55,7 @@ const std::string& SparseReorderingFeatureKey::Name (const string& wordListId)
buf << word->GetString();
buf << kSep;
buf << reoType;
name = str;
name = buf.str();
return name;
}
@ -90,10 +89,9 @@ SparseReordering::SparseReordering(const map<string,string>& config, const Lexic
ReadWeightMap(i->second);
m_useWeightMap = true;
for (int reoType=0; reoType<=LRModel::MAX; ++reoType) {
std::string str;
util::StringStream buf(str);
util::StringStream buf;
buf << reoType;
m_featureMap2.push_back(m_producer->GetFeatureName(str));
m_featureMap2.push_back(m_producer->GetFeatureName(buf.str()));
}
} else if (fields[0] == "phrase") {

View File

@ -62,8 +62,7 @@ void PhraseBoundaryFeature::AddFeatures(
ScoreComponentCollection* scores) const
{
for (size_t i = 0; i < factors.size(); ++i) {
std::string str;
util::StringStream name(str);
util::StringStream name;
name << side << ":";
name << factors[i];
name << ":";

View File

@ -26,16 +26,13 @@ void PhraseLengthFeature::EvaluateInIsolation(const Phrase &source
size_t sourceLength = source.GetSize();
// create feature names
string nameSourceStr;
util::StringStream nameSource(nameSourceStr);
util::StringStream nameSource;
nameSource << "s" << sourceLength;
string nameTargetStr;
util::StringStream nameTarget(nameTargetStr);
util::StringStream nameTarget;
nameTarget << "t" << targetLength;
string nameBothStr;
util::StringStream nameBoth(nameBothStr);
util::StringStream nameBoth;
nameBoth << sourceLength << "," << targetLength;
// increase feature counts

View File

@ -127,8 +127,7 @@ void PhrasePairFeature::EvaluateWithSourceContext(const InputType &input
const bool use_topicid_prob = isnt.GetUseTopicIdAndProb();
// compute pair
string pairStr;
util::StringStream pair(pairStr);
util::StringStream pair;
pair << ReplaceTilde( source.GetWord(0).GetFactor(m_sourceFactorId)->GetString() );
for (size_t i = 1; i < source.GetSize(); ++i) {
@ -148,8 +147,7 @@ void PhrasePairFeature::EvaluateWithSourceContext(const InputType &input
if(use_topicid) {
// use topicid as trigger
const long topicid = isnt.GetTopicId();
string featureStr;
util::StringStream feature(featureStr);
util::StringStream feature;
feature << m_description << "_";
if (topicid == -1)
@ -164,15 +162,13 @@ void PhrasePairFeature::EvaluateWithSourceContext(const InputType &input
// use topic probabilities
const vector<string> &topicid_prob = *(isnt.GetTopicIdAndProb());
if (atol(topicid_prob[0].c_str()) == -1) {
string featureStr;
util::StringStream feature(featureStr);
util::StringStream feature;
feature << m_description << "_unk_";
feature << pair.str();
scoreBreakdown.SparsePlusEquals(feature.str(), 1);
} else {
for (size_t i=0; i+1 < topicid_prob.size(); i+=2) {
string featureStr;
util::StringStream feature(featureStr);
util::StringStream feature;
feature << m_description << "_";
feature << topicid_prob[i];
feature << "_";
@ -186,8 +182,7 @@ void PhrasePairFeature::EvaluateWithSourceContext(const InputType &input
const long docid = isnt.GetDocumentId();
for (set<string>::const_iterator p = m_vocabDomain[docid].begin(); p != m_vocabDomain[docid].end(); ++p) {
string sourceTrigger = *p;
string str;
util::StringStream namestr(str);
util::StringStream namestr;
namestr << m_description << "_";
namestr << sourceTrigger;
namestr << "_";
@ -215,8 +210,7 @@ void PhrasePairFeature::EvaluateWithSourceContext(const InputType &input
sourceTriggerExists = FindStringPiece(m_vocabSource, sourceTrigger ) != m_vocabSource.end();
if (m_unrestricted || sourceTriggerExists) {
string str;
util::StringStream namestr(str);
util::StringStream namestr;
namestr << m_description << "_";
namestr << sourceTrigger;
namestr << "~";
@ -246,8 +240,7 @@ void PhrasePairFeature::EvaluateInIsolation(const Phrase &source
, ScoreComponentCollection &estimatedFutureScore) const
{
if (m_simple) {
string str;
util::StringStream namestr(str);
util::StringStream namestr;
namestr << m_description << "_";
namestr << ReplaceTilde( source.GetWord(0).GetFactor(m_sourceFactorId)->GetString() );
for (size_t i = 1; i < source.GetSize(); ++i) {

View File

@ -58,8 +58,7 @@ void RulePairUnlexicalizedSource::EvaluateInIsolation(const Phrase &source
}
}
string str;
util::StringStream namestr(str);
util::StringStream namestr;
for (size_t posT=0; posT<targetPhrase.GetSize(); ++posT) {
const Word &wordT = targetPhrase.GetWord(posT);

View File

@ -205,10 +205,9 @@ void FVector::save(const string& filename) const
{
ofstream out(filename.c_str());
if (!out) {
std::string str;
util::StringStream msg(str);
util::StringStream msg;
msg << "Unable to open " << filename;
throw runtime_error(str);
throw runtime_error(msg.str());
}
write(out);
out.close();

View File

@ -85,11 +85,10 @@ void GenerationDictionary::Load()
size_t numFeaturesInFile = token.size() - 2;
if (numFeaturesInFile < numFeatureValuesInConfig) {
std::string str;
util::StringStream strme(str);
util::StringStream strme;
strme << m_filePath << ":" << lineNum << ": expected " << numFeatureValuesInConfig
<< " feature values, but found " << numFeaturesInFile << "\n";
throw str;
throw strme.str();
}
std::vector<float> scores(numFeatureValuesInConfig, 0.0f);
for (size_t i = 0; i < numFeatureValuesInConfig; i++)

View File

@ -488,8 +488,7 @@ LanguageModel *ConstructKenLM(const std::string &lineOrig)
util::TokenIter<util::SingleCharacter, true> argument(lineOrig, ' ');
++argument; // KENLM
string str;
util::StringStream line(str);
util::StringStream line;
line << "KENLM";
for (; argument; ++argument) {
@ -512,7 +511,7 @@ LanguageModel *ConstructKenLM(const std::string &lineOrig)
}
}
return ConstructKenLM(str, filePath, factorType, lazy);
return ConstructKenLM(line.str(), filePath, factorType, lazy);
}
LanguageModel *ConstructKenLM(const std::string &line, const std::string &file, FactorType factorType, bool lazy)

View File

@ -97,8 +97,7 @@ LMResult LanguageModelRemote::GetValue(const std::vector<const Word*> &contextFa
cur->boState = *reinterpret_cast<const State*>(&m_curId);
++m_curId;
std::string out;
util::StringStream os(out);
util::StringStream os;
os << "prob ";
if (event_word == NULL) {
os << "</s>";
@ -114,7 +113,7 @@ LMResult LanguageModelRemote::GetValue(const std::vector<const Word*> &contextFa
}
}
os << "\n";
write(sock, out.c_str(), out.size());
write(sock, os.str().c_str(), os.str().size());
char res[6];
int r = read(sock, res, 6);
int errors = 0;

View File

@ -702,8 +702,7 @@ ConvertWeightArgsPhraseModel(const string &oldWeightName)
size_t currOldInd = 0;
for(size_t currDict = 0 ; currDict < translationVector.size(); currDict++) {
string ptLineStr;
util::StringStream ptLine(ptLineStr);
util::StringStream ptLine;
vector<string> token = Tokenize(translationVector[currDict]);
@ -799,7 +798,7 @@ ConvertWeightArgsPhraseModel(const string &oldWeightName)
ptLine << "alignment-path=" << token[6] << " ";
}
AddFeature(ptLineStr);
AddFeature(ptLine.str());
} // for(size_t currDict = 0 ; currDict < translationVector.size(); currDict++) {
} // if (GetParam("ttable-file").size() > 0) {
@ -862,8 +861,7 @@ ConvertWeightArgsDistortion()
}
SetWeight("LexicalReordering", indTable, weights);
string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "LexicalReordering "
<< "type=" << toks[1] << " ";
@ -877,7 +875,7 @@ ConvertWeightArgsDistortion()
strme << "num-features=" << toks[2] << " ";
strme << "path=" << toks[3];
AddFeature(str);
AddFeature(strme.str());
}
}
@ -1010,14 +1008,13 @@ ConvertWeightArgsGeneration(const std::string &oldWeightName, const std::string
}
SetWeight(newWeightName, indTable, weights);
string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "Generation "
<< "input-factor=" << modelToks[0] << " "
<< "output-factor=" << modelToks[1] << " "
<< "num-features=" << modelToks[2] << " "
<< "path=" << modelToks[3];
AddFeature(str);
AddFeature(strme.str());
}
}

View File

@ -118,8 +118,7 @@ std::string Phrase::GetStringRep(const vector<FactorType> factorsToPrint) const
{
bool markUnknown = StaticData::Instance().GetMarkUnknown();
string str;
util::StringStream strme(str);
util::StringStream strme;
for (size_t pos = 0 ; pos < GetSize() ; pos++) {
if (markUnknown && GetWord(pos).IsOOV()) {
strme << StaticData::Instance().GetUnknownWordPrefix();
@ -130,7 +129,7 @@ std::string Phrase::GetStringRep(const vector<FactorType> factorsToPrint) const
}
}
return str;
return strme.str();
}
Word &Phrase::AddWord()

View File

@ -640,21 +640,19 @@ void StaticData::LoadDecodeGraphsOld(const vector<string> &mappingVector, const
switch (decodeType) {
case Translate:
if(index>=pts.size()) {
string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "No phrase dictionary with index "
<< index << " available!";
UTIL_THROW(util::Exception, str);
UTIL_THROW(util::Exception, strme.str());
}
decodeStep = new DecodeStepTranslation(pts[index], prev, *featuresRemaining);
break;
case Generate:
if(index>=gens.size()) {
string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "No generation dictionary with index "
<< index << " available!";
UTIL_THROW(util::Exception, str);
UTIL_THROW(util::Exception, strme.str());
}
decodeStep = new DecodeStepGeneration(gens[index], prev, *featuresRemaining);
break;

View File

@ -56,8 +56,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
TargetPhrase *targetPhrase = new TargetPhrase();
std::string alignmentSSStr;
util::StringStream alignmentSS(alignmentSSStr);
util::StringStream alignmentSS;
for (std::size_t i = 0; i < e.tail.size(); ++i) {
const Word &symbol = e.tail[i]->pvertex.symbol;
if (symbol.IsNonTerminal()) {
@ -77,7 +76,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
targetPhrase->EvaluateInIsolation(m_dummySourcePhrase);
Word *targetLhs = new Word(staticData.GetOutputDefaultNonTerminal());
targetPhrase->SetTargetLHS(targetLhs);
targetPhrase->SetAlignmentInfo(alignmentSSStr);
targetPhrase->SetAlignmentInfo(alignmentSS.str());
return targetPhrase;
}

View File

@ -47,8 +47,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
TargetPhrase *targetPhrase = new TargetPhrase();
std::string alignmentSSStr;
util::StringStream alignmentSS(alignmentSSStr);
util::StringStream alignmentSS;
for (std::size_t i = 0; i < node.children.size(); ++i) {
const Word &symbol = node.children[i]->pvertex.symbol;
if (symbol.IsNonTerminal()) {
@ -68,7 +67,7 @@ TargetPhrase *GlueRuleSynthesizer::SynthesizeTargetPhrase(
targetPhrase->EvaluateInIsolation(sourceRhs);
Word *targetLhs = new Word(staticData.GetOutputDefaultNonTerminal());
targetPhrase->SetTargetLHS(targetLhs);
targetPhrase->SetAlignmentInfo(alignmentSSStr);
targetPhrase->SetAlignmentInfo(alignmentSS.str());
return targetPhrase;
}

View File

@ -41,11 +41,9 @@ void PrintTranslationAnalysis(std::ostream &os, const Hypothesis* hypo)
if (doLMStats)
lmAcc.resize((*tpi)->GetLMStats()->size(), 0);
for (; tpi != translationPath.end(); ++tpi) {
std::string smsStr;
util::StringStream sms(smsStr);
util::StringStream sms;
std::string tmsStr;
util::StringStream tms(tmsStr);
util::StringStream tms;
std::string target = (*tpi)->GetTargetPhraseStringRep();
std::string source = (*tpi)->GetSourcePhraseStringRep();
WordsRange twr = (*tpi)->GetCurrTargetWordsRange();
@ -93,8 +91,8 @@ void PrintTranslationAnalysis(std::ostream &os, const Hypothesis* hypo)
for (; swr_i <= swr.GetEndPos() && swr.GetEndPos() != NOT_FOUND; swr_i++) {
tms << '-' << swr_i;
}
if (!epsilon) targetMap.push_back(smsStr);
sourceMap.push_back(tmsStr);
if (!epsilon) targetMap.push_back(sms.str());
sourceMap.push_back(tms.str());
}
std::vector<std::string>::iterator si = sourceMap.begin();
std::vector<std::string>::iterator ti = targetMap.begin();

View File

@ -368,12 +368,11 @@ void BlockHashIndex::CalcHash(size_t current, void* source_void)
if(lastKey > temp) {
if(source->nkeys != 2 || temp != "###DUMMY_KEY###") {
std::string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "ERROR: Input file does not appear to be sorted with LC_ALL=C sort\n";
strme << "1: " << lastKey << "\n";
strme << "2: " << temp << "\n";
UTIL_THROW2(str);
UTIL_THROW2(strme.str());
}
}
lastKey = temp;

View File

@ -146,12 +146,11 @@ public:
size_t current = m_landmarks.size();
if(m_landmarks.size() && m_landmarks.back().str() >= keys[0]) {
std::string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "ERROR: Input file does not appear to be sorted with LC_ALL=C sort\n";
strme << "1: " << m_landmarks.back().str() << "\n";
strme << "2: " << keys[0] << "\n";
UTIL_THROW2(str);
UTIL_THROW2(strme.str());
}
m_landmarks.push_back(keys[0]);

View File

@ -39,10 +39,9 @@ PhraseDictionaryMultiModel::PhraseDictionaryMultiModel(const std::string &line)
} else if (m_mode == "all" || m_mode == "all-restrict") {
UTIL_THROW2("Implementation has moved: use PhraseDictionaryGroup with restrict=true/false");
} else {
string str;
util::StringStream msg(str);
util::StringStream msg;
msg << "combination mode unknown: " << m_mode;
throw runtime_error(str);
throw runtime_error(msg.str());
}
}
@ -212,10 +211,9 @@ std::vector<std::vector<float> > PhraseDictionaryMultiModel::getWeights(size_t n
raw_weights.push_back(1.0/m_numModels); //uniform weights created online
}
} else if(weights_ptr->size() != m_numModels && weights_ptr->size() != m_numModels * numWeights) {
string str;
util::StringStream strme(str);
util::StringStream strme;
strme << "Must have either one multimodel weight per model (" << m_numModels << "), or one per weighted feature and model (" << numWeights << "*" << m_numModels << "). You have " << weights_ptr->size() << ".";
UTIL_THROW(util::Exception, str);
UTIL_THROW(util::Exception, strme.str());
} else {
raw_weights = *weights_ptr;
}

View File

@ -57,10 +57,9 @@ void PhraseDictionaryMultiModelCounts::SetParameter(const std::string& key, cons
else if (m_mode == "interpolate")
m_combineFunction = LinearInterpolationFromCounts;
else {
string str;
util::StringStream msg(str);
util::StringStream msg;
msg << "combination mode unknown: " << m_mode;
throw runtime_error(str);
throw runtime_error(msg.str());
}
} else if (key == "lex-e2f") {
m_lexE2FStr = Tokenize(value, ",");

View File

@ -53,8 +53,7 @@ void PhraseDictionaryTreeAdaptor::InitializeForInput(ttasksptr const& ttask)
vector<float> weight = staticData.GetWeights(this);
if(m_numScoreComponents!=weight.size()) {
string str;
util::StringStream strme(str);
util::StringStream strme;
UTIL_THROW2("ERROR: mismatch of number of scaling factors: " << weight.size()
<< " " << m_numScoreComponents);
}

View File

@ -126,22 +126,20 @@ void ReformatHieroRule(const string &lineOrig, string &out)
ReformatHieroRule(1, targetPhraseString, ntAlign);
ReformateHieroScore(scoreString);
std::string alignStr;
util::StringStream align(alignStr);
util::StringStream align;
map<size_t, pair<size_t, size_t> >::const_iterator iterAlign;
for (iterAlign = ntAlign.begin(); iterAlign != ntAlign.end(); ++iterAlign) {
const pair<size_t, size_t> &alignPoint = iterAlign->second;
align << alignPoint.first << "-" << alignPoint.second << " ";
}
std::string str;
util::StringStream ret(str);
util::StringStream ret;
ret << sourcePhraseString << " ||| "
<< targetPhraseString << " ||| "
<< scoreString << " ||| "
<< alignStr;
<< align.str();
out = str;
out = ret.str();
}
bool RuleTableLoaderStandard::Load(FormatType format

View File

@ -14,13 +14,12 @@ namespace tmmt
{
std::string SentenceAlignment::getTargetString(const Vocabulary &vocab) const
{
std::string str;
util::StringStream strme(str);
util::StringStream strme;
for (size_t i = 0; i < target.size(); ++i) {
const WORD &word = vocab.GetWord(target[i]);
strme << word << " ";
}
return str;
return strme.str();
}
}

View File

@ -28,13 +28,12 @@ struct SentenceAlignment {
std::string getTargetString(const Vocabulary &vocab) const;
std::string getAlignmentString() const {
std::string str;
util::StringStream strme(str);
util::StringStream strme;
for (size_t i = 0; i < alignment.size(); ++i) {
const std::pair<int,int> &alignPair = alignment[i];
strme << alignPair.first << "-" << alignPair.second << " ";
}
return str;
return strme.str();
}
};

View File

@ -80,8 +80,7 @@ void Word::Merge(const Word &sourceWord)
std::string Word::GetString(const vector<FactorType> factorType,bool endWithBlank) const
{
string str;
util::StringStream strme(str);
util::StringStream strme;
const std::string& factorDelimiter = StaticData::Instance().GetFactorDelimiter();
bool firstPass = true;
unsigned int stop = min(max_fax(),factorType.size());
@ -101,7 +100,7 @@ std::string Word::GetString(const vector<FactorType> factorType,bool endWithBlan
}
}
if(endWithBlank) strme << " ";
return str;
return strme.str();
}
StringPiece Word::GetString(FactorType factorType) const

View File

@ -25,7 +25,8 @@ void Exception::SetLocation(const char *file, unsigned int line, const char *fun
*/
std::string old_text;
std::swap(old_text, what_);
StringStream stream(what_);
StringStream stream;
stream << what_;
stream << file << ':' << line;
if (func) stream << " in " << func << " threw ";
if (child_name) {

View File

@ -11,9 +11,13 @@ namespace util {
class StringStream : public FakeOStream<StringStream> {
public:
// Semantics: appends to string. Remember to clear first!
explicit StringStream(std::string &out)
: out_(out) {}
explicit StringStream()
{}
/*
explicit StringStream(std::string &out)
: out_(out) {}
*/
StringStream &flush() { return *this; }
StringStream &write(const void *data, std::size_t length) {
@ -39,7 +43,7 @@ class StringStream : public FakeOStream<StringStream> {
}
private:
std::string &out_;
std::string out_;
};
} // namespace

View File

@ -11,9 +11,9 @@
namespace util { namespace {
template <class T> void TestEqual(const T value) {
std::string str;
StringStream(str) << value;
BOOST_CHECK_EQUAL(boost::lexical_cast<std::string>(value), str);
StringStream strme;
strme << value;
BOOST_CHECK_EQUAL(boost::lexical_cast<std::string>(value), strme.str());
}
template <class T> void TestCorners() {