L2 norm of diff

This commit is contained in:
Hieu Hoang 2016-09-21 21:49:01 +01:00
parent fbe0126c07
commit 587dffb91e
3 changed files with 14 additions and 6 deletions

View File

@ -95,18 +95,25 @@ void Node::calc_numeric_grad(
//output("numericalGrad", numericalGrad);
// print out diff between origGrad and numericalGrad
/*
std::vector<float> diff(inputSize);
for (size_t i = 0; i < diff.size(); ++i) {
diff[i] = (origGrad[i] - numericalGrad[i]) ;
for (size_t i = 0; i < origGrad.size(); ++i) {
diff[i] = origGrad[i] - numericalGrad[i];
}
output("diff", diff.begin(), diff.end());
*/
cerr << "L2-norm of difference=" << L2Norm(diff) << endl << endl;
// put back origGrad
thrust::copy(origGrad.begin(), origGrad.end(), grad.begin());
}
float Node::L2Norm(const std::vector<float> &vec) const
{
float ret = 0;
for (size_t i = 0; i < vec.size(); ++i) {
ret += vec[i] * vec[i];
}
return sqrt(ret);
}
std::vector<float> Node::StoreTensorInVec(Tensor tensor)
{
size_t totSize = GetTotalSize(tensor.shape());

View File

@ -130,6 +130,7 @@ class Node : public Chainable<Tensor>,
const std::vector<float> &prevCalcGrad
);
void broadcast(const std::vector<float> &largeVec, std::vector<float> &smallVec);
float L2Norm(const std::vector<float> &vec) const;
};

View File

@ -48,7 +48,7 @@ int main(int argc, char** argv)
expr.emplace_back(relu(expr.back()));
expr.emplace_back(log(expr.back()));
expr.emplace_back(exp(expr.back()));
//expr.emplace_back(softmax(expr.back()));
expr.emplace_back(softmax(expr.back()));
Expr ceExpr = cross_entropy(expr.back(), labelExpr);
Expr cost = mean(ceExpr, axis=0);