From f05d17e7ae179ac5d94bf12e443c28121ec7ab23 Mon Sep 17 00:00:00 2001 From: Hieu Hoang Date: Wed, 14 Sep 2016 14:51:49 +0200 Subject: [PATCH] output tensors in shape --- src/tensor.h | 9 +++++++-- src/test.cu | 6 +++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/tensor.h b/src/tensor.h index 499e1ae0..83965508 100644 --- a/src/tensor.h +++ b/src/tensor.h @@ -176,8 +176,13 @@ class TensorImpl { std::vector values(totSize); thrust::copy(data_.begin(), data_.end(), values.begin()); - for (size_t i = 0; i < totSize; ++i) { - strm << values[i] << " "; + size_t ind = 0; + for (size_t i = 0; i < shape()[0]; ++i) { + for (size_t j = 0; j < shape()[1]; ++j) { + strm << values[ind] << " "; + ++ind; + } + strm << std::endl; } return strm.str(); } diff --git a/src/test.cu b/src/test.cu index bd417cee..9eb9b498 100644 --- a/src/test.cu +++ b/src/test.cu @@ -21,8 +21,8 @@ int main(int argc, char** argv) { Expr w = param(shape={IMAGE_SIZE, LABEL_SIZE}, name="W0"); Expr b = param(shape={1, LABEL_SIZE}, name="b0"); - Expr scores = dot(x, w) + b; - Expr lr = softmax(scores, axis=1, name="pred"); + Expr z = dot(x, w) + b; + Expr lr = softmax(z, axis=1, name="pred"); Expr graph = -mean(sum(y * log(lr), axis=1), axis=0, name="cost"); //cerr << "lr=" << Debug(lr.val().shape()) << endl; @@ -46,7 +46,7 @@ int main(int argc, char** argv) { graph.forward(500); - std::cerr << "scores: " << Debug(scores.val().shape()) << endl; + std::cerr << "z: " << Debug(z.val().shape()) << endl; std::cerr << "lr: " << Debug(lr.val().shape()) << endl; std::cerr << "Log-likelihood: " << Debug(graph.val().shape()) << endl ;