std::accumuate -> GetTotalSize()

This commit is contained in:
Hieu Hoang 2016-09-14 14:25:03 +02:00
commit 81f6f51f6f
3 changed files with 23 additions and 31 deletions

View File

@ -59,8 +59,7 @@ inline std::vector<T> Tokenize( const std::string &input
void Tensor::Load(const std::string &path)
{
size_t totSize = std::accumulate(pimpl_->shape().begin(), pimpl_->shape().end(),
1, std::multiplies<int>());
size_t totSize = GetTotalSize(pimpl_->shape());
cerr << "totSize=" << totSize << endl;
std::vector<float> hostData(totSize);

View File

@ -48,6 +48,13 @@ inline std::string Debug(const Shape &shape)
return strm.str();
}
inline size_t GetTotalSize(const Shape &shape)
{
size_t ret = std::accumulate(shape.begin(), shape.end(),
1, std::multiplies<int>());
return ret;
}
template<class Float>
class TensorImpl {
private:
@ -81,8 +88,7 @@ class TensorImpl {
std::cerr << "Allocating : " << shape[0] << " " << shape[1] << std::endl;
int size = std::accumulate(shape_.begin(), shape_.end(),
1, std::multiplies<int>());
int size = GetTotalSize(shape_);
data_.resize(size, value);
cudnnCreateTensorDescriptor(&desc_);
switch (shape_.size()) {
@ -153,8 +159,7 @@ class TensorImpl {
}
void set(const std::vector<Float> &values) {
size_t totSize = std::accumulate(shape().begin(), shape().end(),
1, std::multiplies<int>());
size_t totSize = GetTotalSize(shape());
std::cerr << "tensor size=" << totSize << " vector size=" << values.size() << std::endl;
assert(totSize == values.size());
thrust::copy(values.begin(), values.end(), data_.begin());
@ -164,7 +169,14 @@ class TensorImpl {
{
std::stringstream strm;
assert(shape_.size());
strm << "shape=" << marian::Debug(shape_);
strm << "shape=" << marian::Debug(shape_) << std::endl;
// values
/*
size_t totSize = GetTotalSize(shape());
std::vector<Float> values(totSize);
thrust::copy(data_.begin(), data_.end(), values.begin());
*/
return strm.str();
}
};

View File

@ -22,11 +22,10 @@ int main(int argc, char** argv) {
Expr b = param(shape={1, LABEL_SIZE}, name="b0");
auto scores = dot(x, w) + b;
auto lr = softmax_fast(scores, axis=1, name="pred");
auto lr = softmax(scores, axis=1, name="pred");
auto graph = -mean(sum(y * log(lr), axis=1), axis=0, name="cost");
cerr << "lr=" << lr.Debug() << endl;
#if 0
int numofdata;
vector<float> images = datasets::mnist::ReadImages("../examples/mnist/t10k-images-idx3-ubyte", numofdata, IMAGE_SIZE);
vector<float> labels = datasets::mnist::ReadLabels("../examples/mnist/t10k-labels-idx1-ubyte", numofdata, LABEL_SIZE);
@ -41,33 +40,15 @@ int main(int argc, char** argv) {
cerr << "tx=" << tx.Debug() << endl;
cerr << "ty=" << ty.Debug() << endl;
#else
Tensor tx({500, 784}, 1);
Tensor ty({500, 10}, 1);
#endif
x = tx;
y = ty;
graph.forward(500);
std::cerr << "Result: ";
for (auto val : scores.val().shape()) {
std::cerr << val << " ";
}
std::cerr << std::endl;
std::cerr << "Result: ";
for (auto val : lr.val().shape()) {
std::cerr << val << " ";
}
std::cerr << std::endl;
lr.val().Print();
std::cerr << "Log-likelihood: ";
for (auto val : graph.val().shape()) {
std::cerr << val << " ";
}
std::cerr << std::endl;
graph.val().Print();
std::cerr << "scores: " << Debug(scores.val().shape()) << endl;
std::cerr << "lr: " << Debug(lr.val().shape()) << endl;
std::cerr << "Log-likelihood: " << Debug(graph.val().shape()) << endl ;
graph.backward();