more coolness

This commit is contained in:
Marcin Junczys-Dowmunt 2016-05-03 23:43:39 +01:00
parent d19f1f610f
commit e5fcec663c
4 changed files with 44 additions and 33 deletions

View File

@ -1,3 +1,5 @@
# MAD # Marian*
Massively (Parallel) Automatic Differentiation Parallel Automatic Differentiation Library
* = in honour of Marian Rejewski, a Polish mathematician and cryptologist.

BIN
src/a.out

Binary file not shown.

View File

@ -40,7 +40,7 @@ class Vimpl : public Chainable {
virtual void set_zero_adjoint() { adj_ = 0; } virtual void set_zero_adjoint() { adj_ = 0; }
const Tensor& val() const { return val_; }; const Tensor& val() const { return val_; };
Tensor& adj() { return adj_; }; Tensor& grad() { return adj_; };
protected: protected:
const Tensor val_; const Tensor val_;
@ -72,15 +72,15 @@ class Var {
return vimpl_->val(); return vimpl_->val();
} }
Tensor& adj() { Tensor& grad() {
return vimpl_->adj(); return vimpl_->grad();
} }
VimplPtr vimpl() const { VimplPtr vimpl() const {
return vimpl_; return vimpl_;
} }
void grad() { void calc_gradients() {
mad::grad(vimpl_); mad::grad(vimpl_);
} }
@ -99,7 +99,7 @@ struct LogVimpl : public OpVimpl {
LogVimpl(VimplPtr a) : OpVimpl(std::log(a->val()), a) { } LogVimpl(VimplPtr a) : OpVimpl(std::log(a->val()), a) { }
void chain() { void chain() {
a_->adj() += adj_ / a_->val(); a_->grad() += adj_ / a_->val();
} }
}; };
@ -122,8 +122,8 @@ struct PlusVimplVV : public OpVimplVV {
PlusVimplVV(VimplPtr a, VimplPtr b) : OpVimplVV(a->val() + b->val(), a, b) { } PlusVimplVV(VimplPtr a, VimplPtr b) : OpVimplVV(a->val() + b->val(), a, b) { }
void chain() { void chain() {
a_->adj() += adj_; a_->grad() += adj_;
b_->adj() += adj_; b_->grad() += adj_;
} }
}; };

View File

@ -3,32 +3,41 @@
#include "mad.h" #include "mad.h"
int main(int argc, char** argv) { mad::Var layer(size_t max) {
using namespace mad; using namespace mad;
{
srand(time(NULL)); Var x0 = 1, x1 = 2, x2 = 3;
size_t max = rand() % 20 + 1; Var y = 0.0;
for(int i = 0; i < max; i++) {
Var x0 = 1, x1 = 2, x2 = 3; Var xi = i;
std::vector<Var> x = { x0, x1, x2 }; y = y + x0 + log(x2) + x1;
for(int j = 0; j < i; ++j) {
Var y = 0.0; y = y + xi;
for(int i = 0; i < max; i++) {
Var xi = i;
y = y + x0 + log(x2) + x1;
for(int j = 0; j < i; ++i) {
y = y + xi;
x.push_back(xi);
}
} }
set_zero_all_adjoints();
y.grad();
std::cerr << "y = " << y.val() << std::endl;
for(int i = 0; i < x.size(); ++i)
std::cerr << "dy/dx_" << i << " = " << x[i].adj() << std::endl;
} }
return y;
}
int main(int argc, char** argv) {
srand(time(NULL));
using namespace mad;
Var y1 = layer(10);
Var y2 = layer(5);
Var y = y1 + log(y2);
set_zero_all_adjoints();
y.calc_gradients();
std::cerr << "y1 = " << y1.val() << std::endl;
std::cerr << "y2 = " << y2.val() << std::endl;
std::cerr << "y = " << y.val() << std::endl;
std::cerr << "dy/dy1 = " << y1.grad() << std::endl;
std::cerr << "dy/dy2 = " << y2.grad() << std::endl;
} }