From adf731218cd6e997da2936193a25f8647bf4c449 Mon Sep 17 00:00:00 2001 From: Huw Campbell Date: Mon, 10 Apr 2017 12:06:50 +1000 Subject: [PATCH] Docs --- src/Grenade.hs | 2 +- src/Grenade/Layers/Elu.hs | 7 +++++++ src/Grenade/Layers/Logit.hs | 8 ++++++++ src/Grenade/Layers/Pad.hs | 9 +++++++++ src/Grenade/Layers/Pooling.hs | 8 ++++++++ src/Grenade/Layers/Relu.hs | 7 +++++++ src/Grenade/Layers/Reshape.hs | 13 ++++++++++--- src/Grenade/Layers/Softmax.hs | 11 +++++++++++ src/Grenade/Layers/Tanh.hs | 7 +++++++ src/Grenade/Layers/Trivial.hs | 12 +++++++++++- 10 files changed, 79 insertions(+), 5 deletions(-) diff --git a/src/Grenade.hs b/src/Grenade.hs index 31829cf..a63281b 100644 --- a/src/Grenade.hs +++ b/src/Grenade.hs @@ -46,7 +46,7 @@ is a simple example which runs a logistic regression. > randomMyNet = randomNetwork The function `randomMyNet` witnesses the `CreatableNetwork` -constraint of the neural network, that is, it ensures the network +constraint of the neural network, and in doing so, ensures the network can be built, and hence, that the architecture is sound. -} diff --git a/src/Grenade/Layers/Elu.hs b/src/Grenade/Layers/Elu.hs index ae56513..1d2f06f 100644 --- a/src/Grenade/Layers/Elu.hs +++ b/src/Grenade/Layers/Elu.hs @@ -2,6 +2,13 @@ {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-| +Module : Grenade.Layers.Logit +Description : Exponential linear unit layer +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Elu ( Elu (..) ) where diff --git a/src/Grenade/Layers/Logit.hs b/src/Grenade/Layers/Logit.hs index 5871072..9097ee5 100644 --- a/src/Grenade/Layers/Logit.hs +++ b/src/Grenade/Layers/Logit.hs @@ -3,6 +3,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-| +Module : Grenade.Layers.Logit +Description : Sigmoid nonlinear layer +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Logit ( Logit (..) ) where @@ -14,6 +21,7 @@ import Data.Singletons import Grenade.Core -- | A Logit layer. +-- -- A layer which can act between any shape of the same dimension, perfoming an sigmoid function. -- This layer should be used as the output layer of a network for logistic regression (classification) -- problems. diff --git a/src/Grenade/Layers/Pad.hs b/src/Grenade/Layers/Pad.hs index 5908ee9..d200569 100644 --- a/src/Grenade/Layers/Pad.hs +++ b/src/Grenade/Layers/Pad.hs @@ -5,6 +5,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleContexts #-} +{-| +Module : Grenade.Core.Pad +Description : Padding layer for 2D and 3D images +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Pad ( Pad (..) ) where @@ -22,6 +29,8 @@ import Numeric.LinearAlgebra (konst, subMatrix, diagBlock) import Numeric.LinearAlgebra.Static (extract, create) -- | A padding layer for a neural network. +-- +-- Pads on the X and Y dimension of an image. data Pad :: Nat -> Nat -> Nat diff --git a/src/Grenade/Layers/Pooling.hs b/src/Grenade/Layers/Pooling.hs index dce2065..6016400 100644 --- a/src/Grenade/Layers/Pooling.hs +++ b/src/Grenade/Layers/Pooling.hs @@ -6,6 +6,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleContexts #-} +{-| +Module : Grenade.Core.Pooling +Description : Max Pooling layer for 2D and 3D images +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Pooling ( Pooling (..) ) where @@ -22,6 +29,7 @@ import Grenade.Layers.Internal.Pooling import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows) -- | A pooling layer for a neural network. +-- -- Does a max pooling, looking over a kernel similarly to the convolution network, but returning -- maxarg only. This layer is often used to provide minor amounts of translational invariance. -- diff --git a/src/Grenade/Layers/Relu.hs b/src/Grenade/Layers/Relu.hs index e48f9a0..3db4670 100644 --- a/src/Grenade/Layers/Relu.hs +++ b/src/Grenade/Layers/Relu.hs @@ -2,6 +2,13 @@ {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-| +Module : Grenade.Layers.Relu +Description : Rectifying linear unit layer +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Relu ( Relu (..) ) where diff --git a/src/Grenade/Layers/Reshape.hs b/src/Grenade/Layers/Reshape.hs index 0793341..887f6fa 100644 --- a/src/Grenade/Layers/Reshape.hs +++ b/src/Grenade/Layers/Reshape.hs @@ -3,6 +3,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleContexts #-} +{-| +Module : Grenade.Layers.Reshape +Description : Multipurpose reshaping layer +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Reshape ( Reshape (..) ) where @@ -19,9 +26,9 @@ import Grenade.Core -- | Reshape Layer -- --- Flattens input down to D1 from either 2D or 3D data. --- --- Casts input D1 up to either 2D or 3D data if the shapes are good. +-- The Reshape layer can flatten any 2D or 3D image to 1D vector with the +-- same number of activations, as well as cast up from 1D to a 2D or 3D +-- shape. -- -- Can also be used to turn a 3D image with only one channel into a 2D image -- or vice versa. diff --git a/src/Grenade/Layers/Softmax.hs b/src/Grenade/Layers/Softmax.hs index ce786b9..27ca4b6 100644 --- a/src/Grenade/Layers/Softmax.hs +++ b/src/Grenade/Layers/Softmax.hs @@ -3,6 +3,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-| +Module : Grenade.Core.Softmax +Description : Softmax loss layer +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Softmax ( Softmax (..) , softmax @@ -17,8 +24,12 @@ import Grenade.Core import Numeric.LinearAlgebra.Static as LAS -- | A Softmax layer +-- -- This layer is like a logit layer, but normalises -- a set of matricies to be probabilities. +-- +-- One can use this layer as the last layer in a network +-- if they need normalised probabilities. data Softmax = Softmax deriving Show diff --git a/src/Grenade/Layers/Tanh.hs b/src/Grenade/Layers/Tanh.hs index caa8600..b298f43 100644 --- a/src/Grenade/Layers/Tanh.hs +++ b/src/Grenade/Layers/Tanh.hs @@ -3,6 +3,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-| +Module : Grenade.Layers.Tanh +Description : Hyperbolic tangent nonlinear layer +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Tanh ( Tanh (..) ) where diff --git a/src/Grenade/Layers/Trivial.hs b/src/Grenade/Layers/Trivial.hs index 646c07a..0d756ca 100644 --- a/src/Grenade/Layers/Trivial.hs +++ b/src/Grenade/Layers/Trivial.hs @@ -3,6 +3,13 @@ {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} +{-| +Module : Grenade.Core.Trivial +Description : Trivial layer which perfoms no operations on the data +Copyright : (c) Huw Campbell, 2016-2017 +License : BSD2 +Stability : experimental +-} module Grenade.Layers.Trivial ( Trivial (..) ) where @@ -11,7 +18,10 @@ import Data.Serialize import Grenade.Core --- | A trivial layer. +-- | A Trivial layer. +-- +-- This can be used to pass an unchanged value up one side of a +-- graph, for a Residual network for example. data Trivial = Trivial deriving Show