This commit is contained in:
Huw Campbell 2017-04-10 12:06:50 +10:00
parent f4e1f2899b
commit adf731218c
10 changed files with 79 additions and 5 deletions

View File

@ -46,7 +46,7 @@ is a simple example which runs a logistic regression.
> randomMyNet = randomNetwork > randomMyNet = randomNetwork
The function `randomMyNet` witnesses the `CreatableNetwork` The function `randomMyNet` witnesses the `CreatableNetwork`
constraint of the neural network, that is, it ensures the network constraint of the neural network, and in doing so, ensures the network
can be built, and hence, that the architecture is sound. can be built, and hence, that the architecture is sound.
-} -}

View File

@ -2,6 +2,13 @@
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Logit
Description : Exponential linear unit layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Elu ( module Grenade.Layers.Elu (
Elu (..) Elu (..)
) where ) where

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Logit
Description : Sigmoid nonlinear layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Logit ( module Grenade.Layers.Logit (
Logit (..) Logit (..)
) where ) where
@ -14,6 +21,7 @@ import Data.Singletons
import Grenade.Core import Grenade.Core
-- | A Logit layer. -- | A Logit layer.
--
-- A layer which can act between any shape of the same dimension, perfoming an sigmoid function. -- A layer which can act between any shape of the same dimension, perfoming an sigmoid function.
-- This layer should be used as the output layer of a network for logistic regression (classification) -- This layer should be used as the output layer of a network for logistic regression (classification)
-- problems. -- problems.

View File

@ -5,6 +5,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleContexts #-}
{-|
Module : Grenade.Core.Pad
Description : Padding layer for 2D and 3D images
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Pad ( module Grenade.Layers.Pad (
Pad (..) Pad (..)
) where ) where
@ -22,6 +29,8 @@ import Numeric.LinearAlgebra (konst, subMatrix, diagBlock)
import Numeric.LinearAlgebra.Static (extract, create) import Numeric.LinearAlgebra.Static (extract, create)
-- | A padding layer for a neural network. -- | A padding layer for a neural network.
--
-- Pads on the X and Y dimension of an image.
data Pad :: Nat data Pad :: Nat
-> Nat -> Nat
-> Nat -> Nat

View File

@ -6,6 +6,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleContexts #-}
{-|
Module : Grenade.Core.Pooling
Description : Max Pooling layer for 2D and 3D images
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Pooling ( module Grenade.Layers.Pooling (
Pooling (..) Pooling (..)
) where ) where
@ -22,6 +29,7 @@ import Grenade.Layers.Internal.Pooling
import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows) import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows)
-- | A pooling layer for a neural network. -- | A pooling layer for a neural network.
--
-- Does a max pooling, looking over a kernel similarly to the convolution network, but returning -- Does a max pooling, looking over a kernel similarly to the convolution network, but returning
-- maxarg only. This layer is often used to provide minor amounts of translational invariance. -- maxarg only. This layer is often used to provide minor amounts of translational invariance.
-- --

View File

@ -2,6 +2,13 @@
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Relu
Description : Rectifying linear unit layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Relu ( module Grenade.Layers.Relu (
Relu (..) Relu (..)
) where ) where

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleContexts #-}
{-|
Module : Grenade.Layers.Reshape
Description : Multipurpose reshaping layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Reshape ( module Grenade.Layers.Reshape (
Reshape (..) Reshape (..)
) where ) where
@ -19,9 +26,9 @@ import Grenade.Core
-- | Reshape Layer -- | Reshape Layer
-- --
-- Flattens input down to D1 from either 2D or 3D data. -- The Reshape layer can flatten any 2D or 3D image to 1D vector with the
-- -- same number of activations, as well as cast up from 1D to a 2D or 3D
-- Casts input D1 up to either 2D or 3D data if the shapes are good. -- shape.
-- --
-- Can also be used to turn a 3D image with only one channel into a 2D image -- Can also be used to turn a 3D image with only one channel into a 2D image
-- or vice versa. -- or vice versa.

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Core.Softmax
Description : Softmax loss layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Softmax ( module Grenade.Layers.Softmax (
Softmax (..) Softmax (..)
, softmax , softmax
@ -17,8 +24,12 @@ import Grenade.Core
import Numeric.LinearAlgebra.Static as LAS import Numeric.LinearAlgebra.Static as LAS
-- | A Softmax layer -- | A Softmax layer
--
-- This layer is like a logit layer, but normalises -- This layer is like a logit layer, but normalises
-- a set of matricies to be probabilities. -- a set of matricies to be probabilities.
--
-- One can use this layer as the last layer in a network
-- if they need normalised probabilities.
data Softmax = Softmax data Softmax = Softmax
deriving Show deriving Show

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Tanh
Description : Hyperbolic tangent nonlinear layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Tanh ( module Grenade.Layers.Tanh (
Tanh (..) Tanh (..)
) where ) where

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-|
Module : Grenade.Core.Trivial
Description : Trivial layer which perfoms no operations on the data
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Trivial ( module Grenade.Layers.Trivial (
Trivial (..) Trivial (..)
) where ) where
@ -11,7 +18,10 @@ import Data.Serialize
import Grenade.Core import Grenade.Core
-- | A trivial layer. -- | A Trivial layer.
--
-- This can be used to pass an unchanged value up one side of a
-- graph, for a Residual network for example.
data Trivial = Trivial data Trivial = Trivial
deriving Show deriving Show