This commit is contained in:
Huw Campbell 2017-04-10 12:06:50 +10:00
parent f4e1f2899b
commit adf731218c
10 changed files with 79 additions and 5 deletions

View File

@ -46,7 +46,7 @@ is a simple example which runs a logistic regression.
> randomMyNet = randomNetwork
The function `randomMyNet` witnesses the `CreatableNetwork`
constraint of the neural network, that is, it ensures the network
constraint of the neural network, and in doing so, ensures the network
can be built, and hence, that the architecture is sound.
-}

View File

@ -2,6 +2,13 @@
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Logit
Description : Exponential linear unit layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Elu (
Elu (..)
) where

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Logit
Description : Sigmoid nonlinear layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Logit (
Logit (..)
) where
@ -14,6 +21,7 @@ import Data.Singletons
import Grenade.Core
-- | A Logit layer.
--
-- A layer which can act between any shape of the same dimension, perfoming an sigmoid function.
-- This layer should be used as the output layer of a network for logistic regression (classification)
-- problems.

View File

@ -5,6 +5,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Grenade.Core.Pad
Description : Padding layer for 2D and 3D images
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Pad (
Pad (..)
) where
@ -22,6 +29,8 @@ import Numeric.LinearAlgebra (konst, subMatrix, diagBlock)
import Numeric.LinearAlgebra.Static (extract, create)
-- | A padding layer for a neural network.
--
-- Pads on the X and Y dimension of an image.
data Pad :: Nat
-> Nat
-> Nat

View File

@ -6,6 +6,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Grenade.Core.Pooling
Description : Max Pooling layer for 2D and 3D images
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Pooling (
Pooling (..)
) where
@ -22,6 +29,7 @@ import Grenade.Layers.Internal.Pooling
import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows)
-- | A pooling layer for a neural network.
--
-- Does a max pooling, looking over a kernel similarly to the convolution network, but returning
-- maxarg only. This layer is often used to provide minor amounts of translational invariance.
--

View File

@ -2,6 +2,13 @@
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Relu
Description : Rectifying linear unit layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Relu (
Relu (..)
) where

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Grenade.Layers.Reshape
Description : Multipurpose reshaping layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Reshape (
Reshape (..)
) where
@ -19,9 +26,9 @@ import Grenade.Core
-- | Reshape Layer
--
-- Flattens input down to D1 from either 2D or 3D data.
--
-- Casts input D1 up to either 2D or 3D data if the shapes are good.
-- The Reshape layer can flatten any 2D or 3D image to 1D vector with the
-- same number of activations, as well as cast up from 1D to a 2D or 3D
-- shape.
--
-- Can also be used to turn a 3D image with only one channel into a 2D image
-- or vice versa.

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Core.Softmax
Description : Softmax loss layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Softmax (
Softmax (..)
, softmax
@ -17,8 +24,12 @@ import Grenade.Core
import Numeric.LinearAlgebra.Static as LAS
-- | A Softmax layer
--
-- This layer is like a logit layer, but normalises
-- a set of matricies to be probabilities.
--
-- One can use this layer as the last layer in a network
-- if they need normalised probabilities.
data Softmax = Softmax
deriving Show

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Grenade.Layers.Tanh
Description : Hyperbolic tangent nonlinear layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Tanh (
Tanh (..)
) where

View File

@ -3,6 +3,13 @@
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-|
Module : Grenade.Core.Trivial
Description : Trivial layer which perfoms no operations on the data
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Trivial (
Trivial (..)
) where
@ -11,7 +18,10 @@ import Data.Serialize
import Grenade.Core
-- | A trivial layer.
-- | A Trivial layer.
--
-- This can be used to pass an unchanged value up one side of a
-- graph, for a Residual network for example.
data Trivial = Trivial
deriving Show