Copyright | (c) Huw Campbell 2016-2017 |
---|---|
License | BSD2 |
Stability | experimental |
Safe Haskell | None |
Language | Haskell98 |
This module defines the core data types and functions for non-recurrent neural networks.
- data Network :: [*] -> [Shape] -> * where
- data Gradients :: [*] -> * where
- data Tapes :: [*] -> [Shape] -> * where
- runNetwork :: forall layers shapes. Network layers shapes -> S (Head shapes) -> (Tapes layers shapes, S (Last shapes))
- runGradient :: forall layers shapes. Network layers shapes -> Tapes layers shapes -> S (Last shapes) -> (Gradients layers, S (Head shapes))
- applyUpdate :: LearningParameters -> Network layers shapes -> Gradients layers -> Network layers shapes
- randomNetwork :: (CreatableNetwork xs ss, MonadRandom m) => m (Network xs ss)
Documentation
data Network :: [*] -> [Shape] -> * where Source #
Type of a network.
The [*]
type specifies the types of the layers.
The [Shape]
type specifies the shapes of data passed between the layers.
Can be considered to be a heterogeneous list of layers which are able to transform the data shapes of the network.
NNil :: SingI i => Network '[] '[i] | |
(:~>) :: (SingI i, SingI h, Layer x i h) => !x -> !(Network xs (h ': hs)) -> Network (x ': xs) (i ': (h ': hs)) infixr 5 |
(Show x, Show (Network xs rs)) => Show (Network ((:) * x xs) ((:) Shape i rs)) Source # | |
Show (Network ([] *) ((:) Shape i ([] Shape))) Source # | |
(SingI Shape i, SingI Shape o, Layer x i o, Serialize x, Serialize (Network xs ((:) Shape o rs))) => Serialize (Network ((:) * x xs) ((:) Shape i ((:) Shape o rs))) Source # | |
SingI Shape i => Serialize (Network ([] *) ((:) Shape i ([] Shape))) Source # | Add very simple serialisation to the network |
CreatableNetwork sublayers subshapes => UpdateLayer (Network sublayers subshapes) Source # | Ultimate composition. This allows a complete network to be treated as a layer in a larger network. |
(CreatableNetwork sublayers subshapes, (~) Shape i (Head Shape subshapes), (~) Shape o (Last Shape subshapes)) => Layer (Network sublayers subshapes) i o Source # | Ultimate composition. This allows a complete network to be treated as a layer in a larger network. |
type Gradient (Network sublayers subshapes) Source # | |
type Tape (Network sublayers subshapes) i o Source # | |
data Gradients :: [*] -> * where Source #
Gradient of a network.
Parameterised on the layers of the network.
data Tapes :: [*] -> [Shape] -> * where Source #
Wegnert Tape of a network.
Parameterised on the layers and shapes of the network.
runNetwork :: forall layers shapes. Network layers shapes -> S (Head shapes) -> (Tapes layers shapes, S (Last shapes)) Source #
Running a network forwards with some input data.
This gives the output, and the Wengert tape required for back propagation.
runGradient :: forall layers shapes. Network layers shapes -> Tapes layers shapes -> S (Last shapes) -> (Gradients layers, S (Head shapes)) Source #
Running a loss gradient back through the network.
This requires a Wengert tape, generated with the appropriate input for the loss.
Gives the gradients for the layer, and the gradient across the input (which may not be required).
applyUpdate :: LearningParameters -> Network layers shapes -> Gradients layers -> Network layers shapes Source #
Apply one step of stochastic gradient decent across the network.
randomNetwork :: (CreatableNetwork xs ss, MonadRandom m) => m (Network xs ss) Source #
Create a network with randomly initialised weights.
Calls to this function will not compile if the type of the neural network is not sound.