concat-examples-0.3.0.0: Some examples of compiling to categories
Safe HaskellSafe-Inferred
LanguageHaskell2010

ConCat.Deep

Description

Simple feed-forward deep learning

Synopsis

Documentation

type (--*) p q = q :.: p infixr 1 Source #

Generalized matrix

scaleV :: (Functor a, Num s) => s -> Unop (a s) Source #

Scale a vector

(*^) :: (Functor a, Num s) => s -> Unop (a s) infixl 7 Source #

Scale a vector

negateV :: (Functor a, Num s) => Unop (a s) Source #

(^-^) :: (Zip a, Num s) => Binop (a s) infixl 6 Source #

(^/) :: (Functor a, Fractional s) => a s -> s -> a s infixl 7 Source #

dotV :: (Foldable a, Zip a, Additive s, Num s) => a s -> a s -> s Source #

Inner product

(<.>) :: (Foldable a, Zip a, Additive s, Num s) => a s -> a s -> s infixl 7 Source #

Inner product

outerV :: (Functor a, Functor b, Num s) => a s -> b s -> a (b s) Source #

Outer product. (Do we want this order of functor composition?)

(>.<) :: (Functor a, Functor b, Num s) => a s -> b s -> a (b s) infixl 7 Source #

Outer product. (Do we want this order of functor composition?)

linear :: (Foldable a, Zip a, Functor b, Additive s, Num s) => (a --* b) s -> a s -> b s Source #

Apply a linear map

type Bump h = h :*: Par1 Source #

bump :: Num s => a s -> Bump a s Source #

type (--+) a b = Bump a --* b infixr 1 Source #

Affine map representation

affine :: (Foldable a, Zip a, Functor b, Additive s, Num s) => (a --+ b) s -> a s -> b s Source #

Affine application

normSqr :: (Foldable n, Zip n, Additive s, Num s) => n s -> s Source #

distSqr :: (Foldable n, Zip n, Additive s, Num s) => n s -> n s -> s Source #

Distance squared

relus :: (Functor f, Ord a, Num a) => Unop (f a) Source #

affRelu :: (Foldable a, Zip a, Functor b, Ord s, Additive s, Num s) => (a --+ b) s -> a s -> b s Source #

Affine followed by RELUs.

logistics :: (Functor f, Floating a) => Unop (f a) Source #

affLog :: (Foldable a, Zip a, Functor b, Floating s, Additive s) => (a --+ b) s -> a s -> b s Source #

Affine followed by logistics.

errSqr :: (Foldable b, Zip b, Additive s, Num s) => (a s :* b s) -> (a s -> b s) -> s Source #

errSqrSampled :: (Foldable b, Zip b, Additive s, Num s) => (p s -> a s -> b s) -> (a s :* b s) -> p s -> s Source #

errGrad :: (Foldable b, Zip b, Additive s, Num s) => (p s -> a s -> b s) -> (a s :* b s) -> Unop (p s) Source #

(@.) :: (q s -> b -> c) -> (p s -> a -> b) -> (q :*: p) s -> a -> c infixr 9 Source #

step :: forall s p a b. (Foldable b, Zip b, Functor p, Zip p, Additive1 p, Additive s, Num s) => (p s -> a s -> b s) -> s -> (a s :* b s) -> Unop (p s) Source #

steps :: (Foldable b, Zip b, Zip p, Additive1 p, Functor f, Foldable f, Additive s, Num s) => (p s -> a s -> b s) -> s -> f (a s :* b s) -> Unop (p s) Source #

trainNTimes Source #

Arguments

:: (Foldable b, Zip b, Zip p, Additive1 p, Functor f, Foldable f, Additive s, Num s) 
=> Int

number of epochs

-> s

learning rate

-> (p s -> a s -> b s)

The "network" just converts a set of parameters into a function from input to output functors.

-> p s

initial guess for learnable parameters

-> f (a s :* b s)

the training pairs

-> [p s]

initial parameters + those after each training epoch trainNTimes n rate net ps prs = take (n+1) $ iterate (steps net rate prs) ps

Train a network on several epochs of the training data, keeping track of its parameters after each.

err1 :: (R -> R) -> (R :* R) -> R Source #

err1Grad :: (p -> R -> R) -> (R :* R) -> Unop p Source #

type (-->) a b s = a s -> b s infixr 1 Source #

lr1 :: (C2 Foldable a b, Zip a, Functor b) => (a --+ b) R -> (a --> b) R Source #

lr2 :: (C2 Foldable a b, C2 Zip a b, C2 Functor b c) => ((b --+ c) :*: (a --+ b)) R -> (a --> c) R Source #

lr3 :: (C3 Foldable a b c, C3 Zip a b c, C3 Functor b c d) => ((c --+ d) :*: ((b --+ c) :*: (a --+ b))) R -> (a --> d) R Source #

lr3' :: (C3 Foldable a b c, C3 Zip a b c, C3 Functor b c d) => ((c --+ d) :*: ((b --+ c) :*: (a --+ b))) R -> (a --> d) R Source #