module Network.Network
( Network(..)
, TrainingData
, createNetwork
, loadNetwork
, predict
, apply
, saveNetwork
) where
import Network.Neuron
import Network.Layer
import System.Random
import Numeric.LinearAlgebra
import qualified Data.ByteString.Lazy as B
import System.IO
import Data.Binary (encode, decode, Binary(..))
data Network a = Network { layers :: [Layer a] }
type TrainingData a = (Vector a, Vector a)
createNetwork :: (RandomGen g, Random a, Floating a, Floating (Vector a), Container Vector a)
=> RandomTransform a -> g -> [LayerDefinition a] -> Network a
createNetwork t g [] = Network []
createNetwork t g (layerDef : []) = Network []
createNetwork t g (layerDef : layerDef' : otherLayerDefs) =
Network (layer : layers restOfNetwork)
where layer = createLayer t g layerDef layerDef'
restOfNetwork = createNetwork t g (layerDef' : otherLayerDefs)
predict :: (Floating (Vector a), Container Vector a, Product a)
=> Vector a -> Network a -> Vector a
predict input network = foldl apply input (layers network)
apply :: (Floating (Vector a), Container Vector a, Product a)
=> Vector a -> Layer a -> Vector a
apply vector layer = mapVector sigma (weights <> vector + bias)
where sigma = activation (neuron layer)
weights = weightMatrix layer
bias = biasVector layer
saveNetwork :: (Binary (ShowableLayer a), Floating a, Floating (Vector a), Container Vector a)
=> FilePath -> Network a -> IO ()
saveNetwork file n = B.writeFile file (encode $ map layerToShowable (layers n))
loadNetwork :: (Binary (ShowableLayer a), Floating a, Floating (Vector a), Container Vector a)
=> FilePath -> [LayerDefinition a] -> IO (Network a)
loadNetwork file defs = B.readFile file >>= \sls ->
return $ Network (map showableToLayer (zip (decode sls) defs))