module Neural.FeedForward (makeNet, standartActivations) where

import Neural.Neural
import Data.Graph.Inductive

initNet :: [a] -> Gr a a -> Gr a a
initNet weights net = mkGraph (labNodes net) newEdges
    where 
    newEdges = zipWith (\(i, o) w -> (i, o, w)) (edges net) weights

activation = tanh
activation' x = (1 - x^2)
standartActivations = [activation, activation']

makeCalcErrAndLearn layers activations i o = (calc_f i, err, (\rate net -> learn calc_f_rev rate (err net) net)) 
    where
    err = fErrors (activation') (calc_f i) o
    [calc_f, calc_f_rev] = map (flip calculateNet (layers+1)) $ zipWith ($) [step, step_backward] activations
    
zeroIt = (`zip` (repeat 0.0))

addInputs inputs net = insNodes (zeroIt $ newNodes inputs net) net

addHidden layers hidden = head . (drop layers) . (iterate (addOutputs hidden))

addOutputs n net = (insEdges links) . (insNodes lnodes) $ net
    where 
    links = concat $ map ((`map` nodes) . nedge) outputs
    nedge o i = (o, i, 0.0)
    lnodes = zeroIt nodes
    nodes = newNodes n net
    outputs = getOutputs net

-- | make simple FF network with fully connected layers
makeNet :: [Double] -> Int -> Int -> Int -> Int -> ([Double -> Double] -> [Double] -> [Double] ->
           (Gr Double Double -> Gr Double Double, Gr Double Double ->
           [Double], Double -> Gr Double Double -> Gr Double Double), Gr Double Double)

makeNet weights inputs hidden layers outputs = (makeCalcErrAndLearn layers, net)
    where
    net = initNet weights blank_net
    blank_net = addOutputs outputs $ addHidden layers hidden $ addInputs inputs empty
                        

