module Neural.Neural 
(
  calculateNet,
  learn,
  fErrors,
  getLOutputs,
  getOutputs,  
  step,
  step_backward
)
where

import Data.Graph.Inductive
import Data.List

-- | Nodes, which has no outputs are outputs themself
getOutputs :: (Graph gr) => gr a b -> [Node]
getOutputs net = filter (null . suc net) $ nodes net

-- Nodes, which has no inputs are inputs themself
getInputs net = filter (null . pre net) $ nodes net

-- | Nodes, which has no outputs are outputs themself
getLOutputs :: (Graph gr) => gr a b -> [a]
getLOutputs net = (map snd) . filter (null . (suc net) . fst) $ labNodes net

setLabel label (inp, node, _, out) = (inp, node, label, out)  

-- | update net nodes using given inputs, number of layers and step function
calculateNet :: (DynGraph gr) => (gr a b -> gr a b) -> Int -> [a] -> gr a b -> gr a b
calculateNet step layers inputs = head . (drop layers) . (iterate step) . (setInputs inputs)

-- | step function for forward propogation
step :: Fractional a => (a -> a) -> Gr a a -> Gr a a
step activation net = gmap calc net
    where
    calc (inp, node, value, out) = (inp, node, new_value, out)
        where 
        new_value = case lpre net node of
                         [] -> value
                         inp -> activation $ sum $ map (\(n, w) -> maybe 0.0 (*w) (lab net n)) inp

-- | step function for backward propogation
step_backward :: Fractional a => (a -> a) -> Gr a a -> Gr a a
step_backward activation' net = gmap calc net
    where
    calc (inp, node, value, out) = (inp, node, new_value, out)
        where 
        new_value = case lpre net node of
                         [] -> value
                         inp -> activation' $ sum $ map (\(n, w) -> maybe 0.0 (*w) (lab net n)) inp

-- | error for given outputs
fErrors :: (Num a, Graph gr) => (a -> a) -> (a1 -> gr a b) -> [a] -> a1 -> [a]
fErrors dactivation fCalc o = (zipWith (-) o) . getLOutputs . fCalc

-- | learning function
learn :: (Fractional a, Ord a) => ([a] -> Gr a a -> Gr a a) -> a -> [a] -> Gr a a -> Gr a a
learn fCalc learning_rate errors net = grev $ adjustWeights $ (fCalc errors) $ grev net
    where
    adjustWeights err_net = mkGraph (labNodes net) newEdges
        where
        newEdges = map adjust $ labEdges net
        adjust (from, to, w) = (from, to, new_w)
            where
            new_w = case (w+dw) of
                         x | (x>1) -> 1
                           | (x<(-1)) -> -1
                           |otherwise -> x
            dw = (*learning_rate) $ product $ map (maybe 0.0 id . (`lab` to)) [net, err_net]

setInputs inputs net = (foldr1 (.) (map (&) newInputs)) (delNodes inputNodes net)
                         where
                         inputNodes = getInputs net
                         newInputs = zipWith setLabel inputs $ map (context net) inputNodes

