module Neural.Neural 
(
  updateNet,
  getLOutputs,
  getOutputs,  
  Net
)
where

import Data.Graph.Inductive
import Data.List

type Net a = Gr a a

-- | Nodes, which has no outputs are outputs themself
getOutputs :: Net a -> [Node]
getOutputs net = filter (null . suc net) $ nodes net

-- Nodes, which has no inputs are inputs themself
getInputs net = filter (null . pre net) $ nodes net

-- | Nodes, which has no outputs are outputs themself
getLOutputs :: Net a -> [a]
getLOutputs net = (map snd) . filter (null . (suc net) . fst) $ labNodes net

setLabel label (inp, node, _, out) = (inp, node, label, out)  

-- | update net nodes using given inputs, number of layers and step function
updateNet :: [a] -> Net a -> Net a
updateNet inputs = (setInputs inputs) -- TBD

-- | step function for forward propogation
step :: Fractional a => (a -> a) -> Net a -> Net a
step activation net = gmap calc net
    where
    calc (inp, node, value, out) = (inp, node, new_value, out)
        where 
        new_value = case lpre net node of
                         [] -> value
                         inp -> activation $ sum $ map (\(n, w) -> maybe 0.0 (*w) (lab net n)) inp

-- | step function for backward propogation
step_backward :: Fractional a => (a -> a) -> Net a -> Net a
step_backward activation' net = gmap calc net
    where
    calc (inp, node, value, out) = (inp, node, new_value, out)
        where 
        new_value = case lpre net node of
                         [] -> value
                         inp -> activation' $ sum $ map (\(n, w) -> maybe 0.0 (*w) (lab net n)) inp


setInputs inputs net = (foldr1 (.) (map (&) newInputs)) (delNodes inputNodes net)
                         where
                         inputNodes = getInputs net
                         newInputs = zipWith setLabel inputs $ map (context net) inputNodes

