;;;; Neural Networks
;;;Need to add Normalization
;;;Need to add in what berry told at the workout with adding input
;;;Find answer is the main function of neural networks, it will find it's guess to the input
;;; @input: ([number]^a) the input that the system is going to guess
;;; @keys: ([value]^a) the keys that the input is taken from
;;; @out: 'value
;;; @library: ([hash-table]*) for the previous tests and for creating tests for nn
(defun nn.findAnswer (input keys out library &optional (hiddenLayers '()))
  (let ((m-nn (create-nn (cons (list-length keys)
                               hiddenLayers))));Create the nn
    (let ((trainingsets))
      (loop for htable in library do
            (let ((trainingSet)
                  (input))
              (do ((k keys (cdr k)))
                  ((not k))
                (let ((key (car k)))
                  (setq input (cons (if (gethash key htable)
                                        1 ;if value in htable is not nil
                                      0);else if false
                                    input))))
              (setq input (reverse input));since we put it in the reverse way
              (setq trainingSet (list (if (gethash out htable)
					  1
					  0)));Get the result
              (setq trainingSet (cons input
                                      trainingSet));add the input to the beggining of set
              (setq trainingSets (cons trainingSet
                                       trainingsets))));Add to trainingsets
      (setq m-nn (opt-til m-nn trainingSets 0));train networks
      (car (calc-net input m-nn));Return the nn guess for solution
)))

(defun nn.averageAnswer (input keys library out &optional (outof 3) (hiddenLayers '()))
  (let ((sm 0))
    (dotimes (i (- outof 1))
      (setq sm (+ sm
                  (nn.findAnswer input keys out library hiddenLayers))))
    (/ sm outOf)))


;;;Going to also take in data from student, and from that student going to create a network from previous tests

(defvar *prev-tests* '()
  "All the previous tests, where each test should be a hash map")
(defvar *noise-corr* 0
  "This variable is here to try and remove the noise")
(defun sq (x) (* x x))
(defun activation (x)
 (/ 1 (+ 1 (exp (- x)))))
;(defun activation (x)
;  (- (/ 3.4 
;        (+ 1 (exp (- (* .7 x)))))
;     1.7))
    
;;;Going to run the training till the output changes less than threshold
;;; @neural-net: Should have been created with create-nn
;;; @training:  ((([0-1]*) [0-1])*) which is (((input) output)*)
(defun opt-til (neural-net training &optional (threshold 0) (old-close -1))
  (do ()(nil)
    (progn 
      (let ((difference 0)
            (alpha (/ 1
                      (+ 0
                         (* 2 
			    (list-length training))))));Create alpha off of number in list
        (do ((part training 
		   (cdr part)))
            ((not part))
          (progn
            (setq difference 
                  (+ (sq (- (cadar part)
			    (car (calc-net (caar part)
					   neural-net))));Find the square error
                     difference);add to difference
		  )
            (setq neural-net (train (caar part)
                                    neural-net  
                                    (cadar part)
                                    (* alpha
                                      ; (random 1.0))))));Train network
				       1)))));Train network
        (setq difference 
	      (sqrt difference)
	      );Find the sqrt of that difference
	;(format t "Error: ~a~%" difference)
        (setq difference 
	      (abs (- old-close
		      (setq old-close 
			    (/ (+ difference
				  old-close
				  old-close)
			       3)))));Create new difference which is smaller
        (if (<= (* difference
                  3) 
               threshold)
            (return neural-net))))))  
;;;Going to create a neural network with random starting points
;;; @construction: ([n in naturalNumbers]*)
(defun create-nn (construction)
  (let (answer)
    (do ((layer construction (rest layer)))
        ((not (cdr layer)));make sure that the tail is not empty
        (let (sub-layer)
          (loop for i from 1 to (cadr layer) do; first hidden layer is based off of second cadr
                (let (node)
                  (loop for j from 1 to (car layer) do;input is from car
                        (setq node 
                              (cons (random 1.0)
                                    node)));Random nodes
                  (setq sub-layer
                        (cons node
                              sub-layer))));Add node to layer
          (setq answer 
                (cons sub-layer
                      answer))));Layer to all layers
    (setq answer
          (cons (let (last-node)
                  (loop for i from 1 to (car (last construction)) do
                        (setq last-node
                              (cons (random 1.0)
                                    last-node)));Add last output node/layer
                  (list last-node))
                answer))
    (reverse answer)))  
;;; Going to calculate neural net, if there are too much input then extra will go to nothing and if too few, the missing are zero input
;;; @input: ([number]^a) list of numerical values
;;; @network: ((([number]^a)^b)(([number]^b)^c)...(([number]*))) The neural network that the values are going to go through
(defun calc-net (input network)
  (if (not network) input;If no network just return input
    (let ((output '())
          (layer (car network)))
      (loop for node in layer do
            (let ((total 0))
              (do ((in input (cdr in));the input for this node
                   (wght node (cdr wght)));weight part of this node
                  ((not (and in wght)));exit if either one is empty
                (setq total (+ total (* (car in) (car wght)))));keep making total bigger
              (setq output (cons (activation (- total *noise-corr*)) output))))
      (if (cdr network) 
          (calc-net output (cdr network))
        output))));Need to go to next layer

;;;Going to try and train the network using back-propogation
;;; @input: list the values that the networks is going to get in
;;; @network: list list list for the network representation
;;; @expected-value: the value that the network should get
(defun train (input nn &optional (expected-value 1) (alpha 1/2) (beta 1/10))
 (let (n-nn
       (delta (list (- expected-value
                            (car (calc-net input nn)))));Find first delta
       (w_above '((1))));layer above
   (do ((layer (reverse nn) (rest layer)))
       ((not layer));while layer is not empty 
     (let ((output (calc-net input (reverse layer))))
       (setq delta (fdelta output
                           w_above
                           delta));new delta
       (setq output (calc-net input (reverse (rest layer))))
       (setq n-nn (cons (adjust-layer (car layer)
                                      output
                                      delta
                                      alpha
				      )
                        n-nn))
       (setq beta (sq beta))
       (setq w_above (car layer))))
   n-nn))                                     

         
;;; Going to take a layer and adjust it by delta
;;;  @layer: (([number]^a)*)layer that we are going to adjust
;;;  @input: ([number]^b) input that this node will be recieving
;;;  @delta: ([number]^b) The error of this node
(defun adjust-layer (layer input delta &optional (alpha 1/2) (beta -1/100))
  (let (answer)
    (loop for j from 0 to (- (list-length layer) 1) do ;for every node in layer
          (setq answer (cons 
                        (let (subanswer)
                          (loop for i from 0 to (- (list-length input) 1) do
                                (setq subanswer (cons (+ (nth i
                                                              (nth j
                                                                   layer));original weight
                                                         (* beta
                                                            (nth i
                                                                 (nth j
                                                                      layer)));Decay of neuron
                                                         (* alpha
                                                            (nth i
                                                                 input)
                                                            (nth j
                                                                 delta)));error adjustment
                                                      subanswer)))
                          (reverse subanswer))
                        answer)))
    (reverse answer)))
            
                                                  
  
(defun fdelta (output weights delta) ;Working formula \delta_j = y_j (1-y_j) \sum_[k=1]%n w_[jk] \delta_k
  (reverse (let (d)
             (loop for j from 0 to (- (list-length output) 1) do
                   (setq d (cons (* (nth j output)
                                    (- 1
                                       (nth j output))
                                    (let ((sm 0))
                                      (loop for k from 0 to (- (list-length weights) 1) do
                                            (setq sm (+ sm
                                                        (* (nth j
                                                                (nth k
                                                                     weights))
                                                           (nth k delta)))))
                                      sm))
                                 d)))
             d)))

          
