;;; Based on The (New) Turing Omnibus, Chapter 36

(defstruct neural-net synone syntwo)

(defun make-random-array (n m)
  (let ((a (make-array (list n m))))
    (dotimes (i n)
      (dotimes (j m)
        (setf (aref a i j) (- (random 0.2d0) 0.1d0))))
    a))

(defun init (input hidden output)
  "Returns a two-layer neural network with random synapses.
The arguments control the size of the synapse matrices.
Note that INPUT is complemented with a bias node."
  (make-neural-net :synone (make-random-array (1+ input) hidden)
                   :syntwo (make-random-array hidden output)))

(defun eval-layer (weights input &optional (sigmoidp t))
  (let ((in (iter (for i from 0 below (array-dimension weights 1))
                  (collect (iter (for j from 0 below (length input))
                                 (sum (* (aref weights j i) (elt input j))))))))
    (if sigmoidp
        (mapcar #'tanh in)
        in)))

(defvar *bias*)
(defvar *rate*)

(defun train (net input target)
  "Trains the neural net `NET' using a given `INPUT' - `TARGET' pair.
Learning rate (eta) is specified in the variable `*RATE*'.
Bias value is controlled by the variable `*BIAS*'."
  (let* ((input (cons *bias* input))
         (medout (eval-layer (neural-net-synone net) input))
         (output (eval-layer (neural-net-syntwo net) medout nil))
         (errors (mapcar #'- target output)))
    (with-accessors ((s1 neural-net-synone) (s2 neural-net-syntwo)) net
      (let ((sigma (iter (for i from 0 below (array-dimension s2 0))
                         (collect (iter (for j from 0 below (length target))
                                        (sum (* (elt errors j) (aref s2 i j)))))))
            (sigmoid (mapcar (lambda (x) (- 1 (* x x))) medout)))
        (iter (for i from 0 below (length target))
              (iter (for j from 0 below (array-dimension s2 0))
                    (incf (aref s2 j i)
                          (* *rate* (elt medout j) (elt errors i)))))
        (iter (for i from 0 below (length input))
              (iter (for j from 0 below (array-dimension s1 1))
                    (incf (aref s1 i j)
                          (* *rate* (elt input i) (elt sigmoid j) (elt sigma j)))))))
    (sqrt (reduce #'+ (mapcar (lambda (x) (* x x)) errors)))))

(defun eval-net (net input)
  (let ((medout (eval-layer (neural-net-synone net) (cons *bias* input))))
    (eval-layer (neural-net-syntwo net) medout nil)))

;;; Test

#+nil
(let ((net (init 2 40 2))               ; 5 hidden neurons work almost as well
      (*bias* -2.0d0)                   ; a good bias value seems to be very important
      (*rate* 0.01d0))
  (with-open-file (s "/tmp/neural" :direction :output :if-exists :supersede)
    (iter (for i from 0 below 250)
          (for errors =
               (iter (repeat 200)
                     (for r = (random 1.0d0))
                     (for theta = (random (* 2 pi)))
                     (for x = (* (cos theta) r))
                     (for y = (* (sin theta) r))
                     (collect (train net (list r theta) (list x y)))))
          (format s "~f ~f~%" (/ i 50) (/ (reduce #'+ errors) 200)))))

;;; plot [0:5] [0:0.5] "/tmp/neural"