(ns lastfm-music-recommendation.crawler
  (:refer-clojure :exclude [extend])
  (:use [jsoup.soup])
  (:require [lastfm-music-recommendation.db.datomic :as datomic]))

;Counts how meny agents has finished in each iteration,
;when reaches 25, new iteration is started, or crawler is stopped
(def counter (atom 0))

;true when crawler is running, when set to false crawling stops
(def crawling (ref false))

(defn get-links
  "Returns all links on page that start with 'http://www.last.fm/music'" 
  [doc]
  (set ($ doc "a[abs:href^=http://www.last.fm/music]" (attr "abs:href"))))
 
(defn get-song
  "Returns song's title and artist, if page contains song"
  [doc]   
  (if-not (empty? ($ doc "meta[property=og:type][content=song]" (attr "content")))
    (first ($ doc "meta[property=og:title]" (attr "content")))))

(defn process-result
  "Saves song, saves new links, sets current link as visited"
  [song links url]
  (dorun (map #(datomic/save-link %) links))
  (datomic/set-visited! url)
  (if song (datomic/insert-song song url))) 

(defn crawl-page
  "Crawls given url, process it, and goes next on the 
   first url found on that page. Repeats this 20 times."
  [url]
  (loop [uri url
         depth 20]
    (if (< depth 1)
      (swap! counter inc)
      (let [doc (get! uri :ignore-http-errors true)
            song (get-song doc)
            links (get-links doc)]
        (process-result song links uri)
        (recur (first links) (dec depth))))))

(defn agent-err-handler
  "If error happens in agent, counter is incremented, 
   as if agent has finished crawling"
  [agent ex]
  (swap! agent inc))

(defn crawl
  "Starts new iteration of crawling agents. Gets 30 links from db to be crawled.
   If less then 30 available, increments counter accordingly. Sends-off agents."
  []
  (dosync
    (ref-set crawling true)
    (let [links-to-visit (datomic/get-links-to-visit)] 
      (if (< (count links-to-visit) 30)
        (swap! counter #(+ % (- 30 (count links-to-visit)))))
      (dorun (map (fn [link] (let [agent (agent link)]
                             (set-error-handler! agent agent-err-handler)
                             (send-off agent crawl-page)))
                links-to-visit)))))

(defn counter-watcher
  "When counter reaches value '25', counter is reseted.
   If crawling is set to 'false' crawling is finished,
   else new iteration is started." 
  [key atom oldv newv]
  (if (= 25 newv) 
    (do
      (swap! atom #(- % 25))
      (if @crawling
        (crawl)
        (println "stopped crawling!")))))

(add-watch counter :key counter-watcher)

(defn stop-crawling 
  "Stops crawling when current iteration if finished."
  []
  (dosync (ref-set crawling false)))

