package ezi

class Crawler implements Serializable {

  ArrayList<ezi.Link> links = [] as ArrayList<ezi.Link>
  Thread main
  int index = 0
  int waitingThread = 0
  int startThread = 5
  boolean searchInsideOnly = true
  boolean showInsideOnly = true
  String root
  String domain
  boolean endSearch = false
  int maxDepth = 3
  int timeLeft
  int time = 10000 //10s
  def threads

  synchronized addWaitingThread() {
    if (endSearch) return null
    if (++waitingThread == startThread) {
      endSearch = true;
      notifyAll()
      return -1
    }
    else return waitingThread;
  }

  synchronized removeWaitingThread() {
    waitingThread--;
  }

  synchronized waitSomeTime() {
    wait()
  }

  synchronized endSearching() {
    endSearch = true
    notifyAll()
  }

  def searchLinks(ezi.Link baseLink, int depth) {
    if (searchInsideOnly && !baseLink.url.startsWith(root)) { return []}
    def parser = new XmlParser(new org.ccil.cowan.tagsoup.Parser())
    def html
    try {
      html = parser.parse(baseLink.url)
    } catch (FileNotFoundException) {return []}

    def linksLoc = [] as Set

    html.body.'**'.a.@href.each {

      if (it != null && it.trim() != baseLink.url.trim()) {

        if (showInsideOnly) {

          if (it.startsWith(root)) linksLoc << new ezi.Link(baseLink, it.trim(), depth + 1)
          else if (it.startsWith("/")) linksLoc << new ezi.Link(baseLink, root + it.trim(), depth + 1)
          else if (!it.contains(":")) linksLoc << new ezi.Link(baseLink, baseLink.url + it, depth + 1)
          
        } else {

          if (it.startsWith("http://")) linksLoc << new ezi.Link(baseLink, it.trim(), depth + 1)
          else if (it.startsWith("/")) linksLoc << new ezi.Link(baseLink, root + it.trim(), depth + 1)
          else linksLoc << new ezi.Link(baseLink, baseLink.url + it.trim(), depth + 1)

        }
      }
    }
    return linksLoc
  }

  def waitToEndOfWork() {
    while (timeLeft > 0) {
      sleep(200)
      timeLeft -= 200
      if (endSearch) break;
    }
    println("END TIME")
    endSearching()
    threads*.join()

    println "Wydobyło :" + links.size()
    links.each {
      println it
    }
  }

  def pauseSearching() {
    println "==============PAUSE"
    endSearching()
    main.join()
  }

  def resumeSearching(SearchType type) {
    println "==============RESUME"
    endSearch = false
    main = Thread.currentThread()

    if (type == SearchType.DFS) depthSearch()
    else breadthSearch()

    waitToEndOfWork()
  }

  def takeDomain(root) {
    String out = root - "http://"
    if(out.startsWith("www")) out = root - "http://www."
    domain = out
    println domain
  }

  // BFS

  synchronized addToLinksBFS(l) {
    l.each {
      if (!links.contains(it)) links << it
    }
    if (l.size() > 0) notifyAll()
  }

  synchronized ezi.Link getNextLinkBFS() {
    if (endSearch) return null
    while (links.size() != index) {
      index++
      def l = links[index - 1]
      if (l.depth < maxDepth) return l
    }
    return null
  }
 

  def breadthSearch() {
    threads = []
    startThread.times {

      threads << Thread.start {
        while (true) {

          def aktLink = getNextLinkBFS()

          if (aktLink == null) {
            if (addWaitingThread() < 0) {
              println "End thread work"
              break
            }
            println "No work, waiting..."
            waitSomeTime()

            if (removeWaitingThread() < 0) {
              println "End thread work"
              break
            }
          }
          else {
            println "Working "
            addToLinksBFS(searchLinks(aktLink, aktLink.depth))
            println "After work: " + links.size()
          }
        }
      }
    }
  }

  def breadthFirstSearch(ezi.Link baseLink) {


    timeLeft = time //odnowienie czasu
    links << baseLink
    index = 1
    main = Thread.start {
      root = baseLink.url
      takeDomain(root)
      
      searchLinks(baseLink, 0).each {
        if(it.url!= root) links << it
      }

      breadthSearch()
      waitToEndOfWork()
    }
  }

  // DFS

  synchronized ezi.Link getNextLinkDFS() {
    if (endSearch) {
      return null
    }

    if (index < links.size()) {
      return links[index++]
    }

    return null
  }

  synchronized addToLinksDFS(l) {
    def currentIndex = index;
    l.each {
      if (!links.contains(it)) {
        links.add(currentIndex++, it)
      }
    }

    if (l.size() > 0) {
      notifyAll()
    }
  }

  def depthSearch() {

    threads = []
    startThread.times {

      threads << Thread.start {
        while (true) {

          def aktUrl = getNextLinkDFS()

          if (aktUrl == null) {
            if (addWaitingThread() < 0) {
              println "End thread work"
              break
            }
            println "No work, waiting..."
            waitSomeTime()

            if (removeWaitingThread() < 0) {
              println "End thread work"
              break
            }
          }
          else {
            println "Working "
            if (aktUrl.depth < maxDepth) {
              addToLinksDFS(searchLinks(aktUrl, aktUrl.depth))
            }
            println "After work: " + links.size()
          }
        }
      }
    }
  }

  def depthFirstSearch(ezi.Link baseLink) {

    timeLeft = time //odnowienie czasu
    links << baseLink
    index = 1
    main = Thread.start {
      root = baseLink.url
      takeDomain(root)

      searchLinks(baseLink, 0).each {
        println "root:"+root
        println "link:"+it
        if(it.url!= root) links << it
      }
      depthSearch()
      waitToEndOfWork()
    }
  }

}
enum SearchType {
  BFS, DFS
}