exutils = require("express/lib/utils")
utils = require("./utils")
fs = require("fs")
path = require "path"
vm = require "vm"
request = require "request"
jquery = require "jQuery"

JQUERY = fs.readFileSync("public/js/jquery.min.js").toString()
CACHE_PATH = "/tmp/tucao_cache"
CACHE_ENCODING = "utf-8"
DEBUG = false

class CrawlError extends Error
  constructor : (@message="")->

crawl = module.exports =
  CrawlError : CrawlError
  cfetch : do()->
    (url, cb, timeout=86400000)->
      skey = utils.sha1 url
      spath = "#{CACHE_PATH}/#{skey[...4]}/#{skey[4..]}"
      sfetch = ()->
        request {url: url, encoding: CACHE_ENCODING}, (err, resp, body)->
          if !err and resp.statusCode == 200
            utils.mkdirs path.dirname(spath), ()->
              fs.writeFile spath, body, (e)->
                throw e if e
                cb e, body
          else
            cb err, null
      fs.stat spath, (err, stats)->
        if err or DEBUG
          sfetch()
        else
          if (new Date).getTime() < stats.ctime.getTime() + timeout
            fs.readFile spath, CACHE_ENCODING, (err, data)->
              cb err, data
          else
            sfetch()
            
            
  index : do()->
    URL_INDEX = "http://manhua.178.com/tags/category_search/0-0-0-all-0-0-0-1.shtml"
    (cb)->
      crawl.cfetch URL_INDEX, (err, body)->
        return err if err

        dom = jquery(body)
        ret = []
        for i, n in dom.find(".zxgxbox li a") by 2
          ret.push
            url: _ref[n+1].href
            title: "#{i.innerHTML}_#{_ref[n+1].innerHTML}"

        cb ret
  
  single : do()->
    (url, cb)-> #Must 178 class http://manhua.178.com/shenhai6000/
      throw new crawl.CrawlError("Error URL #{url}") if not (/http:\/\/manhua.178.com\/[^/]+?\//gi).test url
      crawl.cfetch url, (err, body)->
        throw err if err

        dom = jquery(body)
        ret = []
        for i, n in dom.find(".cartoon_online_border li a")
          ret.push
            url: i.href
            title: i.innerHTML

        cb ret

  pages : do()->
    RE_SCRIPT = /pages = '(.+?)';/ig
    RE_PAGES = /http:\/\/www.178.com\/mh\/\w+\/\d+.shtml/ig
    #RE_SCRIPT.compile()
    #RE_PAGES.compile()
    scope_context = vm.createContext({})
    IMG_URL = "http://imgfast.manhua.178.com/"
    (url, cb)-> #
      throw new crawl.CrawlError("Error URL #{url}") if not RE_PAGES.test url
      crawl.cfetch url, (err, body)->
        throw err if err
        
        #dom = jquery body
        #cdata = dom.find "script :first-child"
        cdata = RE_SCRIPT.exec(body)
        vm.runInContext("pages=" + cdata[1], scope_context)
        cb (IMG_URL + i for i in scope_context["pages"])

if require.main == module
  #crawl.index (ret)->
  #  #console.log ret

  #SINGLE
  #crawl.single "http://manhua.178.com/huoyingrenzhe/", (ret)->
  #  console.log ret
  
  #PAGES
  crawl.pages "http://www.178.com/mh/huoyingrenzhe/2056.shtml", (ret)->
    console.log ret
    
