Crawler = require "simplecrawler"
iconv = require 'iconv-lite'
events = (require 'events').EventEmitter
url = require 'url'
http = require 'http'
# tools = require './lib/tools'

exports?.Spider = class Spider extends events
    constructor: ( @_domain ) ->
        # console.log 'constructor'
        @_crawler = new Crawler @_domain
        @_crawler.userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:24.0) Gecko/20100101 Firefox/24.0'
        @_crawler.maxConcurrency = 1
        @_crawler.discoverResources = false
        @_crawler.scanSubdomains = true
        @_crawler.on "crawlstart" , ()=>
            @emit 'onStart'
        @_crawler.on "complete" , ()=>
            console.log '********** crawl website has been completed **********'
            @emit 'onComplete'
        @_crawler.on "fetchcomplete", (queueItem, responseBuffer, response)=>
            logEnd queueItem.url
            @emit 'onFetchComplete' ,queueItem,responseBuffer,response
        @_crawler.on "queueadd" , (queueItem) =>
            @emit 'onQueueadd',queueItem
        @_crawler.on "queueduplicate" , (URLData) =>
            console.log 'queueduplicate',URLData
            @emit 'onQueueduplicate',URLData
        @_crawler.on "queueerror" , ( errorData , URLData ) =>
            console.log 'queueerror',URLData
            @emit 'onQueueerror',errorData,URLData
        @_crawler.on "fetchstart" , (queueItem , requestOptions) =>
            log queueItem.url
            @emit 'onFetchstart',queueItem,requestOptions
        @_crawler.on "fetchheaders", (queueItem , responseObject) =>
            # console.log "fetchheaders"
            @emit 'onFetchheaders',queueItem,responseObject
        @_crawler.on "fetchdataerror" , ( queueItem, response) =>
            console.log "fetchdataerror",queueItem
            @emit 'onFetchdataerror',queueItem,response
        @_crawler.on "fetchredirect" , (queueItem, parsedURL, response ) =>
            console.log "fetchredirect",queueItem,parsedURL
            logEnd queueItem.url
            # console.log parsedURL.path,parsedURL.host
            @emit 'onFetchredirect',queueItem, parsedURL, response
        @_crawler.on "fetch404", (queueItem, response) =>
            console.log "fetch404",queueItem
            @emit 'onFetch404',queueItem, response
        @_crawler.on "fetcherror", ( queueItem, response) =>
            console.log "fetcherror",queueItem
            @emit 'onFetcherror',queueItem, response
        @_crawler.on "fetchtimeout" , (fetchtimeout) =>
            console.log "fetchtimeout",fetchtimeout
            @emit 'onFetchtimeout',fetchtimeout
        @_crawler.on "fetchclienterror", (queueItem, errorData ) =>
            console.log "fetchclienterror",errorData
            @emit 'onFetchclienterror',queueItem, errorData
        @_crawler.on "discoverycomplete", ( queueItem, resources ) =>
            console.log "discoverycomplete"
            @emit 'onDiscoverycomplete', queueItem, resources

    crawl: () ->
        # console.log 'getHtml'
        @_crawler.start()
    addQueue:(_url)->
        # console.log 'addQueue',_url
        _url?= '/'
        @_crawler.queueURL _url
        # console.log @_crawler.queue



exports.test = test = () ->
    console.log 'test'
    domain = 'jd.com'
    spider = new Spider domain
    spider.on 'onFetchstart',(queueItem , requestOptions)->
        # console.log 'onFetchstart'
        log queueItem.url
    spider.on 'onFetchComplete' ,(queueItem, responseBuffer, response)->
        # console.log queueItem.url
        logEnd queueItem.url
        # logEnd 'getHtml'
    spider.on 'onFetchredirect' ,(queueItem, parsedURL, response )->
        # console.log queueItem,parsedURL
        # console.log responseBuffer
        # console.log 'responseBuffer: ',iconv.decode(responseBuffer,'GBK')
    # spider.on 'onStart' ,()->
    #     console.log 22222222
    ulist = 'http://list.jd.com/737-794-798-0-0-0-0-0-0-0-1-1-1-1-1-72-4137-33.html'
    spider.addQueue 'http://www.jd.com'
    spider.addQueue(ulist)
    # spider.addQueue('http://www.jd.com')
    spider.crawl()
    # spider.addQueue(ulist)
    # spider.crawl()
    # log 'getHtml'
    # spider.onFetchComplete = (queueItem, responseBuffer, response)->
    #     console.log 'responseBuffer: ',iconv.decode(responseBuffer,'GBK')
    # console.log 4555555
    # console.log spider.emit 'a'



exports?.logEnd = logEnd = (msg)->
    log?.map?={}
    now = new Date().getTime()
    log.map[msg]?= now 
    list = ('-' for num in [0..30])
    # console.log now
    tdif = (now-log.map[msg])/1000 + 'S'
    # list.concat list
    console.log (list.concat [msg],list,['[END]'],['[Time ',tdif,']']).join('')
    delete log.map[msg]

exports?.log = log = (msg)->
    # console.log log?.map
    log?.map?={}
    log.map[msg] = new Date().getTime()
    list = ('-' for num in [0..30])
    # list.concat list
    console.log (list.concat [msg],list,['[BEGIN]']).join('')

# test()
# myCrawler = new Crawler domain
# myCrawler.userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36'
# conditionID = myCrawler.addFetchCondition(
#     (parsedURL)->
#         reg = /(\d+|-+)+\.html$/ #123-123.html
#         # console.log parsedURL.path.match(reg)
#         parsedURL.path.match(reg);
# )
# domain = 'www.jd.com'
# myCrawler.initialProtocol = "http"
# # myCrawler.initialPath = "/"
# myCrawler.initialPort = 80
# myCrawler.scanSubdomains = true
# myCrawler.maxConcurrency = 1
# myCrawler.discoverResources= false
# # myCrawler.discoverResources = (html,arg...)->
# #     # console.log iconv.decode(html,'GBK')
# #     console.log arg...
# #     ['http://www.jd.com/products/737-794-880-0-0-0-0-0-0-0-1-1-1-1-72-33.html www.jd.com']




# myCrawler
#     .on "crawlstart" , () ->
#         console.log "crawlstart"
#     .on "queueadd" , (queueItem) ->
#         ''
#         # console.log "queueadd"
#         # console.log queueItem
#     .on "queueduplicate" , (URLData) ->
#         console.log "queueduplicate"
#         # console.log URLData
#     .on "queueerror" , ( errorData , URLData ) ->
#         console.log "queueerror"
#     .on "fetchstart" , (queueItem , requestOptions) ->
#         ''
#         # console.log "fetchstart"
#         # console.log queueItem.url,queueItem.host
#     .on "fetchheaders", (queueItem , responseObject) ->
#         console.log "fetchheaders"
#         # console.log responseObject
#     .on "fetchcomplete", (queueItem, responseBuffer, response) ->
#         console.log "Completed fetching resource:"
#         console.log  queueItem.url,queueItem.host
#         # console.log 'responseBuffer: ',iconv.decode(responseBuffer,'GBK')
#         # console.log 'response: ',response
#     .on "fetchdataerror" , ( queueItem, response) ->
#         console.log "fetchdataerror"
#     .on "fetchredirect" , (queueItem, parsedURL, response ) ->
#         console.log "fetchredirect"
#         # console.log queueItem.stateData.headers.location
#         # console.log queueItem
#         console.log parsedURL.path,parsedURL.host
#     .on "fetch404", (queueItem, response) ->
#         console.log "fetch404"
#     .on "fetcherror", ( queueItem, response) ->
#         console.log "fetcherror"
#     .on "fetchtimeout" , (fetchtimeout) ->
#         console.log "fetchtimeout"
#     .on "fetchclienterror", (queueItem, errorData ) ->
#         console.log "fetchclienterror"
#     .on "discoverycomplete", ( queueItem, resources ) ->
#         console.log "discoverycomplete"
#     .on "complete" , () ->
#         console.log "-------------complete-------------"

