Crawler = require "simplecrawler"
iconv = require 'iconv-lite'
events = (require 'events').EventEmitter
url = require 'url'
http = require 'http'
https = require 'https'
qs = require 'querystring'
# tools = require './lib/tools'

exports?.cookieFormat = cookieFormat = (cookieObj)->
    cookieStr = ''
    carr = []
    for i of cookieObj
        carr.push ( i+'='+cookieObj[i])
    cookieStr = carr.join(';')
    return cookieStr
exports?.cookieParse = cookieParse = (cookieStr)->
    arr = cookieStr.split ';'
    cookieObj = {}
    for c in arr
        _c = c.trim()
        idx = _c.indexOf '='
        key = _c.split('')[0..idx-1].join('')
        value = _c.split('')[idx+1...].join('')
        cookieObj[key]=value
        # console.log key,value
    return cookieObj
exports?.getCookieFromHeaders = getCookieFromHeaders = (res)->
        cookieArr = res.headers['set-cookie']
        cArr = []
        cookieArr?=[]
        cookieArr.forEach (c)->
            c = c.split(';')[0]
            cArr.push c
        cookieStr = cArr.join(';')
        cookieObj = cookieParse(cookieStr)
        return cookieObj

exports?.Request = class Request extends events
    constructor:()->
        @u2 = null
        # @urlQueue = [] 
    request:(urlS,callback)=>
        # console.log urlString
        # if @urlQueue.length is 0 then return
            # urlS = @urlQueue.shift()
        if typeof urlS is 'string'
            urlObj = url.parse(urlS)
        else
            urlObj = urlS
        # console.log urlObj.protocol
        if urlObj.protocol == 'http:'
            protocolReq = http.request
            urlObj.port = 80
        else if urlObj.protocol == 'https:'
            protocolReq = https.request
            urlObj.port = 443
        # console.log protocolReq,urlObj
        # console.log urlS,urlObj
        urlObj.method ?= 'GET'
        if !callback
            callbck = ()->
                return
        log url.format urlS
        req = protocolReq(urlObj, (res)=> 
            # @emit 'onFetchstart', url.parse(urlS),req
            # console.log("Got response: " + res.statusCode, res.headers)
            if(res.statusCode is 301 or res.statusCode is 302)
                @u2= res.headers.location
                if (@u2.indexOf 'http://') isnt 0 and (@u2.indexOf 'https://') isnt 0
                    @u2 = urlObj.protocol+'//'+ urlObj.host + decodeURIComponent(res.headers.location)
                console.log (JSON.stringify urlS )+ '  forword to  '+ @u2
                # @emit 'onFetchredirect' ,url.parse(urlS),@u2,res
                @request @u2,callback
            buffers = []
            size = 0
            # console.log c++
            res
            .on('data', (buffer)=> 
                buffers.push(buffer)
                size += buffer.length
            )
            .on('end', ()=> 
                logEnd url.format urlS
                responseBuffer = new Buffer size
                pos = 0;
                try
                    if buffers.length isnt 0
                        # console.log (res.headers['content-type'].match /charset=(\w|\s|-|\d)*/)[0].toLowerCase()
                        if !res.headers['content-type']
                            @charset = 'utf-8'
                        else
                            @charset =  (res.headers['content-type'].match /charset=(\w|\s|-|\d)*/)[0].toLowerCase()
                            @charset = @charset.split('=')[1]
                        # console.log 123213213213, @charset
                        for i in [0..buffers.length-1]
                            buffers[i].copy responseBuffer, pos
                            pos += buffers[i].length
                    # T.logEnd urlS
                        if typeof urlS is 'string'
                            callback url.parse(urlS),responseBuffer,res 
                        else
                            callback urlS,responseBuffer,res
                catch e
                    console.log e
                    # errStr = JSON.stringify e
                    # throw new Error()

                # @emit 'onFetchComplete',url.parse(urlS),responseBuffer,res
            )

        )
        req.on('error', (e)=>
            # @emit 'onFetchclienterror',e
            console.log("Got error: " + e.message)
            # @request urlS,callback
            console.log urlS
            # throw e
            # request urlS,callback
        )
        if urlObj.method.toLowerCase() is 'post'
            # postStr = qs.stringify urlObj.postData
            # postStr = escape postStr
            req.write (qs.stringify urlObj.postData)
            delete urlObj.postData
        req.end()


exports?.Spider = class Spider extends events
    constructor: ( @_domain ) ->
        # console.log 'constructor'
        @_crawler = new Crawler @_domain
        @_crawler.userAgent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:24.0) Gecko/20100101 Firefox/24.0'
        @_crawler.maxConcurrency = 1
        @_crawler.discoverResources = false
        @_crawler.scanSubdomains = true
        @_crawler.on "crawlstart" , ()=>
            @emit 'onStart'
        @_crawler.on "complete" , ()=>
            console.log '********** crawl website has been completed **********'
            @emit 'onComplete'
        @_crawler.on "fetchcomplete", (queueItem, responseBuffer, response)=>
            logEnd queueItem.url
            @emit 'onFetchComplete' ,queueItem,responseBuffer,response
        @_crawler.on "queueadd" , (queueItem) =>
            @emit 'onQueueadd',queueItem
        @_crawler.on "queueduplicate" , (URLData) =>
            console.log 'queueduplicate',URLData
            @emit 'onQueueduplicate',URLData
        @_crawler.on "queueerror" , ( errorData , URLData ) =>
            console.log 'queueerror',URLData
            @emit 'onQueueerror',errorData,URLData
        @_crawler.on "fetchstart" , (queueItem , requestOptions) =>
            log queueItem.url
            @emit 'onFetchstart',queueItem,requestOptions
        @_crawler.on "fetchheaders", (queueItem , responseObject) =>
            # console.log "fetchheaders"
            @emit 'onFetchheaders',queueItem,responseObject
        @_crawler.on "fetchdataerror" , ( queueItem, response) =>
            console.log "fetchdataerror",queueItem
            @emit 'onFetchdataerror',queueItem,response
        @_crawler.on "fetchredirect" , (queueItem, parsedURL, response ) =>
            console.log "fetchredirect",queueItem,parsedURL
            logEnd queueItem.url
            # console.log parsedURL.path,parsedURL.host
            @emit 'onFetchredirect',queueItem, parsedURL, response
        @_crawler.on "fetch404", (queueItem, response) =>
            console.log "fetch404",queueItem
            @emit 'onFetch404',queueItem, response
        @_crawler.on "fetcherror", ( queueItem, response) =>
            console.log "fetcherror",queueItem
            @emit 'onFetcherror',queueItem, response
        @_crawler.on "fetchtimeout" , (fetchtimeout) =>
            console.log "fetchtimeout",fetchtimeout
            @emit 'onFetchtimeout',fetchtimeout
        @_crawler.on "fetchclienterror", (queueItem, errorData ) =>
            console.log "fetchclienterror",errorData
            @emit 'onFetchclienterror',queueItem, errorData
        @_crawler.on "discoverycomplete", ( queueItem, resources ) =>
            console.log "discoverycomplete"
            @emit 'onDiscoverycomplete', queueItem, resources

    crawl: () ->
        # console.log 'getHtml'
        @_crawler.start()
    addQueue:(_url)->
        # console.log 'addQueue',_url
        _url?= '/'
        @_crawler.queueURL _url
        # console.log @_crawler.queue



exports.test = test = () ->
    console.log 'test'
    domain = 'jd.com'
    spider = new Spider domain
    spider.on 'onFetchstart',(queueItem , requestOptions)->
        # console.log 'onFetchstart'
        log queueItem.url
    spider.on 'onFetchComplete' ,(queueItem, responseBuffer, response)->
        # console.log queueItem.url
        logEnd queueItem.url
        # logEnd 'getHtml'
    spider.on 'onFetchredirect' ,(queueItem, parsedURL, response )->
        # console.log queueItem,parsedURL
        # console.log responseBuffer
        # console.log 'responseBuffer: ',iconv.decode(responseBuffer,'GBK')
    # spider.on 'onStart' ,()->
    #     console.log 22222222
    ulist = 'http://list.jd.com/737-794-798-0-0-0-0-0-0-0-1-1-1-1-1-72-4137-33.html'
    spider.addQueue 'http://www.jd.com'
    spider.addQueue(ulist)
    # spider.addQueue('http://www.jd.com')
    spider.crawl()
    # spider.addQueue(ulist)
    # spider.crawl()
    # log 'getHtml'
    # spider.onFetchComplete = (queueItem, responseBuffer, response)->
    #     console.log 'responseBuffer: ',iconv.decode(responseBuffer,'GBK')
    # console.log 4555555
    # console.log spider.emit 'a'



exports?.logEnd = logEnd = (msg)->
    log?.map?={}
    now = new Date().getTime()
    log.map[msg]?= now 
    list = ('-' for num in [0..30])
    # console.log now
    tdif = (now-log.map[msg])/1000 + 'S'
    # list.concat list
    console.log (list.concat [msg],list,['[END]'],['[Time ',tdif,']']).join('')
    delete log.map[msg]

exports?.log = log = (msg)->
    # console.log log?.map
    log?.map?={}
    log.map[msg] = new Date().getTime()
    list = ('-' for num in [0..30])
    # list.concat list
    console.log (list.concat [msg],list,['[BEGIN]']).join('')


exports?.encodeURIComponent_GBK = encodeURIComponent_GBK= (str)->
    if (str==null || typeof(str)=='undefined' || str=='') 
        return ''

    a = str.toString().split('')
    for i in [0..a.length-1]
        ai = a[i]
        if( (ai>='0' && ai<='9') || (ai>='A' && ai<='Z') || (ai>='a' && ai<='z') || ai is '.' || ai is '-' || ai is '_') 
            continue
        b = iconv.encode(ai, 'gbk')
        e = [''] # 注意先放个空字符串，最保证前面有一个%
        # e = []
        # for( j = 0; j<b.length; j++) 
        for j in [0..b.length-1]
            e.push( b.toString('hex', j, j+1).toUpperCase() )
            j++
        a[i] = e.join('%')

    return a.join('')

# %BD%F0%C3%B5%B9%E5%B9%C5%B1%A42002
# %BD%F0%C3%B5%B9%E5%B9%C5%B1%A42002
# console.log encodeURIComponent_GBK ('金玫瑰古堡2002')