csv = require 'csv'
T = require './lib/tools.coffee'
iconv = require 'iconv-lite'
fs = require 'fs'
url = require 'url'
events = (require 'events').EventEmitter
http = require 'http'
jsdom = (require 'jsdom').jsdom
cheerio = require 'cheerio'
Spider = T.Spider 
log = T.log
logEnd = T.logEnd
domain = 'jd.com'
path = './data/keyword_disc.csv'
# path = './data/k2.csv'
productPath = './data/product.csv'
errorPath = './data/error.log'
UrlArr = []
delFile if (fs.unlinkSync productPath if fs.existsSync productPath)
# delFile if (fs.unlinkSync errorPath if fs.existsSync errorPath)
# spider = new Spider()
retryMap = {}
spider = new Spider(domain)
spider.crawl()

    # console.log queueItem

# request('http://list.jd.com/737-794-1300-0-0-0-0-0-0-0-1-1-53-1-1-72-4137-33.html',(urlObj,responseBuffer,response)->
#     # str = iconv.decode responseBuffer,'GBK'
#     # console.log url.format(urlObj)
#     )
iter = ()->
    # UrlArr = ['http://list.jd.com/9987-830-864-0-0-0-0-0-0-0-1-1-4-1-1-72-4137-33.html']
    if UrlArr.length isnt 0
        u = UrlArr.shift()
        # console.log UrlArr,UrlArr.length
        # console.log u

        # request u ,(queueItem, responseBuffer, response)->
        #     fetchComplete queueItem, responseBuffer, response
        spider.addQueue u
        # spider.crawl()

        # s.crawl()
fetchEnd = ()->
    if UrlArr.length isnt 0
        console.log '======================== next keyword ========================'
    else 
        console.log '======================== fetch competed ========================'
    # console.log UrlArr
    iter()
fetchComplete = (queueItem, responseBuffer, response)->
    # console.log url.format(queueItem)
    # T.logEnd  url.format(queueItem
    str = iconv.decode responseBuffer,'GBK'
    $ = cheerio.load(str)
    next = $('#filter').find(' a.next')
    cat = $('#select').find('h1').text().split(' - ')[0]
    plist = $('#plist').find('ul>li')
    # console.log plist.length
    for li in plist
        purl = $(li).find('a').attr 'href'
        pdesc = $(li).find('img').attr 'alt' 
        # console.log purl,pdesc
        if pdesc?.length > 0
            fs.appendFileSync productPath , [cat,pdesc,purl].join(',') + '\n'
    # console.log next.length
    switch next.length
        # when 0 #没有下一页
        when 1
            # console.log spider
            # console.log [queueItem.protocol,'//',queueItem.host,'/',next.attr 'href'].join('')
            urlStr = [queueItem.protocol,'://',queueItem.host,'/',next.attr 'href'].join('') 
            spider.addQueue urlStr
            # spider.crawl()
            # s.crawl()
        else 
            if fetchEnd
                fetchEnd()
    if (str.indexOf ('没有找到符合条件的商品')) isnt -1 or (str.indexOf ('error2.aspx')) isnt -1
        fs.appendFileSync errorPath ,queueItem.url+'\n'

    #fetchComplete
spider.on 'onFetchComplete',fetchComplete
spider.on 'onFetchtimeout',fetchEnd
spider.on 'onQueueduplicate',fetchEnd
spider.on 'onFetchclienterror',(queueItem, errorData)->
    fs.appendFileSync errorPath,errorData+'\n'
    if !retryMap[queueItem.url] then retryMap[queueItem.url]=0
    else
        retryMap[queueItem.url]++
    if(retryMap[queueItem.url]<2)
        spider.addQueue queueItem.url
spider.on 'onFetcherror',(queueItem, response)->
    fs.appendFileSync errorPath ,queueItem+'\n'
    if !retryMap[queueItem.url] then retryMap[queueItem.url]=0
    else
        retryMap[queueItem.url]++
    if(retryMap[queueItem.url]<2)
        spider.addQueue queueItem.url
                # UrlArr = ['http://list.jd.com/1672-2615-9189.html','http://list.jd.com/1672-2615-2621.html']

run = (msg)->
    switch(msg)
        when 'main'
            # return
            csv()
            .from.path(path,{ delimiter: ',', escape: '' })
            .on('record', (row,index)->
                u = row[5]
                # u = row[row.length-1]
                if u 
                    # s.addQueue u
                    UrlArr.push(u)
                    # console.log row[5]
                # UrlArr = ['http://list.jd.com/9987-830-866-0-0-0-0-0-0-0-1-1-1943-1-1-72-4137-33.html']
            )
            .on('end',(count)->
                console.log 'count',count
                iter()
                # UrlArr

            )
            .on( 'error' ,(error)->
                console.log error
            )

run 'main'



