T = require './lib/tools.coffee'
$ = require 'jquery'
iconv = require 'iconv-lite'
fs = require 'fs'
ent = require 'ent'
Spider = T.Spider 
log = T.log
logEnd = T.logEnd
path = './data/'
domain = 'jd.com'
spider = new Spider domain
urlMap = {}
# T.test()
afilter = (a)->
    adom = $(a)
    _arr = [_text,_href ]= [adom.text(),adom.attr 'href']
    # console.log _text,_href
    rst = ''
    if adom.text().trim() isnt '' and (adom.attr 'href') isnt ''#文案非空、连接非空
        if ((adom.attr 'href').indexOf 'javascript') is -1 #href 非 javascript

            rst = _arr.join(',')+'\n'
    else
        rst = ''
    return rst

spider.on 'onFetchstart',(queueItem , requestOptions)->
    # log queueItem.url

spider.on 'onFetchComplete' ,(queueItem, responseBuffer, response)->
    # logEnd queueItem.url
    dom = $(iconv.decode responseBuffer,'GBK')
    _fileName = path+'l1.csv'
    fs.unlinkSync _fileName if fs.existsSync _fileName
    for a in dom.find('#_JD_ALLSORT').find('a')
        adom = $(a)
        _arr = [_text,_href ]= [adom.text(),adom.attr 'href']
        fs.appendFileSync _fileName ,afilter(a)

spider.on 'onFetchredirect' ,(queueItem, parsedURL, response )->
    console.log response
# ulist = 'http://list.jd.com/737-794-798-0-0-0-0-0-0-0-1-1-1-1-1-72-4137-33.html'
# spider.addQueue('http://www.jd.com')
# spider.addQueue(ulist)
_u = 'http://www.jd.com'
_menuUrl = 'http://www.jd.com/ajaxservice.aspx?stype=SortJson'
spider.addQueue _u
# spider.addQueue _menuUrl
spider.crawl()
###
    获取菜单
###
jsonSpider = new Spider domain
# jsonSpider.onFetchComplete
HM = {'文艺':true,'人文社科':true,'经管励志':true,'生活':true,'科技':true,'少儿':true,'教育':true,'其它':true}
jsonSpider.on 'onFetchComplete', (queueItem, responseBuffer, response)->
    str = iconv.decode responseBuffer,'GBK'
    category = {}
    category.getDataService = (_json)->
        return  _json
    _list = (eval str).data
    _fileName = path+'menu.csv'
    fs.unlinkSync _fileName if fs.existsSync _fileName
    for obj in _list
        _str = []
        # console.log obj.n
        _n = obj.n.replace />、</g,'>\0<'
        for _a in _n.split '\0'
            _arr = [$(_a).text(),$(_a).attr 'href']
            urlMap[_arr[0]] = _arr[1]
            # console.log obj.u
            # _str.push (_arr.join '$')
            _str.push (_arr.join ',')
        # str1 = _str.join '|'
        str1 = _str.join ','
        # _str += '\n'
        i1 = obj.i
        # console.log 'i1',i1

        if i1.length isnt 0
            # console.log i1
            # for _l in i1
            #     console.log _l
            for _j in i1
                # console.log _j
                # console.log _j.n
                # console.log  '_j',_j
                # console.log _j.n
                if (_j.u.indexOf 'http://') is -1 and (_j.u isnt '') 
                    _j.u = ['http://channel.jd.com/',_j.u.replace(/-000/,'') ,'.html'].join ''
                    # console.log __u
                _arr = [_j.n,_j.u]
                urlMap[_arr[0]] = _arr[1]
                # _str.push (_arr)
                # str2 = [str1 , _arr.join '$'].join ','
                # console.log  'str2',_arr,_j
                str2 = [str1 , _arr.join ','].join ','
                i2 = _j.i
                # console.log _j
                # console.log i2
                # for _k_i2 in [0,2,3]
                    # ''
                for _k_i2 in i2
                    # console.log _k_i2
                    [_u,_n] = _k_i2.split '|'
                    _n = ent.decode _n
                    if (_u.indexOf 'http://') is -1 and (_u isnt '')
                        if !HM[_j.n]
                            _u = ['http://list.jd.com/',_u.replace(/-000/,'') ,'.html'].join ''
                        else 
                            _u = ['http://channel.jd.com/',_u.replace(/-000/,'') ,'.html'].join ''
                        # console.log __u
                    _arr = [_n,_u]                # console.log _arr
                    urlMap[_arr[0]] = _arr[1]
                    # console.log 'str3',_arr
                    str3 = [str2,_arr.join ','].join ','
                    # buf = iconv.encode str3, 'utf8';
                    # gbStr = iconv.decode buf ,'GBK'
                    fs.appendFileSync _fileName ,str3 + '\n'
                    # console.log str3
        # str = _iter(item)
    #     _str = item.u
        # console.log item,_str
    #     fs.appendFileSync _fileName ,item+'\n'
    # console.log str
jsonSpider.on 'onComplete',()->
    _fileName = path+'cat.csv'
    fs.unlinkSync _fileName if fs.existsSync _fileName
    for n of urlMap
        _arr = [n,urlMap[n]]
        fs.appendFileSync _fileName ,_arr.join(',') + '\n'
jsonSpider.addQueue _menuUrl
jsonSpider.crawl()




# spider.getHtml()
###s
spider = new Spider(domain)
spider.getHtml(url)//String
spider.getUrlList=
###


