# -*- coding: utf-8 -*-
import scrapy,sys,os,json,pymongo,time
reload(sys).setdefaultencoding('UTF-8')
import logging
from pyquery import PyQuery as pq


class bkSpider(scrapy.Spider):
        name = "bk"
        allowed_domains = ["www.tmkoo.com"]
        '''start_urls = [
                "http://www.crca.com.cn/list.aspx?cid=20&page=1",
                ]'''
        
        def __init__(self):
            self.results = {}
            self.regNoList = []
            self.headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
            "Referer": "http://www.tmkoo.com/searchmore/zch.php",
            "Accept-Language": "zh-cn",
            "User-Agent": "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET4.0C; .NET4.0E)",
            "Host": "www.tmkoo.com",
            "Content-Type":"application/x-www-form-urlencoded",
            "Pragma": "no-cache",
            "Connection":"keep-alive",
            "Cookie": "JSESSIONID=657F80F8821239BB779ABF7B9F72BAEC;" ,
            }

            '''self.APIkey = 'UYT856h09TQurw8rsW'
            self.APIpassword = 'Tdwh58dkP04w'
            connection = pymongo.MongoClient('localhost',27017)
            db = connection['shangbiao']
            self.collection = db['shangdun']'''

        def store(self,filename,data):
            with open(filename, 'w') as f:  
                json.dump(data,f)

        def closed(self,reason):
            self.store('r.json',self.results)
            self.store('f.json',self.regNoList[0])

        def start_requests(self):#17000000
            for x in xrange(1,19000000):
                if x%400==0:
                    self.regNoList.append(str(x))
                    pdata = {}
                    pdata['zch'] = ' '.join(self.regNoList)
                    del self.regNoList[:]
                    url = 'http://www.tmkoo.com/searchmore/zch.php'
                    yield scrapy.FormRequest(url,formdata=pdata,cookies={'JSESSIONID':'657F80F8821239BB779ABF7B9F72BAEC'},meta={'regNo':x},callback=self.parse)
                else:
                    self.regNoList.append(str(x))

        def parse(self,response):
            regNo = response.meta['regNo']
            if response.status == 200:
                doc = pq(response.body)
                for line in doc('#messageList dt'):
                    regNo = pq(line).find('.zch a').text()
                    intCls = pq(line).find('.operate a').text()
                    self.results[regNo] = intCls
                    print 'No.%s TM is exisit,Cls is %s,count is %s' %(regNo,intCls,len(self.results))
                    if len(self.results)>100000:
                        self.store('r'+str(regNo)+'.json',self.results)
                        self.results.clear()
            else:
                logging.info("access faild code :%s" %(response.status))
                print "failNo.%s,results:%s,regNo:%s" %(regNo,len(self.results),len(self.regNoList))
                with open('fe.json', 'a') as f:  
                    f.write(regNo+"\n");
                time.sleep(60)
