#淘宝店铺信息采集

import requests,re
from config import *
from urllib.parse import quote
from Helper import Helper
from Helper import r as redis
import json,time,random
from pprint import pprint
from threading import Thread

class Taobao( object ):


    def __init__(self):

        #初始化任务
        keyword = input('请输入想要搜索的关键词:')

        # 获取redis长度

        length = Helper.lenRedis('shop_task')

        # print( length >=100 )

        if length <= 0:

            Helper.savePage( keyword )

        print('*----------------任务生成成功,开始执行爬虫程序-------------*')

    def requestHtml(self,data):

        resp = {}

        try:

            r = requests.delete( url = domain['taobao_url'] + quote('{0}'.format( data['keyword'])) + '&s={0}'.format(data['page']) ,headers=header)

            # cookie过期回置任务
            if r.status_code == 200 and domain['login_url'] in r.text:

                # 回置任务
                # Helper.savePage( data )
                redis.lpush('shop_task',json.dumps(data,ensure_ascii=False))
                resp['code'] = 4003
                resp['msg'] = '会话过期'

                pprint( resp )
            elif r.status_code == 200 and  '亲，小二正忙，滑动一下马上回来' in r.text:

                redis.lpush('shop_task', json.dumps(data, ensure_ascii=False))
                resp['code'] = 4001
                resp['msg'] = '滑块出现'

                pprint( resp )

            else:

                self.parseHtml( r.text )

            return None
        except Exception as e:

            print('此页码暂无店铺信息:',data['page'])

    def parseHtml(self,html):

        # 编写正则表达式
        pattern = re.compile(r'g_page_config = (.*);')

        res = re.search( pattern,html).group(1)

        items = json.loads( res )

        list = items['mods']['shoplist']['data']['shopItems']

        for item in list:

            data = {

                'title': item['title'],
                'uid': item['uid'],
                'nick': item['nick'],
                'city': item['provcity'],
                'shopId': re.search(r'\d+', item['shopUrl']).group(0)
            }

            Helper.saveRedis( data )


    def main(self):

        while True:

            data = Helper.getPage()

            if data == None:

                break

            self.requestHtml( data )


    def run(self):

        threads = []

        for i in range(max_threads):

            t = Thread( target=self.main,args=())

            t.start()

            threads.append( t )

        for t in threads:

            t.join()
