# -*- coding: utf-8 -*-
import random

import scrapy
import redis
import re
from stock.items import StockItem
import time

redis_conn_shang = redis.StrictRedis(db=1)
redis_conn_shen = redis.StrictRedis(db=2)
redis_conn_success_url = redis.StrictRedis(db=3)
re_com = re.compile("\((\d*)\)")


class EastmoneySpider(scrapy.Spider):
    name = 'eastmoney'
    allowed_domains = ['eastmoney.com', 'money.finance.sina.com.cn']
    start_urls = ['http://quote.eastmoney.com/stocklist.html']

    def parse(self, response):
        position = 0
        self.get_list(response, 1, position)
        self.get_list(response, 2, position)
        origin_url = "http://money.finance.sina.com.cn/" \
                     "corp/go.php/vMS_MarketHistory/stockid/{}.phtml?year={}&jidu={}"
        year_list = ["2017", "2018", "2019"]
        quater_list = ["1", "2", "3", "4"]
        shang_list = redis_conn_shang.keys("*")
        item = StockItem()
        for shang_stock in shang_list:
        # shang_stock = shang_list[0]
        #     item["sname"] = shang_stock.decode()
            for year in year_list:
                for quater in quater_list:
                    url = origin_url.format(shang_stock.decode(), year, quater)
                    if not redis_conn_success_url.get(url):
                        yield scrapy.Request(url=url, callback=self.parse_info,
                                             meta={"item": item})
                    else:
                        print("*" * 50, "link fail")
        shen_list = redis_conn_shen.keys("*")
        item_shen = StockItem()
        for shen_stock in shen_list:
        # shang_stock = shang_list[0]
        #     item_shen["sname"] = shen_stock.decode()
            for year in year_list:
                for quater in quater_list:
                    url = origin_url.format(shen_stock.decode(), year, quater)
                    if not redis_conn_success_url.get(url):
                        yield scrapy.Request(url=url, callback=self.parse_info,
                                             meta={"item": item_shen})
                    else:
                        print("*" * 50, "link fail")
        print("success!")

    def get_list(self, response, temp, position):
        shang_li_list = response.xpath('//*[@id="quotesearch"]/ul[' + str(temp) + ']/li[position() >= ' + str(position) +
                                       ' and position() < ' + str(position + 201) + ']/a/text()').extract()
        for shang_li in shang_li_list:
            stock_num = re.search(re_com, shang_li)
            if stock_num:
                if temp == 1:
                    redis_conn_shang.set(stock_num.group(1), shang_li)
                else:
                    redis_conn_shen.set(stock_num.group(1), shang_li)
                print("数据存储完成：{}".format(shang_li))
            else:
                print("数据错误：{}".format(shang_li))
        if len(shang_li_list) > 190:
            self.get_list(response, temp, position + 200)
    # def get_info(self):


    def parse_info(self, response):
        item = response.meta.get("item")
        try:
            stock_length = len(response.xpath('//*[@id="FundHoldSharesTable"]/tr[position() > 1]').extract())
        except Exception as e:
            pass
        else:
            temp = 1 if not redis_conn_success_url.get(response.request.url) \
                else int(redis_conn_success_url.get(response.request.url).decode()) + 1
            redis_conn_success_url.set(response.request.url, temp)
            for i in range(2, stock_length + 1):
                stock_info_list = response.xpath('//*[@id="FundHoldSharesTable"]/tr[' + str(i) +']//text()').extract()
                # item["date"] = re.sub("-", ".", stock_info_list[2].strip())
                item["sname"] = re.search(r"/(\d*)\.", response.request.url).group(1)
                item["date"] = stock_info_list[2].strip()
                item["open"] = stock_info_list[5].strip()
                item["top"] = stock_info_list[7].strip()
                item["close"] = stock_info_list[9].strip()
                item["lowest"] = stock_info_list[11].strip()
                item["volume"] = stock_info_list[13].strip()
                item["amount"] = stock_info_list[15].strip()
                # time.sleep(random.uniform(0.1, 0.3))
                yield item


