import scrapy

from StockPro.items import StockproItem


class StockcurveSpider(scrapy.Spider):
    page = 0
    name = 'stockcurve'
    # allowed_domains = ['www.xxx.com']
    start_urls = [
        'http://quotes.money.163.com/hs/service/diyrank.php?host=http%3A%2F%2Fquotes.money.163.com%2Fhs%2Fservice%2Fdiyrank.php&page=0&query=STYPE%3AEQA&fields=NO%2CSYMBOL%2CNAME%2CPRICE%2CPERCENT%2CUPDOWN%2CFIVE_MINUTE%2COPEN%2CYESTCLOSE%2CHIGH%2CLOW%2CVOLUME%2CTURNOVER%2CHS%2CLB%2CWB%2CZF%2CPE%2CMCAP%2CTCAP%2CMFSUM%2CMFRATIO.MFRATIO2%2CMFRATIO.MFRATIO10%2CSNAME%2CCODE%2CANNOUNMT%2CUVSNEWS&sort=PERCENT&order=desc&count=24&type=query']

    def parse(self, response, **kwargs):
        stocks_list = response.json()['list']
        # print(stocks_list)
        for stocks in stocks_list:
            code = stocks['CODE']
            name = stocks['NAME']
            detail_url = 'http://quotes.money.163.com/trade/lsjysj_' + code[1:] + '.html#06f01'
            yield scrapy.Request(detail_url, callback=self.get_params, meta={'code': code, 'name': name})

        url = 'http://quotes.money.163.com/hs/service/diyrank.php?host=http%3A%2F%2Fquotes.money.163.com%2Fhs%2Fservice%2Fdiyrank.php&page={}&query=STYPE%3AEQA&fields=NO%2CSYMBOL%2CNAME%2CPRICE%2CPERCENT%2CUPDOWN%2CFIVE_MINUTE%2COPEN%2CYESTCLOSE%2CHIGH%2CLOW%2CVOLUME%2CTURNOVER%2CHS%2CLB%2CWB%2CZF%2CPE%2CMCAP%2CTCAP%2CMFSUM%2CMFRATIO.MFRATIO2%2CMFRATIO.MFRATIO10%2CSNAME%2CCODE%2CANNOUNMT%2CUVSNEWS&sort=PERCENT&order=desc&count=24&type=query'
        while self.page < 179:
            self.page += 1
            yield scrapy.Request(url.format(self.page), callback=self.parse)

    def get_params(self, response):
        code = response.meta["code"]
        begin_date = response.xpath(
            '//div[@id="dropBox1"]/div[2]/form/table[2]/tbody/tr[1]/td[2]/input[1]/@value').extract_first()
        end_date = response.xpath(
            '//div[@id="dropBox1"]/div[2]/form/table[2]/tbody/tr[2]/td[2]/input[1]/@value').extract_first()
        begin_date = ''.join(begin_date.split("-"))
        end_date = ''.join(end_date.split("-"))
        download_url = 'http://quotes.money.163.com/service/chddata.html'
        params = {
            "code": code,
            "start": begin_date,
            "end": end_date,
            "fields": "TCLOSE;HIGH;LOW",
        }
        yield scrapy.FormRequest(download_url, formdata=params, callback=self.download_data, meta=response.meta)

    def download_data(self, response):
        code = response.meta['code']
        name = response.meta['name']
        all_data = response.body
        item = StockproItem()
        item['all_data'] = all_data
        item['code'] = code
        item['name'] = name
        # print(self._code_)
        yield item
