 # -*- coding: utf-8 -*-
import MySQLdb
import time
from scrapy import Request

from scrapy.spiders import CrawlSpider
from scrapy.selector import Selector
from spider.db import Session
from sqlalchemy import desc
from spider.models import CurrListedCorp, PeriodList
from spider.items import StockHistoryPriceItem


class CnInfoComCnBalanceSpider(CrawlSpider):
    """股票历史价格"""
    name = "stock_price"
    allowed_domains = ['http://quotes.money.163.com']
    monthList = ['-03-31', '-06-30', '-09-30', '-12-31']

    zcfz_url_pattern = 'http://quotes.money.163.com/trade/lsjysj_{}.html?&year={}&season={}'

    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.104 Safari/537.36 Core/1.53.1708.400 QQBrowser/9.5.9635.400'
    }



    # 脚本入口
    def start_requests(self):
        session = Session()
        try:

            year_period_list = session.query(
                PeriodList.year, PeriodList.period
            ).all()

            stock_cd_market_part_list = session.query(
                CurrListedCorp.stock_cd
            ).all()

            for stock_cd_market_part in stock_cd_market_part_list:
                stock_cd = stock_cd_market_part[0]

                # if int(stock_cd) < 600000:
                #     continue

                # import pdb;
                # pdb.set_trace()
                for year_period in year_period_list:
                    year = int(year_period[0])
                    period = int(year_period[1])+1

                    yield Request(
                        url=self.zcfz_url_pattern.format(
                            stock_cd, year, period
                        ),
                        meta={
                            'stock_cd': stock_cd,
                            'year': year,
                            'period': period,
                        },
                        callback=self.parse
                    )
        finally:
            session.close()


    def parse(self, response):

        arr_rows = response.selector.xpath('//table[@class ="table_bg001 border_box limit_sale"]//tr[*]').extract()

        item = StockHistoryPriceItem()
        item['stock_cd'] = response.meta['stock_cd']
        for i in range(1, len(arr_rows)):

            # print "-------------------------------------------------------------"
            day_time = str(arr_rows[i]).replace("</td>","").split("<td")[1].replace(">","")
            price = str(arr_rows[i]).replace("</td>","").split("<td")[5].replace(' class="cRed">',"").replace(' class="cGreen">',"").replace(' class="">',"")


            print day_time

            print price

            conn = MySQLdb.connect(host='localhost', user='root', passwd='123456', port=3306)
            cur = conn.cursor()

            conn.select_db('greatwall')

            value = [response.meta['stock_cd'], day_time,price]
            time.sleep(0.01)
            cur.execute('insert into stock_price_before values(%s,%s,%s)', value)



            values = []
            # for ii in range(20):
            #     values.append((ii, 'hi rollen' + str(ii)))

            # cur.executemany('insert into stock_history_price values(%s,%s)', values)

            # cur.execute('update test set info="I am rollen" where id=3')

            conn.commit()
            cur.close()
            conn.close()



