"""
从tushare获取日K数据，保存到本地的MongoDB数据库中
"""

from pev2.util.DataBase import DB_CONN
import tushare as ts

class DailyCrawler:
    def __init__(self):
        self.daily = DB_CONN['daily']
        self.dailyQfq = DB_CONN['daily_qfq']
        self.dailyHfq = DB_CONN['daily_hfq']

    """
    获取10年的K线数据，保存到数据库中
    """
    def crawl(self):

        startDate = '2018-01-01'
        endDate = '2018-09-20'

        # 因为上证指数没有停牌不会缺数，所以用它作为交易日历
        szzsKData = ts.get_k_data('000001', index=True, start=startDate, end=endDate)
        allDates = list(szzsKData['date'])

        # 获取所有股票代码
        stockData = ts.get_stock_basics()
        codes = list(stockData['code'])

        for code in codes:
            # 抓取不复权的价格
            dailies = ts.get_k_data(code, autype=None, start=startDate, end=endDate)

            # 抓取前复权的价格
            dailiesQfq = ts.get_k_data(code, autype='qfq', start=startDate, end=endDate)

            # 抓取后复权的价格
            dailiesHfq = ts.get_k_data(code, autype='hfq', start=startDate, end=endDate)

            dailies['adjfactor'] = dailiesHfq['close'] / dailies['close']

            # 将date日期作为索引
            dailies.set_index(['date'], inplace=True)
            dailiesQfq.set_index(['date'], inplace=True)
            dailiesHfq.set_index(['date'], inplace=True)
