
from pymongo import UpdateOne
from pev3.util.DataBase import DB_CONN
import tushare as ts
from datetime import datetime

"""
从tushare获取日K数据，保存到本地的MongoDB数据库中
"""

class DailyCrawler:

    def __init__(self):
        self.daily = DB_CONN['daily']
        self.dailQfq = DB_CONN['daily_qfq']
        self.dailyHfq = DB_CONN['daily_hfq']


    @staticmethod
    def dailyObj2Doc(code, dailyObj):
        return {
            'code': code,
            'date': dailyObj['date'],
            'close': dailyObj['close'],
            'open': dailyObj['open'],
            'high': dailyObj['high'],
            'low': dailyObj['low'],
            'volume': dailyObj['volume']
        }

    """
    将从网上抓取的数据保存到本地MongoDB中

    :param code: 股票代码
    :param dfDaily: 包含日线数据的DataFrame
    :param collection: 要保存的数据集
    :param extraFields: 除了K线数据中保存的字段，需要额外保存的字段
    """
    def saveData(self, code, dfDaily, collection, extraFields=None):
        updateRequests = []

        for dfIndex in dfDaily.index:
            dailyObj = dfDaily.loc[dfIndex]
            doc = self.dailyObj2Doc(code, dailyObj)

            if extraFields is not None:
                doc.update(extraFields)

            updateRequests.append(
                UpdateOne(
                    {'code': doc['code'], 'date': doc['date']},
                    {'$set': doc},
                    upsert=True)
            )

        # 批量写入，提高访问效率
        if len(updateRequests) > 0:
            updateResult = collection.bulk_write(updateRequests, ordered=False)
            print('保存日线数据,代码：%s, 插入： %4d条， 更新: %4d' % (code, updateResult.upserted_count, updateResult.modified_count), flush=True)

    """
    抓取指数的日线数据，并保存到本地数据数据库中
    抓取的日期范围从2008-01-01至今
    """
    def crawlIndex(self, startDate=None, endDate=None):
        indexCodes = ['000001', '000300', '399001', '399005', '399006']

        # 设置默认的日期范围
        if startDate is None:
            startDate = '2008-01-01'

        if endDate is None:
            endDate = datetime.now().strftime('%Y-%m-%d')

        for code in indexCodes:
            dfDaily = ts.get_k_data(code, index=True, start=startDate, end=endDate)
            self.saveData(code, dfDaily, self.daily, {'index': True})

    """
    获取所有股票从2008-01-01至今的K线数据（包括前复权、后复权和不复权三种），保存到数据库中
    """
    def crawl(self, startDate=None, endDate=None):

        # 获取所有股票代码
        stockCodes = ts.get_stock_basics()
        codes = list(stockCodes.index)

        # 设置默认日期范围
        if startDate is None:
            startDate = '2008-01-01'

        if endDate is None:
            endDate = datetime.now().strftime('%Y-%m-%d')

        for code in codes:
            # 抓取不复权的价格
            dfDaily = ts.get_k_data(code, autype=None, start=startDate, end=endDate)
            self.saveData(code, dfDaily, self.daily, {'index': False})

            # 抓取前复权的价格
            dfDailyQfq = ts.get_k_data(code, autype='qfq', start=startDate, end=endDate)
            self.saveData(code, dfDailyQfq, self.dailQfq, {'index': False})

            # 抓取后复权的价格
            dfDailyHfq = ts.get_k_data(code, autype='hfq', start=startDate, end=endDate)
            self.saveData(code, dfDailyHfq, self.dailyHfq, {'index': False})


if __name__ == '__main__':
    dc = DailyCrawler()
    # dc.crawlIndex('2018-10-01', '2018-10-23')
    dc.crawl('2018-10-01', '2018-10-23')