# -*- coding: utf-8 -*-

from  pymongo import UpdateOne
from pe.util.DataBase import DB_CONN
import tushare as ts
from datetime import datetime

"""
从tushare获取日K数据，保存到本地的MongoDB数据库中
"""

class DailyCrawler:

    def __init__(self):
        self.daily = DB_CONN['daily']
        self.dailyQfq = DB_CONN['daily_qfq']
        self.dailyHfq = DB_CONN['daily_hfq']

    """
    抓取指数的日线数据，并保存到本地数据数据库中
    抓取的日期范围从2019-01-01至今
    """
    def crawlIndex(self, startDate=None, endDate=None):

        indexCodes = ['000001', '000300', '399001', '399005', '399006']

        # 设置日期范围
        if startDate is None:
            startDate = '2019-01-01'

        if endDate is None:
            endDate = datetime.now().strftime('%Y-%m-%d')

        for code in indexCodes:
            dfDaily = ts.get_k_data(code, index=True, start=startDate, end=endDate)
            self.saveData(code, dfDaily, self.daily, {'index': True})

    """
    将从网上抓取的数据保存到本地MongoDB中

    :param code: 股票代码
    :param dfDaily: 包含日线数据的DataFrame
    :param collection: 要保存的数据集
    :param extra_fields: 除了K线数据中保存的字段，需要额外保存的字段
    """
    def saveData(self, code, dfDaily, collection, extra_fields=None):
        updateRequests = []

        for dfIndex in dfDaily.index:
            dailyObj = dfDaily.loc[dfIndex]
            doc = self.dailyObj2Doc(code, dailyObj)

            if extra_fields is not None:
                doc.update(extra_fields)

            updateRequests.append(
                UpdateOne(
                    {'code': doc['code'], 'date': doc['date']},
                    {'$set': doc},
                    upsert=True
                )
            )

        # 批量写入， 提高访问效率
        if len(updateRequests)  > 0:
            updateResult = collection.bulk_write(updateRequests, ordered=False)
            print('保存日线数据， 代码:%s,插入：%4d条, 更新：%4d条' % (code, updateResult.upserted_count, updateResult.modified_count), flush=True)

    """
    获取所有股票从2019-01-01至今的K线数据（包括前复权、后复权和不复权三种），保存到数据库中
    """
    def crawl(self, startDate=None, endDate=None):
        # 获取所有股票代码
        dfStocks = ts.get_stock_basics()
        codes = list(dfStocks.index)

        # 设置默认日期
        if startDate is None:
            startDate = '2019-01-01'

        if endDate is None:
            endDate = datetime.now().strftime('%Y-%m-%d')

        for code in codes:
            # 抓取不复权的价格
            dfDaily = ts.get_k_data(code, autype=None, start=startDate, end=endDate)
            self.saveData(code, dfDaily, self.daily, {'index': False})

            # 抓取前复权的价格
            dfDailyQfq = ts.get_k_data(code, autype='qfq', start=startDate, end=endDate)
            self.saveData(code, dfDailyQfq, self.dailyQfq, {'index': False})

            # 抓取后复权的价格
            dfDailyHfq = ts.get_k_data(code, autype='hfq', start=startDate, end=endDate)
            self.saveData(code, dfDailyHfq, self.dailyHfq, {'index': False})


    @staticmethod
    def dailyObj2Doc(code, dailyObj):
        return  {
            'code': code,
            'date': dailyObj['date'],
            'close': dailyObj['close'],
            'open': dailyObj['open'],
            'high': dailyObj['high'],
            'low': dailyObj['low'],
            'volume': dailyObj['volume']

        }

if __name__ == '__main__':
    dailyCrawler = DailyCrawler()
    now = datetime.now().strftime('%Y-%m-%d')
    # print(now)
    dailyCrawler.crawlIndex(startDate='2019-01-01', endDate=None)
    dailyCrawler.crawl(startDate='2019-07-01', endDate='2019-07-22')