#  -*- coding: utf-8 -*-

from pymongo import UpdateOne,InsertOne
from util.database import DB_CONN
from util.stock_util import get_trading_dates
import tushare as ts
from datetime import datetime, timedelta
import traceback

"""
从tushare获取股票基础数据，保存到本地的MongoDB数据库中
"""


class BasicCrawler:
    def __init__(self):
        self.db = DB_CONN['basic']
        ts.set_token('d5e0fba592c038962a64edbab79d84131361b754ffd5a36ea63340c7')
        self.pro = ts.pro_api()

    def crawl_basic(self, begin_date=None, end_date=None):
        """
        抓取指定时间范围内的股票基础信息
        :param begin_date: 开始日期
        :param end_date: 结束日期
        """

        if begin_date is None:
            begin_date = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')

        if end_date is None:
            end_date = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')

        all_dates = get_trading_dates(begin_date, end_date)
        # print (all_dates)
        # print (all_dates)
        # all_dates=['2017-12-27']
        # print (all_dates)
        for date in all_dates:
            try:
                self.crawl_basic_at_date(date)
            except Exception as e:
                print (e)
                print('抓取股票基本信息时出错，日期：%s' % date, flush=True)


    def crawl_basic_at_date(self, date):
        """
        从Tushare抓取指定日期的股票基本信息
        :param date: 日期
        """
        # 默认推送上一个交易日的数据

        df_basics=self.pro.daily_basic(ts_code='', trade_date=date.replace('-',''))

        # 如果当日没有基础信息，在不做操作
        if df_basics is None:
            return

        update_requests = []
        codes = set(df_basics.index)
        for code in codes:
            doc = dict(df_basics.loc[code])
            try:
                update_requests.append(
                    InsertOne(doc))
            except Exception as e:
                print('发生异常，股票代码：%s，日期：%s' % (code, date), flush=True)
            # break

        if len(update_requests) > 0:
            update_result = self.db.bulk_write(update_requests, ordered=False)

            print('抓取股票基本信息，日期：%s, 插入：%4d条' %
                  (date, len(update_requests)), flush=True)


if __name__ == '__main__':
    BasicCrawler().crawl_basic('2017-12-31', '2018-10-08')



