from AStock.ASCrawler import ASResearchReportCrawler, ASResearchReportSaver
from AStock.ASEvent import Engine, Task
import random
from datetime import datetime
import argparse


def _crawl_result_handler(report, **kwargs):
    if 'error' in report:
        print(report)
        return

    # print('ASResearchReportCrawler result: {}'.format(report))

    event_type = 'research_report'

    try:
        engine = kwargs['engine']
        page_no = report['page_no']
        pages = report['pages']
        data = report['data']
        event_data = kwargs['crawl_event_data']
    except KeyError as e:
        print(e)
        return

    # 抓取下一页
    if page_no < pages:
        event_data['rand'] += 1
        event_data['page_no'] = page_no + 1
        crawl_task = Task(
            event_data,
            ASResearchReportCrawler(),
            bind_event='crawl_research_report',
            name='task_crawl',
            callback=_crawl_result_handler,
            engine=engine,
            crawl_event_data=event_data
        )
        engine.run_task(crawl_task)

    # 保存数据
    save_task = Task(
        {'event_type': event_type, 'data': data},
        ASResearchReportSaver(),
        bind_event='save_research_report',
        name='task_save',
        callback=print
    )
    engine.run_task(save_task)


def _main_(args):
    engine = Engine()
    engine.start()
    engine.create_event('crawl_research_report')
    engine.start_event('crawl_research_report')
    engine.create_event('save_research_report')
    engine.start_event('save_research_report')

    begin_date = args.begin_date
    end_date = args.end_date

    print('begin date: {}, end date: {}'.format(begin_date, end_date))

    event_type = 'research_report'

    event_data = {
        'event_type': event_type,
        'begin_date': begin_date,
        'end_date': end_date,
        'page_no': 1,
        'page_size': 1000,
        'rand': random.randint(50000000, 57000000),
        'timestamp': int(datetime.now().timestamp())
    }
    crawl_task = Task(
        event_data,
        ASResearchReportCrawler(),
        bind_event='crawl_research_report',
        name='task_crawl',
        callback=_crawl_result_handler,
        engine=engine,
        crawl_event_data=event_data
    )
    engine.run_task(crawl_task)

    # 等待任务完成
    engine.safe_quit()
    print('done')


if __name__ == '__main__':
    argparser = argparse.ArgumentParser(description='save research report')
    argparser.add_argument('-b', '--begin-date', required=True,
                           help='begin date of research report, yyyymmdd format')
    argparser.add_argument('-e', '--end-date', required=True,
                           help='end date of research report, yyyymmdd format')
    args = argparser.parse_args()
    _main_(args)