from AStock.ASCrawler import ASPePercentageSaver, ASPePercentageCrawler
from AStock.ASEvent import Engine, Task
from AStock.ASQuery import ASQuery_stock_info, ASQuery_valuation
import argparse


def _crawl_result_handler(data, **kwargs):
    if 'error' in data:
        print(data)
        return

    # print('ASPePercentageCrawler result: {}'.format(data))

    event_type = 'pe_percentage'

    try:
        engine = kwargs['engine']
    except KeyError as e:
        print(e)
        return

    # 保存数据
    event_data = {'event_type': event_type}
    event_data.update(data)
    save_task = Task(
        event_data,
        ASPePercentageSaver(),
        bind_event='save_pe_percentage',
        name='task_save',
        callback=print
    )
    engine.run_task(save_task)


def _main_(args):
    force_re_save = args.force_re_save
    codes = args.stock_codes
    print('force_re_save: {} codes: {}'.format(force_re_save, codes))
    if codes and isinstance(codes, str):
        codes = [codes]
    elif not codes:
        df = ASQuery_stock_info(fields=['code'])
    if not force_re_save:
        # 排除库里已经存在的记录
        df_excludes = ASQuery_valuation(fields=['code', 'pe30'])
        if df_excludes.shape[1] != 1:
            df_excludes = df_excludes.dropna(axis=0, how='any')
            df = df.append(df_excludes[['code']])
            df = df.drop_duplicates(subset=['code'], keep=False)

    codes = df['code'].to_list()
    print('total codes: {}'.format(len(codes)))

    engine = Engine()
    engine.start()
    engine.create_event('crawl_pe_percentage')
    engine.start_event('crawl_pe_percentage')
    engine.create_event('save_pe_percentage')
    engine.start_event('save_pe_percentage')

    event_type = 'pe_percentage'
    for code in codes:
        event_data = {
            'event_type': event_type,
            'code': code
        }
        crawl_task = Task(
            event_data,
            ASPePercentageCrawler(),
            bind_event='crawl_pe_percentage',
            name='task_crawl',
            callback=_crawl_result_handler,
            engine=engine
        )
        engine.run_task(crawl_task)

    # 等待任务完成
    engine.safe_quit()
    print('done')


if __name__ == '__main__':
    argparser = argparse.ArgumentParser(description='save pe percentage')
    argparser.add_argument('-c', '--stock-codes', nargs='+', help='stock codes for pe percentage')
    argparser.add_argument('--force-re-save', action='store_true', default=False,
                           help='force re-save pe percentage even if it already exists')
    args = argparser.parse_args()
    _main_(args)
