import os
import sys
import time
import datetime

import pandas as pd

from multiprocessing import Pool

sys.path.append('..')

from tools.setting import DATA_DIR
from API.crypto_news import (get_aicoin_bulletin_by_last_id,
                             get_aicoin_bulletin_last_id,
                             get_jinse_bulletin_by_id,
                             get_social_metrics_list,
                             get_indicators_via_http)
from tools.logger import logger

SOCIAL_DATA_DIR = os.path.join(DATA_DIR, 'social_data')
os.makedirs(SOCIAL_DATA_DIR, exist_ok=True)


def update_social_data_jinse_bulletin():
    # 获取金色财经快讯
    file_name = os.path.join(SOCIAL_DATA_DIR, 'jinse_bulletin.csv')
    if os.path.exists(file_name):
        historical_data = pd.read_csv(file_name, encoding='gb18030')
        last_update = historical_data['id'].astype(int).max()
    else:
        last_update = 170
    logger.info(f'金色财经快讯已更新到id为{last_update}的快讯')
    _id = int(last_update) + 1
    continuous_error_count = 0
    save_count = 0
    data = []
    while True:
        _data = get_jinse_bulletin_by_id(_id)
        _id += 1
        if not _data:
            continuous_error_count += 1
            if continuous_error_count > 30:
                if len(data):
                    df = pd.DataFrame(data)
                    df.to_csv(file_name, index=False, header=False, mode='a', encoding='gb18030')
                break
            continue
        data.append(_data)
        save_count += 1
        continuous_error_count = 0
        if save_count >= 1000:
            df = pd.DataFrame(data)
            if os.path.exists(file_name):
                df.to_csv(file_name, index=False, header=False, mode='a', encoding='gb18030')
            else:
                df.to_csv(file_name, index=False, encoding='gb18030')
            data = []
            save_count = 0
    historical_data = pd.read_csv(file_name, encoding='gb18030', low_memory=False)
    historical_data.dropna(how='all', inplace=True)
    historical_data.drop_duplicates(inplace=True)
    historical_data.to_csv(file_name, index=False, encoding='gb18030')
    logger.info('金色财经快讯已更新完毕，最新id为%d' % historical_data['id'].max())


def update_social_data_aicoin_bulletin():
    # 更新aicoin快讯数据
    logger.info('更新aicoin快讯数据')
    file_name = os.path.join(SOCIAL_DATA_DIR, 'aicoin_bulletin.csv')
    if os.path.exists(file_name):
        historical_data = pd.read_csv(file_name, encoding='gb18030')
        last_update = historical_data['id'].astype(int).max()
    else:
        last_update = 1
    logger.info(f'AICOIN快讯已更新到id为{last_update}的快讯')
    newest_id = get_aicoin_bulletin_last_id()
    _id = newest_id
    save_count = 0
    data = []
    while _id >= last_update:
        _data = get_aicoin_bulletin_by_last_id(_id, dataframe=True)
        if _data.empty:
            if len(data):
                df = pd.concat(data, axis=0)
                df.drop_duplicates(inplace=True)
                df.to_csv(file_name, index=False, header=False, mode='a', encoding='gb18030')
            break
        data.append(_data)
        _id = _data['id'].min()
        save_count += 1
        if save_count >= 50:
            logger.info(f'已经更新到{_id}')
            df = pd.concat(data, axis=0)
            df.drop_duplicates(inplace=True)
            if os.path.exists(file_name):
                df.to_csv(file_name, index=False, header=False, mode='a', encoding='gb18030')
            else:
                df.to_csv(file_name, index=False, encoding='gb18030')
            data = []
            save_count = 0
    historical_data = pd.read_csv(file_name, encoding='gb18030')
    historical_data.dropna(how='all', inplace=True)
    historical_data.drop_duplicates(inplace=True)
    historical_data.to_csv(file_name, index=False, encoding='gb18030')
    logger.info('AICOIN快讯已更新完毕，最新id为%d' % historical_data['id'].max())


def update_social_metric(asset='bitcoin', metric=None, start_date=None, end_date=None):
    # 更新单个舆情指标数据
    logger.info(f'获取{metric}数据')
    if end_date is None:
        end_date = datetime.datetime.now().strftime('%Y-%m-%d')
    folder_name = os.path.join(DATA_DIR, f'social_data\\sanbase')
    os.makedirs(folder_name, exist_ok=True)

    file_name = os.path.join(folder_name, f'{metric}.csv')
    if os.path.exists(file_name):
        historical_df = pd.read_csv(file_name, index_col='end_date')
        start_date = historical_df.index[-1]
        df = get_indicators_via_http(indic_name=metric, asset=asset, start_date=start_date, end_date=end_date)
        if end_date in df.index:
            df.drop(index=end_date, inplace=True)
        if start_date == df.index.max():
            return
        df = pd.concat([historical_df.iloc[:-1, ], df.loc[historical_df.index[-1]:, ]])
        df.sort_index(inplace=True)
        df.to_csv(file_name)
    else:
        df = get_indicators_via_http(indic_name=metric, asset=asset, start_date=start_date, end_date=end_date)
        if end_date in df.index:
            df.drop(index=end_date, inplace=True)
        if not df[df[f'{metric}'] != 0].empty:
            df = df.loc[df[df[f'{metric}'] != 0].index[0]:, :]
        df.sort_index(inplace=True)
        df.to_csv(file_name)


def update_social_metrics_btc():
    # 更新BTC舆情指标数据
    logger.info(f'获取BTC舆情统计数据')
    social_metrics = get_social_metrics_list(asset='bitcoin')
    for metric in social_metrics:
        update_social_metric(asset='bitcoin', metric=metric, start_date='2015-01-01', end_date=None)


if __name__ == '__main__':
    # update_social_data_jinse_bulletin()
    update_social_data_aicoin_bulletin()
    update_social_metrics_btc()
