import os
import json
import time
import pandas as pd
from datetime import datetime

try:
    import tushare as ts
except ImportError:
    ts = None
try:
    import akshare as ak
except ImportError:
    ak = None

DATA_DIR = '../data'

def fetch_tushare_daily(code, start_date, end_date, token):
    if ts is None:
        raise ImportError('tushare 未安装')
    ts.set_token(token)
    pro = ts.pro_api()
    df = pro.daily(ts_code=code, start_date=start_date, end_date=end_date)
    df = df.rename(columns={
        'ts_code': 'code', 'trade_date': 'date',
        'vol': 'volume'
    })
    df['date'] = pd.to_datetime(df['date']).dt.strftime('%Y-%m-%d')
    df = df[['date', 'open', 'high', 'low', 'close', 'volume']]
    return df.sort_values('date')

def fetch_akshare_daily(code, start_date, end_date):
    if ak is None:
        raise ImportError('akshare 未安装')
    df = ak.stock_zh_a_daily(symbol=code)
    df = df.reset_index()
    df = df.rename(columns={'date': 'date', 'open': 'open', 'high': 'high', 'low': 'low', 'close': 'close', 'volume': 'volume'})
    df['date'] = pd.to_datetime(df['date']).dt.strftime('%Y-%m-%d')
    df = df[(df['date'] >= start_date) & (df['date'] <= end_date)]
    df = df[['date', 'open', 'high', 'low', 'close', 'volume']]
    return df.sort_values('date')

def get_all_codes(source='tushare', tushare_token=None):
    cache_file = os.path.join(DATA_DIR, 'all_codes.json')
    # 优先读取本地缓存
    if os.path.exists(cache_file):
        with open(cache_file, 'r', encoding='utf-8') as f:
            return json.load(f)
    # 本地无缓存时才请求接口
    if source == 'tushare':
        if ts is None:
            raise ImportError('tushare 未安装')
        ts.set_token(tushare_token)
        pro = ts.pro_api()
        df = pro.stock_basic(exchange='', list_status='L', fields='ts_code')
        codes = df['ts_code'].tolist()
        with open(cache_file, 'w', encoding='utf-8') as f:
            json.dump(codes, f)
        return codes
    elif source == 'akshare':
        if ak is None:
            raise ImportError('akshare 未安装')
        df = ak.stock_info_a_code_name()
        codes = df['code'].tolist()
        with open(cache_file, 'w', encoding='utf-8') as f:
            json.dump(codes, f)
        return codes
    else:
        raise ValueError('不支持的数据源')

def update_all_data(start_date, end_date, source='tushare', tushare_token=None):
    os.makedirs(DATA_DIR, exist_ok=True)
    codes = get_all_codes(source, tushare_token)
    print(f'共获取到 {len(codes)} 只股票，开始批量下载...')
    count = 0
    for code in codes:
        out_path = os.path.join(DATA_DIR, f'{code}.csv')
        # 断点续传：已下载则跳过
        if os.path.exists(out_path):
            print(f'已存在，跳过 {code}')
            continue
        print(f'更新 {code} 数据...')
        try:
            if source == 'tushare':
                df = fetch_tushare_daily(code, start_date, end_date, tushare_token)
            elif source == 'akshare':
                df = fetch_akshare_daily(code, start_date, end_date)
            else:
                raise ValueError('不支持的数据源')
            df.to_csv(out_path, index=False)
            print(f'已保存到 {out_path}')
        except Exception as e:
            print(f'更新 {code} 失败: {e}')
        count += 1
        # 限速：每50只暂停60秒
        if source == 'tushare' and count % 50 == 0:
            print('达到50只，限速休眠61秒...')
            time.sleep(61)

if __name__ == '__main__':
    # 自动批量更新所有A股数据
    update_all_data('20200101', '20241231', source='tushare', tushare_token='addd6eb595afef5226fcc30d3a6c989d76f690e882022dd6b03a5a3d')
