from machine_lib import *
import pymysql
import pytz
import time
import json
import requests
from datetime import datetime, timedelta

mysql_host = "localhost"
mysql_port = 3306
mysql_user = "root"
mysql_password = "runfor1s"
mysql_db = "WQ"
table_names = ['ASI_D1', 'USA_D1', 'EUR_D1', 'GLB_D1', 'HKG_D1', 'JPN_D1', 'KOR_D1', 'TWN_D1', 'CHN_D1', 'AMR_D1']

def get_last_fetch_time():
    """从数据库中获取最近一次获取数据的时间戳"""
    try:
        with pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, password=mysql_password, database=mysql_db, charset='utf8') as conn:
            with conn.cursor() as cursor:
                cursor.execute("SELECT log_value FROM WQ.logs WHERE log_type='get_alpha_info' LIMIT 1")
                result = cursor.fetchone()
                return result[0] if result else None
    except pymysql.MySQLError as e:
        print(f"Error while fetching last fetch time:{e}")
        return '2024-10-01T00:00:00+00:00'

def get_sharpe_2022(s, alpha_id):
    url = f"https://api.worldquantbrain.com/alphas/{alpha_id}/recordsets/yearly-stats"
    result = request_with_retry(s, url)
    if result:
        try:
            response_json = result.json()
            sharpe_2022 = response_json["records"][-1][6]
            return float(sharpe_2022)
        except (json.decoder.JSONDecodeError, KeyError):
            print(f"Error parsing Sharpe ratio for alpha_id {alpha_id}")
            return None
    return None

def request_with_retry(s, url, retries=3, wait_time=30, params=None):
    for _ in range(retries):
        result = s.get(url, params=params)
        if result.status_code == 429:
            print(f"API rate limit exceed.Retrying in {wait_time} seconds...")
            time.sleep(wait_time) 
        elif "retry-after" in result.headers:
            time.sleep(float(result.headers["retry-after"]))
        else:
            return result
    return None

def fetch_field_list(region):
    with pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, password=mysql_password, database=mysql_db, charset='utf8') as conn:
        with conn.cursor() as cursor:
            cursor.execute(f"SELECT field FROM WQ.{region}_D1")
            return [i[0] for i in cursor.fetchall()]

def update_log(date_end):
    with pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, password=mysql_password, database=mysql_db, charset='utf8') as conn:
        with conn.cursor() as cursor:
            log_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            cursor.execute(f"UPDATE WQ.logs SET log_value='{date_end}', log_time='{log_time}' WHERE log_type='get_alpha_info'")
        conn.commit()

def alpha_info_to_mysql():
    s = login()
    # 从MySQL获取最近的获取时间
    # date_begin = get_last_fetch_time()
    date_begin = '2024-10-01T00:00:00+00:00'
    
    if not date_begin:
        date_begin = '2024-10-01T00:00:00+00:00'
        
    all_field_list = {region.split('_')[0]: fetch_field_list(region.split('_')[0]) for region in table_names}
    
    base_url = "https://api.worldquantbrain.com/users/self/alphas"
    date_now = datetime.now().replace(microsecond=0).astimezone(pytz.utc)
    total_count = 0
    ids = []

    while True:
        date_begin_dt = datetime.fromisoformat(date_begin).astimezone(pytz.utc)
        if date_begin_dt >= date_now:
            break
        date_end = (datetime.fromisoformat(date_begin) + timedelta(hours=1)).strftime('%Y-%m-%dT%H:%M:%S+08:00')
        
        params = {"limit": 100, "dateCreated>": date_begin, "dateCreated<": date_end, "order": "dateCreated"}
        result = request_with_retry(s, base_url, retries=5, params=params).json()

        print(json.dumps(result, indent=2))
        if 'count' not in result or result['count'] == 0:
            print("No new data found for the current period.")
            continue
        
        count = result['count']
        total_count += count
        print(f"total_count:{total_count}")

        for offset in range(0, count, 100):
            print(f"offset: {offset}")
            params['offset'] = offset
            result_page = request_with_retry(s, base_url, params=params)
            if not result_page:
                print("No results in offset stage!")
                continue
            results = result_page.json()["results"]
            print(f"API Response: {json.dumps(results, indent=2)}")
            print(f"Fetched {len(results)} results.")
            for result in results:
                print("handle_alpha_data>>>")
                handle_alpha_data(result, all_field_list, s)

        update_log(date_end)
        date_begin = date_end
    
    print(f"Total count: {total_count}")

def handle_alpha_data(result, all_field_list, s):
    alpha_id = result['id']
    if 'regular' not in result:
        return
    exp = result['regular']['code']
    region = result['settings']['region']
    field_list = all_field_list[region]
    matches = [field for field in field_list if field in exp]
    original_field = max(matches, key=len) if matches else None
    
    sharpe_2022 = get_sharpe_2022(s, alpha_id)
    check_status = 'FAIL' if sharpe_2022 is None or sharpe_2022 < 1 else 'PENDING'

    alpha_info = result['is']
    low_2y_sharpe = next((item['value'] for item in result['is']['checks'] if item['name'] == 'LOW_2Y_SHARPE'), 1)
    is_ladder_sharpe = next((item['value'] for item in result['is']['checks'] if item['name'] == 'IS_LADDER_SHARPE'), 1)
    
    sql = """INSERT INTO WQ.simulated_alpha (id, dateCreated, longCount, shortCount, turnover, returns, drawdown, margin, fitness, sharpe, region, universe, delay, decay, neutralization, truncation, pasteurization, unitHandling, visualization, exp, check_status, original_field, low_2y_sharpe, is_ladder_sharpe)
             VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
             ON DUPLICATE KEY UPDATE dateCreated=%s, longCount=%s, shortCount=%s, turnover=%s, returns=%s, drawdown=%s, margin=%s, fitness=%s, sharpe=%s, region=%s, universe=%s, delay=%s, decay=%s, neutralization=%s, truncation=%s, pasteurization=%s, unitHandling=%s, visualization=%s, exp=%s, check_status=%s, original_field=%s, low_2y_sharpe=%s, is_ladder_sharpe=%s"""
    
    params = (
        alpha_id, result['dateCreated'], alpha_info['longCount'], alpha_info['shortCount'], alpha_info['turnover'],
        alpha_info['returns'], alpha_info['drawdown'], alpha_info['margin'], alpha_info['fitness'], alpha_info['sharpe'],
        region, result['settings']['universe'], result['settings']['delay'], result['settings']['decay'], result['settings']['neutralization'],
        result['settings']['truncation'], result['settings']['pasteurization'], result['settings']['unitHandling'], result['settings']['visualization'], exp,
        check_status, original_field, low_2y_sharpe, is_ladder_sharpe,
        
        result['dateCreated'], alpha_info['longCount'], alpha_info['shortCount'], alpha_info['turnover'],
        alpha_info['returns'], alpha_info['drawdown'], alpha_info['margin'], alpha_info['fitness'], alpha_info['sharpe'],
        region, result['settings']['universe'], result['settings']['delay'], result['settings']['decay'], result['settings']['neutralization'],
        result['settings']['truncation'], result['settings']['pasteurization'], result['settings']['unitHandling'], result['settings']['visualization'], exp,
        check_status, original_field, low_2y_sharpe, is_ladder_sharpe,
    )
    
    print(f"Executing SQL:{sql}")
    print(f"With params: {params}")
    
    with pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, password=mysql_password, database=mysql_db, charset='utf8') as conn:
        with conn.cursor() as cursor:
            cursor.execute(sql, params)
            conn.commit()
            print(f"Rows affected: {cursor.rowcount}")

if __name__ == "__main__":
    alpha_info_to_mysql()