import uuid
import re
import json
from datetime import datetime
from python_depend.http_client_utils import http_post
from python_depend.mysql_pool import MySQLPool
from python_depend.time_urtils import get_current_date, get_current_time

# AAII投资者情绪调查数据源
crawler_path = "https://www.aaii.com/sentimentsurvey"

def fetch_data():
    url = "http://iwc-index-search-engine:8993/search_engine/v1/page_content"

    headers = {
        "X-Arsenal-Auth": "dolphinscheduler-data-product",
        "Content-Type": "application/json",
    }
    body = {
        "app_id": "dolphinscheduler-data-product",
        "channel": "web",
        "trace_id": str(uuid.uuid1()),
        "timeout": 10000,
        "uid_list": [
            {
                "url": crawler_path
            }
        ]
    }
    data = http_post(url=url, params=body, header=headers, timeout=120)
    print(f"data is :{data}")
    if data is not None:
        content = data.get("data", [{}])[0].get("content")
        if not content:
            print("未获取到数据")
            return {}
        return content
    return {}

def parse_aaii_investor_sentiment(text):
    # 定义正则表达式模式
    print(f"text is :{text}")
    weekly_data_pattern = re.compile(r'(\d{1,2}/\d{1,2}/\d{4})\s+([\d.]+%)\s+([\d.]+%)\s+([\d.]+%)')
    historical_avg_pattern = re.compile(r'Historical Averages\s+([\d.]+%)\s+([\d.]+%)\s+([\d.]+%)')
    bullish_high_pattern = re.compile(r'1-Year Bullish High:\s+([\d.]+%)\s+Week Ending (\d{1,2}/\d{1,2}/\d{4})')
    neutral_high_pattern = re.compile(r'1-Year Neutral High\s+([\d.]+%)\s+Week Ending (\d{1,2}/\d{1,2}/\d{4})')
    bearish_high_pattern = re.compile(r'1-Year Bearish High\s+([\d.]+%)\s+Week Ending (\d{1,2}/\d{1,2}/\d{4})')

    # 解析周度数据
    weekly_data = []
    for match in weekly_data_pattern.finditer(text):
        date_str, bullish, neutral, bearish = match.groups()
        weekly_data.append({
            'date': datetime.strptime(date_str, '%m/%d/%Y').strftime('%Y-%m-%d'),
            'bullish': float(bullish.rstrip('%')),
            'neutral': float(neutral.rstrip('%')),
            'bearish': float(bearish.rstrip('%'))
        })

    # 解析历史平均值
    historical_avg = {}
    match = historical_avg_pattern.search(text)
    if match:
        bullish, neutral, bearish = match.groups()
        historical_avg = {
            'bullish': float(bullish.rstrip('%')),
            'neutral': float(neutral.rstrip('%')),
            'bearish': float(bearish.rstrip('%'))
        }

    # 解析一年期最高值
    bullish_high = {}
    match = bullish_high_pattern.search(text)
    if match:
        value, date_str = match.groups()
        bullish_high = {
            'value': float(value.rstrip('%')),
            'date': datetime.strptime(date_str, '%m/%d/%Y').strftime('%Y-%m-%d')
        }

    neutral_high = {}
    match = neutral_high_pattern.search(text)
    if match:
        value, date_str = match.groups()
        neutral_high = {
            'value': float(value.rstrip('%')),
            'date': datetime.strptime(date_str, '%m/%d/%Y').strftime('%Y-%m-%d')
        }

    bearish_high = {}
    match = bearish_high_pattern.search(text)
    if match:
        value, date_str = match.groups()
        bearish_high = {
            'value': float(value.rstrip('%')),
            'date': datetime.strptime(date_str, '%m/%d/%Y').strftime('%Y-%m-%d')
        }

    # 构建结果字典
    result = {
        'weekly_data': weekly_data,
        'historical_averages': historical_avg,
        'bullish_high': bullish_high,
        'neutral_high': neutral_high,
        'bearish_high': bearish_high
    }

    return result

def store_data(parsed_data):
    print(f"parsed_data is :{parsed_data}")
    mysql_pool = MySQLPool.get_pool('feature_quote_data')

    insert_query = (
        "INSERT INTO ads_Investor_sentiment_index (weekly_data, historical_averages, year_bullish_high, year_neutral_high, year_bearish_high, data_time, create_time) "
        "VALUES (%s, %s, %s, %s, %s, %s, %s) "
        "ON DUPLICATE KEY UPDATE weekly_data=VALUES(weekly_data), historical_averages=VALUES(historical_averages), "
        "year_bullish_high=VALUES(year_bullish_high), year_neutral_high=VALUES(year_neutral_high), year_bearish_high=VALUES(year_bearish_high), update_time=NOW()"
    )

    weekly_data = json.dumps(parsed_data["weekly_data"], ensure_ascii=False)
    historical_averages = json.dumps(parsed_data["historical_averages"], ensure_ascii=False)
    year_bullish_high = json.dumps(parsed_data["bullish_high"], ensure_ascii=False)
    year_neutral_high = json.dumps(parsed_data["neutral_high"], ensure_ascii=False)
    year_bearish_high = json.dumps(parsed_data["bearish_high"], ensure_ascii=False)
    data_time = get_current_date()
    create_time = get_current_time()

    try:
        connection = mysql_pool.connection()
        with connection.cursor() as cursor:
            cursor.execute(insert_query, (
                weekly_data,
                historical_averages,
                year_bullish_high,
                year_neutral_high,
                year_bearish_high,
                data_time,
                create_time
            ))
            connection.commit()
            cursor.close()
            connection.close()
            print(f"[SUCCESS] 写入数据记录success")
    except Exception as e:
        print(f"[ERROR] 执行SQL异常: {e}")


def execute_job():
    content = fetch_data()
    parsed_data = parse_aaii_investor_sentiment(content)
    store_data(parsed_data)
    print("Data successfully stored.")


if __name__ == "__main__":
    MySQLPool.initialize()
    execute_job()
    MySQLPool.close_all()