import uuid
from datetime import datetime
from tokenize import String
import json

import pytz

from python_depend.http_client_utils import http_post
from python_depend.mysql_pool import MySQLPool

# 美东时区
eastern = pytz.timezone('America/New_York')
# CNN恐惧与贪婪指数数据源
crawler_path = "https://production.dataviz.cnn.io/index/fearandgreed/graphdata"

def fetch_data():
    url = "http://iwc-index-search-engine:8993/search_engine/v1/page_content"

    headers = {
        "X-Arsenal-Auth": "dolphinscheduler-data-product",
        "Content-Type": "application/json",
    }
    body = {
        "app_id": "dolphinscheduler-data-product",
        "channel": "web",
        "trace_id": str(uuid.uuid1()),
        "timeout": 10000,
        "uid_list": [
            {
                "url": crawler_path
            }
        ]
    }
    data = http_post(url=url, params=body, header=headers,timeout=120)
    print(f"data is :{data}")
    if data is not None:
        content = data.get("data", [{}])[0].get("content")
        if not content:
            print("未获取到数据")
            return {}
        try:
            content = json.loads(content)
        except json.JSONDecodeError as e:
            print(f"JSON解析失败: {e}")
            return {}
        return content

def parse_and_save(data):
    # 插入最新一条
    fg = data.get("fear_and_greed", {})
    score = fg.get("score")
    rating = fg.get("rating")
    timestamp = fg.get("timestamp")
    rows = []
    if score and rating and timestamp:
        dt_utc = datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
        dt_eastern = dt_utc.astimezone(eastern)
        date_str = dt_eastern.strftime('%Y-%m-%d')
        rows.append((str(score), date_str, rating))
    # 批量插入历史数据
    historical = data.get("fear_and_greed_historical", {}).get("data", [])
    for item in historical:
        x = item.get("x")
        y = item.get("y")
        rating = item.get("rating")
        if x and y and rating:
            # x为毫秒时间戳
            dt_utc = datetime.utcfromtimestamp(x / 1000).replace(tzinfo=pytz.utc)
            dt_eastern = dt_utc.astimezone(eastern)
            date_str = dt_eastern.strftime('%Y-%m-%d')
            rows.append((str(y), date_str, rating))
    if not rows:
        print("无可插入数据")
        return
    sql = """
        INSERT INTO ads_market_fear_and_greed (fear_greed_value, data_time, fear_greed_level)
        VALUES (%s, %s, %s)
        ON DUPLICATE KEY UPDATE fear_greed_value=VALUES(fear_greed_value)
                , fear_greed_level=VALUES(fear_greed_level), update_time=NOW()
    """
    mysql_pool = MySQLPool.get_pool('feature_quote_data')
    try:
        connection = mysql_pool.connection()
        with connection.cursor() as cursor:
            cursor.executemany(sql, rows)
            connection.commit()
            print(f"成功写入 {len(rows)} 条数据")
    except Exception as e:
        print(f"写入失败: {e}")

def execute_job():
    data = fetch_data()
    parse_and_save(data)

if __name__ == "__main__":
    MySQLPool.initialize()
    execute_job()
    MySQLPool.close_all()