import requests
from bs4 import BeautifulSoup
import pandas as pd
from datetime import datetime, timedelta
import json
from typing import List, Dict, Optional, Union
import logging

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# 原有工具函数
DEFAULT_HEADERS = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
}

from datetime import date as datetime_date

def get_previous_weekday(target_weekday: int) -> datetime_date:
    """获取当前日期之前最近的指定周几的日期"""
    if not 0 <= target_weekday <= 6:
        raise ValueError("target_weekday必须是0（周一）到6（周日）之间的整数")
    
    today = datetime.now().date()
    today_weekday = today.weekday()
    days_ago = (today_weekday - target_weekday) % 7
    
    result = today if days_ago == 0 else today - timedelta(days=days_ago)
    return result

def fetch_ssq_data(url: str, headers: Optional[Dict] = None, timeout: int = 10) -> Optional[List[Dict]]:
    """从指定URL获取双色球数据"""
    headers = headers or DEFAULT_HEADERS
    
    try:
        response = requests.get(url, headers=headers, timeout=timeout)
        response.raise_for_status()
        data = response.json()
        
        if not data.get('result'):
            logger.warning("API返回数据格式异常")
            return None
            
        return data['result']
        
    except requests.RequestException as e:
        logger.error(f"请求数据时发生异常：{e}")
        return None
    except json.JSONDecodeError as e:
        logger.error(f"JSON解析失败：{e}")
        return None

def process_ssq_data(raw_data: List[Dict]) -> pd.DataFrame:
    """处理原始双色球数据为结构化DataFrame"""
    if not raw_data:
        return pd.DataFrame()
    
    try:
        df = pd.DataFrame(raw_data)
        df = df[["code", "date", "red", "blue"]]
        
        # 拆分红球
        red_df = df["red"].str.split(',', expand=True)
        red_df = red_df.rename(columns={
            0: 'red1', 1: 'red2', 2: 'red3', 
            3: 'red4', 4: 'red5', 5: 'red6'
        }).astype(int)
        
        # 合并数据
        df = pd.concat([
            df.drop(columns=["red"]), 
            red_df
        ], axis=1)
        
        # 处理蓝球
        df["blue"] = df["blue"].astype(int)
        
        # 列顺序
        columns = ['code', 'date', 'red1', 'red2', 'red3', 
                 'red4', 'red5', 'red6', 'blue']
        return df.reindex(columns=columns)
        
    except Exception as e:
        logger.error(f"数据处理失败：{e}")
        return pd.DataFrame()

def get_latest_ssq_data(days: int = 30) -> pd.DataFrame:
    """获取最近的双色球数据，返回空DataFrame如果获取失败"""
    """获取最近的双色球数据"""
    try:
        date = max([
            get_previous_weekday(1),  # 周二
            get_previous_weekday(3),  # 周四
            get_previous_weekday(6)   # 周日
        ])
        
        end_str = date.strftime("%Y-%m-%d")
        start_date = (date - timedelta(days=days)).strftime("%Y-%m-%d")
        
        url = (
            "https://www.cwl.gov.cn/cwl_admin/front/cwlkj/search/kjxx/findDrawNotice"
            f"?name=ssq&dayStart={start_date}&dayEnd={end_str}"
            "&pageNo=1&pageSize=10000&systemType=PC"
        )
        
        raw_data = fetch_ssq_data(url)
        if raw_data is None:
            return pd.DataFrame()
        return process_ssq_data(raw_data)
        
    except Exception as e:
        logger.error(f"获取数据失败：{e}")
        return pd.DataFrame()

def save_to_csv(df: pd.DataFrame, filepath: str) -> bool:
    """保存数据到CSV文件"""
    try:
        df.to_csv(filepath, index=False)
        return True
    except Exception as e:
        logger.error(f"保存文件失败：{e}")
        return False

if __name__ == "__main__":
    # 示例用法
    df = get_latest_ssq_data(days=5000)
    if not df.empty:
        save_to_csv(df, "lottery_data.csv")
        print("数据获取并保存成功")
        print(df.head())
    else:
        print("未能获取有效数据")
