import datetime
import time
import requests
from bs4 import BeautifulSoup
import pandas as pd
import yaml


data = [['draw_date','red1','red2','red3','red4','red5','red6','blue']]
def load_config(config_path):
    with open(config_path, 'r') as f:
        return yaml.safe_load(f)
    
def init_config():
    config_path = 'config/ssq_config.yaml'
    config = load_config(config_path)
    current_year = config['scraper']['end_year']
    current_date = f"year={current_year}-12-31"
    
    url = config['scraper']['url']
    output_csv_path = config['scraper']['output_file']
    
    return url, output_csv_path,current_date
def scrape_lottery_data(url,output_csv_path,current_date):
    global data 

    """
    爬取指定彩票网站的历史开奖数据并保存为CSV文件
    :param url: 网站URL
    :param output_csv_path: 输出的CSV文件路径
    """
    
    full_fetch_url = f"{url}{current_date}"
    response = requests.get(full_fetch_url)
    json_data = response.json()
    res_data = json_data['result']['data']
    if len(res_data) == 0:
        print(f"当前双色球截止到今天，历史期数为{len(data)-1}期")
        df = pd.DataFrame(data[1:], columns=data[0])  # 第一行是表头
        df.to_csv(output_csv_path, index=False)
        print(f"数据保存路径为： {output_csv_path}")
        return
    for item in res_data:
        inner_data = []
        ball_list = item['preDrawCode'].split(',')
        inner_data.append(item['preDrawTime'])
        inner_data.extend(ball_list)
        data.append(inner_data)    
        
        # break
    last_date = res_data[-1]['preDrawTime']
    # last_date = res_data[-1]
    last_date = last_date.split(' ')[0]
    # 获取 last_date 的 前一天
    last_date_before_day = datetime.datetime.strptime(last_date, '%Y-%m-%d') - datetime.timedelta(days=1)
    scrape_lottery_data(url,output_csv_path,last_date_before_day)

def start_scrapy():
    url, output_csv_path,current_date = init_config()
    scrape_lottery_data(url,output_csv_path,current_date)


def scrape_ssq_data(url, output_csv_path):
    _, _,current_date = init_config()
    scrape_lottery_data(url,output_csv_path,current_date)

if __name__ == '__main__':
    start_scrapy()