import os
import json
from datetime import datetime, timedelta
from tqdm import tqdm

# 定义路径
preprocessed_dir = 'fujian/fujian1/preprocessed'
date_test_dir = 'fujian/fujian1/date_test'
os.makedirs(date_test_dir, exist_ok=True)
details_dir = os.path.join(date_test_dir, 'details')
os.makedirs(details_dir, exist_ok=True)

# 获取 JSON 文件列表
json_files = [f for f in os.listdir(preprocessed_dir) if f.endswith('.json')]

# 初始化全局最早和最迟日期
global_earliest = None
global_latest = None

# 用于存储所有json文件的日期范围信息
all_dates_data = []
all_groups_complete = True  # 用于判断是否所有组的日期都无缺失

# 使用 tqdm 进度条
for json_file in tqdm(json_files, desc="Processing JSON files"):
    file_path = os.path.join(preprocessed_dir, json_file)
    with open(file_path, 'r') as f:
        records = json.load(f)
    
    # 提取日期数据并转换为 datetime 对象
    dates = [datetime.strptime(record['date'], '%Y-%m-%d') for record in records]
    dates.sort()

    # 计算最早和最迟日期
    earliest_date = dates[0]
    latest_date = dates[-1]

    # 更新全局最早和最迟日期
    if global_earliest is None or earliest_date < global_earliest:
        global_earliest = earliest_date
    if global_latest is None or latest_date > global_latest:
        global_latest = latest_date

    # 检查日期是否连续
    continuous = True
    missing_dates = []
    for i in range(1, len(dates)):
        if dates[i] != dates[i-1] + timedelta(days=1):
            continuous = False
            # 查找缺失的日期
            missing_dates += [(dates[i-1] + timedelta(days=x)).strftime('%Y-%m-%d') for x in range(1, (dates[i] - dates[i-1]).days)]
    
    # 提取 seller, product 和 warehouse 信息
    seller = records[0]['seller_no']
    product = records[0]['product_no']
    warehouse = records[0]['warehouse_no']
    
    # 保存每个文件的日期信息
    all_dates_data.append(f"{seller}, {product}, {warehouse}, {earliest_date.date()}, {latest_date.date()}, {'Yes' if continuous else 'No'}")
    
    # 如果存在缺失日期，则该组不完整
    if missing_dates:
        all_groups_complete = False

    # 保存缺失日期详情到单独文件
    details_output_path = os.path.join(details_dir, f"date_details_{seller}_{product}_{warehouse}.json")
    with open(details_output_path, 'w') as details_file:
        json.dump(missing_dates, details_file, indent=4)

# 将所有日期信息写入 all_dates_data.txt
all_dates_data_path = os.path.join(date_test_dir, 'all_dates_data.txt')
with open(all_dates_data_path, 'w') as all_data_file:
    all_data_file.write('\n'.join(all_dates_data))

# 将全局最早和最迟日期写入 all_dates_conclusion.txt
conclusion_output_path = os.path.join(date_test_dir, 'all_dates_conclusion.txt')
with open(conclusion_output_path, 'w') as conclusion_file:
    conclusion_file.write(f"Begin Date: {global_earliest.date()}\n")
    conclusion_file.write(f"End Date: {global_latest.date()}\n")

    # 输出所有 group 是否都有完整日期的结论
    if all_groups_complete:
        conclusion_file.write("All groups have continuous dates and no missing dates.\n")
        print("All groups have continuous dates and no missing dates.")
    else:
        conclusion_file.write("Some groups have missing dates.\n")
        print("Some groups have missing dates.")

print("Date analysis completed and saved.")
