#中科
import requests
import time
import os 
import datetime
import re
import logging

# 配置日志
logging.basicConfig(filename='execution.log', level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
SERVER_ADDRESS = ''
#LOCAL_DIRECTORY = "D://JSONData//H_SCAN"
LOCAL_DIRECTORY = "D://JSONData"
CLIENT_NAME = "123456789"
file_path = "current_date.txt"

def upload_file_with_auth(url, username, password, files_to_sync):
    
    real_datadir_two = ""
    virtual_datadir_two = ""
    for filename in files_to_sync:
    # 创建一个会话对象
        session = requests.Session()

        # 设置认证信息
        session.auth = (username, password)
        file_synced = False

        base_file_tmp_split = os.path.split(filename)
        base_dir_tmp_split = os.path.split(base_file_tmp_split[0])
        base_file_tmp_splitext = os.path.splitext(base_file_tmp_split[1])

        if base_dir_tmp_split[1] != real_datadir_two:
            real_datadir_two = base_dir_tmp_split[1]
            virtual_datadir_two = re.sub(r'\D', '', base_file_tmp_splitext[0])

        base_dir_tmp_split_unify = base_dir_tmp_split[0].replace('\\', '/').split('/')
        formatted_date = '/'.join([part.zfill(2) for part in base_dir_tmp_split_unify])

        # virtual_datadir_one = base_dir_tmp_split[0][:4] +  base_dir_tmp_split[0][4:6] +  base_dir_tmp_split[0][6:]

        filename_re = re.sub(r'\D', '', base_file_tmp_split[1]) + '.json'  # 差异处理，去除文件名中的非数字部分
        
        filename_ = os.path.join(CLIENT_NAME, formatted_date, virtual_datadir_two, filename_re)

        for _ in range(3):
            filename_path = os.path.join(LOCAL_DIRECTORY, filename)
            file_size = os.path.getsize(filename_path)
            uploaded_bytes = 0

            response = ""
            with open(filename_path, 'rb') as file:
                while True:
                    chunk = file.read(819200)
                    if not chunk:
                        # 读取到文件末尾，上传剩余部分
                        chunk = file.read()
                        if not chunk:
                            break

                    response = session.post(url, files={'file': (filename_, chunk)})
                    uploaded_bytes += len(chunk)
                    progress = uploaded_bytes / file_size * 100
                    print(f"{filename} 上传进度：{progress:.2f}%")

            if response == "":
                file_synced = True
                print(f'空文件忽略{filename}')
                break

            if response.status_code == 200:
                print('文件上传成功')
                file_synced = True
                break
            else:
                print('文件上传失败 %s, 正在重试..' % response)
                time.sleep(30)

        if not file_synced:
            print(f"文件 {filename} 在三次尝试后同步失败。停止同步过程。")
            return False

        time.sleep(1)
    
    yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
    current_date = yesterday.strftime("%Y%m%d")
    file_path = "current_date.txt"  
    # 将当前的年月日写入文件
    with open(file_path, "w") as file:
        file.write(current_date)
    return True
        

# 获取服务器端文件结构
def get_server_files(continue_key_value):
    # 判断文件是否存在
    if continue_key_value:
    # 读取文件内容
        params = {"file_sync_time": continue_key_value, "client_name": CLIENT_NAME}
        response = requests.get(f"{SERVER_ADDRESS}/list_files", params=params)
    else:
        print(f"文件 {file_path} 不存在。")
        response = requests.get(f"{SERVER_ADDRESS}/list_files")
    return response.json()

# 获取本地目录结构
def get_local_files(continue_key_value):
    file_list = []
    if continue_key_value:
        
        continue_key_value = int(continue_key_value)
        continue_key_value_year = continue_key_value // 10000
        continue_key_value_month = (continue_key_value // 100) % 100
        continue_key_value_day = continue_key_value % 100

        print("读取到本地继续文件")
        for data_time_path in os.listdir(LOCAL_DIRECTORY):
            print("继续年份%s,目录年份%s"%(continue_key_value_year,data_time_path))
            if int(data_time_path) >= int(continue_key_value_year):
                for data_time_path_month in os.listdir(os.path.join(LOCAL_DIRECTORY, data_time_path)):
                    print("继续月份%s,目录月份%s"%(continue_key_value_month,data_time_path_month))
                    if int(data_time_path_month) >= int(continue_key_value_month) or int(data_time_path) > int(continue_key_value_year):
                        for data_time_path_day in os.listdir(os.path.join(LOCAL_DIRECTORY, data_time_path, data_time_path_month)):
                            print("继续天%s,目录天%s"%(continue_key_value_day,data_time_path_day))
                            if int(data_time_path_day) >= int(continue_key_value_day) or int(data_time_path) > int(continue_key_value_year) or int(data_time_path_month) > int(continue_key_value_month):
                                print(f"本地比较文件:{data_time_path}{data_time_path_month}{data_time_path_day}")
                                data_time_path_day_path = os.path.join(LOCAL_DIRECTORY, data_time_path,data_time_path_month, data_time_path_day)
                                for root, dir, files in os.walk(data_time_path_day_path):
                                    for file in files:
                                        file_list.append(os.path.relpath(os.path.join(root, file), LOCAL_DIRECTORY))
    else:
        for root, dir, files in os.walk(LOCAL_DIRECTORY):
            for file in files:
                file_list.append(os.path.relpath(os.path.join(root, file), LOCAL_DIRECTORY))
    return file_list

if __name__ == "__main__":

    continue_key_value = ""

    if os.path.exists(file_path):
        with open(file_path, "r") as file:
            continue_key_value = file.read()
    
    local_files = get_local_files(continue_key_value)
    server_files = get_server_files(continue_key_value)
    
    #files_to_sync = [file for file in local_files if file not in server_files]

    files_to_sync = []
    # 比较模式，比对每一个文件，差异更新
    server_files_names = []
    for server_files_one in server_files:
        server_files_one_split = os.path.split(server_files_one)
        server_files_names.append(server_files_one_split[1])
    
    group_dir_local_dict = {}
    for file in local_files:
        file_split = os.path.split(file)

        file_split_sub = re.sub(r'\D', '', file_split[1]) + '.json' 

        try:
            group_dir = re.search(r'H_SCAN0_\d+_\d+_\d+', file_split[0]).group()
        except:
            logging.info(f"【ERROR】错误路径格式文件：{file}")
            continue
            
        group_dir = group_dir.split('_')[-1]

        if not group_dir_local_dict.get(group_dir):
            group_dir_local_dict[group_dir] = []
            group_dir_local_dict[group_dir].append(file)

        if file_split_sub not in server_files_names:
            # files_to_sync.append(file)
            # group_dir = re.search(r'\\\d+\\', file).group(0)[1:-1]
            try:
                group_dir_local_dict[group_dir].append(file)
            except KeyError:
                group_dir_local_dict[group_dir] = []
                group_dir_local_dict[group_dir].append(file)

    for group_dir_dict_one in group_dir_local_dict:
        upload_file_with_auth(f'{SERVER_ADDRESS}/sysnc_upload', 'sumz', 'Sumz', group_dir_local_dict[group_dir_dict_one])
