# coding=utf-8

"""
文件: sync_dir.py
时间: 2024-11-9
描述：将本地目录下的所有文件同步到泛播R2
"""

import os
import sys
import magic
import configparser as cpr
import boto3


def load_settings(file_setting = '配置.ini'):
    """获取配置文件信息"""
    if not os.path.isfile(file_setting):
        file_setting = os.path.join(os.getcwd(), file_setting)
    if not os.path.exists(file_setting):
        print('找不到配置文件：' + file_setting)
        sys.exit(1)
    cfp = cpr.ConfigParser()
    cfp.read(file_setting, encoding='utf-8-sig')
    # 初始化配置项
    debug = cfp.getint('开发', 'debug')
    aws_access_key_id = cfp.get('泛播', 'aws_access_key_id')
    aws_secret_access_key = cfp.get('泛播', 'aws_secret_access_key')
    region_name = cfp.get('泛播', 'region_name')
    bucket_name = cfp.get('泛播', 'bucket_name')
    endpoint_url = cfp.get('泛播', 'endpoint_url')
    cover_exist_object = cfp.getint('泛播', 'cover_exist_object')
    sync_base_dir = cfp.getint('泛播', 'sync_base_dir')
    result = {
        'debug': debug,
        'aws_access_key_id': aws_access_key_id,
        'aws_secret_access_key': aws_secret_access_key,
        'region_name': region_name,
        'bucket_name': bucket_name,
        'endpoint_url': endpoint_url,
        'cover_exist_object': cover_exist_object,
        'sync_base_dir': sync_base_dir
    }
    # 返回结果
    return result


def sync_dir(local_path:str, remote_base:str, s3_client, bucket_name, cover_exist = 1):
    """同步目录"""
    items = os.listdir(local_path)    
    mime = magic.Magic(mime=True)
    for item in items:
        if item == '.' or item == '..':
            continue
        full_path = os.path.join(local_path, item)
        # 遍历目录下所有文件
        if os.path.isdir(full_path):
            # 递归
            dirname = remote_base + item + '/'
            sync_dir(full_path, dirname, s3_client, bucket_name, cover_exist)
            continue
        # 上传文件
        object_key = (remote_base + item).lstrip('/')
        # 检查文件是否已存在
        if cover_exist != 1:
            try:
                # 文件不存在，将抛出404错误
                object_information = s3_client.head_object(Bucket=bucket_name, Key=object_key)
                print('上传日期：', object_information['LastModified'])
                continue
            except:
                # print('文件不存在')
                pass
        # 获取文件类型
        with open(full_path, 'rb') as fs:
            content_type = mime.from_buffer(fs.read(2048))
        if not content_type:
            content_type = 'application/octet-stream'
        # 上传文件
        print('上传文件：', object_key)
        with open(full_path, 'rb') as data:
            #print(bucket_name, object_key, full_path, content_type)
            s3_client.put_object(Bucket=bucket_name, Key=object_key, Body=data, ContentType=content_type)


def main():
    """入口"""
    # 获取要同步的本地目录
    local_dir = input('请输入要同步的文件夹：')
    if not os.path.isdir(local_dir):
        print('目录不存在或无权访问')
        sys.exit(1)
    # 加载配置文件
    setting = load_settings()
    # Use Amazon S3
    s3_client = boto3.client(
        service_name ="s3",
        endpoint_url = setting['endpoint_url'],
        aws_access_key_id = setting['aws_access_key_id'],
        aws_secret_access_key = setting['aws_secret_access_key'],
        region_name = setting['region_name']
    )
    # 设置远程根路径
    remote_base = ''
    if setting['sync_base_dir'] == 1:
        dir_name = os.path.basename(local_dir)
        remote_base = f'/{dir_name}/'
    # 开始同步目录
    print('开始同步...')
    #print(local_dir, remote_base, s3_client, setting['bucket_name'], setting['cover_exist_object'])
    sync_dir(local_dir, remote_base, s3_client, setting['bucket_name'], setting['cover_exist_object'])
    print('所有文件同步完毕')


if __name__ == '__main__':
    main()
