"""
@Author    : ghenyar
@Time      : 2025/8/28 11:11
@File      : __init__.py
@Desc      : oss存储
"""

import base64
import hashlib
import json
import multiprocessing
import os
import time

from concurrent.futures import ThreadPoolExecutor, as_completed

import oss2
import requests
# from baidubce.auth.bce_credentials import BceCredentials
# from baidubce.bce_client_configuration import BceClientConfiguration
# from baidubce.services.bos.bos_client import BosClient
from oss2 import SizedFileAdapter
from oss2.models import PartInfo
from qiniu import Auth, put_file, BucketManager, etag
from pathlib import Path
from .utils import logger, stream_file, determine_part_size_internal, content_md5, get_file_start_end
from qcloud_cos import CosConfig
from qcloud_cos import CosS3Client


class StorageFile:
    def __init__(self, config: dict, range_header: str = None):
        self.types = config["types"]
        self.storage = self._initialize_storage(config)
        self.range_header = range_header

    def _initialize_storage(self, config: dict):
        storage_classes = {
            "tencent": Tencent,
            "aliyun": AliYun,
            # "baidu": Baidu,
            "qiniu": QiNiu,
        }
        return storage_classes.get(self.types, lambda: None)(config)

    def upload(self, file_path: str):
        """普通上传文件"""
        return self.storage.upload(file_path)

    def binary_upload(self, file_path: str):
        """二进制方式上传文件"""
        return self.storage.upload(file_path) if self.types == "qiniu" else self.storage.binary_upload(file_path)

    def chunked_file_upload(self, file_path: str):
        """分片上传"""
        return self.storage.chunked_file_upload(file_path)

    def download(self, file_name: str):
        """返回文件访问链接"""
        return self.storage.download_file(file_name)

    def download_stream(self, file_name: str):
        """返回文件流"""
        stream, headers = self.storage.download_stream(file_name, self.range_header)
        return stream_file(file_name, stream, headers) if stream else None

    def delete(self, file_name: str):
        """删除存储空间中的文件"""
        return self.storage.delete_file(file_name)


class Tencent:
    def __init__(self, config: dict):
        self._scheme = "https"
        self._bucket = config["bucket"]
        self._secret_id = config["access_key"]
        self._secret_key = config["secret"]
        self._region = config["region"]
        self._endpoint = config["endpoint"]

    def upload(self, file_path: str):
        """普通上传"""
        try:
            client = self._get_client()
            file_name = Path(file_path).name
            client.upload_file(
                Bucket=self._bucket,
                LocalFilePath=file_path,
                Key=file_name,
                EnableMD5=False
            )
        except Exception as e:
            logger("Tencent", str(e))
            return False
        return True

    def binary_upload(self, file_path: str):
        """二进制方式上传"""
        try:
            client = self._get_client()
            file_name = Path(file_path).name
            with open(file_path, "rb") as fp:
                client.put_object(
                    Bucket=self._bucket,
                    Body=fp,
                    Key=file_name
                )
        except Exception as e:
            logger("Tencent", str(e))
            return False
        return True

    def chunked_file_upload(self, file_path: str):
        """分片上传文件"""
        try:
            client = self._get_client()
            file_name = Path(file_path).name
            response = client.create_multipart_upload(Bucket=self._bucket, Key=file_name)
            upload_id = response["UploadId"]
            total_size = os.path.getsize(file_path)
            part_size = determine_part_size_internal(total_size, preferred_size=5 * 1024 * 1024)
            part_info = []

            with open(file_path, "rb") as fp:
                part_number = 1
                offset = 0
                while offset < total_size:
                    num_to_upload = min(part_size, total_size - offset)
                    response = client.upload_part(
                        Bucket=self._bucket,
                        Key=file_name,
                        Body=fp.read(num_to_upload),
                        PartNumber=part_number,
                        UploadId=upload_id
                    )
                    part_info.append({
                        'PartNumber': part_number,
                        'ETag': response['ETag']
                    })
                    offset += num_to_upload
                    part_number += 1

            client.complete_multipart_upload(
                Bucket=self._bucket,
                Key=file_name,
                UploadId=upload_id,
                MultipartUpload={'Part': part_info}
            )
        except Exception as e:
            logger("Tencent", str(e))
            return False
        return True

    def download_file(self, file_name: str):
        """获取文件下载链接"""
        client = self._get_client()
        return client.get_object_url(Bucket=self._bucket, Key=file_name)

    def download_stream(self, file_name: str, range_header):
        """返回文件流"""
        client = self._get_client()
        response = client.head_object(Bucket=self._bucket, Key=file_name)
        file_size = int(response["Content-Length"])
        start, end = get_file_start_end(range_header, file_size)
        headers = {"Content-Range": f"bytes {start}-{end}/{file_size}"}
        response = client.get_object(Bucket=self._bucket, Key=file_name, Range=f"bytes={start}-{end}")
        return response["Body"], headers

    def delete_file(self, file_name: str):
        """删除文件"""
        try:
            client = self._get_client()
            client.delete_object(Bucket=self._bucket, Key=file_name)
        except Exception as e:
            logger("Tencent", str(e))
            return False
        return True

    def _get_client(self):
        config = CosConfig(Region=self._region, SecretId=self._secret_id, SecretKey=self._secret_key,
                           Scheme=self._scheme)
        return CosS3Client(config)


class AliYun(object):
    # 阿里云存储
    def __init__(self, config: dict):
        self._access_id = config["access_key"]
        self._secret_key = config["secret"].encode("utf-8")
        self._bucket_name = config["bucket"]
        self._endpoint = "https://{}".format(config["endpoint"])
        self._region = config["region"]

    def upload(self, file_path: str):
        """普通上传"""
        try:
            bucket = self._bucket()
            key = Path(file_path).name
            bucket.put_object_from_file(key, file_path)
        except Exception as e:
            logger("AliYun", str(e))
            return False
        return True

    def binary_upload(self, file_path: str):
        """二进制方式上传"""
        try:
            bucket = self._bucket()
            file_new_name = Path(file_path).name
            # 以二进制方式打开文件
            with open(file_path, "rb") as fileObj:
                fileObj.seek(0, os.SEEK_SET)
                fileObj.tell()
                bucket.put_object(file_new_name, fileObj)
        except Exception as e:
            logger("AliYun", str(e))
            return False
        return True

    def chunked_file_upload(self, file_path: str):
        """分片上传文件"""
        try:
            # 获取文件大小
            total_size = os.path.getsize(file_path)
            # 确定分片大小
            part_size = determine_part_size_internal(total_size, preferred_size=100 * 1024)
            bucket = self._bucket()
            key = Path(file_path).name
            upload_id = bucket.init_multipart_upload(key).upload_id
            parts = []

            # 定义上传分片的函数
            def upload_part(part_num, offset, num_to_upload):
                with open(file_path, "rb") as fileObj:
                    fileObj.seek(offset)
                    response = bucket.upload_part(key, upload_id, part_num, SizedFileAdapter(fileObj, num_to_upload))
                    return PartInfo(part_num, response.etag)

            # 使用线程池并发上传分片
            with ThreadPoolExecutor(max_workers=10) as executor:
                futures = []
                offset = 0
                part_number = 1
                while offset < total_size:
                    num_to_upload = min(part_size, total_size - offset)
                    futures.append(executor.submit(upload_part, part_number, offset, num_to_upload))
                    offset += num_to_upload
                    part_number += 1
                # 收集上传结果
                for future in as_completed(futures):
                    parts.append(future.result())
            # 完成分片上传
            bucket.complete_multipart_upload(key, upload_id, parts)
        except Exception as e:
            logger("AliYun", f"Error during chunked upload: {str(e)}")
            return False
        return True

    def download_file(self, file_name: str):
        """获取文件下载链接"""
        bucket = self._bucket()
        params = dict()
        return bucket.sign_url('GET', file_name, 600, slash_safe=True, params=params)

    def download_stream(self, file_name: str, range_header: str):
        """返回文件流"""
        bucket = self._bucket()
        simplifiedmeta = bucket.get_object_meta(file_name)
        file_size = int(simplifiedmeta.headers["Content-Length"])
        start, end = get_file_start_end(range_header, file_size)
        # 如果范围末端取值不在有效区间，则返回500~999字节范围内容，且HTTP Code为206。
        object_stream = bucket.get_object(file_name, byte_range=(start, end),
                                          headers={"x-oss-range-behavior": "standard"})
        headers = {"Content-Range": f"bytes {start}-{end}/{file_size}"}
        return object_stream, headers

    def delete_file(self, file_name: str):
        """删除文件"""
        try:
            bucket = self._bucket()
            bucket.delete_object(file_name)
        except Exception as e:
            logger("AliYun", str(e))
            return False
        return True

    def _bucket(self):
        return oss2.Bucket(oss2.Auth(self._access_id, self._secret_key), self._endpoint, self._bucket_name,
                           region=self._region)


class QiNiu:
    def __init__(self, config: dict):
        self._access_key = config["access_key"]
        self._secret_key = config["secret"].encode("utf-8")
        self._bucket_name = config["bucket"]
        self._endpoint = config["endpoint"]
        self._host = "up-z2.qiniup.com"

    def upload(self, file_path: str):
        return self._upload_file(file_path)

    def chunked_file_upload(self, file_path: str):
        # 分块上传
        key = Path(file_path).name
        encoded_file_name = base64.urlsafe_b64encode(key.encode()).decode()
        # 设置分块大小
        chunk_size = 2 * 1024 * 1024  # 4MB
        # 初始化分块上传
        upload_id = self._get_upload_id(encoded_file_name)
        parts = []
        with open(file_path, 'rb') as f:
            part_number = 1
            while True:
                chunk = f.read(chunk_size)
                if not chunk:
                    break
                # 计算分块的 MD5
                part_md5 = self._calculate_md5(chunk)
                part_md5_base64 = base64.b64encode(bytes.fromhex(part_md5)).decode('utf-8')
                url = f"buckets/{self._bucket_name}/objects/{encoded_file_name}/uploads/{upload_id}/{part_number}"
                headers = {
                    "Content-Type": "application/octet-stream",
                    "Content-MD5": part_md5_base64,
                    "Content-Length": str(len(chunk)),
                }
                response = self._request(url, data=chunk, headers=headers, method="PUT")
                parts.append({"etag": response["etag"], "partNumber": part_number})
                part_number += 1

        # 合并分块文件
        complete_url = f"buckets/{self._bucket_name}/objects/{encoded_file_name}/uploads/{upload_id}"
        comp_headers = {
            "Content-Type": "application/json",
        }
        data = {
            "parts": parts,
            "fname": key,
        }
        try:
            response = self._request(complete_url, data=json.dumps(data), headers=comp_headers, method="POST")
            if response and response.status_code == 200:
                return True
            else:
                return False
        except Exception as e:
            print(f"Error completing upload: {e}")
            logger("QiNiu", str(e))
            return False

    def download_file(self, file_name: str):
        """获取文件下载链接"""
        return self._download_url(file_name)

    def download_stream(self, file_name: str, range_header: str):
        """文件流返回"""
        bucket = BucketManager(self._init_auth())
        # 获取文件数据
        ret, _ = bucket.stat(self._bucket_name, file_name)
        if not ret:
            return None, None
        file_size = int(ret["fsize"])
        private_url = self._download_url(file_name)

        start, end = get_file_start_end(range_header, file_size)

        headers = {"Content-Range": f"bytes {start}-{end}/{file_size}"}
        # 指定下载的字节范围，例如下载前 1024 字节
        response = requests.get(private_url, headers={"Range": f"bytes={start}-{end}"})
        return response.iter_content(chunk_size=1024), headers

    def delete_file(self, file_name: str):
        """删除文件"""
        try:
            bucket = BucketManager(self._init_auth())
            ret, _ = bucket.delete(self._bucket_name, file_name)
            return not ret
        except Exception as e:
            logger("QiNiu", f"Error deleting file {file_name}: {str(e)}")
            return False

    def _download_url(self, file_name: str):
        """获取文件下载链接"""
        base_url = f"http://{self._endpoint}/{file_name}"
        return self._init_auth().private_download_url(base_url, expires=3600)

    def _get_upload_id(self, encoded_file_name):
        # 返回uploadId
        url = f"buckets/{self._bucket_name}/objects/{encoded_file_name}/uploads"
        response = self._request(url, None)
        return response["uploadId"]

    @staticmethod
    def _calculate_md5(data):
        md5 = hashlib.md5()
        md5.update(data)
        return md5.hexdigest()

    def _upload_file(self, file_path: str):
        """上传文件 包含分片上传/普通上传"""
        try:
            key = Path(file_path).name
            token = self._get_token()
            ret, info = put_file(token, key, file_path, version="v2", part_size=2)
            return ret['key'] == key and ret['hash'] == etag(file_path)
        except Exception as e:
            logger("QiNiu", f"Error uploading file {file_path}: {str(e)}")
            return False

    def _init_auth(self):
        """初始化Auth状态"""
        return Auth(self._access_key, self._secret_key)

    def _get_token(self):
        """生成上传凭证"""
        return self._init_auth().upload_token(self._bucket_name, None, 3600)

    def _request(self, url: str, data, headers=None, method="POST"):
        full_url = f"https://{self._host}/{url}"
        up_token = self._get_token()
        header = {
            "Authorization": f"UpToken {up_token}"
        }
        response = None
        if headers:
            header.update(headers)
        if method == "POST":
            response = requests.post(full_url, data=data, headers=header)
        elif method == "PUT":
            response = requests.put(full_url, data=data, headers=header)
        if response.status_code != 200:
            return None
        return response.json()

# class Baidu(object):
#     # 百度云存储(BOS)
#     def __init__(self, config: dict):
#         self._bucket_name = config["bucket"]
#         self._access_key = config["access_key"]
#         self._secret_key = config["secret"]
#         self._endpoint = config["endpoint"]
#
#     def upload(self, file_path: str):
#         """普通上传"""
#         try:
#             object_key = Path(file_path).name
#             bos_client = self._bos_client()
#             bos_client.put_object_from_file(self._bucket_name, object_key, file_path)
#         except Exception as e:
#             logger("Baidu", str(e))
#             return False
#         return True
#
#     def binary_upload(self, file_path: str):
#         """二进制方式上传"""
#         try:
#             data = open(file_path, "rb")
#             cont_md5 = content_md5(file_path)
#             object_key = Path(file_path).name
#             total_size = os.path.getsize(file_path)
#             bos_client = self._bos_client()
#             bos_client.put_object(self._bucket_name, object_key, data, total_size, cont_md5)
#         except Exception as e:
#             logger("Baidu", str(e))
#             return False
#         return True
#
#     def chunked_file_upload(self, file_path: str):
#         """分片上传文件"""
#         try:
#             bos_client = self._bos_client()
#             object_key = Path(file_path).name
#             bos_client.put_super_obejct_from_file(self._bucket_name, object_key, file_path,
#                                                   chunk_size = 1, thread_num = multiprocessing.cpu_count())
#         except Exception as e:
#             logger("Baidu", str(e))
#             return False
#         return True
#
#     def download_file(self, file_name: str):
#         """获取文件下载链接"""
#         bos_client = self._bos_client()
#         timestamp = int(time.time())
#         return bos_client.generate_pre_signed_url(self._bucket_name, file_name, timestamp, expiration = 3600)
#
#     def download_stream(self, file_name: str, range_header: str):
#         """返回文件流"""
#         bos_client = self._bos_client()
#         object_info = bos_client.get_object_meta_data(self._bucket_name, file_name)
#         file_size = int(object_info.metadata.content_length)
#         start, end = get_file_start_end(range_header, file_size)
#         headers = {"Content-Range": f"bytes {start}-{end}/{file_size}"}
#         response = bos_client.get_object(self._bucket_name, file_name, range = [start, end])
#         return response.data, headers
#
#     def delete_file(self, file_name: str):
#         """删除文件"""
#         try:
#             bos_client = self._bos_client()
#             bos_client.delete_object(self._bucket_name, file_name)
#         except Exception as e:
#             logger("Baidu", str(e))
#             return False
#         return True
#
#     def _bos_client(self):
#         # BosClient
#         config = BceClientConfiguration(credentials = BceCredentials(access_key_id = self._access_key, secret_access_key = self._secret_key), endpoint = self._endpoint)
#         return BosClient(config)
