# copyright (c) 2024 tencent inc. all rights reserved.
# nrwu@tencent.com

import os
import time
from typing_extensions import override
import logging
from typing import Dict
import traceback
import urllib3
import requests
from datetime import datetime

try:
    import qcloud_cos
    from qcloud_cos import CosConfig
    from qcloud_cos import CosS3Client
    from qcloud_cos.cos_client import logger
    logger.setLevel(logging.WARNING)
except ImportError:
    pass
try:
    from gdataset.utils.service_discovery import ServiceInstanceCache
except ImportError:
    ServiceInstanceCache = None

from gdataset.store.base import CliBase


class CosClient(CliBase):
    def __init__(self, metadata, **kwargs):
        self.cos_region = metadata.get('cos_region', None)
        self.cos_secret_id = metadata['cos_secret_id']
        self.cos_secret_key = metadata['cos_secret_key']
        self.service_domain = metadata.get("service_domain", None)
        self.cos_endpoint = metadata.get('cos_endpoint', None)

        self.cos_service = metadata.get('cos_service', None)
        self.svr_discovery = None

        # 根据 cos client 代码，cos client 内部会维护进程级别的连接池，而 client 的 ctor 是非常
        # 轻量级的。
        self.cos_clis: Dict[str, CosS3Client] = {}
        if self.cos_region is not None:
            self.cos_clis[self.cos_region] = self.create_client(self.cos_region)

    def create_client(self, cos_region) -> CosS3Client:
        scheme = 'https' if self.cos_service is None else 'http'
        if self.cos_endpoint is None:
            # 腾讯机房内网
            cos_endpoint = 'cos-internal.%s.tencentcos.cn' % cos_region
        cos_cli_config = CosConfig(
            Region=cos_region,
            SecretId=self.cos_secret_id,
            SecretKey=self.cos_secret_key,
            Token=None,
            Endpoint=cos_endpoint,
            Scheme=scheme,
            ServiceDomain=self.service_domain,
        )
        return CosS3Client(cos_cli_config)

    def _get(self, cos_url='', url='', cos_bucket_name='', **kwargs):
        if cos_url != '':
            # 兼容之前多媒体 cos 灌入的数据
            assert url == ''
            url = cos_url

        if self.cos_service is not None and self.svr_discovery is None:
            assert ServiceInstanceCache is not None
            self.svr_discovery = ServiceInstanceCache()

        # NOTE: 不做 retry 的理由：
        #  1. cos cli 内部已经有 retry；
        #  2. 默认已经是 timeout=None，如果盲目 retry 可能让服务端雪崩恶化；
        # 如果这样依旧失败，我认为任其崩溃是较好的做法。
        # 如果一定不能崩溃，一定要 retry，优先考虑将 client 设置 retry；其次一定要做 expo backoff。
        cos_region = kwargs.get("cos_region", self.cos_region)
        if cos_region not in self.cos_clis and cos_region is not None:
            self.cos_clis[cos_region] = self.create_client(cos_region)
        cos_client = self.cos_clis[cos_region]
        if self.cos_service is not None:
            cos_instance = self.svr_discovery.get_instance("Production", self.cos_service)
            cos_client.get_conf().set_ip_port(cos_instance['host'], cos_instance['port'])
        cos_resp = cos_client.get_object(
            Bucket=cos_bucket_name,
            Key=url,
        )
        body = cos_resp['Body']._rt.content
        return body

    def get(self, cos_url='', url='', cos_bucket_name='', **kwargs):
        perf = int(os.environ.get("GDATASET_V4_PERF", "0"))
        if perf == 1:
            begin_t = time.time()

        # TODO(parkeychen): debug
        # Retry configuration
        base_delay = 0.016  # 16ms initial delay
        max_retries = 60
        for attempt in range(max_retries):
            try:
                data = self._get(
                    cos_url=cos_url, url=url, cos_bucket_name=cos_bucket_name, **kwargs
                )

                if perf == 1:
                    end_t = time.time()
                    during_t = end_t - begin_t
                    if during_t > 0.1:
                        print(f"cos get {url} cost {during_t:.5f} seconds")
                return data

            except (
                qcloud_cos.cos_exception.CosClientError, urllib3.exceptions.ReadTimeoutError,
                requests.exceptions.ConnectionError, Exception
            ) as e:
                # TODO(parkeychen): 经常冒出一个没有 catch 到的网络异常，暂时 catch 一下 Exception 基类，把千卡压测跑够48小时
                time_str = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                print(f'Warning(cos) {time_str=} cos get error: {e} retry {attempt}', flush=True)
                # traceback.print_exc()
                if attempt >= max_retries - 1:
                    break
                time.sleep(self._calc_backoff(attempt, base_delay))
                # print(f'retry cnt: {attempt + 1}', flush=True)

        raise RuntimeError(f'Failed to get {url} after {max_retries} retries')

    def _calc_backoff(self, attempt, base_delay):
        """Exponential backoff algorithm"""
        return min(
            base_delay * (2**attempt), 16
        )  # Maximum interval 16 seconds (total retry about 15min)
