import json
import os
import random
import string

from addict import Dict
from utils.logger import get_logger
from storage import Boto3Storage
# from pymongo import MongoClient

logger = get_logger()


class Predictor:
    def __init__(self, cfg: dict):
        self.cfg = Dict(cfg)
        self.storage_cfg = self.cfg.storage
        self.storage = Boto3Storage(self.storage_cfg.addr,
                                    self.storage_cfg.accessKey,
                                    self.storage_cfg.secretKey)

        self.bucket_name = self.storage_cfg.bucket_name
        self.expiration = self.storage_cfg.expiration
        self.s3_prefix = f"s3://{self.bucket_name}"
        # self.mongodb_uri = self.storage_cfg.mongodb_uri
        # self.mongo_client = MongoClient(self.mongodb_uri)
        # self.db = self.mongo_client.get_database()
        # self.storage.create_bucket(self.bucket_name)
        # self.storage.set_bucket_lifecycle(self.bucket_name, self.expiration)

    @staticmethod
    def generate_name(path: str, suffix: str):
        chars = string.ascii_letters + string.digits
        path = os.path.basename(path.split(".")[0]) + "-" + "".join(
            random.choice(chars) for _ in range(13)) + suffix

        return path

    @staticmethod
    def parse_s3url(url: str):
        bucket_name, object_name = url[len("s3://"):].split('/', 1)
        return bucket_name, object_name

    @staticmethod
    def read_silent_points(silent_points: str, storage) -> list:
        if isinstance(silent_points, str):
            s3_path = silent_points[5:]
            # 分割路径获取bucket_name和object_name
            bucket_name, object_name = s3_path.split('/', 1)
            data = storage.get_object(bucket_name, object_name)
            silent_points = json.loads(data.read())

        return silent_points

    @staticmethod
    def read_metadata(task_id: str, storage, bucket_name):
        object_name = f"autocut_meta/{task_id}.json"
        data = storage.get_object(bucket_name, object_name)
        metadata = json.loads(data.read())
        return metadata

    def get_metadata(self, noid, task_id):
        object_name = f"autocut_meta/{task_id}.json"
        data = self.storage.get_object(self.bucket_name, object_name)
        if data is None:
            return None, None
        metadata = json.loads(data.read())
        for i, meta in enumerate(metadata):
            if meta["noid"] == noid:
                return i, metadata

        return None, None

    # def update_metadata(self, task_id: str, data: dict, meta_id=None):
    #     if not self.storage.check_object_exists(self.bucket_name, f'autocut_meta/{task_id}.json'):
    #         self.storage.fput_json(self.bucket_name, f"{task_id}.json", [data], f'autocut_meta')
    #     else:
    #         metadata = self.read_metadata(task_id, self.storage, self.bucket_name)
    #         if meta_id is not None:
    #             metadata[meta_id] = data
    #         else:
    #             metadata.append(data)
    #         self.storage.fput_json(self.bucket_name, f"{task_id}.json", metadata, f'autocut_meta')
