# -*- coding: utf-8 -*-
# ==========================
# @Time    : 2023/12/6
# @Author  : zhoutengwei
# @File    : storage.py
# ==========================
import os.path

import pandas as pd
from minio import Minio, S3Error
from minio.commonconfig import ENABLED, Filter
from minio.lifecycleconfig import LifecycleConfig, Rule, Expiration
from datetime import datetime, timedelta
# from utils.logger import get_logger

# logger = get_logger()

class MinioStorage:
    def __init__(self, addr, access_key, secret_key):
        self.addr = addr
        self.access_key = access_key
        self.secret_key = secret_key

    def new_session(self):
        return Minio(self.addr,
                     access_key=self.access_key,
                     secret_key=self.secret_key,
                     secure=False)

    def create_bucket(self, bucket_name):
        # try:
        client = self.new_session()
        # if not client.bucket_exists(bucket_name):
        client.make_bucket(bucket_name)
        # except S3Error as e:
        #     err_msg = "There was some error when create new s3 session"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def set_bucket_lifecycle(self, bucket_name, days):
        # try:
        client = self.new_session()
        if client.bucket_exists(bucket_name):
            config = LifecycleConfig(
                [
                    Rule(
                        ENABLED,
                        rule_filter=Filter(prefix="logs/"),
                        rule_id="rule2",
                        expiration=Expiration(days=days),
                    ),
                ],
            )
            client.set_bucket_lifecycle(bucket_name, config)
        # except S3Error as e:
        #     err_msg = "There was some error when set bucket lifecycle"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def list_buckets(self):
        # try:
        client = self.new_session()
        buckets = client.list_buckets()
        return buckets

        # except S3Error as e:
        #     err_msg = "There was some error when create new s3 session"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def list_objects(self, bucket_name):
        # try:
        client = self.new_session()
        objects = client.list_objects(bucket_name)
        print(objects)
        # except S3Error as e:
        #     err_msg = "There was some error when create new s3 session"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def get_object(self, bucket_name, object_name):
        # try:
        client = self.new_session()
        # while :
        response = client.get_object(bucket_name, object_name)
        return response

        # except S3Error as e:
        #     err_msg = f"There was some error when " \
        #               f"get object {object_name} in {bucket_name}"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def fget_object(self, bucket_name, object_name, file_name):
        # try:
        client = self.new_session()
        # while :
        response = client.fget_object(bucket_name, object_name, file_name)
        return response

        # except S3Error as e:
        #     err_msg = f"There was some error when " \
        #               f"get object {object_name} in {bucket_name}"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def fput_object(self, bucket_name, object_name, file_path):
        client = self.new_session()
        # while :
        response = client.fput_object(bucket_name, object_name, file_path)
        return response


    def fput_npy(self, bucket_name, object_name, arr, sub_dir_name=None):
        import tempfile
        import numpy as np

        with tempfile.TemporaryDirectory() as tmpdir:
            file_path = f"{tmpdir}/{os.path.basename(object_name)}"
            np.save(file_path, arr)
            if sub_dir_name is not None:
                object_name = f"{sub_dir_name}/{object_name}"
            response = self.fput_object(bucket_name, object_name, file_path)
            return object_name

    def fput_json(self, bucket_name, object_name, data, sub_dir_name=None):
        import tempfile
        import json

        with tempfile.TemporaryDirectory() as tmpdir:
            file_path = f"{tmpdir}/{os.path.basename(object_name)}"
            with open(f'{file_path}', "w", encoding="utf-8") as f:
                data = json.dumps(data, ensure_ascii=False, indent=4)
                f.write(data)
                if sub_dir_name is not None:
                    object_name = f"{sub_dir_name}/{object_name}"
            response = self.fput_object(bucket_name, object_name, file_path)
            return object_name


    def fput_csv(self, bucket_name, object_name, df: pd.DataFrame, sub_dir_name=None):
        import tempfile

        with tempfile.TemporaryDirectory() as tmpdir:
            file_path = f"{tmpdir}/{os.path.basename(object_name)}"
            df.to_csv(f'{file_path}')
            if sub_dir_name is not None:
                object_name = f"{sub_dir_name}/{object_name}"
            response = self.fput_object(bucket_name, object_name, file_path)
            return object_name


    def put_object(self, bucket_name, object_name, buffer):
        # try:
        client = self.new_session()
        # while :
        response = client.put_object(bucket_name, object_name, buffer, len(buffer))
        return response

        # except S3Error as e:
        #     err_msg = f"There was some error when " \
        #               f"get object {object_name} in {bucket_name}"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def presigned_get_object(self, bucket_name, object_name, days: int):
        # try:
        client = self.new_session()
        # while :
        response = client.presigned_get_object(bucket_name, object_name, expires=timedelta(days=days))
        return response

        # except S3Error as e:
        #     err_msg = f"There was some error when " \
        #               f"presigned get object {object_name} in {bucket_name}"
        #     logger.error(f"{err_msg}: {str(e)}", exc_info=True)

    def check_object_exists(self, bucket_name, object_name):
        # try:
        client = self.new_session()
        # while :
        response = client.stat_object(bucket_name, object_name)
        return True
        # except S3Error as e:
        #     err_msg = f"There was some error when " \
        #               f"check object {object_name} in {bucket_name}"
        #     logger.error(err_msg)
        #     return False

class Config:
    # addr = "play.min.io:9000"
    addr = "6wdgirmlu.shenzhuo.vip:43129"
    access_key = 'admin'
    secret_key = '12345678'
