import sys
import os

from boto3 import Session



sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import boto3
from config import FORMAL_S3_CONFIG

class Operation_aws_s3():
    def __init__(self):
        # url = "https://{}".format(FORMAL_S3_CONFIG['s3_endpoint'])  # 也可以是自己节点的地址
        session = Session(FORMAL_S3_CONFIG['access_key'], FORMAL_S3_CONFIG['secret_key'],
                          region_name=FORMAL_S3_CONFIG['region_name'])
        self.s3 = session.client('s3')

    def list_object(self, bucket, dir_path):
        """
        列出当前桶下所有的文件
        :param dir_path: 查询包含指定文件夹的文件和文件大小
        :return:
        """
        file_list = []
        response = self.s3.list_objects_v2(
            Bucket=bucket,
            Prefix=dir_path
            # MaxKeys=1000  # 返回数量，如果为空则为全部
        )
        file_desc = response.get('Contents')
        if file_desc:
            for f in file_desc:
                if dir_path:
                    if dir_path in f['Key']:
                        file_list.append({'path': f['Key'], 'path_size': f['Size']})
                    else:
                        pass
                else:
                    file_list.append({'path': f['Key'], 'path_size': f['Size']})
            return file_list

    def empty_object(self, bucket, path):
        res = self.s3.delete_object(Bucket=bucket, Key=path)
        return res

    # 列出所有文件
    def ls(self, bucket_and_path):
        parts = bucket_and_path.split('/')
        bucket, prefix = parts[2], '/'.join(parts[3:])
        if not prefix.endswith('/'):
            prefix += '/'
        paginator = self.s3.get_paginator('list_objects')
        page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix)
        rs = []
        for response in page_iterator:
            for content in response.get('Contents', []):
                key = content['Key']
                rs.append({'bucket': bucket, 'object': key})
        return rs

    # 修改对象名
    def rename(self, old_path, new_path):
        parts = old_path.split('/')
        old_bucket, old_key = parts[2], '/'.join(parts[3:])

        parts2 = new_path.split('/')
        new_bucket, new_key = parts2[2], '/'.join(parts2[3:])
        copy_source = {
            'Bucket': old_bucket,
            'Key': old_key
        }
        bucket = self.s3.Bucket(new_bucket)
        obj = bucket.Object(new_key)
        obj.copy(copy_source)

        b2 = self.s3.Bucket(old_bucket)
        obj = b2.Object(old_key)
        obj.delete()

    def delete_data(self,buck_and_path):
        file_list = self.ls(buck_and_path)
        if file_list:
            for file in file_list:
                if file:
                    b = file.get('bucket')
                    k = file.get("object")
                    self.empty_object(b, k)
