import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import s3fs
from config import FORMAL_S3_CONFIG,region_name
from boto3.session import Session
from botocore.exceptions import ClientError
from base_class.inspect_logging import print_to_log


class Operation_aws_s3():
    def __init__(self):
        # url = "https://{}".format(FORMAL_S3_CONFIG['s3_endpoint'])  # 也可以是自己节点的地址
        session = Session(FORMAL_S3_CONFIG['access_key'], FORMAL_S3_CONFIG['secret_key'],region_name=region_name)
        self.s3 = session.resource('s3')
        self.s3_client = session.client('s3')

    def download_file(self, objectName, fileName):
        """
        下载文件
        :param objectName: 文件的路径
        :param fileName: 下载完成的文件的名称----注意：下载之后的文件默认储存在自己的python工程路径下
        :return:
        """
        self.s3_client.download_file(FORMAL_S3_CONFIG['bucket'], objectName, fileName)

    def upload_file(self, file_name, object_name):
        """
        上传文件
        :param file_name: 需要上传的文件的名称
        :param object_name: 需要上传到的路径，例如file/localfile/test
        :return:
        """
        if object_name is None:
            object_name = file_name
        try:
            self.s3_client.upload_file(file_name, FORMAL_S3_CONFIG['bucket'], object_name)
            print('文件推送至s3成功')
        except ClientError as e:
            print('aws_s3文件上传出错{}'.format(e))
            return False
        return True

    def list_object(self, bucket, dir_path):
        """
        列出当前桶下所有的文件
        :param dir_path: 查询包含指定文件夹的文件和文件大小
        :return:
        """
        file_list = []
        response = self.s3_client.list_objects_v2(
            Bucket=bucket,
            Prefix=dir_path
            # MaxKeys=1000  # 返回数量，如果为空则为全部
        )
        file_desc = response.get('Contents')
        if file_desc:
            for f in file_desc:
                if dir_path:
                    if dir_path in f['Key']:
                        file_list.append({'path': f['Key'], 'path_size': f['Size']})
                    else:
                        pass
                else:
                    file_list.append({'path': f['Key'], 'path_size': f['Size']})
            return file_list

    def add_file(self, data_list, filepath, mode):
        '''
        往S3上的文件写入数据
        :param data_list: 追加的数据【数据必须是字符串类型】
        :param filepath: 数据追加到的文件【需要包含路径】
        :param mode: 往文件写入时的写入模式
        :return:
        '''
        bytes_to_write = data_list.encode()
        try:
            fs = s3fs.S3FileSystem(client_kwargs={'endpoint_url': 'https://{}'.format(FORMAL_S3_CONFIG['s3_endpoint'])},
                                   key=FORMAL_S3_CONFIG['access_key'], secret=FORMAL_S3_CONFIG['secret_key'])
            with fs.open('s3://{}/{}'.format(FORMAL_S3_CONFIG['bucket'], filepath), str(mode)) as f:
                f.write(bytes_to_write)
            content = '往》{}《文件中写入数据成功'.format(filepath)
            print(content)
        except Exception as e:
            content = '往》{}《文件中写入数据出错》》{}'.format(filepath, e)
            print(content)

    def empty_object(self, bucket, path):
        res = self.s3.Object(bucket, path).delete()
        print_to_log(f"delete:bucket->{bucket},path->{path}")
        return res