import boto3
import json
import sys
import os

class S3Operation():
    def __init__(self):
        conf = {
            "aws_access_key_id": "AKIAP3M5E74QM2FNBZWQ",
            "output": "json",
            "region": "cn-north-1",
            "aws_secret_access_key": "JoztMS/9pKoigcGINzSDSUrr1gKjwSxaLJ15Ltlb",
        }
        self.client = boto3.client('s3',region_name=conf['region'],aws_access_key_id=conf['aws_access_key_id'], aws_secret_access_key=conf['aws_secret_access_key'])

        self.resource = boto3.resource('s3', region_name=conf['region'], aws_access_key_id=conf['aws_access_key_id'], aws_secret_access_key=conf['aws_secret_access_key'])

    def upload_file(self, file_path, s3_bucket, s3_key):
        # ExtraArgs = {'ACL': 'public-read'}
        print("upload file: " + file_path + " => " + "s3://" + s3_bucket + "/" + s3_key)

        response = self.client.upload_file(
            file_path,
            s3_bucket,
            s3_key,
            # ExtraArgs
        )
        return response

    def upload_dir(self, dir_path, s3_bucket, s3_key):
        file_list = os.listdir(dir_path)
        for file_name in file_list:
            sub_dir = dir_path + os.sep + file_name
            sub_s3_key = s3_key + os.sep + file_name
            if os.path.isdir(sub_dir):
                self.upload_dir(sub_dir, s3_bucket, sub_s3_key)
            else:
                self.upload_file(sub_dir, s3_bucket, sub_s3_key)

    def download_file(self, s3_bucket, s3_key, file_path):
        response = self.client.download_file(
            s3_bucket,
            s3_key,
            file_path)
        return response

    def copy(self, src_backet, src_key, tar_backet, tar_key):
        copy_source = {
            'Bucket': src_backet,
            'Key': src_key
        }
        ExtraArgs = {'ACL': 'public-read'}
        response = self.client.copy(
            copy_source,
            tar_backet,
            tar_key,
            ExtraArgs
        )
        return response

    def read_s3_file(self, s3_bucket, file_key):
        try:
            obj = self.resource.Object(bucket_name=s3_bucket, key=file_key)
            response = obj.get()
            data = response['Body'].read().decode()
            data = json.loads(data)
        except Exception as e:
            err = str(e)
            data = {}
        return data

    def md5_file(self, tar_backet, tar_key):
        md5sum = self.client.head_object(Bucket=tar_backet,
                                         Key=tar_key)['ETag'][1:-1]
        return md5sum

    def info_file(self, tar_backet, tar_key):
        info = self.client.head_object(Bucket=tar_backet,
                                       Key=tar_key)
        return info

    def locus_test(self):
        return 1111

    def s3_file_info(self, s3_bucket, s3_key):
        md5sum = ''
        last_modified = ''
        try:
            info = self.client.head_object(Bucket=s3_bucket, Key=s3_key)
            md5sum = info['ETag'][1:-1]
            # last_modified = info['ResponseMetadata']['HTTPHeaders']['last-modified']
            last_modified = info['LastModified'].strftime('%Y-%m-%d %H:%M:%S')
        finally:
            return {"md5": md5sum, 'last': last_modified}

    def s3_file_info_online(self, s3_bucket, s3_key):
        md5sum = ''
        last_modified = ''
        try:
            online_client = boto3.client('s3', region_name='us-east-1', aws_access_key_id='AKIAJTAPWNGELBAIQ3EA', aws_secret_access_key='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')
            info = online_client.head_object(Bucket=s3_bucket, Key=s3_key)
            md5sum = info['ETag'][1:-1]
            # last_modified = info['ResponseMetadata']['HTTPHeaders']['last-modified']
            last_modified = info['LastModified'].strftime('%Y-%m-%d %H:%M:%S')
        finally:
            return {"md5": md5sum, 'last': last_modified}

# AWS_OFFLINE = {
#     "aws_access_key_id": "AKIAP3M5E74QM2FNBZWQ",
#     "output": "json",
#     "region": "cn-north-1",
#     "aws_secret_access_key": "JoztMS/9pKoigcGINzSDSUrr1gKjwSxaLJ15Ltlb",
# }

# S3Operation(AWS_OFFLINE)
