import boto
import boto.s3.connection
# pip install filechunkio
from filechunkio import FileChunkIO
import math
import threading
import os
import queue
import json
import time


class Chunk(object):
    num = 0
    offset = 0
    len = 0

    def __init__(self, n, o, l):
        self.num = n
        self.offset = o
        self.length = l


class CONNECTION(object):
    time_count = 0.0

    def __init__(self, access_key, secret_key, ip, port, is_secure=False, chrunksize=8 << 20):  # chunksize最小8M否则上传过程会报错
        self.conn = boto.connect_s3(
            aws_access_key_id=access_key,
            aws_secret_access_key=secret_key,
            host=ip, port=port,
            is_secure=is_secure,
            calling_format=boto.s3.connection.OrdinaryCallingFormat()
        )
        self.chrunksize = chrunksize
        self.port = port

    # 查询
    def list_all(self):
        all_buckets = self.conn.get_all_buckets()
        for bucket in all_buckets:
            print('容器名: %s' % (bucket.name))
            for key in bucket.list():
                print(' '*5, "%-20s%-20s%-20s%-40s%-20s" % (key.mode, key.owner.id,
                      key.size, key.last_modified.split('.')[0], key.name))

    def list_single(self, bucket_name):
        try:
            single_bucket = self.conn.get_bucket(bucket_name)
        except Exception as e:
            print('bucket %s is not exist' % bucket_name)
            return
        print('容器名: %s' % (single_bucket.name))
        for key in single_bucket.list():
            print(' ' * 5, "%-20s%-20s%-20s%-40s%-20s" % (key.mode,
                  key.owner.id, key.size, key.last_modified.split('.')[0], key.name))

    # 文件下载
    def dowload_file(self, filepath, key_name, bucket_name):
        all_bucket_name_list = [i.name for i in self.conn.get_all_buckets()]
        if bucket_name not in all_bucket_name_list:
            print('Bucket %s is not exist,please try again' % (bucket_name))
            return
        else:
            bucket = self.conn.get_bucket(bucket_name)

        all_key_name_list = [i.name for i in bucket.get_all_keys()]
        if key_name not in all_key_name_list:
            print('File %s is not exist,please try again' % (key_name))
            return
        else:
            key = bucket.get_key(key_name)

        if not os.path.exists(os.path.dirname(filepath)):
            print('Filepath %s is not exists, sure to create and try again' % (filepath))
            return

        if os.path.exists(filepath):
            os.remove(filepath)
        os.mknod(filepath)
        try:
            key.get_contents_to_filename(filepath)
        except Exception:
            print('Get contents to file error')

    # 文件上传
    def upload_file(self, filepath, key_name, bucket_name):
        try:
            bucket = self.conn.get_bucket(bucket_name)
        except Exception as e:
            print('bucket %s is not exist, create bucket' % bucket_name)
            # tag = input(
            #     'Do you want to create the bucket %s: (Y/N)?' % bucket_name).strip()
            # while tag not in ['Y', 'N']:
            #     tag = input('Please input (Y/N)').strip()
            # if tag == 'N':
            #     return
            # elif tag == 'Y':
            self.conn.create_bucket(bucket_name)
            bucket = self.conn.get_bucket(bucket_name)
        key = bucket.new_key(key_name)
        if not os.path.exists(filepath):
            print('File %s does not exist, please make sure you want to upload file path and try again' % (
                key_name))
            return
        with open(filepath, 'r', encoding='utf8') as f:
            data = f.read()
            start_t = time.time()
            key.set_contents_from_string(data)
            end_t = time.time()
            self.time_count += (end_t - start_t) * 1000
            content = "upload a " + \
                str(os.path.getsize(filepath)) + "B file cost time: " + \
                str((end_t - start_t) * 1000) + "ms\n"
            out_path = filepath + "_write_time.txt"
            print(content)
            with open(out_path, "a", encoding='utf8') as f1:
                f1.write(content)
            # print("upload a", os.path.getsize(filepath), "B file cost time: ",
            #       (end_t - start_t) * 1000, "ms")

    def delete_file(self, key_name, bucket_name):
        all_bucket_name_list = [i.name for i in self.conn.get_all_buckets()]
        if bucket_name not in all_bucket_name_list:
            print('Bucket %s is not exist,please try again' % (bucket_name))
            return
        else:
            bucket = self.conn.get_bucket(bucket_name)

        all_key_name_list = [i.name for i in bucket.get_all_keys()]
        if key_name not in all_key_name_list:
            print('File %s is not exist,please try again' % (key_name))
            return
        else:
            key = bucket.get_key(key_name)

        try:
            bucket.delete_key(key.name)
        except Exception:
            print('Delete file %s error' % (key_name))

    def delete_bucket(self, bucket_name):
        all_bucket_name_list = [i.name for i in self.conn.get_all_buckets()]
        if bucket_name not in all_bucket_name_list:
            print('Bucket %s is not exist,please try again' % (bucket_name))
            return
        else:
            bucket = self.conn.get_bucket(bucket_name)
        try:
            self.conn.delete_bucket(bucket.name)
        except Exception:
            pass

    def generate_object_download_urls(self, key_name, bucket_name, valid_time=0):
        all_bucket_name_list = [i.name for i in self.conn.get_all_buckets()]
        if bucket_name not in all_bucket_name_list:
            print('Bucket %s is not exist,please try again' % (bucket_name))
            return
        else:
            bucket = self.conn.get_bucket(bucket_name)

        all_key_name_list = [i.name for i in bucket.get_all_keys()]
        if key_name not in all_key_name_list:
            print('File %s is not exist,please try again' % (key_name))
            return
        else:
            key = bucket.get_key(key_name)

        try:
            key.set_canned_acl('public-read')
            download_url = key.generate_url(
                valid_time, query_auth=False, force_http=True)
            if self.port != 80:
                x1 = download_url.split('/')[0:3]
                x2 = download_url.split('/')[3:]
                s1 = u'/'.join(x1)
                s2 = u'/'.join(x2)

                s3 = ':%s/' % (str(self.port))
                download_url = s1+s3+s2
                print(download_url)
        except Exception:
            pass

    def delete_bucket(self, bucket_name):
        all_bucket_name_list = [i.name for i in self.conn.get_all_buckets()]
        if bucket_name not in all_bucket_name_list:
            print('Bucket %s is not exist, no need to delete' % (bucket_name))
            return
        else:
            bucket = self.conn.get_bucket(bucket_name)
        all_key_name_list = [i.name for i in bucket.get_all_keys()]
        bucket.delete_keys(all_key_name_list)
        self.delete_bucket(bucket_name)


def set_key(file_path):
    with open(file_path, 'r', encoding='utf8') as fp:
        json_data = json.load(fp)
        acc = json_data['keys'][0]['access_key']
        sec = json_data['keys'][0]['secret_key']
        return acc, sec


if __name__ == '__main__':
    # 约定：
    # 1:filepath指本地文件的路径(上传路径or下载路径),指的是绝对路径
    # 2:bucket_name相当于文件在对象存储中的目录名或者索引名
    # 3:key_name相当于文件在对象存储中对应的文件名或文件索引
    access_key, secret_key = set_key(
        '/home/mcloud/ceph_fabric_config/secret.json')
    print(access_key)
    print(secret_key)

    ip = 'nmdl'
    port = 8000
    conn = CONNECTION(access_key, secret_key, ip, port)

    # 删除文件
    conn.delete_file('test_4m', 'bucket1')
    conn.list_single('bucket1')
