# -*- coding: utf-8 -*-
import csv
import json
import os
from wt.connector.db_manager.db_untils_tools import DBUntilsTools
from wt.connector.data_connect import DataConnect


class DBS3Manager(DataConnect, DBUntilsTools):
    def __init__(self, s3_bucket='wt-auto-bucket', region='cn-north-1'):
        super().__init__()
        self.s3_client = self.exec_s3_sql()
        self.s3_bucket = s3_bucket
        self.region = region
        self.create_bucket()  # 创建 S3 存储桶

    def generate_document(self):
        """
        生成 MongoDB 文档数据
        """
        data = []
        for i in range(1, 1001):
            document = {
                "int_col": i,
                "c_02_integer_col": self.get_random(-2147483648, 2147483648),
                "c_03_double_col": self.get_random_float(-500, 500),
                "c_04_float_col": self.get_random_float(-500, 500),
                "c_05_smallint_col": self.get_random(-32768, 32767),
                "c_06_bigint_col": self.get_random(-9223372036854775808, 9223372036854775807),
                "c_07_tinyint_col": self.get_random(-128, 123),
                "c_08_decimal_col": self.get_random_float(-500, 500),
                "c_09_varchar_col": self.get_random_string(8),
                "c_10_date_col": self.get_random_date(),
                "c_11_datetime_col": self.get_random_date()
            }
            data.append(document)
        return data

    def generate_file(self, collection_name, text_type):
        """
        根据不同的文件类型在本地生成不同的文件
        """
        data = self.generate_document()

        file_path = f"../resources/file/s3file/{collection_name}.{text_type}"
        path = "../resources/file/s3file"

        try:
            os.makedirs(path, exist_ok=True)

            # 根据传入的 text_type 生成不同的文件内容
            if text_type == "csv":
                fieldnames = data[0].keys()
                with open(file_path, 'w', newline='', encoding='utf-8') as csvfile:
                    writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
                    writer.writeheader()
                    writer.writerows(data)
                return True
            elif text_type == "json":
                with open(file_path, 'w', encoding='utf-8') as jsonfile:
                    json.dump(data, jsonfile, ensure_ascii=False, indent=4)
                return True
            else:
                return False
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def create_bucket(self):
        """
        创建 S3 存储桶
        """
        if self.s3_client.head_bucket(Bucket=self.s3_bucket):
            return True

        try:
            location = {'LocationConstraint': self.region}
            self.s3_client.create_bucket(Bucket=self.s3_bucket, CreateBucketConfiguration=location)
            print(f"Bucket {self.s3_bucket} created successfully")
            return True
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def upload_file(self, tableName, text_type):
        """
        上传文件到 S3，使用文件名作为对象名
        """
        try:
            local_path = "../resources/file/s3file/{}.{}".format(tableName, text_type)
            file_name = local_path.split('/')[-1]  # 获取文件名

            self.s3_client.upload_file(local_path, self.s3_bucket, file_name)
            print(f"Uploaded {local_path} to {file_name}")
            return True
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def download_latest_file(self, tableName, text_type):
        """
        从 S3 下载文件，只下载最新的文件
        """
        try:
            local_path = f"../resources/file/s3file/{tableName}.{text_type}"
            file_name = local_path.split('/')[-1]

            response = self.s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=file_name)
            if 'Contents' not in response:
                print("No files found.")
                return False

            files = response['Contents']
            latest_file = max(files, key=lambda x: x['LastModified'])
            latest_file_key = latest_file['Key']

            self.s3_client.download_file(self.s3_bucket, latest_file_key, local_path)
            print(f"Downloaded {latest_file_key} to {local_path}")
            return True
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def clear_bucket_folder(self, prefix):
        """
        清空 S3 存储桶中的指定文件夹
        """
        try:
            response = self.s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=prefix)
            if 'Contents' in response:
                for obj in response['Contents']:
                    self.s3_client.delete_object(Bucket=self.s3_bucket, Key=obj['Key'])
                    print(f"Deleted {obj['Key']}")
            return True
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def clear_folder(self, tableName, text_type):
        """
        清空本地文件夹中的所有文件和子文件夹
        """
        try:
            file_path = "../resources/file/s3file/{}.{}".format(tableName, text_type)
            if os.path.exists(file_path):
                os.remove(file_path)
                print(f"Cleared file {file_path}")
                return True
            else:
                print(f"Folder {file_path} does not exist")
                return False
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def clear_server_folder(self, tableName, text_type):
        """
        清空 S3 存储桶中的指定文件夹
        """
        try:
            prefix = f"{tableName}.{text_type}"
            response = self.s3_client.list_objects_v2(Bucket=self.s3_bucket, Prefix=prefix)
            if 'Contents' in response:
                for obj in response['Contents']:
                    self.s3_client.delete_object(Bucket=self.s3_bucket, Key=obj['Key'])
                    print(f"Deleted {obj['Key']}")
            return True
        except Exception as e:
            print(f"An error occurred: {e}")
            return False

    def get_file_count(self, tableName, text_type):
        """
        获取 S3 服务器指定文件夹中的文件数量
        """
        try:
            local_path = f"../resources/file/s3file/{tableName}.{text_type}"
            with open(local_path, 'r', encoding='utf-8') as file:
                line_count = sum(1 for line in file)
            return line_count
        except Exception as e:
            print(f"An error occurred: {e}")
            return 0

    def count_documents(self, collection_name):
        """
        统计集合中文档数量
        """
        try:
            collection = self.s3_client[collection_name]
            count = collection.count_documents({})
            print(f"Total documents in collection {collection_name}: {count}")
            return count
        except Exception as e:
            print(f"An error occurred: {e}")
            return None

    def drop_collection(self, collection_name):
        """
        删除集合
        """
        try:
            self.s3_client.drop_collection(collection_name)
            print(f"Dropped collection {collection_name}")
            return True
        except Exception as e:
            print(f"An error occurred: {e}")
            return False


if __name__ == '__main__':
    s3 = DBS3Manager()
    s3.generate_file("test_collection", "csv")  # 生成 CSV 文件
    s3.upload_file("my_bucket", "../resources/file/s3file/test_collection.csv",
                   "test_collection/test_collection.csv")  # 上传文件到 S3
    s3.download_latest_file("my_bucket", "test_collection/",
                            "../resources/file/s3file/downloaded_test_collection.csv")  # 下载最新文件
    s3.clear_server_folder("my_bucket", "test_collection/")  # 清空 S3 文件夹
    s3.clear_folder("../resources/file/s3file")  # 清空本地文件夹
    s3.get_file_count("my_bucket", "test_collection/")  # 获取 S3 文件数量
