import pymysql
from pymysql import cursors
from dbutils.pooled_db import PooledDB
from argon2 import PasswordHasher
import subprocess
import os
from datetime import datetime, timedelta
import time
import threading
import logging
ph = PasswordHasher(time_cost=2,memory_cost=2**14,parallelism=1,hash_len=32)

# 建立连接池
pool2 = PooledDB(
    creator = pymysql, # 使用pymysql库连接
    maxconnections = 6, # 连接池允许的最大连接数
    mincached = 2, # 初始化时，链接池中至少创建的空闲的链接
    maxcached = 2, # 链接池中最多闲置的链接
    blocking = True, # 连接池中如果没有可用连接后，是否阻塞等待
    setsession = [], # 开始会话前执行的命令列表
    ping = 0,  # ping MySQL服务端，检查是否服务可用
    host = 'localhost', # 数据库地址
    port = 3306, # 数据库端口
    user = 'root', # 数据库用户名
    password = '123456', # 数据库密码
    database = 'bill_note', # 数据库名称
    charset = 'utf8mb4', # 编码方式
    cursorclass = pymysql.cursors.DictCursor, # 返回字典类型
)



def get_one(sql, args=None):
    '''查询单条数据'''
    with pool2.connection() as conn:
        cursor = conn.cursor(cursor = cursors.DictCursor)
        cursor.execute(sql,args)
        result = cursor.fetchone()
        return result
    
def get_all(sql, args=None):
    '''查询多条数据'''
    with pool2.connection() as conn:
        cursor = conn.cursor(cursor = cursors.DictCursor)
        cursor.execute(sql,args)
        result = cursor.fetchall()
        return result

def execute(sql, args=None):
    '''执行增删改'''
    with pool2.connection() as conn:
        cursor = conn.cursor(cursor = cursors.DictCursor)
        cursor.execute(sql,args)
        conn.commit()

def hash_password(password):
    '''对密码进行哈希加密'''
    return ph.hash(password)

def verify_password(password, hash):
    '''验证哈希密码是否正确'''
    try:
        ph.verify(hash, password)
        return True
    except Exception:
        return False

def backup_database(backup_dir='C:\\bill_system\\backup\\directory'):
    '''执行数据库备份'''
    try:
        # 确保备份目录存在
        os.makedirs(backup_dir, exist_ok=True)
        
        # 生成备份文件名
        timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
        backup_file = os.path.join(backup_dir, f'backup_{timestamp}.sql')
        
        # 执行mysqldump命令
        command = [
            'mysqldump',
            '-h', pool2._kwargs['host'],
            '-P', str(pool2._kwargs['port']),
            '-u', pool2._kwargs['user'],
            f"-p{pool2._kwargs['password']}",
            pool2._kwargs['database'],
            f'--result-file={backup_file}'
        ]
        
        result = subprocess.run(command, check=True, capture_output=True, text=True)
        
        if result.returncode != 0:
            raise Exception(f"mysqldump 执行失败: {result.stderr}")
        
        # 清理旧的备份文件
        clean_old_backups(backup_dir)
        
        logging.info(f'数据库备份成功: {backup_file}')
        return f'数据库备份成功: {backup_file}'
    except Exception as e:
        error_msg = f'数据库备份失败: {str(e)}'
        logging.error(error_msg)
        return error_msg

def clean_old_backups(backup_dir, days=30):
    '''清理30天前的备份文件'''
    now = datetime.now()
    for filename in os.listdir(backup_dir):
        if filename.startswith('backup_') and filename.endswith('.sql'):
            file_path = os.path.join(backup_dir, filename)
            file_time = datetime.fromtimestamp(os.path.getctime(file_path))
            if now - file_time > timedelta(days=days):
                os.remove(file_path)
                print(f'已删除旧备份文件: {file_path}')

def scheduled_backup_and_cleanup(backup_dir='/bill_system/backup/directory', interval=86400):
    '''定时执行备份和清理操作'''
    while True:
        backup_database(backup_dir)
        time.sleep(interval)  # 每24小时执行一次

def init_backup_thread():
    backup_thread = threading.Thread(target=scheduled_backup_and_cleanup, daemon=True)
    backup_thread.start()
