import json
import random
import threading
import queue
import time
import redis
import hashlib
import requests
from dotenv import load_dotenv
import os
from minio import Minio
from minio.error import S3Error
from utils.customlogger import CustomLogger

# 1. 加载环境变量（包含 Redis 连接信息）
load_dotenv()


class DownAndUpload:
    def __init__(self):
        self.redis_client =redis.Redis(
            host=os.getenv("REDIS_HOST", "localhost"),
            port=int(os.getenv("REDIS_PORT", 6379)),
            db=int(os.getenv("REDIS_DB", 0)),
            password=os.getenv("REDIS_PASSWORD", None),
            decode_responses=True  # 自动解码返回值为字符串
        )
        self.redis_key = os.getenv("REDIS_KEY", "download_list")
        self.files_dir = self.get_file_dir()
        self.upload_queue = queue.Queue()
        self.num_download_threads = int(os.getenv("DOWNLOAD", 2))
        self.num_upload_threads = int(os.getenv("UPLOAD", 2))
        # 缓存代理列表
        self.proxies = []
        self.refresh_interval = int(os.getenv('PROXY_REFRESH_INTERVAL', 180))  # 刷新间隔（秒）
        self.last_refresh = 0
        self.proxy_redis_key=os.getenv("PROXY_REDIS_KEY", "pandas_proxy")
        self.logger = CustomLogger('main').get_logger()
        self.minio_secure = os.getenv("MINIO_SECURE", "False") == "True"
        self.client = Minio(
                os.getenv("MINIO_ENDPOINT", "localhost:9000"),
                access_key=os.getenv("MINIO_ACCESS_KEY", "minioadmin"),
                secret_key=os.getenv("MINIO_SECRET_KEY", "minioadmin"),
                secure=self.minio_secure
            )
        self.download_type = os.getenv("DOWNLOAD_TYPE", "1")
        self.upload_type = os.getenv("UPLOAD_TYPE", "1")
        # # 清空残留文件
        # for file in os.listdir(self.files_dir):
        #     if file.endswith('.pdf'):
        #         os.remove(os.path.join(self.files_dir, file))

        # self.proxy_lock = threading.Lock()


    def get_file_dir(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))
        files_dir = os.path.join(current_dir, 'files')

        return files_dir

    # 下载函数
    def download(self):
        redis_client1 = redis.Redis(
            host=os.getenv("REDIS_HOST", "localhost"),
            port=int(os.getenv("REDIS_PORT", 6379)),
            db=int(os.getenv("REDIS_DB", 0)),
            password=os.getenv("REDIS_PASSWORD", None),
            decode_responses=True  # 自动解码返回值为字符串
        )
        while True:
            # 使用brpop替代lpop实现阻塞式获取
            # 0表示无限等待，self.redis_key改为列表形式
            result = redis_client1.brpop(self.redis_key, timeout=30)
            if result is None:
                continue  # 超时，继续循环
            redis_data = result[1]
            try:
                data = json.loads(redis_data)
                filepath = self.download_file(data)
                if filepath:
                    data["filepath"] = filepath
                    self.upload_queue.put(data)
                else:
                    redis_client1.lpush("download_list", redis_data)
                    self.logger.error(f"下载失败，重新推送{redis_data}到redis，key为download_list")
            except redis.ConnectionError:
                time.sleep(60)  # 等待重连
                continue
            except Exception as e:
                redis_client1.lpush("error_list", redis_data)
                self.logger.error(f"下载失败，报错: {e},重新推送{redis_data}到redis，key为error_list")



    def upload(self):
        while True:
            if self.upload_type == "1":
                try:
                    data = self.upload_queue.get(timeout=30)  # 添加超时机制
                    file_path = data.get("filepath")
                    try:
                        status = self.upload_file_to_minio(file_path)
                        if status:
                            self.logger.info(f"上传 {file_path} 成功, 并删除文件")
                            os.remove(file_path)
                        else:
                            self.logger.error(f"上传 {file_path} 失败")
                    finally:
                        self.upload_queue.task_done()
                except queue.Empty:
                    # 定期检查是否应该退出
                    if self.shutdown_event.is_set():
                        break
            elif self.upload_type == "2":
                pass


    # 启动线程
    def start_threads(self):
        if not os.path.exists(self.files_dir):
            os.makedirs(self.files_dir)

        # 创建关闭事件
        self.shutdown_event = threading.Event()

        # 启动下载线程
        self.logger.info(f"开启 {self.num_download_threads} 个线程下载")
        download_threads = []
        for i in range(self.num_download_threads):
            t = threading.Thread(target=self.download, name=f"Download-{i}")
            t.daemon = False  # 改为非守护线程
            t.start()
            download_threads.append(t)

        # 启动上传线程
        self.logger.info(f"开始 {self.num_upload_threads} 线程上传")
        upload_threads = []
        for i in range(self.num_upload_threads):
            t = threading.Thread(target=self.upload, name=f"Upload-{i}")
            t.daemon = False  # 改为非守护线程
            t.start()
            upload_threads.append(t)

        # 等待终止信号
        try:
            while True:
                time.sleep(1)
        except KeyboardInterrupt:
            self.logger.error("Shutting down...")
            self.shutdown_event.set()


        # 等待线程结束
        for t in download_threads + upload_threads:
            t.join(timeout=5)


    def generate_md5(self, url):
        # 创建MD5对象
        md5 = hashlib.md5()
        # 更新MD5对象，输入数据需要是字节类型
        md5.update(url.encode('utf-8'))

        # 获取最终的哈希值
        return md5.hexdigest()

    def download_file(self, data):
        if self.download_type == "1":
            try:
                self.logger.info(f"开始下载文件 {data.get('url')}")
                url = data.get("url")
                # 获取文件名和扩展名
                filename = os.path.basename(url)
                # 根据URL的扩展名确定文件保存的扩展名
                try:
                    extension = os.path.splitext(filename)[1]
                except Exception as e:
                    extension = os.getenv("DOWNLOAD_EXTENSION", ".txt")

                filepath = self.files_dir + os.sep + self.generate_md5(url) + extension
                headers = {
                    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
                    "accept-language": "zh-CN,zh;q=0.9",
                    "cache-control": "no-cache",
                    "pragma": "no-cache",
                    "priority": "u=0, i",
                    "upgrade-insecure-requests": "1",
                    "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36"
                }
                # 发起GET请求
                response = requests.get(url,headers=headers, stream=True, proxies=self.get_proxy(), timeout=30)

                # 检查请求是否成功
                if response.status_code == 200:
                    # 打开文件准备写入
                    with open(filepath, 'wb') as f:
                        for chunk in response.iter_content(chunk_size=8192):
                            # 如果有数据则写入文件
                            if chunk:
                                f.write(chunk)
                    self.logger.info(f"下载成功，返回{filepath}")
                    return filepath
                else:
                    return None
            except Exception as e:
                self.logger.error(f"报错{e}")
        elif self.download_type == "2":
            pass
        else:
            pass

    def get_proxy(self):
        # 刷新代理列表（如果需要）
        username = os.getenv("PROXY_IP_USERNAME", "")
        password = os.getenv("PROXY_IP_PASSWORD", "")

        if not self.proxies or (time.time() - self.last_refresh) > self.refresh_interval:
            self.refresh_proxies()

        if self.proxies:
            # 随机选择一个代理
            proxy = f"http://{username}:{password}@" + random.choice(self.proxies)
            self.logger.info(f"成功返回代理：{proxy}")
            return {"http": proxy, "https": proxy}
        return {}

    def refresh_proxies(self):
        """
        从 Redis 中刷新代理列表
        """
        self.proxies = [
            proxy for proxy in
            self.redis_client.zrange(self.proxy_redis_key, 0, -1)
        ]
        self.last_refresh = time.time()
        if not self.proxies:
            self.logger.error("No proxies found in Redis")

    def upload_file_to_minio(self, file_path, bucket_name="crawler", object_name=None):
        """
        上传文件到 MinIO

        参数:
            file_path: 本地文件路径
            bucket_name: MinIO 存储桶名称
            object_name: 在 MinIO 中的对象名称（可选，默认为文件名）
        """

        try:
            # 2. 如果未指定对象名称，使用文件名
            if object_name is None:
                object_name = os.path.basename(file_path)

            # 3. 检查存储桶是否存在，不存在则创建
            if not self.client.bucket_exists(bucket_name):
                self.client.make_bucket(bucket_name)
                self.logger.info(f"创建存储桶: {bucket_name}")

            # 4. 上传文件
            self.client.fput_object(
                bucket_name,
                object_name,
                file_path
            )

            self.logger.info(f"文件{file_path}成功上传")

            # # 5. 获取文件 URL（可选）
            # file_url = client.presigned_get_object(bucket_name, object_name)
            # self.logger.info(f"文件访问 URL: {file_url}")

            return True

        except S3Error as e:
            self.logger.error(f"MinIO 错误: {e}")
            return False
        except Exception as e:
            self.logger.error(f"上传失败: {str(e)}")
            return False

if __name__ == "__main__":
    d = DownAndUpload()
    d.start_threads()