from concurrent.futures import ThreadPoolExecutor
from sqlalchemy import or_, and_
from collections import defaultdict
import redis
import logging
import os
import sys
import requests

# 设置项目路径
project_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(project_path)

from apscheduler.schedulers.blocking import BlockingScheduler
from config import REDIS_DATABASE_KEY
from enumobjects.general_enum import YesOrNoEnum
from libs.redis_wrapper import RedisWrapper
from models.annotation_models.annotation_model import Annotation
from models.data_models.data_model import Data
from models.project_models.project_model import Project
from utils.common_util import CommonUtil
from utils.db_util import CreateEngine

# 初始化数据库引擎和会话
engine = CreateEngine()
db_session = engine.get_engine(method="label")

# 设置日志记录
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# 创建 ThreadPoolExecutor 实例并将其包装成字典形式
default_executor = ThreadPoolExecutor(max_workers=20)
process_executor = ThreadPoolExecutor(max_workers=5)

executors = {
    'default': {'type': 'threadpool', 'max_workers': default_executor._max_workers},
    'processpool': {'type': 'processpool', 'max_workers': process_executor._max_workers},
}
job_defaults = {
    'coalesce': False,
    'max_instances': 5,
}

def data_rescan():
    one_week_ago = CommonUtil.get_timestamp() - (7 * 24 * 60 * 60)

    # 查询过去一周内创建的项目
    projects = db_session.query(Project).filter(
        Project.is_delete == YesOrNoEnum.NO.value,
        Project.create_time >= one_week_ago,
        Project.project_id != 12,
    ).all()
    # projects = db_session.query(Project).filter(Project.project_id == 184).all()

    if not projects:
        logger.info("No projects found within the last week.")
        return

    project_ids = [project.project_id for project in projects]

    # 子查询：已标注的数据 ID
    annotated_subquery = db_session.query(Annotation.data_id).filter(Annotation.project_id.in_(project_ids),
                                                              Annotation.is_delete == YesOrNoEnum.NO.value).distinct()

    # 主查询：查找在指定 project_id 中未标注的图片
    unlabeled_images = db_session.query(Data.data_id, Data.project_id).filter(
        Data.project_id.in_(project_ids),
        Data.conform == 1,
        ~Data.data_id.in_(annotated_subquery)
    ).all()

    images_by_project = defaultdict(list)
    for data_id, project_id in unlabeled_images:
        images_by_project[project_id].append(data_id)

    try:
        db_session.begin()
        for project in projects:
            if images_by_project[project.project_id]:
                project.state = 0
            else:
                project.state = 1
        db_session.commit()
    except Exception as e:
        logger.error(f"Error updating project states: {e}")
        db_session.rollback()

    try:
        with RedisWrapper.r.pipeline() as pipe:
            for project_id, data_ids in images_by_project.items():
                key = REDIS_DATABASE_KEY + str(project_id)
                existing_data_ids = RedisWrapper.lrange(key, 0, -1)
                ex_list = [elem.decode('utf-8') for elem in existing_data_ids]
                ex_list_new = [int(i) for i in ex_list]
                print("data_ids", data_ids)

                # 批量处理 Redis 操作
                new_data_ids = [data_id for data_id in data_ids if data_id not in ex_list_new]
                if new_data_ids:
                    pipe.rpush(key, *new_data_ids)
            # print("ex_list", ex_list)
            # print("new_data_ids", new_data_ids)
            pipe.execute()
    except Exception as e:
        logger.error(f"Error pushing data IDs to Redis: {e}")

# def periodic_upload():
#     project_id = 12
#     path = "/nfs/imgs/label/data/koutu-1"
#     url = 'https://label.idimage.cn/api/label/update/project'
#
#     params = {
#         'project_id': project_id,
#         'path': path
#     }
#     # 验证 project_id 是否存在于数据库中
#     project = db_session.query(Project).filter(Project.project_id == project_id,
#                                                Project.is_delete == YesOrNoEnum.NO.value).first()
#     if not project:
#         logger.error(f"项目 {project_id} 不存在或已被删除")
#         return
#
#     # 验证路径是否为绝对路径
#     if not os.path.isabs(path):
#         logger.error(f"路径 {path} 不是绝对路径")
#         return
#
#     # 验证路径是否存在
#     if not os.path.exists(path):
#         logger.error(f"路径 {path} 不存在")
#         return
#
#     # 验证路径是否有读取权限
#     if not os.access(path, os.R_OK):
#         logger.error(f"路径 {path} 没有读取权限")
#         return
#
#     # 设置请求头信息
#     headers = {
#         'Content-Type': 'application/json',
#         'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3MjY5OTYxNDcsIm5hbWUiOiJcdTllYzRcdTRmOWRcdTY4NTAiLCJ1c2VybmFtZSI6InlpdG9uZy5odWFuZyIsIm1vYmlsZSI6IjE4NTMzNjM4NDg0IiwicHJvamVjdF9saXN0IjpbIjExMiJdLCJtYWlsIjoieWl0b25nLmh1YW5nQGtpbmdzaGEuY29tIiwiYXBwIjoiMTEyIiwic3ViIjoiMTEyIn0.Dqbj-4eDvnkNQwsUWLN3psRl1LA2Fmv24-2RqPZxREE'
#     }
#
#     try:
#         logger.info(f'准备发送请求到 {url}, 参数: {params}')
#         response = requests.post(url, json=params, headers=headers)
#         logger.info(f'定时上传结果: {response.status_code}, 响应: {response.text}')
#     except Exception as e:
#         logger.error(f'定时上传失败: {e}')
#
#     # 调试信息: 打印数据库查询结果
#     if project:
#         logger.info(f"项目 {project_id} 存在于数据库中,项目名称: {project.name}")
#     else:
#         logger.error(f"项目 {project_id} 不存在或已被删除")


scheduler = BlockingScheduler(executors=executors, job_defaults=job_defaults)
scheduler.add_job(data_rescan, 'interval', minutes=10)  # 每 10分钟执行一次
# scheduler.add_job(periodic_upload, 'interval', minutes=10)  # 每10分钟执行一次

try:
    scheduler.start()  # 启动调度器
except (KeyboardInterrupt, SystemExit):
    logger.info("Scheduler stopped.")
    pass
