import asyncio
import logging
from datetime import datetime, timezone, timedelta # Added timedelta for date trigger
from typing import Dict, List, Any, Optional

import pika # For RabbitMQ publisher
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel, Field
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.triggers.date import DateTrigger # Added DateTrigger
from apscheduler.jobstores.base import JobLookupError
from contextlib import asynccontextmanager

# --- 配置 ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
APS_SCHEDULER_TIMEZONE = "Asia/Shanghai" # 或者你的目标时区

RABBITMQ_HOST = 'localhost'
RABBITMQ_PORT = 15672
RABBITMQ_USER = 'guest'
RABBITMQ_PASS = '123456'
RABBITMQ_MAIN_EXCHANGE = 'tasks_main_exchange' # 主交换机
RABBITMQ_EXCHANGE_TYPE = 'topic'

# --- RabbitMQ 发布器 ---
class RabbitMQPublisher:
    def __init__(self, host, port, exchange_name, exchange_type):
        self.host = host
        self.port = port
        self.exchange_name = exchange_name
        self.exchange_type = exchange_type
        self._connection = None
        self._channel = None
        self.credentials = pika.PlainCredentials(RABBITMQ_USER, RABBITMQ_PASS) # if auth needed

    def _connect(self):
        if not self._connection or self._connection.is_closed:
            parameters = pika.ConnectionParameters(self.host, self.port, '/', self.credentials)
            # parameters = pika.ConnectionParameters(host=self.host, port=self.port)
            self._connection = pika.BlockingConnection(parameters)
            self._channel = self._connection.channel()
            self._channel.exchange_declare(
                exchange=self.exchange_name,
                exchange_type=self.exchange_type,
                durable=True
            )
            logger.info(f"RabbitMQ Publisher：连接和交换机 '{self.exchange_name}' 声明.")

    def publish_message(self, routing_key: str, message_body: str):
        try:
            self._connect() # Ensure connection
            self._channel.basic_publish(
                exchange=self.exchange_name,
                routing_key=routing_key,
                body=message_body.encode('utf-8'),
                properties=pika.BasicProperties(
                    delivery_mode=pika.spec.PERSISTENT_DELIVERY_MODE, # Make message persistent
                )
            )
            logger.info(f"RabbitMQ Publisher: Message sent to exchange '{self.exchange_name}' with routing key '{routing_key}'. Body: '{message_body}'")
        except Exception as e:
            logger.error(f"RabbitMQ Publisher: Failed to send message. Error: {e}", exc_info=True)
            # Consider re-raising or a retry mechanism
            raise

    def close(self):
        if self._channel and self._channel.is_open:
            self._channel.close()
        if self._connection and self._connection.is_open:
            self._connection.close()
        logger.info("RabbitMQ Publisher: Connection closed.")

# 实例化发布器
mq_publisher = RabbitMQPublisher(
    RABBITMQ_HOST,
    RABBITMQ_PORT,
    RABBITMQ_MAIN_EXCHANGE,
    RABBITMQ_EXCHANGE_TYPE
)

# --- 任务配置数据模型 ---
class DBConnectionConfig(BaseModel):
    host: str
    port: int
    user: str
    password: str
    database: str

class TaskConfig(BaseModel):
    id: str = Field(..., description="任务的唯一ID, 例如 'erp_to_tpl_sales_orders'")
    description: Optional[str] = None
    # source_db: DBConnectionConfig # Consumers will handle DB details based on task_id
    # target_db: DBConnectionConfig
    # source_sql: str
    # target_insert_sql: str
    # target_update_sql: Optional[str] = None
    trigger_type: str = Field(..., description="APScheduler触发器类型: 'interval', 'cron', or 'date'")
    trigger_args: Dict[str, Any] = Field(..., description="触发器参数")
    is_enabled_on_startup: bool = True
    mq_routing_key_prefix: str = Field(..., description="消息队列路由键前缀, e.g., 'erp.tpl.sales'") # 例如 "erp.tpl"

# --- 模拟任务配置存储 (实际应用中这会来自数据库或配置文件) ---
# 注意：为了简化，发布者只关心如何触发和发送ID，消费者关心如何执行
TASK_CONFIGURATIONS: Dict[str, TaskConfig] = {
    "erp_to_tpl_sales_orders": TaskConfig(
        id="erp_to_tpl_sales_orders",
        description="从ERP同步销售订单到TPL系统",
        trigger_type="interval",
        trigger_args={"seconds": 300}, # 每5分钟
        is_enabled_on_startup=True,
        mq_routing_key_prefix="erp.tpl.sales_orders" # Full prefix for this task type
    ),
    "tpl_to_wms_shipments": TaskConfig(
        id="tpl_to_wms_shipments",
        description="从TPL同步发货信息到WMS系统",
        trigger_type="cron",
        trigger_args={"hour": "2", "minute": "15"}, # 每天凌晨2点15
        is_enabled_on_startup=True,
        mq_routing_key_prefix="tpl.wms.shipments"
    ),
    "wms_to_tpl_inventory_update": TaskConfig(
        id="wms_to_tpl_inventory_update",
        description="从WMS回写库存到TPL系统 (手动触发或一次性)",
        trigger_type="date", # 一次性任务示例
        trigger_args={"run_date": (datetime.now(timezone.utc) + timedelta(minutes=1)).isoformat()}, # 1分钟后执行 (UTC)
        is_enabled_on_startup=False, # 默认不启动, 可以通过API启动或手动触发
        mq_routing_key_prefix="wms.tpl.inventory"
    ),
     "other_adhoc_task": TaskConfig(
        id="other_adhoc_task",
        description="一个其他的临时任务",
        trigger_type="date",
        trigger_args={"run_date": "2024-12-31T23:59:00+08:00"}, # 特定日期时间 (带时区)
        is_enabled_on_startup=True,
        mq_routing_key_prefix="other.adhoc"
    )
}

# --- APScheduler 实例 ---
scheduler = AsyncIOScheduler(timezone=APS_SCHEDULER_TIMEZONE)

# --- 调度器要执行的函数 ---
async def send_task_id_to_mq_async_wrapper(task_config_id: str):
    """
    APScheduler 调用此异步包装器，它在 FastAPI 的事件循环中运行。
    然后它使用 asyncio.to_thread 来在单独的线程中执行阻塞的 Pika 操作。
    """
    task_config = TASK_CONFIGURATIONS.get(task_config_id)
    if not task_config:
        logger.error(f"APScheduler: 尝试为不存在的任务配置ID '{task_config_id}' 发送消息")
        return

    # 完整的路由键: prefix + task_id (确保唯一性，并允许消费者精确或模式匹配)
    # 或者，如果prefix已经足够具体，可以直接用prefix
    # 这里我们让 prefix 足够具体，代表一类任务
    routing_key = f"{task_config.mq_routing_key_prefix}" # 使用配置的路由键前缀作为最终的路由键
                                                         # 如果需要更细粒度, 可以是 f"{task_config.mq_routing_key_prefix}.{task_config.id}"

    logger.info(f"APScheduler 触发: 准备发送任务ID '{task_config_id}' 到 MQ (Exchange: {RABBITMQ_MAIN_EXCHANGE}, Key: {routing_key})")
    try:
        # Pika的BlockingConnection是阻塞的，不应该直接在asyncio事件循环中调用
        # 使用 asyncio.to_thread 将其移至工作线程
        await asyncio.to_thread(mq_publisher.publish_message, routing_key, task_config_id)
        logger.info(f"APScheduler 成功发送任务ID '{task_config_id}' 到 MQ.")
    except Exception as e:
        logger.error(f"APScheduler 发送任务ID '{task_config_id}' 到 MQ 失败: {e}", exc_info=True)


# --- 辅助函数: 加载和调度任务 ---
def load_and_schedule_tasks_from_config(task_configs: Dict[str, TaskConfig]):
    logger.info("开始加载和调度任务...")
    for task_id, config in task_configs.items():
        aps_job_id = f"scheduled_task_{task_id}"

        existing_job = scheduler.get_job(aps_job_id)
        if existing_job:
            logger.info(f"任务 '{aps_job_id}' 已存在，将先移除再添加以确保配置最新。")
            try:
                scheduler.remove_job(aps_job_id)
            except JobLookupError:
                logger.warning(f"尝试移除任务 '{aps_job_id}' 时未找到。")

        if config.is_enabled_on_startup:
            trigger_instance = None
            trigger_args = config.trigger_args.copy() # 复制以避免修改原始配置

            if config.trigger_type == "interval":
                trigger_instance = IntervalTrigger(**trigger_args, timezone=APS_SCHEDULER_TIMEZONE)
            elif config.trigger_type == "cron":
                trigger_instance = CronTrigger(**trigger_args, timezone=APS_SCHEDULER_TIMEZONE)
            elif config.trigger_type == "date":
                # DateTrigger 的 run_date 可以是 datetime 对象或 ISO 格式的字符串
                if isinstance(trigger_args.get("run_date"), str):
                    try:
                        # APScheduler 可以处理带时区的 ISO 字符串
                        pass # trigger_args["run_date"] = datetime.fromisoformat(trigger_args["run_date"])
                    except ValueError:
                        logger.error(f"任务 '{task_id}': 'run_date' 字符串 '{trigger_args['run_date']}' 格式无效。跳过。")
                        continue
                trigger_instance = DateTrigger(**trigger_args, timezone=APS_SCHEDULER_TIMEZONE)
            else:
                logger.warning(f"任务 '{task_id}' 的触发器类型 '{config.trigger_type}' 不支持。跳过。")
                continue

            scheduler.add_job(
                send_task_id_to_mq_async_wrapper,
                trigger_instance,
                kwargs={'task_config_id': task_id},
                id=aps_job_id,
                name=config.description or task_id,
                replace_existing=True
            )
            logger.info(f"已调度任务: '{task_id}' (APS ID: {aps_job_id}) | {config.trigger_type} | Args: {config.trigger_args}")
        else:
            logger.info(f"任务 '{task_id}' 配置为启动时不启用，跳过调度。")
    logger.info("任务加载和调度完成。")


# --- FastAPI Lifespan ---
@asynccontextmanager
async def lifespan(app: FastAPI):
    logger.info("FastAPI 应用启动...")
    # mq_publisher._connect() # 在应用启动时预连接一次 (可选, publish_message会按需连接)
    load_and_schedule_tasks_from_config(TASK_CONFIGURATIONS)
    if not scheduler.running:
        scheduler.start()
        logger.info("APScheduler 已启动.")
    else:
        logger.info("APScheduler 已在运行中.")
    try:
        yield
    finally:
        logger.info("FastAPI 应用关闭...")
        if scheduler.running:
            scheduler.shutdown()
            logger.info("APScheduler 已关闭.")
        mq_publisher.close() # 关闭RabbitMQ连接

app = FastAPI(lifespan=lifespan, title="APScheduler Task Publisher API")

# --- API 端点 (与之前类似, 保持简洁) ---
class JobStatus(BaseModel):
    job_id: str # APScheduler job_id
    task_config_id: str
    name: str
    trigger: str
    next_run_time: Optional[datetime] = None
    is_paused: bool

@app.get("/scheduler/jobs", response_model=List[JobStatus], summary="获取所有调度任务的状态")
async def get_all_jobs():
    # ... (实现与之前类似)
    jobs_status = []
    if not scheduler.running: return jobs_status
    for job in scheduler.get_jobs():
        task_config_id = job.id.replace("scheduled_task_", "", 1) if job.id.startswith("scheduled_task_") else "N/A"
        is_paused_flag = job.next_run_time is None and job.trigger is not None # Basic check
        jobs_status.append(JobStatus(
            job_id=job.id, task_config_id=task_config_id, name=job.name,
            trigger=str(job.trigger),
            next_run_time=job.next_run_time.astimezone(timezone.utc) if job.next_run_time else None,
            is_paused=is_paused_flag
        ))
    return jobs_status

def get_aps_job_id(task_config_id: str) -> str:
    return f"scheduled_task_{task_config_id}"

@app.post("/scheduler/tasks/{task_config_id}/pause", summary="暂停指定任务")
async def pause_task_schedule(task_config_id: str):
    # ... (实现与之前类似)
    aps_job_id = get_aps_job_id(task_config_id)
    try:
        job = scheduler.get_job(aps_job_id)
        if not job: raise HTTPException(status_code=404, detail=f"Job for task '{task_config_id}' not found.")
        if job.next_run_time is None and job.trigger is not None:
            return {"message": f"Task '{task_config_id}' is already paused."}
        scheduler.pause_job(aps_job_id)
        return {"message": f"Task '{task_config_id}' paused."}
    except JobLookupError: raise HTTPException(status_code=404, detail=f"Job for task '{task_config_id}' not found.")
    except Exception as e: raise HTTPException(status_code=500, detail=str(e))


@app.post("/scheduler/tasks/{task_config_id}/resume", summary="恢复指定任务")
async def resume_task_schedule(task_config_id: str):
    # ... (实现与之前类似)
    aps_job_id = get_aps_job_id(task_config_id)
    try:
        job = scheduler.get_job(aps_job_id)
        if not job: raise HTTPException(status_code=404, detail=f"Job for task '{task_config_id}' not found.")
        if job.next_run_time is not None:
            return {"message": f"Task '{task_config_id}' is not paused."}
        scheduler.resume_job(aps_job_id)
        new_next_run = scheduler.get_job(aps_job_id).next_run_time
        return {"message": f"Task '{task_config_id}' resumed. Next run: {new_next_run.isoformat() if new_next_run else 'N/A'}"}
    except JobLookupError: raise HTTPException(status_code=404, detail=f"Job for task '{task_config_id}' not found.")
    except Exception as e:
        if "is not paused" in str(e).lower(): return {"message": f"Task '{task_config_id}' is not paused."}
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/scheduler/tasks/{task_config_id}/trigger", summary="立即触发指定任务")
async def trigger_task_now(task_config_id: str):
    task_config = TASK_CONFIGURATIONS.get(task_config_id)
    if not task_config:
        raise HTTPException(status_code=404, detail=f"Task configuration ID '{task_config_id}' not found.")

    aps_job_id = get_aps_job_id(task_config_id)
    job = scheduler.get_job(aps_job_id)

    if job: # 如果任务已在调度器中
        logger.info(f"立即触发已调度的任务: {task_config_id} (APS ID: {aps_job_id})")
        now_with_tz = datetime.now(scheduler.timezone)
        try:
            scheduler.modify_job(aps_job_id, next_run_time=now_with_tz)
            return {"message": f"Scheduled task '{task_config_id}' set to run now."}
        except Exception as e:
            logger.error(f"Failed to modify job '{aps_job_id}' to run now: {e}")
            # 尝试直接发送到MQ作为回退
            await send_task_id_to_mq_async_wrapper(task_config_id)
            return {"message": f"Failed to modify job, but directly triggered task '{task_config_id}' via MQ."}
    else: # 如果任务未被调度（例如 is_enabled_on_startup=False 或被移除）
        logger.info(f"任务 '{task_config_id}' 未在调度器中, 直接发送到MQ.")
        await send_task_id_to_mq_async_wrapper(task_config_id)
        return {"message": f"Task '{task_config_id}' (not scheduled) triggered directly via MQ."}


@app.post("/scheduler/reload_tasks", summary="重新加载并调度所有任务配置")
async def reload_all_tasks():
    logger.info("收到重新加载任务的请求...")
    # 在真实应用中，TASK_CONFIGURATIONS 可能会从数据库或文件重新加载
    # global TASK_CONFIGURATIONS
    # TASK_CONFIGURATIONS = load_new_configs()
    load_and_schedule_tasks_from_config(TASK_CONFIGURATIONS)
    return {"message": "任务配置已重新加载和调度。"}

@app.get("/")
async def root():
    return {"message": "APScheduler Task Publisher API is running."}

# uvicorn publisher_main:app --reload