import queue

from fastapi import FastAPI, HTTPException
from typing import List, Dict, Optional, Tuple
from queue import Queue
import time
import threading
import json
import random
import numpy as np

from config.base_config import NODE_CONFIG
from task import Task
from protocol import (MessageType, NodeInfo, TaskResult,
                     serialize_task_with_path, deserialize_task_with_path,
                     serialize_result, deserialize_result)
import requests
from pydantic import BaseModel
from typing import List, Dict, Optional
import uvicorn

import logging

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    filename="logs/app.log",
    format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

app = FastAPI()

class QueueInfo(BaseModel):
    arrival_queue: List[Dict]
    local_queue: List[Dict]
    forward_queue: List[Dict]

class NodeStatus(BaseModel):
    cpu_usage: float
    memory_usage: float
    queue_length: int
    current_task: Optional[Dict]

class TaskDecision(BaseModel):
    task_id: str
    target_node: str
    path: List[str]

class TaskDecisions(BaseModel):
    decisions: List[TaskDecision]

@app.get("/queues", response_model=QueueInfo)
async def get_queues():
    """获取节点的队列信息"""
    node = app.state.node
    queue_info = {
        "arrival_queue": node.get_arrival_queue_info(),
        "local_queue": node.get_local_queue_info(),
        "forward_queue": node.get_forward_queue_info()
    }
    logger.info(f"Queue information requested: {queue_info}")
    return queue_info

@app.get("/status", response_model=NodeStatus)
async def get_status():
    """获取节点状态"""
    node = app.state.node
    status = node.get_status()
    logger.info(f"Node status requested: {status}")
    return status

@app.post("/decide_task")
async def decide_task( decision: TaskDecision):
    """决定任务的执行节点"""
    node = app.state.node
    success = node.process_task_decision(decision.task_id, decision.target_node, decision.path)
    if not success:
        logger.warning(f"Invalid decision received for task_id: {decision.task_id}, target_node: {decision.target_node}")
        raise HTTPException(status_code=400, detail="Invalid decision")
    logger.info(f"Task {decision.task_id} decided to be processed on {decision.target_node}")
    return {"status": "success"}

@app.post("/decide_tasks")
async def decide_tasks( decisions: TaskDecisions):
    """批量决定任务的执行节点"""
    node = app.state.node
    results = []
    for decision in decisions.decisions:
        success = node.process_task_decision(decision.task_id, decision.target_node, decision.path)
        results.append({
            "task_id": decision.task_id,
            "success": success
        })
        if success:
            logger.info(f"Task {decision.task_id} decided to be processed on {decision.target_node}")
        else:
            logger.warning(f"Invalid decision received for task_id: {decision.task_id}, target_node: {decision.target_node}")
    return {"results": results}

# FastAPI路由
@app.post("/submit_task")
async def submit_task(message: Dict):
    logger.info(f"Received submit task request")
    result = await app.state.node.handle_submit_task(message)
    logger.info(f"Submit task processed, result")
    return result

@app.post("/forward_task")
async def forward_task(message: Dict):
    logger.info(f"Received forward task request")
    result = await app.state.node.handle_forward_task(message)
    logger.info(f"Forward task processed, result")
    return result

class ComputeNode:
    def __init__(self,
                 node_id: str,
                 node_type: str,
                 host: str,
                 real_host: str,
                 port: int,
                 compute_capacity: float,    # GHz
                 memory_capacity: float,     # GB
                 neighbors: Dict[str, NodeInfo] = None):  # 邻居节点信息
        self.node_id = node_id
        self.node_type = node_type
        self.host = host
        self.real_host = real_host
        self.port = port
        self.compute_capacity = compute_capacity
        self.memory_capacity = memory_capacity
        self.neighbors = neighbors or {}

        self.sdn_host = NODE_CONFIG["sdn_host"]
        self.sdn_port = NODE_CONFIG["sdn_port"]
        # 队列长度限制
        self.max_queue_length = NODE_CONFIG["queue_length"]

        # 队列初始化
        self.arrival_queue = Queue(maxsize=self.max_queue_length)
        self.local_queue = Queue(maxsize=self.max_queue_length)
        self.forward_queue = Queue(maxsize=self.max_queue_length)

        self.current_task: Optional[Task] = None
        self.is_running = True

        logger.info(f"ComputeNode {node_id} initialized with host={host}, port={port}, capacity={compute_capacity}")

        # 启动处理线程
        threading.Thread(target=self._process_local_tasks).start()

        threading.Thread(target=self._process_forward_tasks).start()

    def get_status(self) -> Dict:
        """获取节点状态"""
        status={
            'node_id': self.node_id,
            'node_type':self.node_type,
            'cpu_usage': len(self.local_queue.queue) / 10.0,  # 简化的CPU使用率计算
            'memory_usage': 0.5,  # 示例值
            'queue_length': self.local_queue.qsize(),
            'active_tasks': 1 if self.current_task else 0,
            'compute_capacity': self.compute_capacity,
            'current_task': str(self.current_task) if self.current_task else None
        }
        return status

    def _process_local_tasks(self):
        """处理本地任务的主循环，增加异常处理"""
        while self.is_running:
            try:
                # 检查本地队列是否为空
                if not self.local_queue.empty() and self.current_task is None:
                    try:
                        task, client_addr = self.local_queue.get_nowait()
                        logger.info(f"Processing task {task.id} from local queue.")
                        self.current_task = task # 设置当前执行的任务

                        # 释放锁
                        self._execute_task(task, client_addr) # 执行任务

                    except queue.Empty:
                        # 如果队列在检查后立即变空，则忽略此异常
                        pass
                    except Exception as e:
                        logger.exception(f"Error processing local task: {e}")
                    finally:
                        self.current_task = None  # 重置当前任务
                time.sleep(0.1)

            except Exception as e:
                logger.exception(f"Unexpected error in _process_local_tasks loop: {e}")
            finally:
               time.sleep(0.1)


    def _process_forward_tasks(self):
        """处理转发任务的主循环，增加异常处理"""
        while self.is_running:
            try:
                # 循环处理转发队列
                while not self.forward_queue.empty():
                    try:
                        task, target_node, path, client_addr = self.forward_queue.get_nowait()
                        logger.info(f"Forwarding task {task.id} to {target_node} with path {path}.")

                        # 转发前先调用sdn控制下发路径流表
                        try:
                            self._update_flow_table(task, target_node, path)
                        except Exception as e:
                            logger.error(f"Error updating flow table for task {task.id}: {e}")
                            continue  # 继续处理下一个任务

                        try:
                            self._forward_task(task, target_node, path, client_addr)
                        except Exception as e:
                            logger.error(f"Error forwarding task {task.id} to {target_node}: {e}")

                    except queue.Empty:
                        # 如果队列在检查后立即变空，则忽略此异常
                        pass

                    except Exception as e:
                        logger.exception(f"Error processing forward task: {e}")
            except Exception as e:
                logger.exception(f"Unexpected error in _process_forward_tasks loop: {e}")

            finally:
                time.sleep(0.1)

    def _update_flow_table(self, task: Task, target_node: str, path: List[str]):
        """更新流表"""
        # 调用sdn控制下发路径流表
        # 请求示例：
        #curl -X POST -H "Content-Type: application/json" -d '{
        #     "src_ip": "10.0.0.1",
        #     "dst_ip": "10.0.0.2",
        #     "dst_port": 80,
        #     "path": [1,2,11,10,3]
        # }' http://localhost:8080/flow
        if len(path) == 0:
            logger.warning(f"Path is empty for task {task.id}.")
            return

        # 源节点ip
        src_node_ip = self.real_host
        # 目标节点ip
        target_node_ip = self.neighbors[target_node].host
        # 目标节点端口
        target_node_port = self.neighbors[target_node].port
        # 请求体
        data = {
            "src_ip": src_node_ip,
            "dst_ip": target_node_ip,
            "dst_port": target_node_port,
            "path": path
        }
        # 请求url
        url = f"http://{self.sdn_host}:{self.sdn_port}/flow"
        logger.info(f"Update flow table url: {url}, data: {data}")
        # 发送请求
        try:
            response = requests.post(url, json=data)
            # 打印响应
            logger.info(f"Update flow table response: {response.text}")
        except Exception as e:
            logger.error(f"fail to update flow table : {e}")

    def _execute_task(self, task: Task, client_addr: tuple):
        """执行任务并返回结果"""
        task.start_time = time.time()
        result_data, execution_details = self._simulate_computation(task)
        task.completion_time = time.time()

        # 检查是否超时
        if task.is_failed():
            success = False
            logger.warning(f"Task {task.id} failed due to timeout.")
        else:
            success = True
            logger.info(f"Task {task.id} completed successfully.")

        # 创建并发送结果
        result = TaskResult(
            task_id=task.id,
            success=success,
            submit_time=task.submit_time,
            max_delay=task.max_delay,
            result_data=result_data,
            completion_time=task.completion_time,
            execution_path=[self.node_id],
            execution_details=execution_details  # 添加执行详情
        )

        self._send_result_to_client(result,client_addr)

    def _forward_task(self, task: Task, target_node: str, path: List[str], client_addr: tuple):
        """转发任务到其他节点"""
        if target_node not in self.neighbors:
            logger.error(f"Target node {target_node} not in neighbors list.")
            return

        node_info = self.neighbors[target_node]
        try:
            # 使用 requests 发送 HTTP 请求
            response = requests.post(
                f"http://{node_info.host}:{node_info.port}/forward_task",
                json={
                    'type': MessageType.FORWARD_TASK,
                    'data': serialize_task_with_path(task, self.node_id, path, client_addr)
                }
            )
            if response.status_code != 200:
                logger.error(f"Forwarding task failed: {response.text}")
            else:
                logger.info(f"Task {task.id} forwarded successfully to {target_node}.")
        except Exception as e:
            logger.exception(f"Error forwarding task {task.id} to node {target_node}: {e}")
            return

    def _send_result_to_client(self, result: TaskResult,client_addr: tuple):
        """发送任务结果"""
        try:
            response = requests.post(f"http://{client_addr[0]}:{client_addr[1]}/result", json={
                'type': MessageType.TASK_RESULT,
                'data': serialize_result(result)
            })
            if response.status_code == 200:
                logger.info(f"Result for task {result.task_id} sent to client successfully.")
            else:
                logger.error(f"Failed to send result for task {result.task_id} to client. Status code: {response.status_code}")
        except Exception as e:
            logger.exception(f"Error sending result for task {result.task_id} to client: {e}")
            return

    async def handle_submit_task(self, message: Dict):
        """处理任务提交"""
        task, source_node, path, client_addr = deserialize_task_with_path(message['data'])
        logger.info(f"Received task submission from {client_addr}. Task ID: {task.id}")

        # 新任务进入到达队列
        if self.arrival_queue.full():
            logger.warning(f"Arrival queue is full. Task {task.id} rejected.")
            raise HTTPException(status_code=503, detail="Arrival queue is full")
        self.arrival_queue.put((task, client_addr))
        logger.info(f"Task {task.id} added to arrival queue.")
        return {"status": "success"}

    async def handle_forward_task(self, message: Dict):
        """处理任务转发"""
        task, _, _, client_addr = deserialize_task_with_path(message['data'])
        logger.info(f"Received forwarded task {task.id} from an unknown source.")

        if self.local_queue.full():
            logger.warning(f"Local queue is full. Task {task.id} rejected.")
            raise HTTPException(status_code=503, detail="Queue is full")

        self.local_queue.put((task, client_addr))
        logger.info(f"Task {task.id} added to local queue.")
        return {"status": "success"}

    def stop(self) -> None:
        """停止节点运行"""
        self.is_running = False
        logger.info(f"ComputeNode {self.node_id} stopped.")

    def _simulate_computation(self, task: Task) -> bytes:
        """执行实际计算过程 - 质数计算"""
        start_time = time.time()

        # 根据计算负载确定计算范围
        # 计算负载越高，计算的范围越大
        computation_range = int(task.compute_load * 1000000)

        # 质数计算函数
        def is_prime(n):
            if n <= 1:
                return False
            if n <= 3:
                return True
            if n % 2 == 0 or n % 3 == 0:
                return False
            i = 5
            while i * i <= n:
                if n % i == 0 or n % (i + 2) == 0:
                    return False
                i += 6
            return True

        # 计算指定范围内的质数
        primes = []
        count = 0
        for num in range(2, computation_range):
            if is_prime(num):
                count += 1
                if len(primes) < 100:  # 只保存前100个质数作为结果
                    primes.append(num)

        # 计算实际执行时间
        execution_time = time.time() - start_time

        # 创建结果数据
        result_data = {
            "prime_count": count,
            "sample_primes": primes[:10],  # 只返回前10个质数
            "computation_range": computation_range,
            "execution_time": execution_time
        }

        # 将结果转换为字节
        result_bytes = json.dumps(result_data).encode()

        logger.info(f"Computed {count} primes in range [2, {computation_range}] in {execution_time:.2f} seconds")

        return result_bytes

    def get_arrival_queue_info(self) -> List[Dict]:
        """获取到达队列信息"""
        tasks = []
        for task, _ in list(self.arrival_queue.queue):
            tasks.append({
                'id': task.id,
                'task_type': task.task_type,
                'compute_load': task.compute_load,
                'data_size': task.data_size,
                'submit_time': task.submit_time,
                'max_delay': task.max_delay
            })
        return tasks

    def get_local_queue_info(self) -> List[Dict]:
        """获取本地队列信息"""
        tasks = []
        for task, _ in list(self.local_queue.queue):
            tasks.append({
                'id': task.id,
                'task_type': task.task_type,
                'compute_load': task.compute_load,
                'data_size': task.data_size,
                'max_delay': task.max_delay,
                'submit_time': task.submit_time
            })
        return tasks

    def get_forward_queue_info(self) -> List[Dict]:
        """获取转发队列信息"""
        tasks = []
        for task, target, path, _ in list(self.forward_queue.queue):
            tasks.append({
                'id': task.id,
                'task_type': task.task_type,
                'compute_load': task.compute_load,
                'data_size': task.data_size,
                'max_delay': task.max_delay,
                'submit_time': task.submit_time,
                'target_node': target,
                'path': path
            })
        return tasks

    def process_task_decision(self, task_id: str, target_node: str, path: List[str]) -> bool:
        """处理任务决策，增加异常处理"""
        try:
            # 查找任务
            found = False
            for task, client_addr in list(self.arrival_queue.queue): # 直接遍历底层list
                if task.id == task_id:
                    found = True
                    # 从到达队列中移除任务
                    try:
                        self.arrival_queue.queue.remove((task, client_addr)) # 直接操作底层队列
                    except ValueError as e:
                        logger.error(f"Error removing task {task_id} from arrival queue: {e}")
                        return False

                    # 根据决策转发或本地处理
                    if target_node == self.node_id:
                        logger.info(f"Moving task {task_id} to local queue.")
                        try:
                            self.local_queue.put_nowait((task, client_addr))
                        except queue.Full as e: # 捕获队列满的异常
                            logger.error(f"Local queue is full, cannot add task {task_id}: {e}")
                            return False

                    else:
                        logger.info(f"Moving task {task_id} to forward queue, target: {target_node}.")
                        try:
                            self.forward_queue.put_nowait((task, target_node, path, client_addr))
                        except queue.Full as e: # 捕获队列满的异常
                            logger.error(f"Forward queue is full, cannot add task {task_id}: {e}")
                            return False

                    return True # 成功处理后返回True
            if not found:
                logger.warning(f"Task {task_id} not found in arrival queue.")
                return False
            return True

        except Exception as e: # 捕获其他所有异常
            logger.exception(f"Unexpected error processing task decision for task {task_id}: {e}")
            return False