import queue

from fastapi import FastAPI, HTTPException
from typing import List, Dict, Optional, Tuple
from queue import Queue
import time
import threading
import json
import random
import numpy as np

from config.base_config import NODE_CONFIG
from task import Task
from protocol import (MessageType, NodeInfo, TaskResult,
                     serialize_task_with_path, deserialize_task_with_path,
                     serialize_result, deserialize_result)
import requests
from pydantic import BaseModel
from typing import List, Dict, Optional
import uvicorn

import logging

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    filename="logs/app.log",
    format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

app = FastAPI()

class QueueInfo(BaseModel):
    arrival_queue: List[Dict]

class NodeStatus(BaseModel):
    cpu_usage: float
    memory_usage: float
    queue_length: int

class TaskDecision(BaseModel):
    task_id: str
    target_node: str
    path: List[str]

class TaskDecisions(BaseModel):
    decisions: List[TaskDecision]

@app.get("/queues", response_model=QueueInfo)
async def get_queues():
    """获取节点的队列信息"""
    node = app.state.node
    queue_info = {
        "arrival_queue": node.get_arrival_queue_info(),
    }
    logger.info(f"Queue information requested: {queue_info}")
    return queue_info

@app.get("/status", response_model=NodeStatus)
async def get_status():
    """获取节点状态"""
    node = app.state.node
    status = node.get_status()
    logger.info(f"Node status requested: {status}")
    return status

# FastAPI路由
@app.post("/submit_task")
async def submit_task(message: Dict):
    logger.info(f"Received submit task request")
    result = await app.state.node.handle_submit_task(message)
    logger.info(f"Submit task processed, result")
    return result

class ComputeNode:
    def __init__(self,
                 node_id: str,
                 node_type: str,
                 host: str,
                 real_host: str,
                 port: int,
                 compute_capacity: float,    # GHz
                 memory_capacity: float):     # GB:  # 邻居节点信息
        self.node_id = node_id
        self.node_type = node_type
        self.host = host
        self.real_host = real_host
        self.port = port
        self.compute_capacity = compute_capacity
        self.memory_capacity = memory_capacity

        # 队列长度限制
        self.max_queue_length = NODE_CONFIG["queue_length"]

        # 队列初始化
        self.arrival_queue = Queue(maxsize=self.max_queue_length)

        self.is_running = True

        logger.info(f"ComputeNode {node_id} initialized with host={host}, port={port}, capacity={compute_capacity}")

        # 启动处理线程
        threading.Thread(target=self._process_local_tasks).start()


    def get_status(self) -> Dict:
        """获取节点状态"""
        status={
            'node_id': self.node_id,
            'node_type':self.node_type,
            'memory_usage': 0.5,  # 示例值
            'queue_length': self.arrival_queue.qsize(),
            'compute_capacity': self.compute_capacity,
        }
        return status

    def _process_local_tasks(self):
        """处理本地任务的主循环，增加异常处理"""
        while self.is_running:
            try:
                # 检查本地队列是否为空
                if not self.arrival_queue.empty():
                    try:
                        task, client_addr = self.arrival_queue.get_nowait()
                        logger.info(f"Processing task {task.id} from local queue.")

                        # 释放锁
                        self._execute_task(task, client_addr) # 执行任务

                    except queue.Empty:
                        # 如果队列在检查后立即变空，则忽略此异常
                        pass
                    except Exception as e:
                        logger.exception(f"Error processing local task: {e}")
                time.sleep(0.1)

            except Exception as e:
                logger.exception(f"Unexpected error in _process_local_tasks loop: {e}")
            finally:
               time.sleep(0.1)


    def _execute_task(self, task: Task, client_addr: tuple):
        """执行任务并返回结果"""
        task.start_time = time.time()
        result_data = self._simulate_computation(task)
        task.completion_time = time.time()

        # 检查是否超时
        if task.is_failed():
            success = False
            logger.warning(f"Task {task.id} failed due to timeout.")
        else:
            success = True
            logger.info(f"Task {task.id} completed successfully.")

        # 创建并发送结果
        result = TaskResult(
            task_id=task.id,
            success=success,
            submit_time=task.submit_time,
            max_delay=task.max_delay,
            result_data=result_data,
            completion_time=task.completion_time,
            execution_path=[self.node_id]
        )

        self._send_result_to_client(result,client_addr)

    def _send_result_to_client(self, result: TaskResult,client_addr: tuple):
        """发送任务结果"""
        try:
            response = requests.post(f"http://{client_addr[0]}:{client_addr[1]}/result", json={
                'type': MessageType.TASK_RESULT,
                'data': serialize_result(result)
            })
            if response.status_code == 200:
                logger.info(f"Result for task {result.task_id} sent to client successfully.")
            else:
                logger.error(f"Failed to send result for task {result.task_id} to client. Status code: {response.status_code}")
        except Exception as e:
            logger.exception(f"Error sending result for task {result.task_id} to client: {e}")
            return

    async def handle_submit_task(self, message: Dict):
        """处理任务提交"""
        task, source_node, path, client_addr = deserialize_task_with_path(message['data'])
        logger.info(f"Received task submission from {client_addr}. Task ID: {task.id}")

        # 新任务进入到达队列
        if self.arrival_queue.full():
            logger.warning(f"Arrival queue is full. Task {task.id} rejected.")
            raise HTTPException(status_code=503, detail="Arrival queue is full")
        self.arrival_queue.put((task, client_addr))
        logger.info(f"Task {task.id} added to arrival queue.")
        return {"status": "success"}

    def stop(self) -> None:
        """停止节点运行"""
        self.is_running = False
        logger.info(f"ComputeNode {self.node_id} stopped.")

    def _simulate_computation(self, task: Task) -> bytes:
        """执行实际计算过程 - 质数计算"""
        start_time = time.time()

        # 根据计算负载确定计算范围
        # 计算负载越高，计算的范围越大
        computation_range = int(task.compute_load )

        # 质数计算函数
        def is_prime(n):
            if n <= 1:
                return False
            if n <= 3:
                return True
            if n % 2 == 0 or n % 3 == 0:
                return False
            i = 5
            while i * i <= n:
                if n % i == 0 or n % (i + 2) == 0:
                    return False
                i += 6
            return True

        # 计算指定范围内的质数
        primes = []
        count = 0
        for num in range(2, computation_range):
            if is_prime(num):
                count += 1
                if len(primes) < 100:  # 只保存前100个质数作为结果
                    primes.append(num)

        # 计算实际执行时间
        execution_time = time.time() - start_time

        # 创建结果数据
        result_data = {
            "prime_count": count,
            "sample_primes": primes[:10],  # 只返回前10个质数
            "computation_range": computation_range,
            "execution_time": execution_time
        }

        # 将结果转换为字节
        result_bytes = json.dumps(result_data).encode()

        logger.info(f"Computed {count} primes in range [2, {computation_range}] in {execution_time:.2f} seconds")

        return result_bytes

    def get_arrival_queue_info(self) -> List[Dict]:
        """获取到达队列信息"""
        tasks = []
        for task, _ in list(self.arrival_queue.queue):
            tasks.append({
                'id': task.id,
                'task_type': task.task_type,
                'compute_load': task.compute_load,
                'data_size': task.data_size,
                'submit_time': task.submit_time,
                'max_delay': task.max_delay
            })
        return tasks



