"""
推理请求相关数据模型
"""
import time
from enum import Enum
from typing import Optional, List, Dict, Any, Union
from dataclasses import dataclass
from datetime import datetime
from pydantic import BaseModel, Field

from motor.resources.endpoint import Endpoint
from motor.resources.instance import Instance

class RequestType(Enum):
    OPENAI = "openai"
    TRITON = "triton"
    TGI = "tgi"
    VLLM = "vllm"
    MINDIE = "mindie"

class ReqState(Enum):
    ARRIVE = 'Arrive'               # Request arrive
    P_SCHEDULING = 'P_Scheduling'   # Currently scheduling P instance
    P_ALLOCATED = 'P_Allocated'     # Allocated P instance
    PREFILL_END = 'Prefill End'     # Prefill completed
    FIRST_TOKEN_FINISH = 'First Token Finish'   # First decoded token
    D_SCHEDULING = 'D_Scheduling'   # Currently scheduling D instance
    D_ALLOCATED = 'D_Allocated'     # Allocated D instance
    DECODE_END = 'Decode End'       # Decode completed
    INVALID = 'Invalid'             # Invalid state
    TIMEOUT = 'Timeout'             # Request timeout
    EXCEPTION = 'Exception'         # Request exception
    RECOMPUTE = 'Recompute'         # Recomputation


class DeploymentMode(Enum):
    SINGLE_NODE = "single_node"  # Single node mode
    PD_SEPERATE = "pd_seperate"  # PD separate mode
    PD_DISAGGREGATION = "pd_disaggregation"  # PD disaggregation mode


class OpenAIMessage(BaseModel):
    """OpenAI message model"""
    role: str
    content: str


class OpenAICompletionRequest(BaseModel):
    """OpenAI Completion request model"""
    model: str
    prompt: str
    max_tokens: Optional[int] = None
    temperature: Optional[float] = 1.0
    top_p: Optional[float] = 1.0
    stream: Optional[bool] = False
    stop: Optional[Union[str, List[str]]] = None
    presence_penalty: Optional[float] = 0.0
    frequency_penalty: Optional[float] = 0.0
    user: Optional[str] = None


class OpenAIChatCompletionRequest(BaseModel):
    """OpenAI Chat Completion request model"""
    model: str
    messages: List[OpenAIMessage]
    max_tokens: Optional[int] = None
    temperature: Optional[float] = 1.0
    top_p: Optional[float] = 1.0
    stream: Optional[bool] = False
    stop: Optional[Union[str, List[str]]] = None
    presence_penalty: Optional[float] = 0.0
    frequency_penalty: Optional[float] = 0.0
    user: Optional[str] = None


class RequestInfo(BaseModel):
    req_id: str = Field(..., description="Request ID generated by RequestManager")
    req_data: dict = Field(..., description="Request json content")
    req_len: int = Field(..., description="Request body length")
    api: str = Field(..., description="API need to be forwarded")
    state: ReqState = Field(default=ReqState.INVALID, description="Request status")
    start_time: datetime = Field(default=time.time(), description="Request start time")
    p_scheduled_time: datetime = Field(default=None, description="P instance scheduled time")
    prefill_end_time: datetime = Field(default=None, description="Prefill end time")
    d_scheduled_time: datetime = Field(default=None, description="D instance scheduled time")
    first_token_time: datetime = Field(default=None, description="First token decoded time")
    decode_end_time: datetime = Field(default=None, description="Decode end time")
    input_tokens: int = Field(default=0, description="Input tokens")
    output_tokens: int = Field(default=0, description="Output tokens")
    
    def update_state(self, new_state: ReqState):
        self.state = new_state
        if new_state == ReqState.P_ALLOCATED:
            self.p_scheduled_time = time.time()
        elif new_state == ReqState.PREFILL_END:
            self.prefill_end_time = time.time()
        elif new_state == ReqState.D_ALLOCATED:
            self.d_scheduled_time = time.time()
        elif new_state == ReqState.FIRST_TOKEN_FINISH:
            self.first_token_time = time.time()
        elif new_state == ReqState.DECODE_END:
            self.decode_end_time = time.time()


class RequestResponse(BaseModel):
    request_id: str
    status: str
    message: Optional[str] = None
    data: Optional[Dict[str, Any]] = None


class StreamResponse(BaseModel):
    request_id: str
    chunk: str
    is_final: bool = False
    error: Optional[str] = None
    
    
class ScheduledResource(BaseModel):
    """Represents a scheduled resource with an instance and endpoint"""
    instance: Instance = None
    endpoint: Endpoint = None