from __future__ import annotations

# Global constants for special participant IDs and nicknames
DEFAULT_HOST_USER_ID = "human_host_admin"
DEFAULT_HOST_USER_NICKNAME = "HostAdmin"

import uuid
from datetime import datetime, timezone
from typing import List, Optional, Dict, Any
from pydantic import BaseModel, Field

# Default factory for IDs
def default_uuid():
    return str(uuid.uuid4())

class ChatMessage(BaseModel):
    message_id: str = Field(default_factory=default_uuid)
    participant_id: str
    sender_nickname: str
    content: str
    timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
    tags: List[str] = Field(default_factory=list)
    observed_tags: Optional[List[str]] = None
    raw_llm_output_if_agent: Optional[str] = None

class WhiteboardItem(BaseModel):
    reference_id: str = Field(default_factory=default_uuid)
    display_name: str
    full_local_path: str
    uploader_participant_id: str
    timestamp_added: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))

class GlobalTag(BaseModel):
    tag_name: str # This is the PK, should be normalized
    first_seen_timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
    last_seen_timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
    occurrence_count: int = 1

class LLMLogEntry(BaseModel):
    log_entry_id: str = Field(default_factory=default_uuid)
    agent_id: str
    timestamp_call_start: datetime
    timestamp_call_end: datetime
    llm_call_type: str # E.g., "active_listening", "public_speech"
    raw_llm_input_context_summary: Optional[str] = None
    raw_llm_output: str
    parsed_thinking_process: Optional[str] = None
    parsed_desire_to_speak: Optional[bool] = None
    parsed_spoken_tags: Optional[List[str]] = None
    parsed_spoken_message: Optional[str] = None
    associated_public_message_id: Optional[str] = None # FK to ChatMessage.message_id

class ParticipantConfig(BaseModel):
    participant_id: str = Field(default_factory=default_uuid)
    nickname: str
    role: str # "human" or "llm_agent"
    is_mvp_host: bool = False
    llm_persona_prompt: Optional[str] = None
    llm_config_listening: Optional[Dict[str, Any]] = None
    llm_config_speaking: Optional[Dict[str, Any]] = None

    class Config:
        validate_assignment = True