# services/db_service.py
# Encapsulates SQLite database CRUD operations.
# Detailed implementation will follow based on design documents.

import sqlite3
import json # Added for serializing dict to JSON string
import uuid # Added to fix NameError
from typing import List, Optional, Any, Dict # Added Dict
from datetime import datetime, timedelta, timezone # Added for timestamp operations
from core.data_models import ChatMessage, WhiteboardItem, GlobalTag, LLMLogEntry, ParticipantConfig, DEFAULT_HOST_USER_ID, DEFAULT_HOST_USER_NICKNAME # Import constants

DB_NAME = "llm_chatroom_mvp.db"

class DBService:
    def __init__(self, db_name: str = DB_NAME):
        self.db_name = db_name
        self._shared_conn: Optional[sqlite3.Connection] = None
        if self.db_name == ":memory:":
            self._shared_conn = sqlite3.connect(self.db_name)
            # print(f"DBService initialized with persistent in-memory DB connection: {self.db_name}")
        # else:
            # print(f"DBService initialized for file-based database: {self.db_name}")


    def _get_connection(self) -> sqlite3.Connection:
        """Returns the shared connection if available, otherwise a new one."""
        if self._shared_conn:
            return self._shared_conn
        return sqlite3.connect(self.db_name)

    def _close_connection(self, conn: sqlite3.Connection):
        """Closes the connection if it's not the shared one."""
        if conn and conn != self._shared_conn:
            conn.close()

    def close(self):
        """Closes the shared connection if it exists (primarily for :memory: databases)."""
        if self._shared_conn:
            # print(f"Closing persistent DB connection for: {self.db_name}")
            self._shared_conn.close()
            self._shared_conn = None

    def create_tables_if_not_exist(self):
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()

            cursor.execute("""
            CREATE TABLE IF NOT EXISTS PublicChatHistory (
                message_id TEXT PRIMARY KEY,
                participant_id TEXT NOT NULL,
                sender_nickname TEXT NOT NULL,
                content TEXT NOT NULL,
                timestamp DATETIME NOT NULL,
                tags TEXT, -- JSON list string, e.g., '["tag1", "tag2"]'
                observed_tags TEXT, -- JSON list string, for LLM Agent messages
                raw_llm_output_if_agent TEXT -- Store LLM Agent's raw output for this message
            );
            """)

            cursor.execute("""
            CREATE TABLE IF NOT EXISTS WhiteboardReferences (
                reference_id TEXT PRIMARY KEY,
                display_name TEXT NOT NULL,
                full_local_path TEXT NOT NULL UNIQUE,
                uploader_participant_id TEXT NOT NULL,
                timestamp_added DATETIME NOT NULL
            );
            """)

            cursor.execute("""
            CREATE TABLE IF NOT EXISTS GlobalTags (
                tag_name TEXT PRIMARY KEY, -- Normalized tag name
                first_seen_timestamp DATETIME NOT NULL,
                last_seen_timestamp DATETIME NOT NULL,
                occurrence_count INTEGER DEFAULT 1
            );
            """)

            cursor.execute("""
            CREATE TABLE IF NOT EXISTS LLMAgentPrivateLogs (
                log_entry_id TEXT PRIMARY KEY,
                agent_id TEXT NOT NULL,
                timestamp_call_start DATETIME NOT NULL,
                timestamp_call_end DATETIME NOT NULL,
                llm_call_type TEXT NOT NULL, -- e.g., "active_listening", "public_speech"
                raw_llm_input_context_summary TEXT,
                raw_llm_output TEXT NOT NULL,
                parsed_thinking_process TEXT,
                parsed_desire_to_speak BOOLEAN,
                parsed_spoken_tags TEXT, -- JSON list string
                parsed_spoken_message TEXT,
                associated_public_message_id TEXT -- FK to PublicChatHistory.message_id
            );
            """)

            cursor.execute("""
            CREATE TABLE IF NOT EXISTS ParticipantConfig (
                participant_id TEXT PRIMARY KEY,
                nickname TEXT NOT NULL UNIQUE,
                role TEXT NOT NULL, -- "human" or "llm_agent"
                is_mvp_host BOOLEAN DEFAULT FALSE,
                llm_persona_prompt TEXT,
                llm_config_listening TEXT, -- JSON string for model name, params
                llm_config_speaking TEXT  -- JSON string for model name, params
            );
            """)
            
            conn.commit()
            # print("DBService: All tables created or already exist.")
        finally:
            self._close_connection(conn)

    # --- ParticipantConfig CRUD ---
    def add_or_update_participant_config(self, config: ParticipantConfig):
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()
            llm_config_listening_str = json.dumps(config.llm_config_listening, ensure_ascii=False) if config.llm_config_listening else None
            llm_config_speaking_str = json.dumps(config.llm_config_speaking, ensure_ascii=False) if config.llm_config_speaking else None

            cursor.execute("""
            INSERT OR REPLACE INTO ParticipantConfig (
                participant_id, nickname, role, is_mvp_host,
                llm_persona_prompt, llm_config_listening, llm_config_speaking
            ) VALUES (?, ?, ?, ?, ?, ?, ?)
            """, (
                config.participant_id, config.nickname, config.role, config.is_mvp_host,
                config.llm_persona_prompt, llm_config_listening_str, llm_config_speaking_str
            ))
            conn.commit()
            # print(f"ParticipantConfig for {config.nickname} ({config.participant_id}) added or updated.")
        finally:
            self._close_connection(conn)

    def get_participant_config_by_id(self, participant_id: str) -> Optional[ParticipantConfig]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()
            
            cursor.execute("SELECT * FROM ParticipantConfig WHERE participant_id = ?", (participant_id,))
            row = cursor.fetchone()
            
            if row:
                llm_config_listening = json.loads(row["llm_config_listening"]) if row["llm_config_listening"] else None
                llm_config_speaking = json.loads(row["llm_config_speaking"]) if row["llm_config_speaking"] else None
                return ParticipantConfig(
                    participant_id=row["participant_id"],
                    nickname=row["nickname"],
                    role=row["role"],
                    is_mvp_host=bool(row["is_mvp_host"]),
                    llm_persona_prompt=row["llm_persona_prompt"],
                    llm_config_listening=llm_config_listening,
                    llm_config_speaking=llm_config_speaking
                )
            return None
        finally:
            self._close_connection(conn)

    def get_all_participant_configs(self) -> List[ParticipantConfig]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM ParticipantConfig")
            rows = cursor.fetchall()
            
            configs = []
            for row in rows:
                llm_config_listening = json.loads(row["llm_config_listening"]) if row["llm_config_listening"] else None
                llm_config_speaking = json.loads(row["llm_config_speaking"]) if row["llm_config_speaking"] else None
                configs.append(ParticipantConfig(
                    participant_id=row["participant_id"],
                    nickname=row["nickname"],
                    role=row["role"],
                    is_mvp_host=bool(row["is_mvp_host"]),
                    llm_persona_prompt=row["llm_persona_prompt"],
                    llm_config_listening=llm_config_listening,
                    llm_config_speaking=llm_config_speaking
                ))
            return configs
        finally:
            self._close_connection(conn)

    # --- PublicChatHistory CRUD ---
    def add_chat_message(self, message: ChatMessage):
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()
            
            tags_str = json.dumps(message.tags, ensure_ascii=False) if message.tags else None
            observed_tags_str = json.dumps(message.observed_tags, ensure_ascii=False) if message.observed_tags else None
            
            cursor.execute("""
            INSERT INTO PublicChatHistory (
                message_id, participant_id, sender_nickname, content, timestamp,
                tags, observed_tags, raw_llm_output_if_agent
            ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
            """, (
                message.message_id, message.participant_id, message.sender_nickname, message.content,
                message.timestamp, tags_str, observed_tags_str, message.raw_llm_output_if_agent
            ))
            conn.commit()
            # print(f"ChatMessage {message.message_id} from {message.sender_nickname} added to DB.")
        finally:
            self._close_connection(conn)

    def _db_row_to_chat_message(self, row: sqlite3.Row) -> ChatMessage:
        """Helper to convert a DB row to a ChatMessage Pydantic model."""
        tags = json.loads(row["tags"]) if row["tags"] else []
        observed_tags = json.loads(row["observed_tags"]) if row["observed_tags"] else None
        return ChatMessage(
            message_id=row["message_id"],
            participant_id=row["participant_id"],
            sender_nickname=row["sender_nickname"],
            content=row["content"],
            timestamp=row["timestamp"], # SQLite stores DATETIME as TEXT, Pydantic handles conversion
            tags=tags,
            observed_tags=observed_tags,
            raw_llm_output_if_agent=row["raw_llm_output_if_agent"]
        )

    def get_recent_chat_messages(self, limit: int = 50) -> List[ChatMessage]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM PublicChatHistory ORDER BY timestamp DESC LIMIT ?", (limit,))
            rows = cursor.fetchall()
            
            messages = [self._db_row_to_chat_message(row) for row in rows]
            return messages[::-1] # Return in chronological order (oldest first)
        finally:
            self._close_connection(conn)

    def get_chat_messages_by_tags(self, tags_to_filter: List[str], logic: str = "OR", limit: int = 50) -> List[ChatMessage]:
        if not tags_to_filter:
            # This call will use the new connection management logic internally
            return self.get_recent_chat_messages(limit=limit)

        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()
            
            conditions = []
            params = []
            for i, tag in enumerate(tags_to_filter):
                conditions.append("tags LIKE ?")
                like_pattern = f'%"{tag}"%'
                params.append(like_pattern)

            if not conditions:
                 # This call will use the new connection management logic internally
                 return self.get_recent_chat_messages(limit=limit)

            where_clause = f" {logic.upper()} ".join(conditions)
            
            query = f"SELECT * FROM PublicChatHistory WHERE {where_clause} ORDER BY timestamp DESC LIMIT ?"
            params.append(limit)
            
            cursor.execute(query, tuple(params))
            rows = cursor.fetchall()

            messages = [self._db_row_to_chat_message(row) for row in rows]
            return messages[::-1] # Return in chronological order
        finally:
            self._close_connection(conn)

    # --- GlobalTags CRUD ---
    def _db_row_to_global_tag(self, row: sqlite3.Row) -> GlobalTag:
        """Helper to convert a DB row to a GlobalTag Pydantic model."""
        return GlobalTag(
            tag_name=row["tag_name"],
            first_seen_timestamp=row["first_seen_timestamp"],
            last_seen_timestamp=row["last_seen_timestamp"],
            occurrence_count=row["occurrence_count"]
        )

    def add_or_update_global_tag(self, tag_name: str, timestamp: datetime):
        """Adds a new tag or updates an existing one's last_seen_timestamp and occurrence_count."""
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()
            
            cursor.execute("SELECT occurrence_count FROM GlobalTags WHERE tag_name = ?", (tag_name,))
            row = cursor.fetchone()
            
            if row:
                new_occurrence_count = row[0] + 1
                cursor.execute("""
                UPDATE GlobalTags
                SET last_seen_timestamp = ?, occurrence_count = ?
                WHERE tag_name = ?
                """, (timestamp, new_occurrence_count, tag_name))
                # print(f"GlobalTag '{tag_name}' updated. Count: {new_occurrence_count}.") # Reduced verbosity
            else:
                cursor.execute("""
                INSERT INTO GlobalTags (tag_name, first_seen_timestamp, last_seen_timestamp, occurrence_count)
                VALUES (?, ?, ?, 1)
                """, (tag_name, timestamp, timestamp))
                # print(f"GlobalTag '{tag_name}' added.")
            conn.commit()
        finally:
            self._close_connection(conn)

    def get_global_tag(self, tag_name: str) -> Optional[GlobalTag]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM GlobalTags WHERE tag_name = ?", (tag_name,))
            row = cursor.fetchone()
            
            return self._db_row_to_global_tag(row) if row else None
        finally:
            self._close_connection(conn)

    def get_all_global_tags(self) -> List[GlobalTag]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM GlobalTags ORDER BY last_seen_timestamp DESC")
            rows = cursor.fetchall()
            
            return [self._db_row_to_global_tag(row) for row in rows]
        finally:
            self._close_connection(conn)

    def get_recent_active_tags(self, limit: int = 10) -> List[GlobalTag]:
        """Gets the most recently active tags, ordered by last_seen_timestamp."""
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM GlobalTags ORDER BY last_seen_timestamp DESC, occurrence_count DESC LIMIT ?", (limit,))
            rows = cursor.fetchall()
            
            return [self._db_row_to_global_tag(row) for row in rows]
        finally:
            self._close_connection(conn)

# --- WhiteboardReferences CRUD ---
    def _db_row_to_whiteboard_item(self, row: sqlite3.Row) -> WhiteboardItem:
        """Helper to convert a DB row to a WhiteboardItem Pydantic model."""
        return WhiteboardItem(
            reference_id=row["reference_id"],
            display_name=row["display_name"],
            full_local_path=row["full_local_path"],
            uploader_participant_id=row["uploader_participant_id"],
            timestamp_added=row["timestamp_added"] # SQLite stores DATETIME as TEXT, Pydantic handles conversion
        )

    def add_whiteboard_item(self, item: WhiteboardItem):
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()
            cursor.execute("""
            INSERT INTO WhiteboardReferences (
                reference_id, display_name, full_local_path, uploader_participant_id, timestamp_added
            ) VALUES (?, ?, ?, ?, ?)
            """, (
                item.reference_id, item.display_name, item.full_local_path,
                item.uploader_participant_id, item.timestamp_added
            ))
            conn.commit()
            # print(f"WhiteboardItem {item.reference_id} ({item.display_name}) added to DB.")
        finally:
            self._close_connection(conn)

    def get_whiteboard_item_by_id(self, reference_id: str) -> Optional[WhiteboardItem]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM WhiteboardReferences WHERE reference_id = ?", (reference_id,))
            row = cursor.fetchone()
            
            return self._db_row_to_whiteboard_item(row) if row else None
        finally:
            self._close_connection(conn)

    def get_all_whiteboard_items(self) -> List[WhiteboardItem]:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM WhiteboardReferences ORDER BY timestamp_added DESC")
            rows = cursor.fetchall()
            
            return [self._db_row_to_whiteboard_item(row) for row in rows]
        finally:
            self._close_connection(conn)

    def remove_whiteboard_item(self, reference_id: str) -> bool:
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()
            cursor.execute("DELETE FROM WhiteboardReferences WHERE reference_id = ?", (reference_id,))
            conn.commit()
            if cursor.rowcount > 0:
                # print(f"WhiteboardItem {reference_id} removed from DB.")
                return True
            # print(f"WhiteboardItem {reference_id} not found or not removed.")
            return False
        finally:
            self._close_connection(conn)

    # --- LLMAgentPrivateLogs CRUD ---
    def add_llm_agent_log_entry(self, log_entry: LLMLogEntry):
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()

            parsed_spoken_tags_str = json.dumps(log_entry.parsed_spoken_tags, ensure_ascii=False) if log_entry.parsed_spoken_tags is not None else None
            parsed_desire_to_speak_int = int(log_entry.parsed_desire_to_speak) if log_entry.parsed_desire_to_speak is not None else None

            cursor.execute("""
            INSERT INTO LLMAgentPrivateLogs (
                log_entry_id, agent_id, timestamp_call_start, timestamp_call_end,
                llm_call_type, raw_llm_input_context_summary, raw_llm_output,
                parsed_thinking_process, parsed_desire_to_speak, parsed_spoken_tags,
                parsed_spoken_message, associated_public_message_id
            ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
            """, (
                log_entry.log_entry_id,
                log_entry.agent_id,
                log_entry.timestamp_call_start,
                log_entry.timestamp_call_end,
                log_entry.llm_call_type,
                log_entry.raw_llm_input_context_summary,
                log_entry.raw_llm_output,
                log_entry.parsed_thinking_process,
                parsed_desire_to_speak_int,
                parsed_spoken_tags_str,
                log_entry.parsed_spoken_message,
                log_entry.associated_public_message_id
            ))
            conn.commit()
            # print(f"LLMLogEntry {log_entry.log_entry_id} for agent {log_entry.agent_id} added to DB.")
        finally:
            self._close_connection(conn)

    def _db_row_to_llm_log_entry(self, row: sqlite3.Row) -> LLMLogEntry:
        """Helper to convert a DB row to an LLMLogEntry Pydantic model."""
        parsed_spoken_tags = json.loads(row["parsed_spoken_tags"]) if row["parsed_spoken_tags"] else None
        
        parsed_desire_to_speak_bool: Optional[bool] = None
        if row["parsed_desire_to_speak"] is not None:
            parsed_desire_to_speak_bool = bool(row["parsed_desire_to_speak"])

        return LLMLogEntry(
            log_entry_id=row["log_entry_id"],
            agent_id=row["agent_id"],
            timestamp_call_start=row["timestamp_call_start"],
            timestamp_call_end=row["timestamp_call_end"],
            llm_call_type=row["llm_call_type"],
            raw_llm_input_context_summary=row["raw_llm_input_context_summary"],
            raw_llm_output=row["raw_llm_output"],
            parsed_thinking_process=row["parsed_thinking_process"],
            parsed_desire_to_speak=parsed_desire_to_speak_bool,
            parsed_spoken_tags=parsed_spoken_tags,
            parsed_spoken_message=row["parsed_spoken_message"],
            associated_public_message_id=row["associated_public_message_id"]
        )

    def get_logs_for_agent(self, agent_id: str, limit: int = 20) -> List[LLMLogEntry]:
        """Retrieves the most recent log entries for a specific agent, ordered by call start time."""
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("""
                SELECT * FROM LLMAgentPrivateLogs
                WHERE agent_id = ?
                ORDER BY timestamp_call_start DESC
                LIMIT ?
            """, (agent_id, limit))
            
            rows = cursor.fetchall()

            return [self._db_row_to_llm_log_entry(row) for row in rows]
        finally:
            self._close_connection(conn)

    def get_llm_log_entry_by_id(self, log_entry_id: str) -> Optional[LLMLogEntry]:
        """Retrieves a specific LLM log entry by its ID."""
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row
            cursor = conn.cursor()

            cursor.execute("SELECT * FROM LLMAgentPrivateLogs WHERE log_entry_id = ?", (log_entry_id,))
            row = cursor.fetchone()
            
            return self._db_row_to_llm_log_entry(row) if row else None
        finally:
            self._close_connection(conn)

    def get_last_successful_speech_tags(self, agent_id: str) -> Optional[List[str]]:
        """
        Retrieves the parsed_spoken_tags from the most recent successful 'public_speech'
        log entry for a given agent.
        A successful speech is defined as having a non-empty parsed_spoken_message.
        """
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            conn.row_factory = sqlite3.Row # Ensure we can access columns by name
            cursor = conn.cursor()

            cursor.execute("""
                SELECT parsed_spoken_tags
                FROM LLMAgentPrivateLogs
                WHERE agent_id = ?
                  AND llm_call_type = 'public_speech'
                  AND parsed_spoken_message IS NOT NULL
                  AND parsed_spoken_message != ''
                ORDER BY timestamp_call_end DESC
                LIMIT 1
            """, (agent_id,))
            
            row = cursor.fetchone()
            
            if row and row["parsed_spoken_tags"]:
                try:
                    tags = json.loads(row["parsed_spoken_tags"])
                    return tags if isinstance(tags, list) else None
                except json.JSONDecodeError:
                    return None # Or log an error if malformed JSON is unexpected
            return None
        finally:
            self._close_connection(conn)

    def update_log_entry_with_public_message_id(self, log_entry_id: str, public_message_id: str):
        """Updates the associated_public_message_id for a given LLM log entry."""
        conn: Optional[sqlite3.Connection] = None
        try:
            conn = self._get_connection()
            cursor = conn.cursor()
            cursor.execute("""
            UPDATE LLMAgentPrivateLogs
            SET associated_public_message_id = ?
            WHERE log_entry_id = ?
            """, (public_message_id, log_entry_id))
            conn.commit()
            if cursor.rowcount == 0:
                print(f"Warning: No LLMLogEntry found with log_entry_id {log_entry_id} to update.")
            # else:
                # print(f"LLMLogEntry {log_entry_id} updated with public_message_id {public_message_id}.")
        finally:
            self._close_connection(conn)

    # Add other CRUD methods for other data models here later

def _main():
    import os
    # Ensure imports for testing are available if not already at top level

    db_service = DBService()
    db_service.create_tables_if_not_exist()
    # Example CRUD operations for ParticipantConfig
    print("\n--- Testing ParticipantConfig CRUD ---")
    
    # Create dummy configs
    # Ensure the main human user created by this test script matches
    # the ID and nickname expected by the main application logic.
    human_config = ParticipantConfig(
        participant_id=DEFAULT_HOST_USER_ID,    # Use constant
        nickname=DEFAULT_HOST_USER_NICKNAME, # Use constant
        role="human",
        is_mvp_host=True
    )
    agent1_config = ParticipantConfig(
        participant_id="llm_agent_01",
        nickname="EchoBot",
        role="llm_agent",
        llm_persona_prompt="""你是一位经验丰富的的讨论参与者和思想促进者。你的核心使命是提升对话的质量、深度和准确性。
在交流中，请你：
1.  **积极参与**: 主动分享你的见解，适时提出有深度的问题，推动讨论向更深层次发展。不要害怕挑战现有观点或引入新的视角。
2.  **建设性批判**: 当你发现他人发言中存在逻辑谬误、事实错误、不完整之处或有待改进的地方时，请清晰、直接、且有理有据地指出。你的目标不是驳倒对方，而是帮助完善观点，共同接近真理。在批判的同时，尽可能提出具体的改进建议或思考方向。
3.  **拥抱辩证思维**: 鼓励从不同角度审视问题，识别和探讨观点间的内在联系、潜在矛盾以及可能的整合点。帮助揭示讨论的复杂性和多面性。
4.  **保持率直与客观**: 你的反馈应基于事实和逻辑，避免不必要的委婉或奉承。评价他人观点时，保持中立和客观，不因个人偏好或发言者身份而有所偏颇。
5.  **尊重并促进成长**: 即使在提出尖锐批评时，也要保持对他人的尊重。相信每一次有效的批判都是促进集体认知进步的机会。
你的语气应是：专业、理性、坦诚、开放，但同时坚定有力。
避免：人身攻击、情绪化表达、无端的奉承或附和、模糊不清的批评。""",
        llm_config_listening={"model": "ollama/gemma3:27b-it-q8_0", "temperature": 0.1},
        llm_config_speaking={"model": "ollama/gemma3:27b-it-q8_0", "temperature": 0.1}
    )
    agent2_config = ParticipantConfig(
        participant_id="llm_agent_02",
        nickname="Summarizer",
        role="llm_agent",
        llm_persona_prompt="""你是一位富有建设精神的批判性伙伴 (Constructive Critical Partner)。你的目标不仅是发现问题，更重要的是推动找到更好的解决方案或更完善的理解。
在互动中，你应：
1.  **以解决方案为导向进行批判**: 在指出他人发言的不足之处时，你的批判应服务于“如何做得更好”这一目标。例如，指出数据不足后，可以建议需要哪些数据；指出逻辑缺陷后，可以尝试重构逻辑链条。
2.  **清晰阐述“为什么是错的/不足的”**: 不只是说“不对”，而是要清晰解释判断的理由、依据的标准或参照的逻辑。
3.  **主动提供替代视角或方案**: 在反驳一个观点后，如果可能，请提供一个你认为更合理或更全面的替代观点，或者至少指出通往更好方案的路径。
4.  **正直不阿**: 你的评价和建议完全基于对话内容本身的质量，不阿谀奉承，不因压力而妥协观点。
5.  **鼓励迭代与改进**: 将讨论视为一个不断迭代和完善的过程，你的每一次发言都旨在推动这个过程。
你的语气应是：务实、有启发性、清晰、坚定、合作性。
避免：空泛的指责、破坏性的批评（只破不立）、居高临下的态度、固执己见不听取他人。""",
        llm_config_listening={"model": "ollama/gemma3:27b-it-q8_0"},
        llm_config_speaking={"model": "ollama/gemma3:27b-it-q8_0"}
    )

    db_service.add_or_update_participant_config(human_config)
    db_service.add_or_update_participant_config(agent1_config)
    db_service.add_or_update_participant_config(agent2_config)

    print("\nFetching all participant configs:")
    all_configs = db_service.get_all_participant_configs()
    for cfg in all_configs:
        print(f"  ID: {cfg.participant_id}, Nickname: {cfg.nickname}, Role: {cfg.role}, Host: {cfg.is_mvp_host}")
        if cfg.llm_config_listening:
            print(f"    Listen Cfg: {cfg.llm_config_listening}")
        if cfg.llm_config_speaking:
            print(f"    Speak Cfg: {cfg.llm_config_speaking}")


    print("\nFetching EchoBot by ID:")
    retrieved_agent = db_service.get_participant_config_by_id("llm_agent_01")
    if retrieved_agent:
        print(f"  Retrieved: {retrieved_agent.nickname}, Persona: {retrieved_agent.llm_persona_prompt}")
        print(f"  Listen Cfg: {retrieved_agent.llm_config_listening}")
    else:
        print("  EchoBot not found.")
        
    print("\nUpdating EchoBot's persona:")
    if retrieved_agent:
        retrieved_agent.llm_persona_prompt = "You are an extremely enthusiastic bot that echoes things with joy!"
        retrieved_agent.llm_config_listening["temperature"] = 0.2 # type: ignore
        db_service.add_or_update_participant_config(retrieved_agent)
        
        updated_agent = db_service.get_participant_config_by_id("llm_agent_01")
        if updated_agent:
            print(f"  Updated Persona: {updated_agent.llm_persona_prompt}")
            print(f"  Updated Listen Cfg: {updated_agent.llm_config_listening}")


    print("\n--- Testing PublicChatHistory CRUD ---")
    from datetime import datetime, timedelta # This is fine here as it's specific to this test block
    import uuid # This is fine here as it's specific to this test block

    # Add some dummy messages
    msg1 = ChatMessage(
        message_id=str(uuid.uuid4()), participant_id="human_user_01", sender_nickname="HumanUser",
        content="Hello everyone! This is the first message.", timestamp=datetime.now(timezone.utc) - timedelta(minutes=5),
        tags=["greeting", "general"]
    )
    msg2 = ChatMessage(
        message_id=str(uuid.uuid4()), participant_id="llm_agent_01", sender_nickname="EchoBot",
        content="Hello HumanUser! This is the first message.", timestamp=datetime.now(timezone.utc) - timedelta(minutes=4),
        tags=["greeting", "response"], observed_tags=["general"], raw_llm_output_if_agent="Raw output for msg2"
    )
    msg3 = ChatMessage(
        message_id=str(uuid.uuid4()), participant_id="llm_agent_02", sender_nickname="Summarizer",
        content="I will summarize this discussion about greetings.", timestamp=datetime.now(timezone.utc) - timedelta(minutes=3),
        tags=["summary", "meta"], observed_tags=["greeting"]
    )
    msg4 = ChatMessage(
        message_id=str(uuid.uuid4()), participant_id="human_user_01", sender_nickname="HumanUser",
        content="Let's talk about Python.", timestamp=datetime.now(timezone.utc) - timedelta(minutes=2),
        tags=["python", "programming"]
    )
    msg5 = ChatMessage(
        message_id=str(uuid.uuid4()), participant_id="llm_agent_01", sender_nickname="EchoBot",
        content="Python is a great language for programming!", timestamp=datetime.now(timezone.utc) - timedelta(minutes=1),
        tags=["python", "positive"], observed_tags=["python", "programming"]
    )

    db_service.add_chat_message(msg1)
    db_service.add_chat_message(msg2)
    
    print("\n--- Testing WhiteboardReferences CRUD ---")
    
    wb_item1 = WhiteboardItem(
        reference_id=str(uuid.uuid4()),
        display_name="小明的喜好",
        full_local_path="test_data\小明的喜好.txt",
        uploader_participant_id="human_user_01",
        timestamp_added=datetime.now(timezone.utc) - timedelta(hours=2)
    )
    wb_item2 = WhiteboardItem(
        reference_id=str(uuid.uuid4()),
        display_name="打败大魔王的方法",
        full_local_path="test_data\打败大魔王的方法.md",
        uploader_participant_id="llm_agent_02",
        timestamp_added=datetime.now(timezone.utc) - timedelta(hours=1)
    )
    
    db_service.add_whiteboard_item(wb_item1)
    db_service.add_whiteboard_item(wb_item2)
    
    print("\nFetching all whiteboard items:")
    all_wb_items = db_service.get_all_whiteboard_items()
    for item in all_wb_items:
        print(f"  ID: {item.reference_id}, Name: {item.display_name}, Path: {item.full_local_path}, Uploader: {item.uploader_participant_id}")
    assert len(all_wb_items) == 2
    
    print(f"\nFetching whiteboard item by ID: {wb_item2.reference_id}")
    retrieved_wb_item = db_service.get_whiteboard_item_by_id(wb_item2.reference_id)
    if retrieved_wb_item:
        print(f"  Retrieved: {retrieved_wb_item.display_name}, Path: {retrieved_wb_item.full_local_path}")
        assert retrieved_wb_item.display_name == wb_item2.display_name
    else:
        print(f"  Whiteboard item {wb_item2.reference_id} not found.")
    
    print(f"\nRemoving whiteboard item by ID: {wb_item1.reference_id}")
    remove_success = db_service.remove_whiteboard_item(wb_item1.reference_id)
    print(f"  Removal successful: {remove_success}")
    assert remove_success
    
    print("\nFetching all whiteboard items after removal:")
    all_wb_items_after_removal = db_service.get_all_whiteboard_items()
    for item in all_wb_items_after_removal:
        print(f"  ID: {item.reference_id}, Name: {item.display_name}")
    assert len(all_wb_items_after_removal) == 1
    
    print("\nAttempting to remove non-existent whiteboard item:")
    non_existent_id = str(uuid.uuid4())
    remove_fail_success = db_service.remove_whiteboard_item(non_existent_id)
    print(f"  Removal successful (should be False): {remove_fail_success}")
    assert not remove_fail_success
    
    db_service.add_chat_message(msg3)
    db_service.add_chat_message(msg4)
    db_service.add_chat_message(msg5)
    
    print("\nFetching recent messages (limit 3):")
    recent_messages = db_service.get_recent_chat_messages(limit=3)
    for msg in recent_messages:
        print(f"  MsgID: {msg.message_id}, Sender: {msg.sender_nickname}, Content: '{msg.content[:30]}...', Tags: {msg.tags}")
    
    print(f"  Expected 3, Got: {len(recent_messages)}")
    
    
    print("\nFetching messages tagged 'greeting' OR 'python' (limit 10):")
    tagged_messages = db_service.get_chat_messages_by_tags(tags_to_filter=["greeting", "python"], logic="OR", limit=10)
    for msg in tagged_messages:
        print(f"  MsgID: {msg.message_id}, Sender: {msg.sender_nickname}, Content: '{msg.content[:30]}...', Tags: {msg.tags}")
    
    print("\nFetching messages tagged 'python' AND 'positive' (limit 10) - using OR for now as AND is more complex with LIKE:")
    tagged_and_messages = db_service.get_chat_messages_by_tags(tags_to_filter=["python", "positive"], logic="OR", limit=10)
    print("  (Note: Current 'AND' logic with LIKE on JSON string behaves as OR for multiple tags on same message)")
    for msg in tagged_and_messages:
         print(f"  MsgID: {msg.message_id}, Sender: {msg.sender_nickname}, Content: '{msg.content[:30]}...', Tags: {msg.tags}")
    
    print("\n--- Testing GlobalTags CRUD ---")
    now = datetime.now(timezone.utc)
    db_service.add_or_update_global_tag("python", now - timedelta(days=1))
    db_service.add_or_update_global_tag("general", now - timedelta(hours=5))
    db_service.add_or_update_global_tag("python", now - timedelta(minutes=30))
    db_service.add_or_update_global_tag("greeting", now - timedelta(minutes=10))
    db_service.add_or_update_global_tag("meta", now - timedelta(minutes=5))
    db_service.add_or_update_global_tag("summary", now - timedelta(minutes=5))
    db_service.add_or_update_global_tag("python", now)
    
    print("\nFetching tag 'python':")
    py_tag = db_service.get_global_tag("python")
    if py_tag:
        print(f"  Tag: {py_tag.tag_name}, Count: {py_tag.occurrence_count}, Last Seen: {py_tag.last_seen_timestamp}")
        assert py_tag.occurrence_count == 3
    
    print("\nFetching all global tags (should be ordered by last_seen desc):")
    all_tags = db_service.get_all_global_tags()
    for tag_obj in all_tags:
        print(f"  Tag: {tag_obj.tag_name}, Count: {tag_obj.occurrence_count}, Last: {tag_obj.last_seen_timestamp}, First: {tag_obj.first_seen_timestamp}")
    
    print("\nFetching 3 most recent active tags:")
    recent_active = db_service.get_recent_active_tags(limit=3)
    for tag_obj in recent_active:
        print(f"  Tag: {tag_obj.tag_name}, Count: {tag_obj.occurrence_count}, Last: {tag_obj.last_seen_timestamp}")
    assert len(recent_active) <= 3
    if recent_active:
        assert recent_active[0].tag_name == "python"
    
    print("\n--- Testing LLMAgentPrivateLogs CRUD ---")
    
    log_entry1 = LLMLogEntry(
        agent_id="llm_agent_01",
        timestamp_call_start=datetime.now(timezone.utc) - timedelta(seconds=10),
        timestamp_call_end=datetime.now(timezone.utc) - timedelta(seconds=5),
        llm_call_type="active_listening",
        raw_llm_input_context_summary="User said 'hello'",
        raw_llm_output="Thinking about response to 'hello'.",
        parsed_thinking_process="User greeted. Should I respond?",
        parsed_desire_to_speak=True,
        parsed_spoken_tags=["greeting", "internal"],
        parsed_spoken_message=None,
        associated_public_message_id=None
    )
    log_entry2 = LLMLogEntry(
        agent_id="llm_agent_01",
        timestamp_call_start=datetime.now(timezone.utc) - timedelta(seconds=4),
        timestamp_call_end=datetime.now(timezone.utc) - timedelta(seconds=1),
        llm_call_type="public_speech",
        raw_llm_input_context_summary="Decided to say hi back.",
        raw_llm_output="Hello there! I am EchoBot.",
        parsed_thinking_process="Constructed greeting message.",
        parsed_desire_to_speak=None,
        parsed_spoken_tags=["greeting", "introduction"],
        parsed_spoken_message="Hello there! I am EchoBot.",
        associated_public_message_id=msg2.message_id
    )
    log_entry3 = LLMLogEntry(
        agent_id="llm_agent_02",
        timestamp_call_start=datetime.now(timezone.utc) - timedelta(minutes=1),
        timestamp_call_end=datetime.now(timezone.utc) - timedelta(seconds=30),
        llm_call_type="active_listening",
        raw_llm_output="Processing recent conversation for summary.",
        parsed_thinking_process="Identified key points about greetings.",
        parsed_desire_to_speak=False,
        raw_llm_input_context_summary=None,
        parsed_spoken_tags=None,
        parsed_spoken_message=None,
        associated_public_message_id=None
    )
    
    db_service.add_llm_agent_log_entry(log_entry1)
    db_service.add_llm_agent_log_entry(log_entry2)
    db_service.add_llm_agent_log_entry(log_entry3)
    
    print("Finished testing LLMAgentPrivateLogs add_llm_agent_log_entry.")

    print("\n--- Testing get_logs_for_agent ---")
    log_entry4_agent01 = LLMLogEntry(
        agent_id="llm_agent_01",
        timestamp_call_start=datetime.now(timezone.utc) - timedelta(seconds=20),
        timestamp_call_end=datetime.now(timezone.utc) - timedelta(seconds=15),
        llm_call_type="background_processing",
        raw_llm_output="Processing data...",
        parsed_thinking_process="Thinking about data."
    )
    log_entry5_agent01 = LLMLogEntry(
        agent_id="llm_agent_01",
        timestamp_call_start=datetime.now(timezone.utc) - timedelta(seconds=3),
        timestamp_call_end=datetime.now(timezone.utc) - timedelta(seconds=1),
        llm_call_type="quick_check",
        raw_llm_output="All systems nominal.",
        parsed_thinking_process="Final check complete."
    )
    db_service.add_llm_agent_log_entry(log_entry4_agent01)
    db_service.add_llm_agent_log_entry(log_entry5_agent01)
    
    agent01_logs = db_service.get_logs_for_agent(agent_id="llm_agent_01", limit=3)
    print(f"Retrieved {len(agent01_logs)} logs for agent llm_agent_01 (limit 3):")
    for log in agent01_logs:
        print(f"  Log ID: {log.log_entry_id}, Type: {log.llm_call_type}, Start: {log.timestamp_call_start}, DesireSpeak: {log.parsed_desire_to_speak}, Tags: {log.parsed_spoken_tags}")
    
    assert len(agent01_logs) <= 3
    if len(agent01_logs) > 0:
        assert agent01_logs[0].llm_call_type == "quick_check"
    if len(agent01_logs) > 1:
        assert agent01_logs[0].timestamp_call_start > agent01_logs[1].timestamp_call_start

    agent02_logs = db_service.get_logs_for_agent(agent_id="llm_agent_02", limit=5)
    print(f"\nRetrieved {len(agent02_logs)} logs for agent llm_agent_02 (limit 5):")
    for log in agent02_logs:
        print(f"  Log ID: {log.log_entry_id}, Type: {log.llm_call_type}, Start: {log.timestamp_call_start}")
    assert len(agent02_logs) == 1
    if agent02_logs:
        assert agent02_logs[0].agent_id == "llm_agent_02"

    non_existent_agent_logs = db_service.get_logs_for_agent(agent_id="non_existent_agent", limit=5)
    print(f"\nRetrieved {len(non_existent_agent_logs)} logs for non_existent_agent (limit 5):")
    assert len(non_existent_agent_logs) == 0
    print("Finished testing get_logs_for_agent.")

    print("\n--- Testing update_log_entry_with_public_message_id ---")
    # Use log_entry1 and a new public message ID for testing
    test_public_msg_id = str(uuid.uuid4())
    print(f"Attempting to update log_entry_id: {log_entry1.log_entry_id} with public_message_id: {test_public_msg_id}")
    db_service.update_log_entry_with_public_message_id(log_entry1.log_entry_id, test_public_msg_id)
    
    # Verify the update by fetching the log entry again using the new method
    updated_log_entry = db_service.get_llm_log_entry_by_id(log_entry1.log_entry_id)
    if updated_log_entry:
        print(f"  Verified Log ID: {updated_log_entry.log_entry_id}, Associated Public Msg ID: {updated_log_entry.associated_public_message_id}")
        assert updated_log_entry.associated_public_message_id == test_public_msg_id
    else:
        assert False, f"Log entry {log_entry1.log_entry_id} not found after update attempt."
        
    print("Attempting to update a non-existent log_entry_id:")
    db_service.update_log_entry_with_public_message_id("non_existent_log_id", "some_public_id")
    print("Finished testing update_log_entry_with_public_message_id.")


    print("\nDBService example run complete with ParticipantConfig, PublicChatHistory, GlobalTags, WhiteboardReferences, and LLMAgentPrivateLogs tests.")
    
    if db_service.db_name != DB_NAME and os.path.exists(db_service.db_name):
        print(f"Cleaning up test database: {db_service.db_name}")
        os.remove(db_service.db_name)
    elif db_service.db_name == DB_NAME:
        print(f"Test run used the main database ({DB_NAME}). Not deleting automatically.")
if __name__ == '__main__':
    _main()