import logging
import random
from typing import Any, List

from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
from backend.data.model import SchemaField
from backend.blocks.llm import AIConversationBlock, AIStructuredResponseGeneratorBlock, LlmModel, APIKeyCredentials, AICredentials, AICredentialsField, TEST_CREDENTIALS, TEST_CREDENTIALS_INPUT


logger = logging.getLogger(__name__)


class NotificationFacadeBlock(AIConversationBlock):
    PROMPT_TEMPLATE: str = """
    请返回下面的内容:
    {notification_input}
    """
    
    class Input(BlockSchema):
        notification_input: str = SchemaField(
            description="notification input"
        )
        model: LlmModel = SchemaField(
            title="LLM Model",
            default=LlmModel.GPT4_TURBO,
            description="The language model to use for the conversation.",
        )
        credentials: AICredentials = AICredentialsField()
        max_tokens: int | None = SchemaField(
            advanced=True,
            default=None,
            description="The maximum number of tokens to generate in the chat completion.",
        )

    class Output(BlockSchema):
        notification_output: str = SchemaField(
            description="notification output"
        )
        error: str = SchemaField(description="Error message if the API call failed.")
    
    def __init__(self):
        Block.__init__(
            self,
            id="6ea39467-743d-4d20-bc21-b0ab0664eb76",
            description="Advanced LLM call that takes a prompt and sends them to the language model.",
            categories={BlockCategory.AI},
            input_schema=NotificationFacadeBlock.Input,
            output_schema=NotificationFacadeBlock.Output
        )

    def run(
        self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
    ) -> BlockOutput:
        notification_input = input_data.notification_input
        prompt = self.PROMPT_TEMPLATE.format(notification_input=notification_input)
        msg_list = [ {"role": "user", "content": prompt} ]
        rsp = self.llm_call(
            AIStructuredResponseGeneratorBlock.Input(
                prompt="",
                credentials=input_data.credentials,
                model=input_data.model,
                conversation_history=msg_list,
                max_tokens=input_data.max_tokens,
                expected_format={},
            ),
            credentials=credentials,
        )

        notification_output = rsp
        yield "notification_output", notification_output
