import os
import boto3

from .base_llm import BaseLLM


class AwsLLM(BaseLLM):
    def __init__(self):
        super().__init__("AWS")
        session = boto3.Session(
            aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
            aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'),
            region_name=os.getenv('AWS_DEFAULT_REGION')
        )
        self.client = session.client('bedrock-runtime', verify=False)
        self.model_id = os.getenv('AWS_MODEL_ID')

    def converse_stream(self, history):
        conversation = [{
        "role": msg["role"], 
        "content": [{"text": msg["content"].strip() or " "}]
        } for msg in history if msg["role"] in ("user", "assistant") and msg.get("content")]

        try:
            response = self.client.converse_stream(
                modelId=self.model_id,
                messages=conversation,
                inferenceConfig={
                    "maxTokens": 4096,
                    "temperature": 0.5,
                    "topP": 0.9
                }
            )
            if stream := response.get('stream'):
                for event in stream:
                    if 'contentBlockDelta' in event:
                        delta = event['contentBlockDelta'].get('delta', {})
                        if text := delta.get('text') or delta.get('reasoningContent', {}).get('text'):
                            yield text
        except Exception as e:
            yield f"ERROR: {str(e)}"

    def _perform_connection_check(self):
        try:
            response = self.client.list_foundation_models(maxResults=1)
            return {
                "status": True,
                "detail": f"Available models: {len(response.get('modelSummaries', []))}"
            }
        except Exception as e:
            return {"status": False, "detail": str(e)}