from mlserver import MLModel
from mlserver.types import InferenceRequest, InferenceResponse, ResponseOutput, MetadataModelResponse, MetadataTensor
from transformers import pipeline, AutoTokenizer
import numpy as np

class FinancialSentimentModel(MLModel):
    async def load(self) -> bool:
        model_name = "mrm8488/distilroberta-finetuned-financial-news-sentiment-analysis"
        self.tokenizer = AutoTokenizer.from_pretrained(model_name)
        self.model = pipeline("sentiment-analysis", model=model_name, tokenizer=self.tokenizer)
        self.ready = True
        return self.ready

    async def predict(self, payload: InferenceRequest) -> InferenceResponse:
        inputs = [inp.data[0] for inp in payload.inputs]
        all_outputs = []

        for input_text in inputs:
            # Tokenize and truncate the input text
            encoded_input = self.tokenizer(input_text, truncation=True, max_length=512, return_tensors="pt")
            truncated_text = self.tokenizer.decode(encoded_input["input_ids"][0], skip_special_tokens=True)
            
            # Process the truncated text
            outputs = self.model(truncated_text)

            all_outputs.append(outputs[0])  # Assuming batch size of 1

        response = [
            {
                "label": output["label"],
                "score": output["score"]
            }
            for output in all_outputs
        ]

        return InferenceResponse(
            model_name=self.name,
            outputs=[
                ResponseOutput(
                    name="output",
                    shape=[len(response)],
                    datatype="BYTES",
                    data=response
                )
            ]
        )

    async def metadata(self) -> MetadataModelResponse:
        return MetadataModelResponse(
            name=self.name,
            versions=self.versions,
            platform="HuggingFace",
            inputs=[
                MetadataTensor(name="input", datatype="BYTES", shape=[-1])
            ],
            outputs=[
                MetadataTensor(name="output", datatype="BYTES", shape=[-1])
            ]
        )
