from pydantic import BaseModel
from abc import ABC, abstractmethod
from llmapi.base.types import *
from typing import List, Optional, Union, Generator, Sequence, Iterator, Deque, Tuple
import random,uuid
from llmapi.base import *

_fake_messages :List[str] = ['Hello', 'How are you?', 'I am fine.']
_fake_max_tokens :int = 2048
class FakeModel(BaseModel, ChatModel, EmbeddingModel):
    @property
    def name(self) ->str:
        return "fake"
    
    def embedding(self, input: str, **kwargs) -> Embedding:
        return {
            "object": "list",
            "data": [
                {
                    "object": "embedding",
                    "embedding": [0.0023064255,
                    -0.009327292,
                    -0.0028842222,],
                    "index": 0,
                }
            ],
            "model": self.name,
            "usage": {
                "prompt_tokens": 0,
                "total_tokens": 0,
            },
        }
    
    def chat_completion(self,
                        messages: List[ChatCompletionMessage],
                        temperature: float = 0.2,
                        top_p: float = 0.95,
                        top_k: int = 40,
                        stream: bool = False,
                        stop: Optional[List[str]] = [],
                        max_tokens: int = 256,
                        verbose: Optional[bool] = False,
                        **kwargs) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]:
        completion_id: str = f"cmpl-{str(uuid.uuid4())}"
        created: int = int(time.time())
        completion_tokens: List[_fake_max_tokens] = []
        text: bytes = b""
        returned_characters: int = 0
        stop = stop if stop is not None else []
        logprobs_or_none = {
            "tokens": [],
            "text_offset":[10],
            "token_logprobs": [],
            "top_logprobs": [],
        }

        if stream:
            for i in range(10):
                yield {
                    "id": completion_id,
                    "object": "chat.completion.chunk",
                    "created": created,
                    "model": self.name,
                    "choices": [
                        {
                            "delta": {
                                "role": "user",
                                "content": random.choice(_fake_messages),
                            },
                            "index": 0,
                            "finish_reason": "length",
                        }
                    ]
                }
        res = ChatCompletion( **{
            "id": completion_id,
            "object": "chat.completion",
            "created": created,
            "model": self.name,
            "choices": [
                {
                    "message": {
                        "role": "assistant",
                        "content": random.choice(_fake_messages),
                    },
                    "index": 0,
                    "logprobs": logprobs_or_none,
                    "finish_reason": "length",
                }
            ],
            "usage": {
                "prompt_tokens": 100,
                "completion_tokens": len(completion_tokens),
                "total_tokens": 100 + len(completion_tokens),
            },
        })
        yield res
    def completion(self, 
                   prompt: str,
                   max_tokens: int = 16,
                   temperature: float = 0.7,
                   top_p: float = 1.0,
                   top_k: int = 0,
                   presence_penalty: float = 0.0,
                   frequency_penalty: float = 0.0,
                   stop: Optional[List[str]] = [],
                   echo: bool = False,
                   logprobs: int = 0,
                   stream: bool = False,
                   verbose: Optional[bool] = False,
                   **kwargs) -> Union[Completion, Iterator[CompletionChunk]]:
        completion_id: str = f"cmpl-{str(uuid.uuid4())}"
        created: int = int(time.time())
        completion_tokens: List[_fake_max_tokens] = []
        text: bytes = b""
        returned_characters: int = 0
        stop = stop if stop is not None else []
        logprobs_or_none = {
            "tokens": [],
            "text_offset":[10],
            "token_logprobs": [],
            "top_logprobs": [],
        }

        if stream:
            for i in range(10):
                yield {
                    "id": completion_id,
                    "object": "text_completion",
                    "created": created,
                    "model": self.name,
                    "choices": [
                        {
                            "text": random.choice(_fake_messages),
                            "index": 0,
                            "logprobs": None,
                            "finish_reason": "length",
                        }
                    ]
                }
        yield {
            "id": completion_id,
            "object": "text_completion",
            "created": created,
            "model": self.name,
            "choices": [
                {
                    "text": random.choice(_fake_messages),
                    "index": 0,
                    "logprobs": logprobs_or_none,
                    "finish_reason": "length",
                }
            ],
            "usage": {
                "prompt_tokens": 100,
                "completion_tokens": len(completion_tokens),
                "total_tokens": 100 + len(completion_tokens),
            },
        }