|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from abc import ABCMeta, abstractmethod |
|
from typing import Any, AsyncIterator, List, Literal, Union, overload |
|
|
|
from erniebot_agent.messages import AIMessage, AIMessageChunk, Message |
|
|
|
|
|
class ChatModel(metaclass=ABCMeta): |
|
"""The base class of chat-optimized LLM.""" |
|
|
|
def __init__(self, model: str): |
|
self.model = model |
|
|
|
@overload |
|
async def async_chat( |
|
self, messages: List[Message], *, stream: Literal[False] = ..., **kwargs: Any |
|
) -> AIMessage: |
|
... |
|
|
|
@overload |
|
async def async_chat( |
|
self, messages: List[Message], *, stream: Literal[True], **kwargs: Any |
|
) -> AsyncIterator[AIMessageChunk]: |
|
... |
|
|
|
@overload |
|
async def async_chat( |
|
self, messages: List[Message], *, stream: bool, **kwargs: Any |
|
) -> Union[AIMessage, AsyncIterator[AIMessageChunk]]: |
|
... |
|
|
|
@abstractmethod |
|
async def async_chat( |
|
self, messages: List[Message], *, stream: bool = False, **kwargs: Any |
|
) -> Union[AIMessage, AsyncIterator[AIMessageChunk]]: |
|
"""Asynchronously chats with the LLM. |
|
|
|
Args: |
|
messages (List[Message]): A list of messages. |
|
stream (bool): Whether to use streaming generation. Defaults to False. |
|
**kwargs: Arbitrary keyword arguments. |
|
|
|
Returns: |
|
If stream is False, returns a single message. |
|
If stream is True, returns an asynchronous iterator of message chunks. |
|
""" |
|
raise NotImplementedError |
|
|