Husnain
commited on
⚡ [Enhance] Use nous-mixtral-8x7b as default model
Browse files- apis/chat_api.py +9 -3
apis/chat_api.py
CHANGED
|
@@ -14,12 +14,13 @@ from pydantic import BaseModel, Field
|
|
| 14 |
from sse_starlette.sse import EventSourceResponse, ServerSentEvent
|
| 15 |
from tclogger import logger
|
| 16 |
|
| 17 |
-
from constants.models import AVAILABLE_MODELS_DICTS
|
| 18 |
from constants.envs import CONFIG
|
| 19 |
|
| 20 |
from messagers.message_composer import MessageComposer
|
| 21 |
from mocks.stream_chat_mocker import stream_chat_mock
|
| 22 |
from networks.huggingface_streamer import HuggingfaceStreamer
|
|
|
|
| 23 |
from networks.openai_streamer import OpenaiStreamer
|
| 24 |
|
| 25 |
|
|
@@ -58,8 +59,8 @@ class ChatAPIApp:
|
|
| 58 |
|
| 59 |
class ChatCompletionsPostItem(BaseModel):
|
| 60 |
model: str = Field(
|
| 61 |
-
default="mixtral-8x7b",
|
| 62 |
-
description="(str) `mixtral-8x7b`",
|
| 63 |
)
|
| 64 |
messages: list = Field(
|
| 65 |
default=[{"role": "user", "content": "Hello, who are you?"}],
|
|
@@ -92,6 +93,11 @@ class ChatAPIApp:
|
|
| 92 |
if item.model == "gpt-3.5-turbo":
|
| 93 |
streamer = OpenaiStreamer()
|
| 94 |
stream_response = streamer.chat_response(messages=item.messages)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
else:
|
| 96 |
streamer = HuggingfaceStreamer(model=item.model)
|
| 97 |
composer = MessageComposer(model=item.model)
|
|
|
|
| 14 |
from sse_starlette.sse import EventSourceResponse, ServerSentEvent
|
| 15 |
from tclogger import logger
|
| 16 |
|
| 17 |
+
from constants.models import AVAILABLE_MODELS_DICTS, PRO_MODELS
|
| 18 |
from constants.envs import CONFIG
|
| 19 |
|
| 20 |
from messagers.message_composer import MessageComposer
|
| 21 |
from mocks.stream_chat_mocker import stream_chat_mock
|
| 22 |
from networks.huggingface_streamer import HuggingfaceStreamer
|
| 23 |
+
from networks.huggingchat_streamer import HuggingchatStreamer
|
| 24 |
from networks.openai_streamer import OpenaiStreamer
|
| 25 |
|
| 26 |
|
|
|
|
| 59 |
|
| 60 |
class ChatCompletionsPostItem(BaseModel):
|
| 61 |
model: str = Field(
|
| 62 |
+
default="nous-mixtral-8x7b",
|
| 63 |
+
description="(str) `nous-mixtral-8x7b`",
|
| 64 |
)
|
| 65 |
messages: list = Field(
|
| 66 |
default=[{"role": "user", "content": "Hello, who are you?"}],
|
|
|
|
| 93 |
if item.model == "gpt-3.5-turbo":
|
| 94 |
streamer = OpenaiStreamer()
|
| 95 |
stream_response = streamer.chat_response(messages=item.messages)
|
| 96 |
+
elif item.model in PRO_MODELS:
|
| 97 |
+
streamer = HuggingchatStreamer(model=item.model)
|
| 98 |
+
stream_response = streamer.chat_response(
|
| 99 |
+
messages=item.messages,
|
| 100 |
+
)
|
| 101 |
else:
|
| 102 |
streamer = HuggingfaceStreamer(model=item.model)
|
| 103 |
composer = MessageComposer(model=item.model)
|