Update api/models.py
Browse files- api/models.py +2 -22
api/models.py
CHANGED
@@ -1,34 +1,14 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
from typing import List, Optional, Union
|
4 |
from pydantic import BaseModel
|
5 |
|
6 |
class Message(BaseModel):
|
7 |
role: str
|
8 |
-
content:
|
9 |
|
10 |
class ChatRequest(BaseModel):
|
11 |
model: str
|
12 |
messages: List[Message]
|
13 |
-
proxy: Optional[str] = None # Assuming proxy might be needed
|
14 |
stream: Optional[bool] = False
|
15 |
temperature: Optional[float] = 0.7
|
16 |
top_p: Optional[float] = 0.9
|
17 |
max_tokens: Optional[int] = 99999999
|
18 |
-
|
19 |
-
class ImageResponseModel(BaseModel):
|
20 |
-
images: List[str]
|
21 |
-
alt: str
|
22 |
-
|
23 |
-
class ChatCompletionChoice(BaseModel):
|
24 |
-
index: int
|
25 |
-
message: dict
|
26 |
-
finish_reason: Optional[str]
|
27 |
-
|
28 |
-
class ChatCompletionResponse(BaseModel):
|
29 |
-
id: str
|
30 |
-
object: str
|
31 |
-
created: int
|
32 |
-
model: str
|
33 |
-
choices: List[ChatCompletionChoice]
|
34 |
-
usage: Optional[dict] = None
|
|
|
1 |
+
from typing import List, Optional
|
|
|
|
|
2 |
from pydantic import BaseModel
|
3 |
|
4 |
class Message(BaseModel):
|
5 |
role: str
|
6 |
+
content: str | list
|
7 |
|
8 |
class ChatRequest(BaseModel):
|
9 |
model: str
|
10 |
messages: List[Message]
|
|
|
11 |
stream: Optional[bool] = False
|
12 |
temperature: Optional[float] = 0.7
|
13 |
top_p: Optional[float] = 0.9
|
14 |
max_tokens: Optional[int] = 99999999
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|