Spaces:
Runtime error
Runtime error
Oleg Lavrovsky
commited on
Data min max
Browse files
app.py
CHANGED
|
@@ -104,12 +104,12 @@ def fit_to_length(text, min_length=3, max_length=100):
|
|
| 104 |
return text
|
| 105 |
|
| 106 |
|
| 107 |
-
def get_model_reponse(
|
| 108 |
"""Process the text content."""
|
| 109 |
|
| 110 |
# Prepare the model input
|
| 111 |
messages_think = [
|
| 112 |
-
{"role": "user", "content":
|
| 113 |
]
|
| 114 |
text = tokenizer.apply_chat_template(
|
| 115 |
messages_think,
|
|
@@ -144,7 +144,7 @@ async def completion(data: Completion):
|
|
| 144 |
raise HTTPException(status_code=503, detail="Model not loaded")
|
| 145 |
|
| 146 |
try:
|
| 147 |
-
text = fit_to_length(
|
| 148 |
|
| 149 |
result = get_model_reponse(text, model)
|
| 150 |
|
|
@@ -179,7 +179,7 @@ async def predict(q: str):
|
|
| 179 |
|
| 180 |
input_data = TextInput(text=q)
|
| 181 |
|
| 182 |
-
text = fit_to_length(input_data.text, input_data.max_length)
|
| 183 |
|
| 184 |
result = get_model_reponse(text, model)
|
| 185 |
|
|
|
|
| 104 |
return text
|
| 105 |
|
| 106 |
|
| 107 |
+
def get_model_reponse(query: str):
|
| 108 |
"""Process the text content."""
|
| 109 |
|
| 110 |
# Prepare the model input
|
| 111 |
messages_think = [
|
| 112 |
+
{"role": "user", "content": query}
|
| 113 |
]
|
| 114 |
text = tokenizer.apply_chat_template(
|
| 115 |
messages_think,
|
|
|
|
| 144 |
raise HTTPException(status_code=503, detail="Model not loaded")
|
| 145 |
|
| 146 |
try:
|
| 147 |
+
text = fit_to_length(data.prompt, 3, data.max_tokens)
|
| 148 |
|
| 149 |
result = get_model_reponse(text, model)
|
| 150 |
|
|
|
|
| 179 |
|
| 180 |
input_data = TextInput(text=q)
|
| 181 |
|
| 182 |
+
text = fit_to_length(input_data.text, input_data.min_length, input_data.max_length)
|
| 183 |
|
| 184 |
result = get_model_reponse(text, model)
|
| 185 |
|