Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -6,13 +6,13 @@ import torch
|
|
6 |
|
7 |
class YourModel:
|
8 |
def __init__(self):
|
9 |
-
|
10 |
-
model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen-1_8B-Chat", device_map="auto", trust_remote_code=True).eval()
|
11 |
-
history = None
|
12 |
|
13 |
@instrument
|
14 |
def predict(self, user_input, system_prompt):
|
15 |
-
print("Model response for:
|
16 |
response, self.history = model.chat(tokenizer, user_input, history=self.history, system=system_prompt)
|
17 |
return response
|
18 |
|
|
|
6 |
|
7 |
class YourModel:
|
8 |
def __init__(self):
|
9 |
+
self.tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-1_8B-Chat", trust_remote_code=True)
|
10 |
+
self.model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen-1_8B-Chat", device_map="auto", trust_remote_code=True).eval()
|
11 |
+
self.history = None
|
12 |
|
13 |
@instrument
|
14 |
def predict(self, user_input, system_prompt):
|
15 |
+
print("Model response for:" + user_input + " with prompt:" + system_prompt)
|
16 |
response, self.history = model.chat(tokenizer, user_input, history=self.history, system=system_prompt)
|
17 |
return response
|
18 |
|