Spaces:
Sleeping
Sleeping
Akis Giannoukos
commited on
Commit
·
eb07602
1
Parent(s):
9325a21
Updated Gemma chat-template usage
Browse files
app.py
CHANGED
|
@@ -192,9 +192,9 @@ def generate_recording_agent_reply(chat_history: List[Tuple[str, str]]) -> str:
|
|
| 192 |
)
|
| 193 |
pipe = get_textgen_pipeline()
|
| 194 |
tokenizer = pipe.tokenizer
|
|
|
|
| 195 |
messages = [
|
| 196 |
-
{"role": "
|
| 197 |
-
{"role": "user", "content": user_prompt},
|
| 198 |
]
|
| 199 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 200 |
gen = pipe(
|
|
@@ -231,9 +231,9 @@ def scoring_agent_infer(chat_history: List[Tuple[str, str]], features: Dict[str,
|
|
| 231 |
)
|
| 232 |
pipe = get_textgen_pipeline()
|
| 233 |
tokenizer = pipe.tokenizer
|
|
|
|
| 234 |
messages = [
|
| 235 |
-
{"role": "
|
| 236 |
-
{"role": "user", "content": user_prompt},
|
| 237 |
]
|
| 238 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 239 |
gen = pipe(
|
|
|
|
| 192 |
)
|
| 193 |
pipe = get_textgen_pipeline()
|
| 194 |
tokenizer = pipe.tokenizer
|
| 195 |
+
combined_prompt = system_prompt + "\n\n" + user_prompt
|
| 196 |
messages = [
|
| 197 |
+
{"role": "user", "content": combined_prompt},
|
|
|
|
| 198 |
]
|
| 199 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 200 |
gen = pipe(
|
|
|
|
| 231 |
)
|
| 232 |
pipe = get_textgen_pipeline()
|
| 233 |
tokenizer = pipe.tokenizer
|
| 234 |
+
combined_prompt = system_prompt + "\n\n" + user_prompt
|
| 235 |
messages = [
|
| 236 |
+
{"role": "user", "content": combined_prompt},
|
|
|
|
| 237 |
]
|
| 238 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 239 |
gen = pipe(
|