Spaces:
Runtime error
Runtime error
Update clean.py
Browse files
clean.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
from huggingface_hub import InferenceClient
|
| 2 |
from pathlib import Path
|
| 3 |
import gradio as gr
|
|
|
|
| 4 |
|
| 5 |
MODEL_NAME = "meta-llama/Meta-Llama-3-70b-Instruct"
|
| 6 |
|
|
@@ -19,7 +20,7 @@ def clean_transcript(audio_file, options, prompt, transcript: str):
|
|
| 19 |
messages = [
|
| 20 |
{"role": "user", "content": prompt + "\n" + chunk}
|
| 21 |
]
|
| 22 |
-
client = InferenceClient(model=MODEL_NAME)
|
| 23 |
for c in client.chat_completion(messages, max_tokens=1000, stream=True):
|
| 24 |
token = c.choices[0].delta.content
|
| 25 |
text += token or ""
|
|
|
|
| 1 |
from huggingface_hub import InferenceClient
|
| 2 |
from pathlib import Path
|
| 3 |
import gradio as gr
|
| 4 |
+
import os
|
| 5 |
|
| 6 |
MODEL_NAME = "meta-llama/Meta-Llama-3-70b-Instruct"
|
| 7 |
|
|
|
|
| 20 |
messages = [
|
| 21 |
{"role": "user", "content": prompt + "\n" + chunk}
|
| 22 |
]
|
| 23 |
+
client = InferenceClient(model=MODEL_NAME, token=os.getenv("HF_TOKEN"))
|
| 24 |
for c in client.chat_completion(messages, max_tokens=1000, stream=True):
|
| 25 |
token = c.choices[0].delta.content
|
| 26 |
text += token or ""
|