Spaces:
Runtime error
Runtime error
Commit
•
ce3c9da
1
Parent(s):
15850bc
Update app.py
Browse files
app.py
CHANGED
@@ -1,23 +1,19 @@
|
|
1 |
-
import
|
|
|
|
|
2 |
|
3 |
-
|
4 |
-
|
5 |
|
6 |
-
|
7 |
-
|
8 |
|
9 |
-
|
10 |
-
|
|
|
|
|
|
|
|
|
11 |
|
12 |
-
|
13 |
-
@bot.message_handler(commands=['start'])
|
14 |
-
def send_welcome(message):
|
15 |
-
bot.reply_to(message, "Welcome to the tele-transcribe-chat bot")
|
16 |
|
17 |
-
# Handler for all other messages
|
18 |
-
@bot.message_handler(func=lambda message: True)
|
19 |
-
def echo_all(message):
|
20 |
-
bot.reply_to(message, message.text)
|
21 |
-
|
22 |
-
# Start the bot
|
23 |
-
bot.polling()
|
|
|
1 |
+
import os
|
2 |
+
HF_TOKEN = os.getenv('HF_TOKEN')
|
3 |
+
print("Token loaded")
|
4 |
|
5 |
+
import transformers
|
6 |
+
import torch
|
7 |
|
8 |
+
#Loading llama3 model
|
9 |
+
model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
|
10 |
|
11 |
+
pipeline = transformers.pipeline(
|
12 |
+
"text-generation",
|
13 |
+
model=model_id,
|
14 |
+
model_kwargs={"torch_dtype": torch.bfloat16},
|
15 |
+
device="cuda",
|
16 |
+
)
|
17 |
|
18 |
+
print("llama download successfully")
|
|
|
|
|
|
|
19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|