Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,8 @@ import os
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
from huggingface_hub import login
|
5 |
|
6 |
-
|
|
|
7 |
|
8 |
import torch
|
9 |
|
@@ -12,8 +13,8 @@ class VietnameseChatbot:
|
|
12 |
"""
|
13 |
Initialize the Vietnamese chatbot with a pre-trained model
|
14 |
"""
|
15 |
-
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
|
16 |
-
self.model = AutoModelForCausalLM.from_pretrained(model_name)
|
17 |
|
18 |
# Use GPU if available
|
19 |
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
@@ -49,13 +50,9 @@ class VietnameseChatbot:
|
|
49 |
return response
|
50 |
|
51 |
def main():
|
52 |
-
st.set_page_config(
|
53 |
-
page_title="IOGPT",
|
54 |
-
page_icon="🇻🇳",
|
55 |
-
layout="wide"
|
56 |
-
)
|
57 |
|
58 |
-
st.title("
|
59 |
st.markdown("""
|
60 |
### Trò chuyện với IOGPT
|
61 |
""")
|
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
from huggingface_hub import login
|
5 |
|
6 |
+
sec_token=os.getenv("HF_TOKEN")
|
7 |
+
login(token=sec_token)
|
8 |
|
9 |
import torch
|
10 |
|
|
|
13 |
"""
|
14 |
Initialize the Vietnamese chatbot with a pre-trained model
|
15 |
"""
|
16 |
+
self.tokenizer = AutoTokenizer.from_pretrained(model_name, token=sec_token)
|
17 |
+
self.model = AutoModelForCausalLM.from_pretrained(model_name, token=sec_token)
|
18 |
|
19 |
# Use GPU if available
|
20 |
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
50 |
return response
|
51 |
|
52 |
def main():
|
53 |
+
st.set_page_config(page_title="IOGPT", layout="wide")
|
|
|
|
|
|
|
|
|
54 |
|
55 |
+
st.title("Chat với IOGPT")
|
56 |
st.markdown("""
|
57 |
### Trò chuyện với IOGPT
|
58 |
""")
|