Spaces:
Sleeping
Sleeping
uploaded new model
Browse files- app.py +6 -3
- tj-fa-3dmzfi52.pt → tj-fa.pt +2 -2
app.py
CHANGED
@@ -3,15 +3,18 @@ import streamlit as st
|
|
3 |
from model import init_model, predict
|
4 |
from data import Tokenizer, load_config
|
5 |
|
6 |
-
|
|
|
|
|
|
|
7 |
print('Config:', config)
|
8 |
tokenizer = Tokenizer(config)
|
9 |
|
10 |
# Load the model
|
11 |
-
model = init_model(
|
12 |
|
13 |
# Create a text area box where the user can enter their text
|
14 |
-
user_input = st.text_area("Enter some text here", value="
|
15 |
|
16 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
17 |
|
|
|
3 |
from model import init_model, predict
|
4 |
from data import Tokenizer, load_config
|
5 |
|
6 |
+
|
7 |
+
MODEL_PATH = 'tj-fa.pt'
|
8 |
+
|
9 |
+
config = load_config(MODEL_PATH)
|
10 |
print('Config:', config)
|
11 |
tokenizer = Tokenizer(config)
|
12 |
|
13 |
# Load the model
|
14 |
+
model = init_model(MODEL_PATH)
|
15 |
|
16 |
# Create a text area box where the user can enter their text
|
17 |
+
user_input = st.text_area("Enter some text here", value="Он ҷо, ки висоли дӯстон аст,\nВ-оллоҳ, ки миёни хона саҳрост.")
|
18 |
|
19 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
20 |
|
tj-fa-3dmzfi52.pt → tj-fa.pt
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ad48e215fbe386a54877fd2cd3606ea79022273385784bb1c86e444c4aa06169
|
3 |
+
size 15589259
|