Spaces:
Sleeping
Sleeping
Update back.py
Browse files
back.py
CHANGED
@@ -42,14 +42,12 @@ label_mapping = {
|
|
42 |
}
|
43 |
|
44 |
|
45 |
-
#@st.cache_resource
|
46 |
def load_setup(path_to_model: str, path_to_vocab: str) -> tuple[ModelType, TokenizerType]:
|
47 |
loaded_model = torch.load(path_to_model, map_location=device)
|
48 |
loaded_tokenizer = DistilBertTokenizer(path_to_vocab)
|
49 |
return loaded_model, loaded_tokenizer
|
50 |
|
51 |
|
52 |
-
#@st.cache_data
|
53 |
def predict(model: ModelType, tokenizer: TokenizerType, input_text: str, max_length: int = 512) -> str:
|
54 |
inputs = tokenizer.encode_plus(
|
55 |
input_text,
|
|
|
42 |
}
|
43 |
|
44 |
|
|
|
45 |
def load_setup(path_to_model: str, path_to_vocab: str) -> tuple[ModelType, TokenizerType]:
|
46 |
loaded_model = torch.load(path_to_model, map_location=device)
|
47 |
loaded_tokenizer = DistilBertTokenizer(path_to_vocab)
|
48 |
return loaded_model, loaded_tokenizer
|
49 |
|
50 |
|
|
|
51 |
def predict(model: ModelType, tokenizer: TokenizerType, input_text: str, max_length: int = 512) -> str:
|
52 |
inputs = tokenizer.encode_plus(
|
53 |
input_text,
|