laiBatool commited on
Commit
d65cc8b
·
verified ·
1 Parent(s): 3abcb4d

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +17 -2
src/streamlit_app.py CHANGED
@@ -1,7 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
  import torch
4
- import os
5
 
6
  # Load your model from Hugging Face Hub
7
  model_name = "laiBatool/laiba-spam-classifier-bert" # replace with your actual model repo name
@@ -10,7 +25,7 @@ model_name = "laiBatool/laiba-spam-classifier-bert" # replace with your actual
10
  @st.cache_resource
11
  def load_model():
12
 
13
- os.environ['HF_HOME'] = './hf_cache' # Safe cache location in HF Space
14
  tokenizer = AutoTokenizer.from_pretrained(model_name)
15
  model = AutoModelForSequenceClassification.from_pretrained(model_name)
16
  return tokenizer, model
 
1
+ import os
2
+
3
+ # Fix: Set Hugging Face cache to a writable directory
4
+ os.environ["HF_HOME"] = "/tmp/huggingface"
5
+ os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface"
6
+ os.environ["HF_DATASETS_CACHE"] = "/tmp/huggingface"
7
+ os.environ["HF_METRICS_CACHE"] = "/tmp/huggingface"
8
+
9
+ # Optional but safe
10
+ os.makedirs("/tmp/huggingface", exist_ok=True)
11
+
12
+
13
+
14
+
15
+
16
  import streamlit as st
17
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
18
  import torch
19
+
20
 
21
  # Load your model from Hugging Face Hub
22
  model_name = "laiBatool/laiba-spam-classifier-bert" # replace with your actual model repo name
 
25
  @st.cache_resource
26
  def load_model():
27
 
28
+
29
  tokenizer = AutoTokenizer.from_pretrained(model_name)
30
  model = AutoModelForSequenceClassification.from_pretrained(model_name)
31
  return tokenizer, model