Jack Wong commited on
Commit
1d1fceb
1 Parent(s): e513d6a

removed cache_dir

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -11,10 +11,10 @@ temp_dir = tempfile.TemporaryDirectory()
11
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
12
 
13
  def get_tokenizer() -> PreTrainedTokenizer:
14
- return AutoTokenizer.from_pretrained('thenlper/gte-large', trust_remote_code=True, cache_dir=temp_dir.name)
15
 
16
  def get_model() -> PreTrainedModel:
17
- return AutoModel.from_pretrained('thenlper/gte-large', trust_remote_code=True, cache_dir=temp_dir.name).to(device)
18
 
19
  def average_pooling(last_hidden_states: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
20
  last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0)
 
11
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
12
 
13
  def get_tokenizer() -> PreTrainedTokenizer:
14
+ return AutoTokenizer.from_pretrained('thenlper/gte-large', trust_remote_code=True)
15
 
16
  def get_model() -> PreTrainedModel:
17
+ return AutoModel.from_pretrained('thenlper/gte-large', trust_remote_code=True).to(device)
18
 
19
  def average_pooling(last_hidden_states: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
20
  last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0)