qnguyen3 commited on
Commit
6c81b3d
1 Parent(s): d227c5a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -2
app.py CHANGED
@@ -39,7 +39,6 @@ class KeywordsStoppingCriteria(StoppingCriteria):
39
  self.tokenizer = tokenizer
40
  self.start_len = input_ids.shape[1]
41
 
42
- @spaces.GPU
43
  def call_for_batch(self, output_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
44
  offset = min(output_ids.shape[1] - self.start_len, self.max_keyword_len)
45
  self.keyword_ids = [keyword_id.to(output_ids.device) for keyword_id in self.keyword_ids]
@@ -53,7 +52,6 @@ class KeywordsStoppingCriteria(StoppingCriteria):
53
  return True
54
  return False
55
 
56
- @spaces.GPU
57
  def __call__(self, output_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
58
  outputs = []
59
  for i in range(output_ids.shape[0]):
 
39
  self.tokenizer = tokenizer
40
  self.start_len = input_ids.shape[1]
41
 
 
42
  def call_for_batch(self, output_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
43
  offset = min(output_ids.shape[1] - self.start_len, self.max_keyword_len)
44
  self.keyword_ids = [keyword_id.to(output_ids.device) for keyword_id in self.keyword_ids]
 
52
  return True
53
  return False
54
 
 
55
  def __call__(self, output_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool:
56
  outputs = []
57
  for i in range(output_ids.shape[0]):