saattrupdan commited on
Commit
d917126
1 Parent(s): 298f3a3

fix: Don't specify torch.compile backend for model

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -3,8 +3,9 @@
3
  from typing import Dict, Tuple
4
  import gradio as gr
5
  from gradio.components import Dropdown, Textbox, Button, Label, Markdown
6
- from gradio import Row, Column
7
  from types import MethodType
 
 
8
  from transformers import pipeline, AutoModelForSequenceClassification, AutoTokenizer
9
  from luga import language as detect_language
10
  import torch
@@ -22,7 +23,7 @@ def main():
22
  model_id = "alexandrainst/scandi-nli-large"
23
  model = AutoModelForSequenceClassification.from_pretrained(model_id)
24
  tokenizer = AutoTokenizer.from_pretrained(model_id)
25
- model = torch.compile(model=model, backend="aot_eager")
26
  model.eval()
27
  classifier = pipeline("zero-shot-classification", model=model, tokenizer=tokenizer)
28
  classifier.get_inference_context = MethodType(
 
3
  from typing import Dict, Tuple
4
  import gradio as gr
5
  from gradio.components import Dropdown, Textbox, Button, Label, Markdown
 
6
  from types import MethodType
7
+ from gradio.layouts.column import Column
8
+ from gradio.layouts.row import Row
9
  from transformers import pipeline, AutoModelForSequenceClassification, AutoTokenizer
10
  from luga import language as detect_language
11
  import torch
 
23
  model_id = "alexandrainst/scandi-nli-large"
24
  model = AutoModelForSequenceClassification.from_pretrained(model_id)
25
  tokenizer = AutoTokenizer.from_pretrained(model_id)
26
+ model = torch.compile(model=model)
27
  model.eval()
28
  classifier = pipeline("zero-shot-classification", model=model, tokenizer=tokenizer)
29
  classifier.get_inference_context = MethodType(