dmariko commited on
Commit
27d9c81
1 Parent(s): c58b413

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -1,5 +1,5 @@
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoConfig
2
- #from transformers import T5Tokenizer, T5ForConditionalGeneration, AutoConfig
3
  import gradio as gr
4
  from torch.nn import functional as F
5
  import seaborn
@@ -22,13 +22,13 @@ import matplotlib.font_manager as fm
22
  MODEL_NAME = 'https://huggingface.co/yseop/FNP_T5_D2T_complete'
23
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
24
  model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
25
- #config = AutoConfig.from_pretrained(MODEL_NAME)
26
 
27
  MODEL_BUF = {
28
  "name": MODEL_NAME,
29
  "tokenizer": tokenizer,
30
- "model": model
31
- #"config": config
32
  }
33
 
34
  font_dir = ['./']
@@ -44,7 +44,7 @@ def change_model_name(name):
44
  MODEL_BUF["name"] = name
45
  MODEL_BUF["tokenizer"] = AutoTokenizer.from_pretrained(name)
46
  MODEL_BUF["model"] = AutoModelForSeq2SeqLM.from_pretrained(name)
47
- #MODEL_BUF["config"] = AutoConfig.from_pretrained(name)
48
 
49
 
50
  def generate(text, model, tokenizer):
@@ -75,9 +75,9 @@ app = gr.Interface(
75
 
76
  examples = [[MODEL_BUF["name"], text]],
77
 
78
- title="FReE",
79
 
80
- description="Financial relations classifier"
81
 
82
  )
83
 
 
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoConfig
2
+
3
  import gradio as gr
4
  from torch.nn import functional as F
5
  import seaborn
 
22
  MODEL_NAME = 'https://huggingface.co/yseop/FNP_T5_D2T_complete'
23
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
24
  model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
25
+ config = AutoConfig.from_pretrained(MODEL_NAME)
26
 
27
  MODEL_BUF = {
28
  "name": MODEL_NAME,
29
  "tokenizer": tokenizer,
30
+ "model": model,
31
+ "config": config
32
  }
33
 
34
  font_dir = ['./']
 
44
  MODEL_BUF["name"] = name
45
  MODEL_BUF["tokenizer"] = AutoTokenizer.from_pretrained(name)
46
  MODEL_BUF["model"] = AutoModelForSeq2SeqLM.from_pretrained(name)
47
+ MODEL_BUF["config"] = AutoConfig.from_pretrained(name)
48
 
49
 
50
  def generate(text, model, tokenizer):
 
75
 
76
  examples = [[MODEL_BUF["name"], text]],
77
 
78
+ title="FTG",
79
 
80
+ description="Financial Text Generation"
81
 
82
  )
83