DaniRIU commited on
Commit
9c4fc30
1 Parent(s): dbf7e82

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -3,8 +3,8 @@ import gradio as gr
3
  import time
4
 
5
  godel = gr.Interface.load("huggingface/microsoft/GODEL-v1_1-large-seq2seq")
6
- tokenizer = AutoTokenizer.from_pretrained(godel)
7
- model = AutoModelForSeq2SeqLM.from_pretrained(godel)
8
 
9
  def generate(dialog):
10
  dialog = [dialog]
@@ -22,7 +22,7 @@ def generate(dialog):
22
  dialog = ' EOS '.join(dialog)
23
  query = f"{instruction} [CONTEXT] {dialog} {knowledge}"
24
  input_ids = tokenizer(f"{query}", return_tensors="pt").input_ids
25
- outputs = model.generate(input_ids, max_length=128, min_length=8, top_p=0.9, do_sample=True)
26
  output = tokenizer.decode(outputs[0], skip_special_tokens=True)
27
  return output
28
 
 
3
  import time
4
 
5
  godel = gr.Interface.load("huggingface/microsoft/GODEL-v1_1-large-seq2seq")
6
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/GODEL-v1_1-large-seq2seq")
7
+ #model = AutoModelForSeq2SeqLM.from_pretrained(godel)
8
 
9
  def generate(dialog):
10
  dialog = [dialog]
 
22
  dialog = ' EOS '.join(dialog)
23
  query = f"{instruction} [CONTEXT] {dialog} {knowledge}"
24
  input_ids = tokenizer(f"{query}", return_tensors="pt").input_ids
25
+ outputs = godel.generate(input_ids, max_length=128, min_length=8, top_p=0.9, do_sample=True)
26
  output = tokenizer.decode(outputs[0], skip_special_tokens=True)
27
  return output
28