File size: 864 Bytes
af33862
e02b803
059d35d
e02b803
 
 
 
2f82811
6ce726d
bf6b329
6ce726d
bf6b329
 
 
9caee9d
 
bf6b329
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
import gradio as gr
import torch
import torch.nn as nn
from transformers import AutoModelForCausalLM, AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("juanpasanper/tigo_question_answer")
model = AutoModelForCausalLM.from_pretrained("juanpasanper/tigo_question_answer")
   
model.load_state_dict(torch.load(juanpasanper/tigo_question_answer))
def question_answer(context, question):
  predictions, raw_outputs = model.predict([{"context": context, "qas": [{"question": question, "id": "0",}],}])
  prediccion = predictions[0]['answer'][0]
  return prediccion
iface = gr.Interface(fn=question_answer, inputs=["text", "text"], outputs=["text"],
                     allow_flagging="manual",
                     flagging_options=["correcto", "incorrecto"],
                     flagging_dir='flagged',
                     enable_queue = True)
iface.launch()