Bronco92's picture
Update app.py
dd9e773
import gradio as gr
import pandas as pd
from transformers import AutoTokenizer, AutoModelForTableQuestionAnswering
# Replace with your actual Hugging Face API key
model_name = "neulab/omnitab-large-finetuned-wtq"
tokenizer = TapexTokenizer
model = AutoModelForTableQuestionAnswering.from_pretrained(model_name)
# Define the question-answering function
def answer_question(csv_file, question):
# Read the csv file into a pandas DataFrame
data = pd.read_csv(csv_file.name)
# Convert the DataFrame into a format the model can understand
tables = tokenizer.table_encode(data, return_tensors="pt")
# Tokenize the question
questions = tokenizer(question, return_tensors="pt")
# Generate the answer
outputs = model(questions, tables)
predicted_answer = tokenizer.batch_decode(outputs.logits, skip_special_tokens=True)
return predicted_answer[0].strip()
# Create the Gradio interface
gr.Interface(
fn=answer_question,
inputs=[
gr.inputs.File(label="CSV File"),
gr.inputs.Textbox(lines=2, label="Question")
],
outputs=gr.outputs.Textbox(label="Answer"),
title="Table Question Answering",
description="Upload a CSV file and ask a question about the data.",
).launch()