File size: 537 Bytes
53d88c3
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
from transformers import AutoTokenizer, AutoModelForTableQuestionAnswering
import pandas as pd
from io import StringIO

def initialize_tapas():
    tokenizer = AutoTokenizer.from_pretrained("google/tapas-large-finetuned-wtq")
    model = AutoModelForTableQuestionAnswering.from_pretrained("google/tapas-large-finetuned-wtq")
    return tokenizer, model

def ask_llm_chunk(tokenizer, model, chunk, questions):
    # ... [same as in your code]

def summarize_map_reduce(tokenizer, model, data, questions):
    # ... [same as in your code]