File size: 725 Bytes
809f9c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
from transformers import T5Tokenizer, T5ForConditionalGeneration
import torch
import colorama
from colorama import Fore, Back, Style

colorama.init()

# Load the trained model for inference
model = T5ForConditionalGeneration.from_pretrained("./Ruttoni_AI")
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")

# Generate a summary using the trained model
def generate_summary(input_text):
    input_ids = tokenizer.encode(input_text, return_tensors='pt')
    outputs = model.generate(input_ids)
    summary = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return summary

# Example usage
input_text = "Who is pesce beddo?"
summary = generate_summary(input_text)
print(Back.GREEN + "Answer: " + summary)