RuttoniAI / aka.py
lu2000luk's picture
Upload 8 files
809f9c2
raw
history blame
725 Bytes
from transformers import T5Tokenizer, T5ForConditionalGeneration
import torch
import colorama
from colorama import Fore, Back, Style
colorama.init()
# Load the trained model for inference
model = T5ForConditionalGeneration.from_pretrained("./Ruttoni_AI")
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
# Generate a summary using the trained model
def generate_summary(input_text):
input_ids = tokenizer.encode(input_text, return_tensors='pt')
outputs = model.generate(input_ids)
summary = tokenizer.decode(outputs[0], skip_special_tokens=True)
return summary
# Example usage
input_text = "Who is pesce beddo?"
summary = generate_summary(input_text)
print(Back.GREEN + "Answer: " + summary)