test_python / app.py
Leonardo Di Lella
initial commit
431337f
raw
history blame contribute delete
896 Bytes
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load the model and tokenizer (fine-tuned or pre-trained)
model_name = "EleutherAI/gpt-neo-1.3B" # Replace with your fine-tuned model path
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Define the function to transform requests
def transform_request(instruction):
inputs = tokenizer(instruction, return_tensors="pt", truncation=True)
outputs = model.generate(**inputs, max_length=100)
code = tokenizer.decode(outputs[0], skip_special_tokens=True)
return code
# Create the Gradio interface
interface = gr.Interface(
fn=transform_request,
inputs="text",
outputs="text",
title="Code Transformer",
description="Enter an instruction to generate Python code.",
)
if __name__ == "__main__":
interface.launch()