|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
import gradio as gr |
|
import torch |
|
|
|
|
|
model_name = "EleutherAI/pythia-70m" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu") |
|
|
|
|
|
def review_code(code_snippet): |
|
print("β
Received Code:", code_snippet) |
|
|
|
|
|
inputs = tokenizer(code_snippet, return_tensors="pt").to("cpu") |
|
outputs = model.generate(**inputs, max_length=80, do_sample=False, num_beams=3) |
|
|
|
|
|
if outputs is None: |
|
print("β Model did not generate output!") |
|
return "Error: Model did not generate output." |
|
|
|
reviewed_code = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
print("β
Generated Code:", reviewed_code) |
|
|
|
return reviewed_code |
|
|
|
|
|
def check_code(input_code): |
|
reviewed_code = review_code(input_code) |
|
return input_code, reviewed_code, reviewed_code |
|
|
|
|
|
interface = gr.Interface( |
|
fn=check_code, |
|
inputs=gr.Textbox(label="Enter Python Code"), |
|
outputs=[ |
|
gr.Textbox(label="Original Code", interactive=False), |
|
gr.Textbox(label="Reviewed Code", interactive=False), |
|
gr.File(label="Download Reviewed Code") |
|
], |
|
title="π AI Code Reviewer", |
|
description="π Enter Python code and get a reviewed version. Download the reviewed code as a file.", |
|
allow_flagging="never" |
|
) |
|
|
|
|
|
interface.launch(server_name="0.0.0.0", server_port=7860, show_error=True) |
|
|