Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
# Define the function to modify the code based on the prompt and selected model | |
def modify_code(file, prompt, model_name): | |
# Read the uploaded file | |
with open(file.name, 'r') as f: | |
code = f.read() | |
# Initialize the model based on the selected model name | |
if model_name == "CodeGPT": | |
generator = pipeline("text-generation", model="microsoft/CodeGPT-small-py") | |
elif model_name == "Codex": | |
generator = pipeline("text-generation", model="EleutherAI/gpt-neo-2.7B") | |
else: | |
return "Model not supported." | |
# Generate the modified code based on the prompt | |
modified_code = generator( | |
f"{prompt}\n{code}", | |
max_new_tokens=500, # Generate up to 500 new tokens | |
num_return_sequences=1 | |
)[0]['generated_text'] | |
# Truncate the output to a maximum of 793,833 lines (or characters) | |
max_lines = 793833 | |
if isinstance(modified_code, str): | |
# If the output is a string, truncate by lines or characters | |
lines = modified_code.splitlines() | |
if len(lines) > max_lines: | |
modified_code = "\n".join(lines[:max_lines]) | |
elif len(modified_code) > max_lines: | |
modified_code = modified_code[:max_lines] | |
return modified_code | |
# Define the Gradio interface | |
with gr.Blocks(theme="Nymbo/Nymbo-theme") as demo: | |
gr.Markdown("# Code Modifier") | |
with gr.Row(): | |
file_input = gr.File(label="Upload your code file") | |
prompt_input = gr.Textbox(label="Enter your prompt for changes") | |
model_selector = gr.Dropdown(label="Select a model", choices=["CodeGPT", "Codex"]) | |
submit_button = gr.Button("Modify Code") | |
output = gr.Textbox(label="Modified Code", lines=10) | |
submit_button.click(fn=modify_code, inputs=[file_input, prompt_input, model_selector], outputs=output) | |
# Launch the interface | |
demo.launch() |