starcoder / app.py
import os
import gradio as gr
from huggingface_hub import InferenceClient
token = os.getenv("HF_TOKEN")
client = InferenceClient("bigcode/starcoder2-15b", token=token)
def generate_code(
task_description,
max_tokens,
temperature,
top_p,
):
generated_code = ""
for message in client.text_generation(
task_description,
max_new_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
generated_code += message
yield generated_code
with gr.Blocks() as demo:
gr.Markdown("# πŸš€ Starcoder2-15b Code Generator")
with gr.Row():
task_input = gr.Textbox(
lines=3, placeholder="Describe the task in natural language...", label="Task Description"
)
with gr.Row():
max_tokens = gr.Slider(1, 2048, value=100, step=1, label="Max Tokens")
temperature = gr.Slider(0.1, 4.0, value=0.7, step=0.1, label="Temperature")
top_p = gr.Slider(0.1, 1.0, value=0.95, step=0.05, label="Top-p")
with gr.Row():
submit_button = gr.Button("Generate Code πŸš€")
output = gr.Textbox(lines=10, label="Generated Code")
submit_button.click(
generate_code,
inputs=[task_input, max_tokens, temperature, top_p],
outputs=output,
)
if __name__ == "__main__":
demo.launch()