import gradio as gr from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline title = "Code Compexity Predictor" description = "This is a space to predict complexity of Java code with [CodeParrot-Multi-Complexity](https://huggingface.co/codeparrot/codeparrot-small-multi),\ a multilingual model for code generation, finetuned on [CodeComplex](https://huggingface.co/datasets/codeparrot/codecomplex), a dataset for complexity prediction of Java code." example = [ ["code example 1", "nlogn"], ["code example 2", "constant"]] # model to be changed to the finetuned one tokenizer = AutoTokenizer.from_pretrained("codeparrot/codeparrot-small-multi") model = AutoModelForSequenceClassification.from_pretrained("codeparrot/codeparrot-small-multi") def complexity_estimation(gen_prompt, topk): pipe = pipeline("text-classification", model=model, tokenizer=tokenizer) label = pipe(gen_prompt, do_sample=True, top_k=topk)[0]['label'] return label iface = gr.Interface( fn=complexity_estimation, inputs=[ gr.Textbox(lines=10, label="Input code"), gr.inputs.Slider( minimum=1, maximum=3, step=1, default=1, label="Number of results to return", ), ], outputs=gr.Textbox(label="Predicted complexity", lines=10), examples=example, layout="vertical", theme="peach", description=description, title=title ) iface.launch()