incoder-api / app.py
Danil
Update app.py
962bbdd
raw
history blame contribute delete
No virus
1.14 kB
import requests
import os
import gradio as gr
import json
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = 'facebook/incoder-1B'
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=True)
print('load ok')
def completion(prompt, max_tokens, temperature, top_k, top_p):
inpt = tokenizer.encode(prompt, return_tensors="pt")
tok = len(tokenizer(prompt)['input_ids'])
out = model.generate(inpt, max_length=tok+max_tokens, top_p=top_p, top_k=top_k, temperature=temperature, num_beams=2, repetition_penalty=2.0)
res = tokenizer.decode(out[0])
return res
demo = gr.Interface(
fn=completion,
inputs=[
gr.inputs.Textbox(lines=10,placeholder='Write some code..'),
gr.inputs.Slider(10,200,10,100,'Max Tokens',False),
gr.inputs.Slider(0,1.0,0.1,1.0,'temperature',False),
gr.inputs.Slider(0,50,1,40,'top_k',True),
gr.inputs.Slider(0,1.0,0.1,0.9,'top_p',True)
],
outputs="text",
allow_flagging=False,
)
demo.launch()