Spaces:
Runtime error
Runtime error
Danil
commited on
Commit
β’
eba2192
1
Parent(s):
a352cad
README.md
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
---
|
2 |
title: Incoder Api
|
3 |
emoji: π»
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
sdk_version: 2.9.4
|
8 |
app_file: app.py
|
|
|
1 |
---
|
2 |
title: Incoder Api
|
3 |
emoji: π»
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: green
|
6 |
sdk: gradio
|
7 |
sdk_version: 2.9.4
|
8 |
app_file: app.py
|
app.py
CHANGED
@@ -2,18 +2,18 @@ import requests
|
|
2 |
import os
|
3 |
import gradio as gr
|
4 |
import json
|
|
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
if os.environ.get('SSTART') != "1":
|
11 |
-
start_server()
|
12 |
|
13 |
-
def completion(prompt,max_tokens,temperature,top_k,top_p):
|
14 |
-
|
15 |
-
|
16 |
-
|
|
|
17 |
|
18 |
demo = gr.Interface(
|
19 |
fn=completion,
|
@@ -32,5 +32,4 @@ demo = gr.Interface(
|
|
32 |
|
33 |
)
|
34 |
|
35 |
-
|
36 |
-
demo.launch()
|
|
|
2 |
import os
|
3 |
import gradio as gr
|
4 |
import json
|
5 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
6 |
|
7 |
+
model_name = 'facebook/incoder-1B'
|
8 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
9 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=True)
|
10 |
+
print('load ok')
|
|
|
|
|
11 |
|
12 |
+
def completion(prompt, max_tokens, temperature, top_k, top_p):
|
13 |
+
inpt = tokenizer.encode(prompt, return_tensors="pt")
|
14 |
+
out = model.generate(inpt, max_length=max_tokens, top_p=top_p, top_k=top_k, temperature=temperature)
|
15 |
+
res = tokenizer.decode(out[0])
|
16 |
+
return res
|
17 |
|
18 |
demo = gr.Interface(
|
19 |
fn=completion,
|
|
|
32 |
|
33 |
)
|
34 |
|
35 |
+
demo.launch()
|
|