File size: 2,128 Bytes
083c4a0
2ae71de
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
083c4a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2ae71de
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import gradio as gr
import requests
import time
from ast import literal_eval
from datetime import datetime

def to_md(text):
    # return text.replace("\n", "<br />")
    return text.replace("\n", "<br />")

def infer(
    prompt, 
    model_name, 
    max_new_tokens=10, 
    temperature=0.1, 
    top_p=1.0,
    top_k=40,
    num_completions=1,
    seed=42,
    stop="\n"
):
    model_name_map = {
        "GPT-JT-6B-v1": "Together-gpt-JT-6B-v1",
    }
    max_new_tokens = int(max_new_tokens)
    num_completions = int(num_completions)
    temperature = float(temperature)
    top_p = float(top_p)
    top_k = int(top_k)
    stop = stop.split(";")
    seed = seed
    
    assert 1 <= max_new_tokens <= 256
    assert 1 <= num_completions <= 5
    assert 0.0 <= temperature <= 10.0
    assert 0.0 <= top_p <= 1.0
    assert 1 <= top_k <= 1000

    if temperature == 0.0:
        temperature = 0.01
    if prompt=="":
        prompt = " "
    my_post_dict = {
        "model": "Together-gpt-JT-6B-v1",
        "prompt": prompt,
        "top_p": top_p,
        "top_k": top_k,
        "temperature": temperature,
        "max_tokens": max_new_tokens,
        "stop": stop,
    }
    print(f"send: {datetime.now()}")
    response = requests.get("https://staging.together.xyz/api/inference", params=my_post_dict).json()
    generated_text = response['output']['choices'][0]['text']
    print(f"recv: {datetime.now()}")
    
    for stop_word in stop:
        if stop_word != '' and stop_word in generated_text:
            generated_text = generated_text[:generated_text.find(stop_word)]
    
    return generated_text

def main ():
    iface = gr.Interface(
        fn=infer,
        inputs=[
            gr.Textbox(lines=20), # prompt
            gr.Dropdown(["GPT-JT-6B-v1"]), # model_name
            gr.Slider(10, 1000, value=200), # max_tokens
            gr.Slider(0.0, 0.1, value=0.1), # temperature
            gr.Slider(0.0, 1.0, value=1.0), # top_p
            gr.Slider(0, 100, value=40) # top_k
        ],
        outputs=gr.Textbox(lines=7)
    )

    iface.launch(debug=True)

if __name__ == '__main__':
    main()