File size: 4,304 Bytes
e278e9e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
import json
import os

import gradio as gr
import requests

APIKEY = os.environ.get("APIKEY")
APISECRET = os.environ.get("APISECRET")


def predict(prompt, lang, seed, out_seq_length, temperature, top_k, top_p):
    global APIKEY
    global APISECRET

    if prompt == '':
        return 'Input should not be empty!'

    url = 'https://tianqi.aminer.cn/api/v2/multilingual_code_generate_block'

    payload = json.dumps({
        "apikey"       : APIKEY,
        "apisecret"    : APISECRET,
        "prompt"       : prompt,
        "lang"         : lang,
        "out_seq_lengt": out_seq_length,
        "seed"         : seed,
        "temperature"  : temperature,
        "top_k"        : top_k,
        "top_p"        : top_p
    })

    headers = {
        'Content-Type': 'application/json'
    }

    try:
        response = requests.request("POST", url, headers=headers, data=payload, timeout=(20, 100)).json()
    except Exception as e:
        return 'Timeout! Please wait a few minutes and retry'

    if response['status'] == 1:
        return response['message']

    answer = response['result']['output']['code'][0]

    return prompt + answer


def main():
    gr.close_all()
    examples = []
    with open("./example_inputs.jsonl", "r") as f:
        for line in f:
            examples.append(list(json.loads(line).values()))

    with gr.Blocks() as demo:
        gr.Markdown(
            """# CodeGeeX: A Multilingual Code Generation Model
            <img src="https://github.com/THUDM/CodeGeeX/blob/main/resources/logo/codegeex_logo.png">
            We introduce CodeGeeX, a large-scale multilingual code generation model with 13 billion parameters, pre-trained on a large code corpus of more than 20 programming languages. CodeGeeX was trained on more than 850 billion tokens on a cluster of 1,536 [Ascend 910 AI Processors](https://e.huawei.com/en/products/servers/ascend). CodeGeeX supports 15+ programming languages for both code generation and code translation. CodeGeeX is open source, please refer to our [GitHub](https://github.com/THUDM/CodeGeeX) for more details. This is a minimal-functional DEMO, for other DEMOs like code translation, please visit our [Homepage](https://models.aminer.cn/codegeex/). We also offer a free [VS Code extension](https://marketplace.visualstudio.com/items?itemName=aminer.codegeex) for full functionality. 
            """)

        with gr.Row():
            with gr.Column():
                prompt = gr.Textbox(lines=13, placeholder='Input', label='Input')
                with gr.Row():
                    gen = gr.Button("Generate")
                    clr = gr.Button("Clear")

            outputs = gr.Textbox(lines=15, label='Output')

        gr.Markdown(
            """
            Generation Parameter
            """)
        with gr.Row():
            with gr.Column():
                lang = gr.Radio(
                    choices=["C++", "C", "C#", "Python", "Java", "HTML", "PHP", "JavaScript", "TypeScript", "Go",
                             "Rust",
                             "SQL", "Kotlin", "R", "Fortran"], value='lang', label='Programming Language',
                    default="Python")
            with gr.Column():
                seed = gr.Slider(maximum=10000, value=43, step=1, label='Seed')
                out_seq_length = gr.Slider(maximum=1024, value=256, minimum=1, step=1, label='Output Sequence Length')
                temperature = gr.Slider(maximum=1, value=0.9, minimum=0, label='Temperature')
                top_k = gr.Slider(maximum=40, value=0, minimum=0, step=1, label='Top K')
                top_p = gr.Slider(maximum=1, value=1.0, minimum=0, label='Top P')

        inputs = [prompt, lang, seed, out_seq_length, temperature, top_k, top_p]
        gen.click(fn=predict, inputs=inputs, outputs=outputs)
        clr.click(fn=lambda value: gr.update(value=""), inputs=clr, outputs=prompt)

        gr_examples = gr.Examples(examples=examples, inputs=[prompt, lang],
                                  label="Example Inputs (Click to insert an examplet it into the input box)",
                                  examples_per_page=20)
        gr.Markdown("![visitors](https://visitor-badge.glitch.me/badge?page_id=THUDM.CodeGeeX)")
        
    demo.launch()

if __name__ == '__main__':
    main()