File size: 2,857 Bytes
e278e9e
d9f53af
e278e9e
8d614a6
e278e9e
8d614a6
e278e9e
8d614a6
e278e9e
57d3d9b
 
e278e9e
57d3d9b
 
 
 
 
e278e9e
57d3d9b
e278e9e
57d3d9b
e278e9e
57d3d9b
d9f53af
8d614a6
57d3d9b
8d614a6
57d3d9b
 
d9f53af
57d3d9b
e278e9e
57d3d9b
 
e278e9e
d9f53af
57d3d9b
 
d9f53af
57d3d9b
 
3a46326
d9f53af
 
57d3d9b
 
 
 
 
d9f53af
 
57d3d9b
 
 
d9f53af
57d3d9b
d9f53af
 
57d3d9b
 
 
 
 
e278e9e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import os

import gradio as gr
from zhipuai import ZhipuAI

SYSTEM_PROMPT = "你是一位智能编程助手,你叫CodeGeeX。你会为用户回答关于编程、代码、计算机方面的任何问题,并提供格式规范、可以执行、准确安全的代码,并在必要时提供详细的解释。"

client = ZhipuAI(api_key=os.getenv("CODEGEEX_API_KEY"))

def respond(message, history: list[tuple[str, str]]):
    messages = [{"role": "system", "content": SYSTEM_PROMPT}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": message})

    response = ""

    for message in client.chat.completions.create(
        messages=messages,  # type: ignore
        model="codegeex-4",
        stream=True,
        temperature=0.2,
        max_tokens=1024,
        top_p=0.95,
    ):  # type: ignore
        token = message.choices[0].delta.content

        response += token
        yield response


with gr.Blocks(fill_height=True) as demo:
    gr.Markdown(
        """
            <p align="center" style="margin: 32px 32px 0 0;">
                <img src="https://gist.githubusercontent.com/rojas-diego/0c1b444aff2c6b6420ff635bfd206869/raw/16566317fabce71d35ab3cf8c71adf3b5dc11d87/codegeex.svg" style="width: 30%">
            </p>
            """
    )
    gr.Markdown(
        """
        <p align="center">
            🏠 <a href="https://codegeex.cn" target="_blank">Homepage</a> | 📖 <a href="http://keg.cs.tsinghua.edu.cn/codegeex/" target="_blank">Blog</a> | 🛠 <a href="https://marketplace.visualstudio.com/items?itemName=aminer.codegeex" target="_blank">VS Code</a> or <a href="https://plugins.jetbrains.com/plugin/20587-codegeex" target="_blank">Jetbrains</a> Extensions | 💻 <a href="https://github.com/THUDM/CodeGeeX4" target="_blank">Github</a> | 🤖 <a href="https://huggingface.co/THUDM/codegeex-4-9b" target="_blank">HuggingFace</a>
        </p>
        """
    )
    gr.Markdown(
        """
        <p align="center">
            We introduce CodeGeeX4 9B, a large-scale multilingual code generation model with 9 billion parameters, pre-trained on a large code corpus of more than 300 programming languages. CodeGeeX4 9B is open source, please refer to our <a href="https://github.com/THUDM/CodeGeeX4" target="_blank">GitHub</a> for more details. We also offer free <a href="https://marketplace.visualstudio.com/items?itemName=aminer.codegeex" target="_blank">VS Code</a> and <a href="https://plugins.jetbrains.com/plugin/20587-codegeex" target="_blank">Jetbrains</a> extensions for full functionality.
        </p>
        """
    )

    gr.ChatInterface(respond, fill_height=True)


if __name__ == "__main__":
    demo.launch()