File size: 2,964 Bytes
00aa292
 
 
e614510
c17b63f
00aa292
 
 
 
 
 
 
 
 
 
 
e614510
 
 
 
 
00aa292
 
 
 
 
 
 
 
 
 
 
 
e614510
 
 
e5050c0
6728946
00aa292
 
 
 
 
 
 
 
 
 
 
f381a2e
 
 
 
00aa292
 
 
 
 
 
 
 
6728946
 
00aa292
 
 
 
 
6728946
e5050c0
6728946
 
 
00aa292
6728946
00aa292
6728946
e614510
 
 
 
 
 
 
 
 
f0f49b4
00aa292
 
 
 
c17b63f
 
 
 
00aa292
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import argparse
import json
import platform

import gradio as gr
from langchain.chains.llm import LLMChain
from langchain.llms import OpenAI, HuggingFaceHub
from langchain.prompts import PromptTemplate

import project_settings as settings


def get_args():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--examples_json_file",
        default="examples.json",
        type=str
    )
    parser.add_argument(
        "--openai_api_key",
        default=settings.environment.get("openai_api_key", default=None, dtype=str),
        type=str
    )
    args = parser.parse_args()
    return args


def main():
    args = get_args()

    with open(args.examples_json_file, "r", encoding="utf-8") as f:
        examples = json.load(f)

    def fn(prompt_name: str, prompt_description: str, prompt: str, model_name: str = "text-davinci-003",
           temperature: float = 0.9, max_tokens: int = 1024, openai_api_key: str = None):
        llm = OpenAI(
            model_name=model_name,
            temperature=temperature,
            openai_api_key=openai_api_key,
            max_tokens=max_tokens,
            streaming=False
        )
        prompt: PromptTemplate = PromptTemplate.from_template(prompt)

        llm_chain = LLMChain(llm=llm, prompt=prompt)

        try:
            outputs = llm_chain.predict()
        except Exception as e:
            outputs = str(e)
        return outputs

    description = """
    Awesome ChatGPT Prompts
    
    一些好用的 prompt 测试. 
    
    你需要填写自己的 OpenAI API Key. 

    欢迎在 community 发布你认为有价值的 prompt. 
    """

    demo = gr.Interface(
        fn=fn,
        inputs=[
            gr.Text(label="prompt_name", value="best practice"),
            gr.Text(label="prompt_description", value="description for prompt"),
            gr.Text(label="prompt", lines=4, max_lines=200),
            gr.Text(label="model_name", value="text-davinci-003"),
            gr.Slider(minimum=0, maximum=1, value=0.85, label="temperature"),
            gr.Number(value=1024, label="max_tokens"),
            gr.Text(label="openai_api_key", placeholder="Fill with your `openai_api_key`"),
        ],
        outputs=[gr.Text(label="output", lines=4, max_lines=200)],
        examples=[[
            example["title"],
            example["description"],
            example["prompt"],
            example["model_name"],
            example["temperature"],
            example["max_tokens"],
            args.openai_api_key or "Fill with your `openai_api_key`"
        ] for example in examples],
        cache_examples=False,
        examples_per_page=50,
        title="Awesome ChatGPT Prompts",
        description=description,
    )
    demo.launch(
        server_name="127.0.0.1" if platform.system() == "Windows" else "0.0.0.0",
        server_port=7860
    )

    return


if __name__ == '__main__':
    main()