File size: 6,372 Bytes
83de32a
 
 
67fc651
83de32a
 
c29c648
17728ea
 
7676b1c
 
83de32a
33dbe31
67fc651
6786405
 
67fc651
17728ea
 
567cb2f
6786405
4d91c16
 
6786405
4d91c16
 
 
 
 
 
17728ea
4d91c16
 
 
 
567cb2f
4d91c16
 
 
567cb2f
4d91c16
 
567cb2f
4d91c16
6786405
 
 
 
 
 
 
 
 
17728ea
6786405
 
 
 
 
 
 
567cb2f
6786405
 
 
567cb2f
6786405
 
567cb2f
6786405
 
67fc651
567cb2f
67fc651
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
567cb2f
67fc651
 
567cb2f
67fc651
 
83de32a
 
67fc651
cb5be95
83de32a
67fc651
7c4cca4
83de32a
67fc651
 
83de32a
d786b7e
7f1cae9
c3fc45d
 
83de32a
c3fc45d
83de32a
 
 
df68234
7676b1c
 
 
 
 
 
27b3699
 
 
 
 
 
 
52ed413
 
67fc651
52ed413
 
4f4b28e
83de32a
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
import os
import gradio as gr
import openai
import google.generativeai as palm

llm_api_options = ["OpenAI API","Azure OpenAI API","Google PaLM API", "Llama 2"]
TEST_MESSAGE = "Write an introductory paragraph to explain Generative AI to the reader of this content."
openai_models = ["gpt-4", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613", "gpt-3.5-turbo",
 
                     "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613", "text-davinci-003", 
                     "text-davinci-002", "text-curie-001", "text-babbage-001", "text-ada-001"]

google_palm_models = ["models/text-bison-001", "models/chat-bison-001","models/embedding-gecko-001"]

azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
azure_deployment_name = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME")
google_palm_key = os.getenv("GOOGLE_PALM_AI_API_KEY")
openai_api_key = os.getenv("OPENAI_API_KEY")
azure_openai_api_key = os.getenv("AZURE_OPENAI_KEY")
temperature = 0.7

def openai_text_completion(prompt: str, model: str):
    try:
        system_prompt: str = "Explain in detail to help student understand the concept.",
        assistant_prompt: str = None,
        messages = [
            {"role": "user", "content": f"{prompt}"},
            {"role": "system", "content": f"{system_prompt}"},
            {"role": "assistant", "content": f"{assistant_prompt}"}
        ]
        openai.api_key = openai_api_key
        openai.api_version = '2020-11-07'
        completion = openai.ChatCompletion.create(
            model = model, 
            messages = messages,
            temperature = temperature
        )           
        response = completion["choices"][0]["message"].content
        return "", response
    except Exception as exception:
        print(f"Exception Name: {type(exception).__name__}")
        print(exception)
        return f" openai_text_completion Error - {exception}", ""

def azure_openai_text_completion(prompt: str, model: str):
    try:
        system_prompt: str = "Explain in detail to help student understand the concept.",
        assistant_prompt: str = None,
        messages = [
            {"role": "user", "content": f"{prompt}"},
            {"role": "system", "content": f"{system_prompt}"},
            {"role": "assistant", "content": f"{assistant_prompt}"}
        ]
        openai.api_key = azure_openai_api_key
        openai.api_type = "azure"
        openai.api_version = "2023-05-15" 
        openai.api_base = f"https://{azure_endpoint}.openai.azure.com"
        completion = openai.ChatCompletion.create(
            model = model, 
            engine = azure_deployment_name,
            messages = messages,
            temperature = temperature
        )           
        response = completion["choices"][0]["message"].content
        return "", response
    except Exception as exception:
        print(f"Exception Name: {type(exception).__name__}")
        print(exception)
        return f" azure_openai_text_completion Error - {exception}", ""


def palm_text_completion(prompt: str, model: str):
    try:        
        candidate_count = 1
        top_k = 40
        top_p = 0.95
        max_output_tokens = 1024
        palm.configure(api_key=google_palm_key)
        defaults = {
                  'model': model,
                  'temperature': temperature,
                  'candidate_count': candidate_count,
                  'top_k': top_k,
                  'top_p': top_p,
                  'max_output_tokens': max_output_tokens,
                  'stop_sequences': [],
                  'safety_settings': [{"category":"HARM_CATEGORY_DEROGATORY","threshold":1},{"category":"HARM_CATEGORY_TOXICITY","threshold":1},{"category":"HARM_CATEGORY_VIOLENCE","threshold":2},{"category":"HARM_CATEGORY_SEXUAL","threshold":2},{"category":"HARM_CATEGORY_MEDICAL","threshold":2},{"category":"HARM_CATEGORY_DANGEROUS","threshold":2}],
                }
        
        response = palm.generate_text(
          **defaults,
          prompt=prompt
        )
        return "", response.result
    except Exception as exception:
        print(f"Exception Name: {type(exception).__name__}")
        print(exception)
        return f" palm_text_completion Error - {exception}", ""

def test_handler(optionSelection, prompt: str = TEST_MESSAGE, openai_model_name: str ="gpt-4", google_model_name: str ="models/text-bison-001"):
    match optionSelection:
        case  "OpenAI API":
            message, response = openai_text_completion(prompt,openai_model_name)
            return message, response
        case  "Azure OpenAI API":
            message, response = azure_openai_text_completion(prompt,openai_model_name)
            return message, response
        case  "Google PaLM API":
            message, response = palm_text_completion(prompt,google_model_name)
            return message, response
        case  "Llama 2":
            return f"{optionSelection} is not yet implemented!", ""
        case _:
            if optionSelection not in llm_api_options:
                return ValueError("Invalid choice!"), ""

        

with gr.Blocks() as LLMDemoTabbedScreen:
    with gr.Tab("Text-to-Text (Text Completion)"):
        llm_options = gr.Radio(llm_api_options, label="Select one", info="Which service do you want to use?", value="OpenAI API")
        with gr.Row():
            with gr.Column(): 
                test_string = gr.Textbox(label="Try String", value=TEST_MESSAGE, lines=2)
                test_string_response = gr.Textbox(label="Response")
                test_string_output_info = gr.Label(value="Output Info", label="Info")
                test_button = gr.Button("Try it")
    with gr.Tab("API Settings"):
        with gr.Tab("Open AI"):
            openai_model = gr.Dropdown(openai_models, value="gpt-4", label="Model", info="Select one, for Natural language")
        with gr.Tab("Google PaLM API"):
            google_model_name = gr.Dropdown(google_palm_models, 
                                                   value="models/text-bison-001", label="Model", info="Select one, for Natural language") 
        
    test_button.click(
            fn=test_handler,
            inputs=[llm_options, test_string, openai_model, google_model_name],
            outputs=[test_string_output_info, test_string_response]
    )
    
if __name__ == "__main__":
    LLMDemoTabbedScreen.launch()