Update app.py
Browse files
app.py
CHANGED
|
@@ -11,7 +11,7 @@ deepseek_client = OpenAI(api_key=DEEPSEEK_API_KEY, base_url="https://api.deepsee
|
|
| 11 |
def generate_response(prompt, temperature, top_p, max_tokens, repetition_penalty):
|
| 12 |
try:
|
| 13 |
response = openai_client.chat.completions.create(
|
| 14 |
-
model="gpt-3.5-turbo", #
|
| 15 |
messages=[{"role": "user", "content": prompt}],
|
| 16 |
temperature=temperature,
|
| 17 |
top_p=top_p,
|
|
@@ -27,6 +27,7 @@ def generate_response(prompt, temperature, top_p, max_tokens, repetition_penalty
|
|
| 27 |
iface = gr.Interface(
|
| 28 |
fn=generate_response,
|
| 29 |
inputs=[
|
|
|
|
| 30 |
gr.Textbox(label="Prompt", lines=6, placeholder="Ask something..."),
|
| 31 |
gr.Slider(minimum=0.1, maximum=1.5, value=0.7, step=0.1, label="Temperature"),
|
| 32 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.9, step=0.05, label="Top-p"),
|
|
|
|
| 11 |
def generate_response(prompt, temperature, top_p, max_tokens, repetition_penalty):
|
| 12 |
try:
|
| 13 |
response = openai_client.chat.completions.create(
|
| 14 |
+
model="gpt-3.5-turbo", # model="deepseek-chat",
|
| 15 |
messages=[{"role": "user", "content": prompt}],
|
| 16 |
temperature=temperature,
|
| 17 |
top_p=top_p,
|
|
|
|
| 27 |
iface = gr.Interface(
|
| 28 |
fn=generate_response,
|
| 29 |
inputs=[
|
| 30 |
+
gr.Dropdown(choices=["DeepSeek", "OpenAI"], value="DeepSeek", label="Model Provider"),
|
| 31 |
gr.Textbox(label="Prompt", lines=6, placeholder="Ask something..."),
|
| 32 |
gr.Slider(minimum=0.1, maximum=1.5, value=0.7, step=0.1, label="Temperature"),
|
| 33 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.9, step=0.05, label="Top-p"),
|