prithivMLmods commited on
Commit
0c1d553
1 Parent(s): 9240e91

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -37
app.py CHANGED
@@ -2,39 +2,11 @@ import gradio as gr
2
  from openai import OpenAI
3
  import os
4
 
5
- # Custom CSS for the Deadpool theme
6
  css = '''
7
- .gradio-container {
8
- max-width: 1000px !important;
9
- background-color: black !important;
10
- color: red !important;
11
- font-family: 'Courier New', monospace !important;
12
- padding: 20px !important;
13
- border-radius: 5px !important;
14
- }
15
- h1 {
16
- text-align: center;
17
- color: red !important;
18
- font-family: 'Impact', sans-serif !important;
19
- }
20
  footer {
21
- visibility: hidden;
22
- }
23
- textarea, input, select, button {
24
- background-color: black !important;
25
- color: red !important;
26
- border: 2px solid blue !important;
27
- font-family: 'Courier New', monospace !important;
28
- }
29
- button:hover {
30
- background-color: blue !important;
31
- color: white !important;
32
- }
33
- .chatbot {
34
- background-color: black !important;
35
- color: red !important;
36
- border: 2px solid blue !important;
37
- font-family: 'Courier New', monospace !important;
38
  }
39
  '''
40
 
@@ -65,7 +37,7 @@ def respond(
65
 
66
  response = ""
67
 
68
- for message in client.chat.completions.create(
69
  model="meta-llama/Meta-Llama-3.1-8B-Instruct",
70
  max_tokens=max_tokens,
71
  stream=True,
@@ -81,7 +53,7 @@ def respond(
81
  demo = gr.ChatInterface(
82
  respond,
83
  additional_inputs=[
84
- gr.Textbox(value="", label="System message", lines=2),
85
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
86
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
87
  gr.Slider(
@@ -91,11 +63,9 @@ demo = gr.ChatInterface(
91
  step=0.05,
92
  label="Top-P",
93
  ),
 
94
  ],
95
- css=css,
96
- title="Deadpool Chat",
97
- theme="default"
98
  )
99
-
100
  if __name__ == "__main__":
101
  demo.launch()
 
2
  from openai import OpenAI
3
  import os
4
 
 
5
  css = '''
6
+ .gradio-container{max-width: 1000px !important}
7
+ h1{text-align:center}
 
 
 
 
 
 
 
 
 
 
 
8
  footer {
9
+ visibility: hidden
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  }
11
  '''
12
 
 
37
 
38
  response = ""
39
 
40
+ for message in client.chat.completions.create(
41
  model="meta-llama/Meta-Llama-3.1-8B-Instruct",
42
  max_tokens=max_tokens,
43
  stream=True,
 
53
  demo = gr.ChatInterface(
54
  respond,
55
  additional_inputs=[
56
+ gr.Textbox(value="", label="System message"),
57
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
58
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
59
  gr.Slider(
 
63
  step=0.05,
64
  label="Top-P",
65
  ),
66
+
67
  ],
68
+ css=css
 
 
69
  )
 
70
  if __name__ == "__main__":
71
  demo.launch()