kalashshah040 commited on
Commit
2a71d85
·
verified ·
1 Parent(s): 784dbce

Updated list of Working Models

Browse files
Files changed (1) hide show
  1. app.py +88 -128
app.py CHANGED
@@ -1,129 +1,89 @@
1
- from models import *
2
- import gradio as gr
3
-
4
- GPT_4 = "deepseek/deepseek-chat-v3-0324:free" #12
5
- PHI_4 = "microsoft/phi-4" #2
6
- PHI_3 = "microsoft/phi-3-medium-128k-instruct:free" #3
7
- GEMMA_3_27B = "google/gemma-3-27b-it:free" #4
8
- GEMIN_FLASH_THINK = "google/gemini-2.0-flash-thinking-exp:free" #6
9
- GEMIN_FLASH = "google/gemini-flash-1.5-8b-exp" #7
10
- QWEN_32B = "qwen/qwq-32b:free" #8
11
- QWEN_25 = "qwen/qwen2.5-vl-72b-instruct:free" #10 #error
12
- DEEPSEEK_R1 = "deepseek/deepseek-r1:free" #11
13
- DEEPSEEK_R1_ZERO = "deepseek/deepseek-r1-zero:free" #13
14
- META_LLAMA_MODEL = "meta-llama/Llama-3.3-70B-Instruct:free" #14
15
- MISTRAL_SMALL_MODEL = "mistralai/mistral-small-3.1-24b-instruct:free" #15
16
- MISTRAL_NEMO = "mistralai/mistral-nemo:free" #16
17
- ZEPHYR = "huggingfaceh4/zephyr-7b-beta:free" #17
18
- OLYMPIC_CODER = "open-r1/olympiccoder-32b:free" #19
19
- LEARN = "google/learnlm-1.5-pro-experimental:free" #20
20
- REKA_FLASH = "rekaai/reka-flash-3:free" #21
21
- OPEN_CHAT = "openchat/openchat-7b:free" #21
22
- TOPPY = "undi95/toppy-m-7b:free" #23
23
- MOONLIGHT = "moonshotai/moonlight-16b-a3b-instruct:free"
24
-
25
- CONCISE_ENGLISH_PROMPT = "Answer in short and precise English sentences."
26
-
27
- def get_model(title, dev, model, name, user_input, system_prompt):
28
- if user_input.lower() == "data":
29
- df = get_data()
30
- return df
31
-
32
- if user_input.lower() == "text":
33
- text = get_text()
34
- return text
35
-
36
- if name == "" or name is None:
37
- return "Enter Your Name !"
38
-
39
- if model is None or model == "":
40
- return "Select AI Model !"
41
-
42
- chain = ModelChain()
43
- prompt = system_prompt + " " + CONCISE_ENGLISH_PROMPT
44
-
45
- # Check the model and map to the correct model
46
- if "ChatGPT" == model: #1
47
- return chain.generate_response(GPT_4, name, user_input, prompt)
48
- elif "Phi-4" == model: #2
49
- return chain.generate_response(PHI_4, name, user_input, prompt)
50
- elif "Phi-3" == model: #3
51
- return chain.generate_response(PHI_3, name, user_input, prompt)
52
- elif "Gemma-3" == model: #4
53
- return chain.generate_response(GEMMA_3_27B, name, user_input, prompt)
54
- elif "Gemini-2-Flash-Think" == model: #6
55
- return chain.generate_response(GEMIN_FLASH_THINK, name, user_input, prompt)
56
- elif "Gemini-Flash" == model: #7
57
- return chain.generate_response(GEMIN_FLASH, name, user_input, prompt)
58
- elif "QwQ-32B" == model: #8
59
- return chain.generate_response(QWEN_32B, name, user_input, prompt)
60
- elif "Qwen2.5" == model: #10
61
- return chain.generate_response(QWEN_25, name, user_input, prompt)
62
- elif "DeepSeek-R1" == model: #11
63
- return chain.generate_response(DEEPSEEK_R1, name, user_input, prompt)
64
- elif "DeepSeek-R1-Zero" == model: #11
65
- return chain.generate_response(DEEPSEEK_R1, name, user_input, prompt)
66
- elif "Llama-3.3" == model: #14
67
- return chain.generate_response(META_LLAMA_MODEL, name, user_input, prompt)
68
- elif "Mistral-Small" == model: #15
69
- return chain.generate_response(MISTRAL_SMALL_MODEL, name, user_input, prompt)
70
- elif "Mistral-Nemo" == model: #16
71
- return chain.generate_response(MISTRAL_NEMO, name, user_input, prompt)
72
- elif "Zephyr" == model: #17
73
- return chain.generate_response(ZEPHYR, name, user_input, prompt)
74
- elif "Olympic-Coder" == model: #19
75
- return chain.generate_response(OLYMPIC_CODER, name, user_input, prompt)
76
- elif "LearnLM" == model: #20
77
- return chain.generate_response(LEARN, name, user_input, prompt)
78
- elif "Reka-Flash" == model: #21
79
- return chain.generate_response(REKA_FLASH, name, user_input, prompt)
80
- elif "OpenChat" == model: #21
81
- return chain.generate_response(OPEN_CHAT, name, user_input, prompt)
82
- elif "Toppy" == model: #21
83
- return chain.generate_response(TOPPY, name, user_input, prompt)
84
- elif "MoonLight" == model: #21
85
- return chain.generate_response(MOONLIGHT, name, user_input, prompt)
86
- else:
87
- return "Invalid Model Name : " + model
88
-
89
- def main():
90
- view = gr.Interface(
91
- fn= get_model,
92
- inputs = [
93
- gr.Markdown("# Switch AI"),
94
- gr.Markdown("### by Kalash"),
95
- gr.Radio(
96
- [
97
- "ChatGPT", #1
98
- "Phi-4", #2
99
- "Phi-3", #3
100
- "Gemma-3", #4
101
- "Gemini-2-Flash-Think", #6
102
- "Gemini-Flash", #7
103
- "QwQ-32B", #9
104
- "Qwen2.5", #11
105
- "DeepSeek-R1", #12
106
- "DeepSeek-R1-Zero", #12
107
- "Llama-3.3", #15
108
- "Mistral-Small", #16
109
- "Mistral-Nemo", #17
110
- "Zephyr", #18
111
- "Olympic-Coder", #20
112
- "LearnLM", #8
113
- "Reka-Flash", #21
114
- "OpenChat", #22
115
- "Toppy", #22
116
- "MoonLight", #22
117
- ],
118
- label = "Choose AI Model", value = "ChatGPT"),
119
- gr.Textbox(label = "Your Name", placeholder = "Enter Your Name"),
120
- gr.Textbox(label = "Your Query", placeholder = "Enter Your Question"),
121
- gr.Textbox(label = "System Prompt", placeholder = "Enter Custom System Propmt (Optional)"),
122
- ],
123
- outputs = [gr.Textbox(label ="AI Response", lines = 25)],
124
- flagging_mode = "never"
125
- ).launch(share=True)
126
- # ).launch(share=False, server_port=54321)
127
-
128
- if __name__ == '__main__':
129
  main()
 
1
+ from models import *
2
+ import gradio as gr
3
+
4
+ GPT_4 = "deepseek/deepseek-chat-v3-0324:free" #12
5
+ PHI_4 = "microsoft/phi-4" #2
6
+ GEMMA_3_27B = "google/gemma-3-27b-it:free" #4
7
+ QWEN_32B = "qwen/qwq-32b:free" #8
8
+ QWEN_25 = "qwen/qwen2.5-vl-72b-instruct:free" #10 #error
9
+ DEEPSEEK_R1 = "deepseek/deepseek-r1:free" #11
10
+ DEEPSEEK_R1_ZERO = "deepseek/deepseek-r1-zero:free" #13
11
+ META_LLAMA_MODEL = "meta-llama/Llama-3.3-70B-Instruct:free" #14
12
+ MISTRAL_SMALL_MODEL = "mistralai/mistral-small-3.1-24b-instruct:free" #15
13
+ MISTRAL_NEMO = "mistralai/mistral-nemo:free" #16
14
+
15
+ CONCISE_ENGLISH_PROMPT = "Answer in short and precise English sentences."
16
+
17
+ def get_model(title, dev, model, name, user_input, system_prompt):
18
+ if user_input.lower() == "data":
19
+ df = get_data()
20
+ return df
21
+
22
+ if user_input.lower() == "text":
23
+ text = get_text()
24
+ return text
25
+
26
+ if name == "" or name is None:
27
+ return "Enter Your Name !"
28
+
29
+ if model is None or model == "":
30
+ return "Select AI Model !"
31
+
32
+ chain = ModelChain()
33
+ prompt = system_prompt + " " + CONCISE_ENGLISH_PROMPT
34
+
35
+ # Check the model and map to the correct model
36
+ if "ChatGPT" == model: #1
37
+ return chain.generate_response(GPT_4, name, user_input, prompt)
38
+ elif "Phi-4" == model: #2
39
+ return chain.generate_response(PHI_4, name, user_input, prompt)
40
+ elif "Gemma-3" == model: #4
41
+ return chain.generate_response(GEMMA_3_27B, name, user_input, prompt)
42
+ elif "QwQ-32B" == model: #8
43
+ return chain.generate_response(QWEN_32B, name, user_input, prompt)
44
+ elif "Qwen2.5" == model: #10
45
+ return chain.generate_response(QWEN_25, name, user_input, prompt)
46
+ elif "DeepSeek-R1" == model: #11
47
+ return chain.generate_response(DEEPSEEK_R1, name, user_input, prompt)
48
+ elif "DeepSeek-R1-Zero" == model: #11
49
+ return chain.generate_response(DEEPSEEK_R1, name, user_input, prompt)
50
+ elif "Llama-3.3" == model: #14
51
+ return chain.generate_response(META_LLAMA_MODEL, name, user_input, prompt)
52
+ elif "Mistral-Small" == model: #15
53
+ return chain.generate_response(MISTRAL_SMALL_MODEL, name, user_input, prompt)
54
+ elif "Mistral-Nemo" == model: #16
55
+ return chain.generate_response(MISTRAL_NEMO, name, user_input, prompt)
56
+ else:
57
+ return "Invalid Model Name : " + model
58
+
59
+ def main():
60
+ view = gr.Interface(
61
+ fn= get_model,
62
+ inputs = [
63
+ gr.Markdown("# Switch AI"),
64
+ gr.Markdown("### by Kalash"),
65
+ gr.Radio(
66
+ [
67
+ "ChatGPT", #1
68
+ "Phi-4", #2
69
+ "Gemma-3", #4
70
+ "QwQ-32B", #9
71
+ "Qwen2.5", #11
72
+ "DeepSeek-R1", #12
73
+ "DeepSeek-R1-Zero", #12
74
+ "Llama-3.3", #15
75
+ "Mistral-Small", #16
76
+ "Mistral-Nemo", #17
77
+ ],
78
+ label = "Choose AI Model", value = "ChatGPT"),
79
+ gr.Textbox(label = "Your Name", placeholder = "Enter Your Name"),
80
+ gr.Textbox(label = "Your Query", placeholder = "Enter Your Question"),
81
+ gr.Textbox(label = "System Prompt", placeholder = "Enter Custom System Propmt (Optional)"),
82
+ ],
83
+ outputs = [gr.Textbox(label ="AI Response", lines = 25)],
84
+ flagging_mode = "never"
85
+ ).launch(share=True)
86
+ # ).launch(share=False, server_port=54321)
87
+
88
+ if __name__ == '__main__':
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  main()