LennardZuendorf commited on
Commit
dacf466
1 Parent(s): aaf0c9d

feat: fixing controller to use mistral, adding info, cleanup

Browse files
Files changed (2) hide show
  1. backend/controller.py +11 -4
  2. main.py +8 -3
backend/controller.py CHANGED
@@ -7,7 +7,11 @@ import gradio as gr
7
  # internal imports
8
  from model import godel
9
  from model import mistral
10
- from explanation import interpret_shap as shap_int, visualize as viz
 
 
 
 
11
 
12
 
13
  # main interference function that that calls chat functions depending on selections
@@ -37,7 +41,10 @@ def interference(
37
  # matching selection
38
  match xai_selection.lower():
39
  case "shap":
40
- xai = shap_int
 
 
 
41
  case "attention":
42
  xai = viz
43
  case _:
@@ -102,9 +109,9 @@ def explained_chat(
102
  model, xai, message: str, history: list, system_prompt: str, knowledge: str = ""
103
  ):
104
  # formatting the prompt using the model's format_prompt function
105
- #message, history, system_prompt, knowledge = mdl.prompt_limiter(
106
  # message, history, system_prompt, knowledge
107
- #)
108
  prompt = model.format_prompt(message, history, system_prompt, knowledge)
109
 
110
  # generating an answer using the methods chat function
 
7
  # internal imports
8
  from model import godel
9
  from model import mistral
10
+ from explanation import (
11
+ interpret_shap as shap_int,
12
+ interpret_captum as cpt_int,
13
+ visualize as viz,
14
+ )
15
 
16
 
17
  # main interference function that that calls chat functions depending on selections
 
41
  # matching selection
42
  match xai_selection.lower():
43
  case "shap":
44
+ if model_selection.lower == "mistral":
45
+ xai = cpt_int
46
+ else:
47
+ xai = shap_int
48
  case "attention":
49
  xai = viz
50
  case _:
 
109
  model, xai, message: str, history: list, system_prompt: str, knowledge: str = ""
110
  ):
111
  # formatting the prompt using the model's format_prompt function
112
+ # message, history, system_prompt, knowledge = mdl.prompt_limiter(
113
  # message, history, system_prompt, knowledge
114
+ # )
115
  prompt = model.format_prompt(message, history, system_prompt, knowledge)
116
 
117
  # generating an answer using the methods chat function
main.py CHANGED
@@ -64,6 +64,9 @@ def xai_info(xai_radio):
64
  else:
65
  gr.Info("No XAI method was selected.")
66
 
 
 
 
67
 
68
  # ui interface based on Gradio Blocks
69
  # see https://www.gradio.app/docs/interface)
@@ -135,6 +138,7 @@ with gr.Blocks(
135
  # calling info functions on inputs/submits for different settings
136
  system_prompt.submit(system_prompt_info, [system_prompt])
137
  xai_selection.input(xai_info, [xai_selection])
 
138
 
139
  # row with chatbot ui displaying "conversation" with the model
140
  with gr.Row(equal_height=True):
@@ -164,9 +168,10 @@ with gr.Blocks(
164
  )
165
  # extenable components for extra knowledge
166
  with gr.Accordion(label="Additional Knowledge", open=False):
167
- gr.Markdown(
168
- "*Hint:* Add extra knowledge to see GODEL work the best."
169
- )
 
170
  # textbox to enter the knowledge
171
  knowledge_input = gr.Textbox(
172
  value="",
 
64
  else:
65
  gr.Info("No XAI method was selected.")
66
 
67
+ def model_info(model_radio):
68
+ # displays the selected model using the Gradio Info component
69
+ gr.Info(f"The following model was selected:\n {model_radio} ")
70
 
71
  # ui interface based on Gradio Blocks
72
  # see https://www.gradio.app/docs/interface)
 
138
  # calling info functions on inputs/submits for different settings
139
  system_prompt.submit(system_prompt_info, [system_prompt])
140
  xai_selection.input(xai_info, [xai_selection])
141
+ model_selection.input(model_info, [model_selection])
142
 
143
  # row with chatbot ui displaying "conversation" with the model
144
  with gr.Row(equal_height=True):
 
168
  )
169
  # extenable components for extra knowledge
170
  with gr.Accordion(label="Additional Knowledge", open=False):
171
+ gr.Markdown("""
172
+ *Hint:* Add extra knowledge to see GODEL work the best.
173
+ Knowledge doesn't work mith Mistral and will be ignored.
174
+ """)
175
  # textbox to enter the knowledge
176
  knowledge_input = gr.Textbox(
177
  value="",