dar-tau commited on
Commit
868605b
β€’
1 Parent(s): e45f7c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -7
app.py CHANGED
@@ -192,17 +192,19 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
192
  We will follow the SelfIE implementation in this space for concreteness. Patchscopes are so general that they encompass many other interpretation techniques too!!!
193
  ''', line_breaks=True)
194
 
195
- gr.Markdown('**πŸ‘Ύ The idea is really simple: models are able to understand their own hidden states by nature! πŸ‘Ύ**',
196
- # elem_classes=['explanation_accordion']
197
- )
198
  gr.Markdown(
199
- '''According to the residual stream view ([nostalgebraist, 2020](https://www.lesswrong.com/posts/AcKRB8wDpdaN6v6ru/interpreting-gpt-the-logit-lens)), internal representations from different layers are transferable between layers.
 
 
200
  So we can inject an representation from (roughly) any layer to any layer! If I give a model a prompt of the form ``User: [X] Assistant: Sure'll I'll repeat your message`` and replace the internal representation of ``[X]`` *during computation* with the hidden state we want to understand,
201
  we expect to get back a summary of the information that exists inside the hidden state. Since the model uses a roughly common latent space, it can understand representations from different layers and different runs!! How cool is that! 😯😯😯
202
  ''', line_breaks=True)
203
 
204
- with gr.Column(scale=1):
205
- gr.Markdown('<span style="font-size:180px;">πŸ€”</span>')
206
 
207
  with gr.Group('Interpretation'):
208
  interpretation_prompt = gr.Text(suggested_interpretation_prompts[0], label='Interpretation Prompt')
@@ -233,7 +235,7 @@ with gr.Blocks(theme=gr.themes.Default(), css=css) as demo:
233
  use_gpu = False # gr.Checkbox(value=False, label='Use GPU')
234
  progress_dummy = gr.Markdown('', elem_id='progress_dummy')
235
 
236
- interpretation_bubbles = [gr.Textbox('', container=False, visible=False, elem_classes=['bubble',
237
  'even_bubble' if i % 2 == 0 else 'odd_bubble'])
238
  for i in range(model.config.num_hidden_layers)]
239
 
 
192
  We will follow the SelfIE implementation in this space for concreteness. Patchscopes are so general that they encompass many other interpretation techniques too!!!
193
  ''', line_breaks=True)
194
 
195
+ # gr.Markdown('**πŸ‘Ύ The idea is really simple: models are able to understand their own hidden states by nature! πŸ‘Ύ**',
196
+ # # elem_classes=['explanation_accordion']
197
+ # )
198
  gr.Markdown(
199
+ '''
200
+ **πŸ‘Ύ The idea is really simple: models are able to understand their own hidden states by nature! πŸ‘Ύ**
201
+ According to the residual stream view ([nostalgebraist, 2020](https://www.lesswrong.com/posts/AcKRB8wDpdaN6v6ru/interpreting-gpt-the-logit-lens)), internal representations from different layers are transferable between layers.
202
  So we can inject an representation from (roughly) any layer to any layer! If I give a model a prompt of the form ``User: [X] Assistant: Sure'll I'll repeat your message`` and replace the internal representation of ``[X]`` *during computation* with the hidden state we want to understand,
203
  we expect to get back a summary of the information that exists inside the hidden state. Since the model uses a roughly common latent space, it can understand representations from different layers and different runs!! How cool is that! 😯😯😯
204
  ''', line_breaks=True)
205
 
206
+ # with gr.Column(scale=1):
207
+ # gr.Markdown('<span style="font-size:180px;">πŸ€”</span>')
208
 
209
  with gr.Group('Interpretation'):
210
  interpretation_prompt = gr.Text(suggested_interpretation_prompts[0], label='Interpretation Prompt')
 
235
  use_gpu = False # gr.Checkbox(value=False, label='Use GPU')
236
  progress_dummy = gr.Markdown('', elem_id='progress_dummy')
237
 
238
+ interpretation_bubbles = [gr.Textbox('', label=f'Layer {i}', container=False, visible=False, elem_classes=['bubble',
239
  'even_bubble' if i % 2 == 0 else 'odd_bubble'])
240
  for i in range(model.config.num_hidden_layers)]
241