Clemet commited on
Commit
f3ed291
1 Parent(s): 049e137

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. requirements.txt +2 -1
app.py CHANGED
@@ -148,7 +148,7 @@ if btn:
148
  html_action='return'
149
  )._repr_html_(), height=775, width=1000, scrolling=True)
150
 
151
- with st.expander('Attention'):
152
  attention_matrix = attentions[layer][0, head].detach().numpy()
153
  separator_token = roberta_tokenizer.sep_token
154
  sep_token_index = tokens.index(separator_token) if separator_token in tokens else len(tokens) - 1
@@ -181,7 +181,7 @@ if btn:
181
  html_action='return'
182
  )._repr_html_(), height=375, width=1000, scrolling=True)
183
 
184
- with st.expander('Attention'):
185
  attention_matrix = attentions[layer][0, head].detach().numpy()
186
  separator_token = distilbert_tokenizer.sep_token
187
  sep_token_index = tokens.index(separator_token) if separator_token in tokens else len(tokens) - 1
 
148
  html_action='return'
149
  )._repr_html_(), height=775, width=1000, scrolling=True)
150
 
151
+ with st.expander('Attention at selected layer and head'):
152
  attention_matrix = attentions[layer][0, head].detach().numpy()
153
  separator_token = roberta_tokenizer.sep_token
154
  sep_token_index = tokens.index(separator_token) if separator_token in tokens else len(tokens) - 1
 
181
  html_action='return'
182
  )._repr_html_(), height=375, width=1000, scrolling=True)
183
 
184
+ with st.expander('Attention at selected layer and head'):
185
  attention_matrix = attentions[layer][0, head].detach().numpy()
186
  separator_token = distilbert_tokenizer.sep_token
187
  sep_token_index = tokens.index(separator_token) if separator_token in tokens else len(tokens) - 1
requirements.txt CHANGED
@@ -3,4 +3,5 @@ safetensors
3
  transformers
4
  streamlit
5
  plotly
6
- bertviz
 
 
3
  transformers
4
  streamlit
5
  plotly
6
+ bertviz
7
+ ipython