dar-tau commited on
Commit
6ff782d
1 Parent(s): 223968c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -11,14 +11,13 @@ tokenizer = AutoTokenizer.from_pretrained('facebook/opt-350m', add_prefix_space=
11
  def analyze_sentence(index):
12
  row = dataset[index]
13
  text = row['text']
 
14
  attn_map_shape = row['attention_maps_shape'][1:]
15
  seq_len = attn_map_shape[1]
16
  attn_maps = np.array(row['attention_maps']).reshape(*attn_map_shape)
17
  plot = sns.heatmap(attn_maps.sum(0)[1:, 1:])
18
- plt.xticks(np.arange(seq_len - 1) + 0.5,
19
- tokenizer.tokenize(text, add_special_tokens=False), rotation=90);
20
- plt.yticks(np.arange(seq_len - 1) + 0.5,
21
- tokenizer.tokenize(text, add_special_tokens=False), rotation=0);
22
  plt.ylabel('TARGET')
23
  plt.xlabel('SOURCE')
24
  plt.grid()
 
11
  def analyze_sentence(index):
12
  row = dataset[index]
13
  text = row['text']
14
+ tokenized = tokenizer.tokenize(text, add_special_tokens=False)
15
  attn_map_shape = row['attention_maps_shape'][1:]
16
  seq_len = attn_map_shape[1]
17
  attn_maps = np.array(row['attention_maps']).reshape(*attn_map_shape)
18
  plot = sns.heatmap(attn_maps.sum(0)[1:, 1:])
19
+ plt.xticks(np.arange(seq_len - 1) + 0.5, tokenized[1:], rotation=90);
20
+ plt.yticks(np.arange(seq_len - 1) + 0.5, tokenized[1:], rotation=0);
 
 
21
  plt.ylabel('TARGET')
22
  plt.xlabel('SOURCE')
23
  plt.grid()