# 注意力权重
np.random.seed(42)
fast_layer_weights = np.random.dirichlet(np.ones(20)*8, 1)[0]
slow_layer_weights = np.random.dirichlet(np.ones(30)*0.5, 1)[0]

# 合并矩阵
max_devices = max(len(fast_layer_weights), len(slow_layer_weights))
weights_matrix = np.full((2, max_devices), np.nan)
weights_matrix[0, :20] = fast_layer_weights
weights_matrix[1, :30] = slow_layer_weights

# 绘制热力图
plt.figure(figsize=(12, 4))
sns.heatmap(weights_matrix, cmap="YlGnBu", annot=True, fmt=".2f",
            yticklabels=['Low Entropy', 'High Entropy'],
            cbar_kws={'label': 'Attention Weight'})

plt.xlabel('Device Index', fontsize=12)
# plt.title('Attention Weight Distribution at Round 50 ($T^{(t)}=0.4$)', fontsize=14)
plt.savefig('attention_layer_heatmap_en.png', bbox_inches='tight')
