sayakpaul HF staff commited on
Commit
bceef9f
1 Parent(s): 8ab76fa

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -0
app.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from huggingface_hub.keras_mixin import from_pretrained_keras
3
+ from PIL import Image
4
+
5
+ import utils
6
+
7
+ _MODEL = from_pretrained_keras("probing-vits/vit_b16_patch16_224_i21k_i1k")
8
+
9
+
10
+ def show_rollout(image):
11
+ _, preprocessed_image = utils.preprocess_image(image, "original_vit")
12
+ _, attention_scores_dict = _MODEL.predict(preprocessed_image)
13
+ result = utils.attention_rollout_map(
14
+ image, attention_scores_dict, "original_vit"
15
+ )
16
+ return Image.fromarray(result)
17
+
18
+
19
+ title = "Generate Attention Rollout Plots"
20
+ article = "Attention Rollout was proposed by [Abnar et al.](https://arxiv.org/abs/2005.00928) to quantify the information that flows through self-attention layers. In the original ViT paper ([Dosovitskiy et al.](https://arxiv.org/abs/2010.11929)), the authors use it to investigate the representations learned by ViTs. The model used in the backend is a ViT B-16 model. For more details about it, refer to [this notebook](https://github.com/sayakpaul/probing-vits/blob/main/notebooks/load-jax-weights-vitb16.ipynb)."
21
+
22
+ iface = gr.Interface(
23
+ show_rollout,
24
+ gr.inputs.Image(type="pil", label="Input Image"),
25
+ "image",
26
+ title=title,
27
+ article=article,
28
+ allow_flagging="never",
29
+ )
30
+ iface.launch(share=True)