Spaces:
Runtime error
Runtime error
File size: 7,604 Bytes
4af74ed a891744 4af74ed a891744 4af74ed a891744 4af74ed 8731543 4af74ed a891744 4af74ed 8731543 4af74ed a891744 4af74ed a891744 4af74ed a891744 4af74ed a891744 4af74ed a891744 4af74ed a891744 4af74ed a891744 18c03c5 a891744 18c03c5 a891744 4af74ed 6da8e24 a891744 4af74ed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 |
import random
from functools import partial
import gradio as gr
import numpy as np
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("adamcasson/ul2-tinystories")
def mask_spans(
tokens,
mu,
r,
vocab_size,
eos_id,
prepend_id=None,
prefix_lm=False,
):
masked_tokens = tokens[:]
encoder_inputs = [prepend_id] if prepend_id is not None else []
encoder_mask = [1] if prepend_id is not None else []
targets = []
targets_mask = []
# Original T5 code reused tokens at the end of vocab for sentinels
# https://github.com/google-research/text-to-text-transfer-transformer/blob/258fd30687e6c60d18b7204d009dc5c753142987/t5/data/preprocessors.py#L3106C6-L3106C6
sentinel_id = vocab_size - 1
if prefix_lm:
# n = 1
mu = max(1, int(len(tokens) * r))
start = max(
0, len(tokens) - random.randint(1, int(2 * mu)) # sample from uniform distribution for S denoisers
) # max to handle start < 0
encoder_inputs += tokens[:start] + [sentinel_id]
encoder_mask += ([1] * len(tokens[:start])) + [0]
targets += [sentinel_id] + tokens[start:]
targets_mask += [0] + ([1] * len(tokens[start:]))
for i in range(start, len(tokens)):
masked_tokens[i] = -1
else:
# n = ceil(len(tokens) / mu)
prev_span_unmasked = False
start = 0
end = 0
while start < len(tokens):
# for R and X denoisers, sample random span length from normal distribution bounded from 1 to 2 * mu.
# std of 0.25 * mu is arbitrary, not specified in paper but makes a sane looking distribution
# at extreme ends of span length means (from 3 to 64).
length = max(1, min(int(2 * mu), int(np.round(np.random.normal(mu, 0.25 * mu)))))
end = min(start + length, len(tokens))
# randomly decide if span should be masked
if np.random.binomial(1, p=r):
encoder_inputs.append(sentinel_id)
encoder_mask.append(0)
targets += tokens[start:end]
targets_mask += ([1] * len(tokens[start:end]))
for i in range(start, end):
masked_tokens[i] = -1
prev_span_unmasked = False
sentinel_id -= 1
else:
encoder_inputs += tokens[start:end]
encoder_mask += ([1] * len(tokens[start:end]))
# if previous span was also unmasked we don't need to keep adding the sentinel token
if not prev_span_unmasked:
targets.append(sentinel_id)
targets_mask.append(0)
prev_span_unmasked = True
start = end
targets.append(eos_id)
targets_mask.append(1)
decoder_inputs = [eos_id] + targets[:-1]
decoder_mask = [1] + targets_mask[:-1]
return encoder_inputs, encoder_mask, decoder_inputs, decoder_mask, targets, targets_mask, masked_tokens
# Create mixture-of-denoisers
denoiser_map = {
"R (µ = 3, r = 0.15)": partial(
mask_spans,
mu=3,
r=0.15,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prepend_id=tokenizer.vocab["[R]"],
),
"R (µ = 8, r = 0.15)": partial(
mask_spans,
mu=8,
r=0.15,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prepend_id=tokenizer.vocab["[R]"],
),
"S (r = 0.25)": partial(
mask_spans,
mu=None,
r=0.25,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prefix_lm=True,
prepend_id=tokenizer.vocab["[S]"],
),
"X (µ = 3, r = 0.5)": partial(
mask_spans,
mu=3,
r=0.5,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prepend_id=tokenizer.vocab["[X]"],
),
"X (µ = 8, r = 0.5)": partial(
mask_spans,
mu=8,
r=0.5,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prepend_id=tokenizer.vocab["[X]"],
),
"X (µ = 32, r = 0.15)": partial(
mask_spans,
mu=32,
r=0.15,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prepend_id=tokenizer.vocab["[X]"],
),
"X (µ = 32, r = 0.5)": partial(
mask_spans,
mu=32,
r=0.5,
vocab_size=tokenizer.vocab_size,
eos_id=tokenizer.eos_token_id,
prepend_id=tokenizer.vocab["[X]"],
),
}
def mask_viz(denoiser, text):
seq = tokenizer.encode(text)
tokens = tokenizer.tokenize(text)
enc_in, enc_mask, dec_in, dec_mask, targets, targets_mask, mask = denoiser_map[denoiser](seq)
highlight_tok = []
for tok, tok_mask in zip(tokens, mask):
highlight_tok.append((tok.replace("Ġ", " ").replace("Ċ", "\n"), "masked" if tok_mask == -1 else "unmasked"))
highlight_enc = []
enc_tok = tokenizer.convert_ids_to_tokens(enc_in)
for id, tok, tok_mask in zip(enc_in, enc_tok, enc_mask):
highlight_enc.append((tok.replace("Ġ", " ").replace("Ċ", "\n") if tok_mask == 1 else f" {id}", "masked" if tok_mask == 0 else "unmasked"))
highlight_dec = []
dec_tok = tokenizer.convert_ids_to_tokens(dec_in)
for id, tok, tok_mask in zip(dec_in, dec_tok, dec_mask):
highlight_dec.append((tok.replace("Ġ", " ").replace("Ċ", "\n") if tok_mask == 1 else f" {id}", "masked" if tok_mask == 0 else "unmasked"))
return highlight_tok, highlight_enc, highlight_dec
iface = gr.Interface(
fn=mask_viz,
inputs=[
gr.Dropdown(
label="Denoiser",
choices=[
"R (µ = 3, r = 0.15)",
"R (µ = 8, r = 0.15)",
"S (r = 0.25)",
"X (µ = 3, r = 0.5)",
"X (µ = 8, r = 0.5)",
"X (µ = 32, r = 0.15)",
"X (µ = 32, r = 0.5)",
],
value="R (µ = 3, r = 0.15)",
),
gr.Textbox(
value='Once upon a time, there was a clever little dog named Max. Max loved to run and play with his friends in the park. One day, Max was running very fast when he fell and hurt his knee. Max went to his friend, the wise old owl, and said, "Owl, my knee hurts. What can I do?" The owl thought for a moment and said, "Max, you should test your knee. Try to walk slowly and see if it still hurts." So Max tested his knee by walking slowly. At first, it hurt a little, but soon Max felt better. He said, "Thank you, Owl, for your help. Now I can play with my friends again." Max was so happy that he could play with his friends without pain. He learned that sometimes, it was good to slow down and listen to his body. And Max and his friends played happily in the park ever after.'
),
],
outputs=[
gr.HighlightedText(
label="Corrupted spans",
combine_adjacent=True,
show_legend=True,
color_map={"unmasked": "green", "masked": "red"}
),
gr.HighlightedText(
label="Encoder input",
combine_adjacent=True,
show_legend=True,
color_map={"unmasked": "green", "masked": "red"}
),
gr.HighlightedText(
label="Decoder input",
combine_adjacent=True,
show_legend=True,
color_map={"unmasked": "green", "masked": "red"}
),
],
)
iface.launch() |