Annorita's picture
Update utils.py
bfc00c8 verified
raw
history blame
1.2 kB
from transformers import AutoTokenizer
import itertools
def get_color():
colors = ['#df7b55', '#2c7482', '#2c8234', '#5581df', '#822c63','#b355df']
return itertools.cycle(colors)
def get_res(model_name, input_sentence, single_print=True):
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
color_iterator = get_color()
out = tokenizer.encode(input_sentence, add_special_tokens=False)
token_num = len(out)
work_around = True
if work_around:
w = []
pre = ""
for i in range(len(out)):
res = tokenizer.decode(out[:i+1])
if w == []:
w.append(res)
else:
pre_len = len(pre) #0
w.append(res[pre_len:])
pre = res
w = [ f'<span style="font-size:1.25em;background-color:{next(color_iterator)}">{x}</span>' for x in out ]
else:
w = [ f'<span style="font-size:1.25em;background-color:{next(color_iterator)}">{tokenizer.decode(x)}</span>' for x in out ]
res = ''.join(w)
if single_print:
print(res + str(token_num))
else:
return res, token_num