File size: 599 Bytes
751936e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
428b731
 
 
751936e
 
428b731
751936e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27


import tiktoken
from tiktoken import Encoding

tokenizer = tiktoken.encoding_for_model('gpt-3.5-turbo')

tokenizer.vocab_size = tokenizer.n_vocab


def decode(self, tokens, errors="replace"):
# def decode(self, tokens: list[int], errors: str = "replace") -> str:
    try:
        decode_str = self._core_bpe.decode_bytes(tokens).decode("utf-8", errors=errors)
    except:
        decode_str = "null"
    return decode_str

def convert_ids_to_tokens(self, tokens):
    return tokenizer.decode_tokens_bytes(tokens)


Encoding.decode = decode
Encoding.convert_ids_to_tokens = convert_ids_to_tokens