File size: 448 Bytes
bf6cda7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
{
    "module": "keras_nlp.src.models.gpt2.gpt2_tokenizer",
    "class_name": "GPT2Tokenizer",
    "config": {
        "name": "gpt2_tokenizer",
        "trainable": true,
        "dtype": "int32",
        "sequence_length": null,
        "add_prefix_space": false
    },
    "registered_name": "keras_nlp>GPT2Tokenizer",
    "assets": [
        "assets/tokenizer/merges.txt",
        "assets/tokenizer/vocabulary.json"
    ],
    "weights": null
}