| { | |
| "module": "keras_hub.src.models.llama3.llama3_tokenizer", | |
| "class_name": "Llama3Tokenizer", | |
| "config": { | |
| "name": "llama3_tokenizer_1", | |
| "trainable": true, | |
| "dtype": { | |
| "module": "keras", | |
| "class_name": "DTypePolicy", | |
| "config": { | |
| "name": "int32" | |
| }, | |
| "registered_name": null | |
| }, | |
| "config_file": "tokenizer.json", | |
| "sequence_length": null, | |
| "add_prefix_space": false, | |
| "unsplittable_tokens": [ | |
| "<|begin_of_text|>", | |
| "<|eot_id|>", | |
| "<|end_header_id|>", | |
| "<|finetune_right_pad_id|>", | |
| "<|end_of_text|>", | |
| "<|eom_id|>", | |
| "<|python_tag|>", | |
| "<|start_header_id|>" | |
| ] | |
| }, | |
| "registered_name": "keras_hub>Llama3Tokenizer" | |
| } |