{ "module": "keras_hub.src.models.llama3.llama3_causal_lm", "class_name": "Llama3CausalLM", "config": { "backbone": { "module": "keras_hub.src.models.llama3.llama3_backbone", "class_name": "Llama3Backbone", "config": { "name": "llama3_backbone", "trainable": true, "vocabulary_size": 128256, "num_layers": 16, "num_query_heads": 32, "hidden_dim": 2048, "intermediate_dim": 8192, "rope_max_wavelength": 10000, "rope_scaling_factor": 1.0, "num_key_value_heads": 8, "layer_norm_epsilon": 1e-06, "dropout": 0 }, "registered_name": "keras_hub>Llama3Backbone" }, "preprocessor": { "module": "keras_hub.src.models.llama3.llama3_causal_lm_preprocessor", "class_name": "Llama3CausalLMPreprocessor", "config": { "name": "llama3_causal_lm_preprocessor", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "float32" }, "registered_name": null }, "tokenizer": { "module": "keras_hub.src.models.llama3.llama3_tokenizer", "class_name": "Llama3Tokenizer", "config": { "name": "llama3_tokenizer", "trainable": true, "dtype": { "module": "keras", "class_name": "DTypePolicy", "config": { "name": "int32" }, "registered_name": null }, "config_file": "tokenizer.json", "sequence_length": null, "add_prefix_space": false, "unsplittable_tokens": [ "<|eom_id|>", "<|eot_id|>", "<|finetune_right_pad_id|>", "<|begin_of_text|>", "<|python_tag|>", "<|start_header_id|>", "<|end_of_text|>", "<|end_header_id|>" ] }, "registered_name": "keras_hub>Llama3Tokenizer" }, "config_file": "preprocessor.json", "sequence_length": 1024, "add_start_token": true, "add_end_token": true }, "registered_name": "keras_hub>Llama3CausalLMPreprocessor" }, "name": "llama3_causal_lm" }, "registered_name": "keras_hub>Llama3CausalLM" }