{ "architectures": [ "GPT" ], "block_size": 512, "d_model": 768, "dropout": 0.0, "model_type": "custom_gpt", "n_heads": 12, "n_layers": 12, "torch_dtype": "float32", "transformers_version": "4.42.3", "vocab_size": 50257 }