{ "architectures": [ "BloomForCausalLM" ], "vocab_size": 50257, "hidden_size": 4096, "tie_word_embeddings": true, "n_layer": 30, "hidden_dropout": 0.0, "layer_norm_epsilon": 1e-05, "n_head": 32, "attention_dropout": 0.0, "model_type": "bloom", "torch_dtype": "float16", "transformers_version": "4.35.2" }