{ "_name_or_path": "ehartford/WizardLM-Uncensored-Falcon-7b", "alibi": false, "apply_residual_connection_post_layernorm": false, "architectures": [ "RWForCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "ehartford/WizardLM-Uncensored-Falcon-7b--configuration_RW.RWConfig", "AutoModelForCausalLM": "ehartford/WizardLM-Uncensored-Falcon-7b--modelling_RW.RWForCausalLM" }, "bias": false, "bos_token_id": 1, "eos_token_id": 2, "hidden_dropout": 0.0, "hidden_size": 4544, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "RefinedWebModel", "multi_query": true, "n_head": 71, "n_layer": 32, "parallel_attn": true, "torch_dtype": "bfloat16", "transformers_version": "4.30.0.dev0", "use_cache": true, "vocab_size": 65025 }