gpt-neo-1-3B-orpo / config.json
trainhubai's picture
Upload final model
38e0d8b verified
raw
history blame contribute delete
568 Bytes
{
"model_type": "gpt_neo",
"transformers_version": "4.10.0",
"vocab_size": 50257,
"embedding_size": 1280,
"num_hidden_layers": 12,
"num_attention_heads": 16,
"intermediate_size": 5120,
"hidden_act": "gelu",
"max_position_embeddings": 2048,
"type_vocab_size": 2,
"initializer_range": 0.02,
"layer_norm_eps": 1e-5,
"pad_token_id": 50256,
"adapter_config": {
"lora_config": {
"r": 16,
"lora_alpha": 32,
"lora_dropout": 0.05,
"bias": "none"
}
}
}