bibleGPT / config.json
aframson's picture
cxc
7153f3d
raw
history blame contribute delete
491 Bytes
{
"architectures": [
"GPT2LMHeadModel"
],
"auto_map": {
"AutoConfig": "GPT2Tokenizer",
"AutoModel": "GPT2LMHeadModel"
},
"batch_size": 12,
"block_size": 100,
"device": "cpu",
"eval_interval": 250,
"hidden_dropout_prob": 0.0,
"n_embd": 300,
"n_head": 6,
"n_layer": 6,
"learning_rate": 0.001,
"max_iters": 6000,
"torch_dtype": "float16",
"transformers_version": "4.33.2",
"vocab_size": 1000
}