File size: 491 Bytes
a018629
 
d2e4408
a018629
 
d2e4408
fb451cc
a018629
 
 
 
 
 
7153f3d
 
 
a018629
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
{
    "architectures": [
        "GPT2LMHeadModel"
      ],
      "auto_map": {
        "AutoConfig": "GPT2Tokenizer",
        "AutoModel": "GPT2LMHeadModel"
    },
    "batch_size": 12,
    "block_size": 100,
    "device": "cpu",
    "eval_interval": 250,
    "hidden_dropout_prob": 0.0,
    "n_embd": 300,
    "n_head": 6,
    "n_layer": 6,
    "learning_rate": 0.001,
    "max_iters": 6000,
    "torch_dtype": "float16",
    "transformers_version": "4.33.2",
    "vocab_size": 1000
  }