File size: 630 Bytes
a018629
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
{
    "architectures": [
        "GPT2LMHeadModel"
      ],
      "auto_map": {
        "AutoConfig": "GPT2Config",
        "AutoModel": "GPT2Model",
        "AutoModelForCausalLM": "GPT2LMHeadModel",
        "AutoModelForQuestionAnswering": "GPT2ForQuestionAnswering"
    },
    "batch_size": 12,
    "block_size": 100,
    "device": "cpu",
    "eval_interval": 250,
    "hidden_dropout_prob": 0.0,
    "hidden_size": 300,
    "learning_rate": 0.001,
    "max_iters": 6000,
    "num_attention_heads": 6,
    "num_hidden_layers": 6,
    "torch_dtype": "float16",
    "transformers_version": "4.33.2",
    "vocab_size": 1000
  }