| { | |
| "vocab_size": 50257, | |
| "block_size": 128, | |
| "n_layer": 6, | |
| "n_head": 6, | |
| "n_embd": 384, | |
| "dropout": 0.1, | |
| "bias": true, | |
| "architectures": [ | |
| "GPT" | |
| ], | |
| "model_type": "gpt", | |
| "tokenizer_class": "GPT2Tokenizer" | |
| } |
| { | |
| "vocab_size": 50257, | |
| "block_size": 128, | |
| "n_layer": 6, | |
| "n_head": 6, | |
| "n_embd": 384, | |
| "dropout": 0.1, | |
| "bias": true, | |
| "architectures": [ | |
| "GPT" | |
| ], | |
| "model_type": "gpt", | |
| "tokenizer_class": "GPT2Tokenizer" | |
| } |