{ "architectures": [ "sunguk-bert" ], "act_fn": "gelu", "dropout_prob": 0.2, "hidden_size": 256, "initializer_range": 0.02, "ff_dim": 1024, "num_attention_heads": 8, "max_position_embeddings": 512, "num_hidden_layers": 12, "type_vocab_size": 2, "vocab_size": 32000 }