abhishek's picture
abhishek HF staff
Commit From AutoNLP
86e9232
{
"_name_or_path": "AutoNLP",
"_num_labels": 41,
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "ABC Studios",
"1": "Blizzard Entertainment",
"2": "Capcom",
"3": "Cartoon Network",
"4": "Clive Barker",
"5": "DC Comics",
"6": "Dark Horse Comics",
"7": "Disney",
"8": "Dreamworks",
"9": "George Lucas",
"10": "George R. R. Martin",
"11": "Hanna-Barbera",
"12": "HarperCollins",
"13": "Hasbro",
"14": "IDW Publishing",
"15": "Ian Fleming",
"16": "Icon Comics",
"17": "Image Comics",
"18": "J. K. Rowling",
"19": "J. R. R. Tolkien",
"20": "Konami",
"21": "Lego",
"22": "Marvel Comics",
"23": "Matt Groening",
"24": "Mattel",
"25": "Microsoft",
"26": "Mortal Kombat",
"27": "NBC - Heroes",
"28": "Namco",
"29": "Nintendo",
"30": "Sega",
"31": "Shueisha",
"32": "Sony Pictures",
"33": "South Park",
"34": "Star Trek",
"35": "Stephen King",
"36": "SyFy",
"37": "Team Epic TV",
"38": "Ubisoft",
"39": "Universal Studios",
"40": "Wildstorm"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"ABC Studios": 0,
"Blizzard Entertainment": 1,
"Capcom": 2,
"Cartoon Network": 3,
"Clive Barker": 4,
"DC Comics": 5,
"Dark Horse Comics": 6,
"Disney": 7,
"Dreamworks": 8,
"George Lucas": 9,
"George R. R. Martin": 10,
"Hanna-Barbera": 11,
"HarperCollins": 12,
"Hasbro": 13,
"IDW Publishing": 14,
"Ian Fleming": 15,
"Icon Comics": 16,
"Image Comics": 17,
"J. K. Rowling": 18,
"J. R. R. Tolkien": 19,
"Konami": 20,
"Lego": 21,
"Marvel Comics": 22,
"Matt Groening": 23,
"Mattel": 24,
"Microsoft": 25,
"Mortal Kombat": 26,
"NBC - Heroes": 27,
"Namco": 28,
"Nintendo": 29,
"Sega": 30,
"Shueisha": 31,
"Sony Pictures": 32,
"South Park": 33,
"Star Trek": 34,
"Stephen King": 35,
"SyFy": 36,
"Team Epic TV": 37,
"Ubisoft": 38,
"Universal Studios": 39,
"Wildstorm": 40
},
"layer_norm_eps": 1e-12,
"max_length": 512,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"padding": "max_length",
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"transformers_version": "4.8.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 28996
}