{ "_name_or_path": "/home/wukevin/projects/tcr/tcr_models/temp3/checkpoint-805", "architectures": [ "BertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "LLWNGPMAV", "1": "RPRGEVRFL", "2": "ATDALMTGY", "3": "HSKKKCDEL", "4": "KAFSPEVIPMF", "5": "KRWIILGLNK", "6": "KRWIIMGLNK", "7": "TPQDLNTML", "8": "EIYKRWII", "9": "ISPRTL-W", "10": "FLKEKGGL", "11": "HPKVSSEVHI", "12": "IIKDYGKQM", "13": "LPPIVAKEI", "14": "RFPLTFGWCF", "15": "RYPLTFGWCF", "16": "TPGPGVRYPL", "17": "TQGYFPDWQNY", "18": "FPRPWLHGL", "19": "RYPLTFGW", "20": "ELRRKMMYM", "21": "QIKVRVDMV", "22": "QIKVRVKMV", "23": "VLEETSVML", "24": "FPTKDVAL", "25": "NLVPMVATV", "26": "RPHERNGFTVL", "27": "TPRVTGGGAM", "28": "VTEHDTLLY", "29": "YLAMPFATPMEAELARRSLA", "30": "GLCTLVAML", "31": "YVLDHLIVV", "32": "EPLPQGQLTAY", "33": "RAKFKQLL", "34": "HPVGEADYFEY", "35": "FLRGRAYGL", "36": "AVFDRKSDAK", "37": "IVTDFSVIK", "38": "NFIRMVISNPAAT", "39": "KRGIVEQSSTSISSL", "40": "ENPVVHFFKNIVTPR", "41": "GILGFVFTL", "42": "PQPELPYPQPE", "43": "FWIDLFETIG", "44": "other" }, "initializer_range": 0.02, "intermediate_size": 1536, "label2id": { "LABEL_0": 0, "LABEL_1": 1, "LABEL_10": 10, "LABEL_11": 11, "LABEL_12": 12, "LABEL_13": 13, "LABEL_14": 14, "LABEL_15": 15, "LABEL_16": 16, "LABEL_17": 17, "LABEL_18": 18, "LABEL_19": 19, "LABEL_2": 2, "LABEL_20": 20, "LABEL_21": 21, "LABEL_22": 22, "LABEL_23": 23, "LABEL_24": 24, "LABEL_25": 25, "LABEL_26": 26, "LABEL_27": 27, "LABEL_28": 28, "LABEL_29": 29, "LABEL_3": 3, "LABEL_30": 30, "LABEL_31": 31, "LABEL_32": 32, "LABEL_33": 33, "LABEL_34": 34, "LABEL_35": 35, "LABEL_36": 36, "LABEL_37": 37, "LABEL_38": 38, "LABEL_39": 39, "LABEL_4": 4, "LABEL_40": 40, "LABEL_41": 41, "LABEL_42": 42, "LABEL_43": 43, "LABEL_44": 44, "LABEL_5": 5, "LABEL_6": 6, "LABEL_7": 7, "LABEL_8": 8, "LABEL_9": 9 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 64, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 21, "position_embedding_type": "absolute", "transformers_version": "4.4.2", "type_vocab_size": 2, "use_cache": true, "vocab_size": 26 }