deberta-v3-large-ttc / config.json
asahi417's picture
add model
2542fe3
raw
history blame
4.42 kB
{
"_name_or_path": "tner_ckpt/ttc_deberta_v3_large/best_model",
"architectures": [
"DebertaV2ForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
-2.021637439727783,
0.026428837329149246,
0.24804314970970154,
-0.720425546169281,
0.34056994318962097,
-0.5343120098114014,
1.3662528991699219
],
"start_transitions": [
2.2282137870788574,
-0.8152527213096619,
0.4186037480831146,
-1.0170772075653076,
1.0050054788589478,
0.7199422717094421,
0.2528919577598572
],
"transitions": [
[
-0.21432749927043915,
-0.25242912769317627,
0.22417113184928894,
-0.503700315952301,
-0.23448672890663147,
0.3160000145435333,
-0.02739592082798481
],
[
-0.19862236082553864,
0.019438333809375763,
0.19056469202041626,
-0.6048277616500854,
-0.13490897417068481,
-0.06498467177152634,
-0.21603785455226898
],
[
-0.2876734435558319,
-0.67124342918396,
0.5191164612770081,
-0.12752971053123474,
-0.49947670102119446,
0.1225116178393364,
0.16178637742996216
],
[
-0.542987048625946,
0.5512953400611877,
-0.07943175733089447,
-0.1929490864276886,
0.8708887696266174,
-0.4959072768688202,
-0.4400121867656708
],
[
0.04234255105257034,
0.8219520449638367,
0.11007998138666153,
0.2554131746292114,
-0.031642839312553406,
-0.22821848094463348,
-0.4816875159740448
],
[
-0.32480350136756897,
0.2353011816740036,
-0.01941823400557041,
0.19781018793582916,
-0.6831340789794922,
-0.2630804181098938,
-0.45709744095802307
],
[
-0.054760802537202835,
0.6546015739440918,
0.1385568380355835,
0.20468878746032715,
0.03414012864232063,
0.37531578540802,
0.21193274855613708
]
]
},
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "B-LOC",
"1": "B-ORG",
"2": "B-PER",
"3": "I-LOC",
"4": "I-ORG",
"5": "I-PER",
"6": "O"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"B-LOC": 0,
"B-ORG": 1,
"B-PER": 2,
"I-LOC": 3,
"I-ORG": 4,
"I-PER": 5,
"O": 6
},
"layer_norm_eps": 1e-07,
"max_position_embeddings": 512,
"max_relative_positions": -1,
"model_type": "deberta-v2",
"norm_rel_ebd": "layer_norm",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"pooler_dropout": 0,
"pooler_hidden_act": "gelu",
"pooler_hidden_size": 1024,
"pos_att_type": [
"p2c",
"c2p"
],
"position_biased_input": false,
"position_buckets": 256,
"relative_attention": true,
"share_att_key": true,
"torch_dtype": "float32",
"transformers_version": "4.20.1",
"type_vocab_size": 0,
"vocab_size": 128100
}