bullmount commited on
Commit
c1ff5bc
1 Parent(s): 00c9060

Training completed!

Browse files
Files changed (23) hide show
  1. .gitignore +1 -0
  2. added_tokens.json +1 -0
  3. config.json +25 -0
  4. pytorch_model.bin +3 -0
  5. runs/Feb26_17-08-36_1f658d538c8a/1645895345.0933218/events.out.tfevents.1645895345.1f658d538c8a.84.1 +3 -0
  6. runs/Feb26_17-08-36_1f658d538c8a/events.out.tfevents.1645895345.1f658d538c8a.84.0 +3 -0
  7. runs/Feb26_17-27-19_1f658d538c8a/1645896449.4013433/events.out.tfevents.1645896449.1f658d538c8a.84.3 +3 -0
  8. runs/Feb26_17-27-19_1f658d538c8a/events.out.tfevents.1645896449.1f658d538c8a.84.2 +3 -0
  9. runs/Feb26_17-28-25_1f658d538c8a/1645896517.1839676/events.out.tfevents.1645896517.1f658d538c8a.84.5 +3 -0
  10. runs/Feb26_17-28-25_1f658d538c8a/1645896534.6182578/events.out.tfevents.1645896534.1f658d538c8a.84.6 +3 -0
  11. runs/Feb26_17-28-25_1f658d538c8a/1645897296.0591638/events.out.tfevents.1645897296.1f658d538c8a.84.7 +3 -0
  12. runs/Feb26_17-28-25_1f658d538c8a/events.out.tfevents.1645896517.1f658d538c8a.84.4 +3 -0
  13. runs/Feb26_17-43-12_1f658d538c8a/1645897403.1880443/events.out.tfevents.1645897403.1f658d538c8a.84.9 +3 -0
  14. runs/Feb26_17-43-12_1f658d538c8a/events.out.tfevents.1645897403.1f658d538c8a.84.8 +3 -0
  15. runs/Feb26_17-45-50_1f658d538c8a/1645897560.169575/events.out.tfevents.1645897560.1f658d538c8a.84.11 +3 -0
  16. runs/Feb26_17-45-50_1f658d538c8a/events.out.tfevents.1645897560.1f658d538c8a.84.10 +3 -0
  17. runs/Feb26_17-50-24_1f658d538c8a/1645897835.7440243/events.out.tfevents.1645897835.1f658d538c8a.84.13 +3 -0
  18. runs/Feb26_17-50-24_1f658d538c8a/events.out.tfevents.1645897835.1f658d538c8a.84.12 +3 -0
  19. special_tokens_map.json +1 -0
  20. tokenizer.json +0 -0
  21. tokenizer_config.json +1 -0
  22. training_args.bin +3 -0
  23. vocab.txt +0 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"DPI": 31140, "idoneità": 31138, "riciclato": 31131, "espletamento": 31148, "pecuniaria": 31118, "riutilizzo": 31114, "rappresentativo": 31147, "VLE": 31136, "sexies": 31128, "punito": 31103, "osservanza": 31153, "albo": 31130, "tempestivamente": 31150, "octies": 31142, "INAIL": 31134, "emanazione": 31129, "gassoso": 31155, "ammenda": 31113, "quinquies": 31126, "VAS": 31137, "reflua": 31112, "quater": 31119, "individuazione": 31117, "bonifica": 31108, "ISPESL": 31132, "applicarsi": 31141, "osservatorio": 31158, "istruttoria": 31123, "semplificato": 31139, "proponente": 31111, "predetto": 31105, "economicità": 31152, "riciclo": 31144, "cancerogeno": 31154, "disciplinato": 31102, "ISPRA": 31121, "amianto": 31120, "committente": 31133, "BAT": 31145, "accertamento": 31104, "depurazione": 31124, "sanzione": 31109, "MW": 31143, "territorialmente": 31122, "differenziato": 31127, "modificazione": 31110, "assoggettabilità": 31125, "coincenerimento": 31116, "adempimento": 31107, "caratterizzazione": 31156, "effettuazione": 31149, "sottosuolo": 31157, "limitatamente": 31151, "incenerimento": 31115, "diffida": 31135, "tracciabilità": 31146, "idrografico": 31106}
config.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dbmdz/bert-base-italian-xxl-cased",
3
+ "architectures": [
4
+ "BertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 768,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 3072,
13
+ "layer_norm_eps": 1e-12,
14
+ "max_position_embeddings": 512,
15
+ "model_type": "bert",
16
+ "num_attention_heads": 12,
17
+ "num_hidden_layers": 12,
18
+ "pad_token_id": 0,
19
+ "position_embedding_type": "absolute",
20
+ "torch_dtype": "float32",
21
+ "transformers_version": "4.16.2",
22
+ "type_vocab_size": 2,
23
+ "use_cache": true,
24
+ "vocab_size": 32102
25
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9fb82f5b3c275c0905263cad900c27d62d532c69a05bb588cdcd5245cd446f5
3
+ size 443002091
runs/Feb26_17-08-36_1f658d538c8a/1645895345.0933218/events.out.tfevents.1645895345.1f658d538c8a.84.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5748cb6c1fef107723cdd025a08890577b7df5d283b51dc1777bd42480b1f71f
3
+ size 4757
runs/Feb26_17-08-36_1f658d538c8a/events.out.tfevents.1645895345.1f658d538c8a.84.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2899d67f5ff1c4116be30ff3b2dbc4ad4ab18524bc7a5aae3479aa37c3208797
3
+ size 3277
runs/Feb26_17-27-19_1f658d538c8a/1645896449.4013433/events.out.tfevents.1645896449.1f658d538c8a.84.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bac13a9eca978535884ad8064eb8d9f307f6c60e4273c222dd4296b0a7c9f95
3
+ size 4757
runs/Feb26_17-27-19_1f658d538c8a/events.out.tfevents.1645896449.1f658d538c8a.84.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:523e16d998d5c073344850f8da123ed7d093736652bfbee7bc856bc7ce86a717
3
+ size 3277
runs/Feb26_17-28-25_1f658d538c8a/1645896517.1839676/events.out.tfevents.1645896517.1f658d538c8a.84.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b01e80ba36fa2944749584d4b0ba3d851ff2fc87e415791af20f25c0bd4ce61f
3
+ size 4757
runs/Feb26_17-28-25_1f658d538c8a/1645896534.6182578/events.out.tfevents.1645896534.1f658d538c8a.84.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ace997ce4def9c9aa3ce7684f5621e972533b1c04328a350583189dc7bd8d07
3
+ size 4757
runs/Feb26_17-28-25_1f658d538c8a/1645897296.0591638/events.out.tfevents.1645897296.1f658d538c8a.84.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8768a6a0806cb087f451afc59331db5aed25470bdc35c2005d3fbe78a3a799d9
3
+ size 4757
runs/Feb26_17-28-25_1f658d538c8a/events.out.tfevents.1645896517.1f658d538c8a.84.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f502e959a17cf3a8c4c3f4903c85e0bd68a9d7d0d2511fd41dc09335713bb6a6
3
+ size 9751
runs/Feb26_17-43-12_1f658d538c8a/1645897403.1880443/events.out.tfevents.1645897403.1f658d538c8a.84.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7868fb42268e82701320a2e6d0fb6ddcb84e1bf8b6525a33d83710775a880567
3
+ size 4757
runs/Feb26_17-43-12_1f658d538c8a/events.out.tfevents.1645897403.1f658d538c8a.84.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c8f1e9a7a3803a177f7631201a8156b5b911f17c273a2c62c6c17127630ac36
3
+ size 3277
runs/Feb26_17-45-50_1f658d538c8a/1645897560.169575/events.out.tfevents.1645897560.1f658d538c8a.84.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bc0c6d7289f9ade555951625118232b0ffc33c6abb00fcc0d7ede19aa4b5693
3
+ size 4757
runs/Feb26_17-45-50_1f658d538c8a/events.out.tfevents.1645897560.1f658d538c8a.84.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d20c940bfc4cd30371c338e66291573045198d13f0697b87c3bbc13a9966bfbe
3
+ size 3434
runs/Feb26_17-50-24_1f658d538c8a/1645897835.7440243/events.out.tfevents.1645897835.1f658d538c8a.84.13 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb5681fceca6f54f10982ecf6c11e50c22b92ce3f8880f92c0aa142591b68619
3
+ size 4757
runs/Feb26_17-50-24_1f658d538c8a/events.out.tfevents.1645897835.1f658d538c8a.84.12 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:402483efad6d656f2cb2d1ef2b45df4aff3b9ba623a4b9ae0874344f2971f354
3
+ size 4059
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "max_len": 512, "special_tokens_map_file": null, "name_or_path": "dbmdz/bert-base-italian-xxl-cased", "do_basic_tokenize": true, "never_split": null, "tokenizer_class": "BertTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5165b2d13b15796972278515e6a8210b6a61949030081ab831603b3e476d47d
3
+ size 2991
vocab.txt ADDED
The diff for this file is too large to render. See raw diff