Alexander Seifert commited on
Commit
b4e91c1
1 Parent(s): 271d1b4

initial commit

Browse files
config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation": "gelu",
3
+ "architectures": [
4
+ "DistilBertForTokenClassification"
5
+ ],
6
+ "attention_dropout": 0.1,
7
+ "dim": 768,
8
+ "dropout": 0.1,
9
+ "hidden_dim": 3072,
10
+ "initializer_range": 0.02,
11
+ "max_position_embeddings": 512,
12
+ "model_type": "distilbert",
13
+ "n_heads": 12,
14
+ "n_layers": 6,
15
+ "output_past": true,
16
+ "pad_token_id": 0,
17
+ "qa_dropout": 0.1,
18
+ "seq_classif_dropout": 0.2,
19
+ "sinusoidal_pos_embds": true,
20
+ "tie_weights_": true,
21
+ "vocab_size": 31102
22
+ }
eval_results.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ eval_loss = 0.02132968286541164
2
+ f1_score = 0.9277463324477302
3
+ precision = 0.9369185453060749
4
+ recall = 0.9187519661395029
5
+ seqeval = precision recall f1-score support
6
+
7
+ COMMA 0.9369 0.9188 0.9277 34967
8
+
9
+ micro avg 0.9369 0.9188 0.9277 34967
10
+ macro avg 0.9369 0.9188 0.9277 34967
11
+
model_args.json ADDED
@@ -0,0 +1 @@
 
1
+ {"classification_report": false, "adam_epsilon": 1e-08, "best_model_dir": "outputs/best_model", "cache_dir": "cache_dir/", "config": {}, "do_lower_case": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 3, "encoding": null, "eval_batch_size": 8, "evaluate_during_training": false, "evaluate_during_training_steps": 2000, "evaluate_during_training_verbose": false, "fp16": false, "fp16_opt_level": "O1", "gradient_accumulation_steps": 1, "learning_rate": 4e-05, "logging_steps": 50, "manual_seed": null, "max_grad_norm": 1.0, "max_seq_length": 128, "multiprocessing_chunksize": 500, "n_gpu": 1, "no_cache": false, "no_save": false, "num_train_epochs": 1, "output_dir": "outputs/", "overwrite_output_dir": true, "process_count": 1, "reprocess_input_data": true, "save_best_model": true, "save_eval_checkpoints": true, "save_model_every_epoch": true, "save_steps": 2000, "save_optimizer_and_scheduler": true, "silent": false, "tensorboard_dir": null, "train_batch_size": 8, "use_cached_eval_features": false, "use_early_stopping": false, "use_multiprocessing": true, "wandb_kwargs": {}, "wandb_project": null, "warmup_ratio": 0.06, "warmup_steps": 1296, "weight_decay": 0, "block_size": 128, "config_name": null, "dataset_class": null, "dataset_type": "None", "discriminator_config": {}, "discriminator_loss_weight": 50, "generator_config": {}, "max_steps": -1, "min_frequency": 2, "mlm": true, "mlm_probability": 0.15, "sliding_window": false, "special_tokens": ["<s>", "<pad>", "</s>", "<unk>", "<mask>"], "stride": 0.8, "tie_generator_and_discriminator_embeddings": true, "tokenizer_name": "distilbert-base-german-cased", "vocab_size": null, "model_name": "distilbert-base-german-cased-derstandard/", "model_type": "distilbert"}
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27df42f9333d933cc342423e90016557c978e1ebc204bd693d882a4ad37582ed
3
+ size 267264178
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": false, "model_max_length": 512, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ca4e193b217955b285590d20722775027ce9eb7e08a55df4a1620449b1b5815
3
+ size 2086
vocab.txt ADDED
The diff for this file is too large to render. See raw diff