ShengdingHu commited on
Commit
b81d89e
1 Parent(s): b3adb23

Training in progress, step 200

Browse files
config.json CHANGED
@@ -1,37 +1,35 @@
1
  {
2
- "_name_or_path": "../../../../plm_cache/roberta-base",
3
  "architectures": [
4
- "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
 
 
8
  "classifier_dropout": null,
 
9
  "eos_token_id": 2,
10
- "finetuning_task": "sst2",
11
- "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
- "hidden_size": 768,
14
- "id2label": {
15
- "0": "negative",
16
- "1": "positive"
17
- },
18
  "initializer_range": 0.02,
19
- "intermediate_size": 3072,
20
- "label2id": {
21
- "negative": 0,
22
- "positive": 1
23
- },
24
- "layer_norm_eps": 1e-05,
25
- "max_position_embeddings": 514,
26
- "model_type": "roberta",
27
- "num_attention_heads": 12,
28
- "num_hidden_layers": 12,
29
- "pad_token_id": 1,
30
  "position_embedding_type": "absolute",
31
- "problem_type": "single_label_classification",
 
32
  "torch_dtype": "float32",
33
- "transformers_version": "4.16.0.dev0",
34
- "type_vocab_size": 1,
 
35
  "use_cache": true,
36
- "vocab_size": 50265
37
  }
1
  {
2
+ "_name_or_path": "/home/hushengding/plm_cache/bigbird-roberta-large",
3
  "architectures": [
4
+ "BigBirdForMaskedLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
+ "attention_type": "block_sparse",
8
+ "block_size": 64,
9
+ "bos_token_id": 1,
10
  "classifier_dropout": null,
11
+ "dropout_rate": 0.0,
12
  "eos_token_id": 2,
13
+ "gradient_checkpointing": false,
14
+ "hidden_act": "gelu_new",
15
  "hidden_dropout_prob": 0.1,
16
+ "hidden_size": 1024,
 
 
 
 
17
  "initializer_range": 0.02,
18
+ "intermediate_size": 4096,
19
+ "layer_norm_eps": 1e-12,
20
+ "max_position_embeddings": 4096,
21
+ "model_type": "big_bird",
22
+ "num_attention_heads": 16,
23
+ "num_hidden_layers": 24,
24
+ "num_random_blocks": 3,
25
+ "pad_token_id": 0,
 
 
 
26
  "position_embedding_type": "absolute",
27
+ "rescale_embeddings": false,
28
+ "sep_token_id": 66,
29
  "torch_dtype": "float32",
30
+ "transformers_version": "4.18.0",
31
+ "type_vocab_size": 2,
32
+ "use_bias": true,
33
  "use_cache": true,
34
+ "vocab_size": 50358
35
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:97b85198a4f764a512668a9b6195426e63fa9d5f43bc824065bb21ee5c9213d5
3
- size 2670813
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e8c0a9a0dfda06961c89632395fe183e80424b74556e68eca3cd31379827ead
3
+ size 3177797
runs/Apr26_18-58-00_node1/1650971691.1516325/events.out.tfevents.1650971691.node1.3995991.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ad234157e3e21bcd53e6017bfe4b9e35a71c9be65b78e227123c378a37605ea
3
+ size 5442
runs/Apr26_18-58-00_node1/events.out.tfevents.1650971691.node1.3995991.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05685bf7a2cc42782bdf3abd651f6f9679ed6ac3aaf2fe1b6dcab520bff0dc52
3
+ size 4331
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
1
+ {"bos_token": {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<unk>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "sep_token": {"content": "[SEP]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "cls_token": {"content": "[CLS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true}}
spiece.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
- size 791656
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdc81e1fc9d42e0c08b86d5b280d05d7c5e9747c4231c648f2b56b8e1d893c82
3
+ size 845731
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "../../../../plm_cache/roberta-base", "tokenizer_class": "RobertaTokenizer"}
1
+ {"bos_token": {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "unk_token": {"content": "<unk>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sep_token": {"content": "[SEP]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "pad_token": {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "cls_token": {"content": "[CLS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "special_tokens_map_file": "/home/hushengding/plm_cache/bigbird-roberta-large/special_tokens_map.json", "name_or_path": "/home/hushengding/plm_cache/bigbird-roberta-large", "sp_model_kwargs": {}, "tokenizer_class": "BigBirdTokenizer"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b55d0648d1b0a06415f18ffb4570c4700baf8ccd5969e309dd37202ddb1a325d
3
- size 2991
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a5bd23e6c586706fc70dfad4751c8d3173847382439d7afd74d51dfe92ae3c6
3
+ size 3311
training_config.json CHANGED
@@ -1 +1 @@
1
- {"dataset_config_name": ["en"], "delta_type": "soft_prompt", "do_eval": true, "do_test": true, "do_train": true, "eval_dataset_config_name": ["en"], "eval_dataset_name": "sst2", "eval_steps": 200, "evaluation_strategy": "steps", "greater_is_better": true, "learning_rate": 0.03, "load_best_model_at_end": true, "max_source_length": 128, "metric_for_best_model": "average_metrics", "model_name_or_path": "../../../../plm_cache/t5-base", "num_train_epochs": 3, "output_dir": "outputs/soft_prompt/t5-base/sst2", "overwrite_output_dir": true, "per_device_eval_batch_size": 32, "per_device_train_batch_size": 32, "predict_with_generate": true, "push_to_hub": true, "save_steps": 200, "save_strategy": "steps", "save_total_limit": 1, "seed": 42, "soft_token_num": 100, "split_validation_test": true, "task_name": "sst2", "test_dataset_config_name": ["en"], "test_dataset_name": "sst2", "token_init": false, "tokenizer_name": "../../../../plm_cache/t5-base", "unfrozen_modules": ["deltas"], "warmup_steps": 0}
1
+ {"dataset_config_name": ["en"], "delta_type": "lora", "do_eval": true, "do_test": true, "do_train": true, "eval_dataset_config_name": ["en"], "eval_dataset_name": "sst2", "eval_steps": 200, "evaluation_strategy": "steps", "greater_is_better": true, "is_seq2seq": false, "learning_rate": 0.001, "load_best_model_at_end": true, "max_source_length": 128, "metric_for_best_model": "average_metrics", "model_name_or_path": "/home/hushengding/plm_cache/bigbird-roberta-large", "modified_modules": ["query", "key"], "num_train_epochs": 3, "output_dir": "outputs/lora/bigbird-roberta-large/sst2", "overwrite_output_dir": true, "per_device_eval_batch_size": 32, "per_device_train_batch_size": 32, "predict_with_generate": false, "push_to_hub": true, "save_steps": 200, "save_strategy": "steps", "save_total_limit": 1, "seed": 42, "split_validation_test": true, "task_name": "sst2", "test_dataset_config_name": ["en"], "test_dataset_name": "sst2", "tokenizer_name": "/home/hushengding/plm_cache/bigbird-roberta-large", "warmup_steps": 0}