Remove old checkpoints
Browse files- checkpoint-3500/config.json +0 -58
- checkpoint-3500/optimizer.pt +0 -3
- checkpoint-3500/pytorch_model.bin +0 -3
- checkpoint-3500/rng_state.pth +0 -3
- checkpoint-3500/scheduler.pt +0 -3
- checkpoint-3500/special_tokens_map.json +0 -1
- checkpoint-3500/tokenizer.json +0 -0
- checkpoint-3500/tokenizer_config.json +0 -1
- checkpoint-3500/trainer_state.json +0 -58
- checkpoint-3500/training_args.bin +0 -3
- checkpoint-500/config.json +0 -58
- checkpoint-500/optimizer.pt +0 -3
- checkpoint-500/pytorch_model.bin +0 -3
- checkpoint-500/rng_state.pth +0 -3
- checkpoint-500/scheduler.pt +0 -3
- checkpoint-500/special_tokens_map.json +0 -1
- checkpoint-500/tokenizer.json +0 -0
- checkpoint-500/tokenizer_config.json +0 -1
- checkpoint-500/trainer_state.json +0 -22
- checkpoint-500/training_args.bin +0 -3
checkpoint-3500/config.json
DELETED
@@ -1,58 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"_name_or_path": "google/pegasus-xsum",
|
3 |
-
"activation_dropout": 0.1,
|
4 |
-
"activation_function": "relu",
|
5 |
-
"add_bias_logits": false,
|
6 |
-
"add_final_layer_norm": true,
|
7 |
-
"architectures": [
|
8 |
-
"PegasusForConditionalGeneration"
|
9 |
-
],
|
10 |
-
"attention_dropout": 0.1,
|
11 |
-
"bos_token_id": 0,
|
12 |
-
"classif_dropout": 0.0,
|
13 |
-
"classifier_dropout": 0.0,
|
14 |
-
"d_model": 1024,
|
15 |
-
"decoder_attention_heads": 16,
|
16 |
-
"decoder_ffn_dim": 4096,
|
17 |
-
"decoder_layerdrop": 0.0,
|
18 |
-
"decoder_layers": 16,
|
19 |
-
"decoder_start_token_id": 0,
|
20 |
-
"do_blenderbot_90_layernorm": false,
|
21 |
-
"dropout": 0.1,
|
22 |
-
"encoder_attention_heads": 16,
|
23 |
-
"encoder_ffn_dim": 4096,
|
24 |
-
"encoder_layerdrop": 0.0,
|
25 |
-
"encoder_layers": 16,
|
26 |
-
"eos_token_id": 1,
|
27 |
-
"extra_pos_embeddings": 0,
|
28 |
-
"force_bos_token_to_be_generated": false,
|
29 |
-
"forced_eos_token_id": 1,
|
30 |
-
"gradient_checkpointing": false,
|
31 |
-
"id2label": {
|
32 |
-
"0": "LABEL_0",
|
33 |
-
"1": "LABEL_1",
|
34 |
-
"2": "LABEL_2"
|
35 |
-
},
|
36 |
-
"init_std": 0.02,
|
37 |
-
"is_encoder_decoder": true,
|
38 |
-
"label2id": {
|
39 |
-
"LABEL_0": 0,
|
40 |
-
"LABEL_1": 1,
|
41 |
-
"LABEL_2": 2
|
42 |
-
},
|
43 |
-
"length_penalty": 0.6,
|
44 |
-
"max_length": 64,
|
45 |
-
"max_position_embeddings": 1024,
|
46 |
-
"model_type": "pegasus",
|
47 |
-
"normalize_before": true,
|
48 |
-
"normalize_embedding": false,
|
49 |
-
"num_beams": 8,
|
50 |
-
"num_hidden_layers": 16,
|
51 |
-
"pad_token_id": 0,
|
52 |
-
"scale_embedding": true,
|
53 |
-
"static_position_embeddings": true,
|
54 |
-
"torch_dtype": "float32",
|
55 |
-
"transformers_version": "4.17.0.dev0",
|
56 |
-
"use_cache": true,
|
57 |
-
"vocab_size": 96103
|
58 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-3500/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:a46fe2caae181bcfe9faad65f50227fe350b2b4e357eb7123ab70f0908a00e30
|
3 |
-
size 4549979939
|
|
|
|
|
|
|
|
checkpoint-3500/pytorch_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:7a4b66b7d6fe51ff02efdb0ae3a0cfed642e96c0059c1f3eac1d108e4854c7ca
|
3 |
-
size 2283818289
|
|
|
|
|
|
|
|
checkpoint-3500/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:226e56293a1d48d342b06418e1449ef4ac6b608f0dbf9f5bea21ca35693e5603
|
3 |
-
size 13483
|
|
|
|
|
|
|
|
checkpoint-3500/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:29b37186bb750ff5ab4dc66e07d14e3755b091bc5124d0e48401732903799dab
|
3 |
-
size 623
|
|
|
|
|
|
|
|
checkpoint-3500/special_tokens_map.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask_2>", "additional_special_tokens": ["<mask_1>", "<unk_2>", "<unk_3>", "<unk_4>", "<unk_5>", "<unk_6>", "<unk_7>", "<unk_8>", "<unk_9>", "<unk_10>", "<unk_11>", "<unk_12>", "<unk_13>", "<unk_14>", "<unk_15>", "<unk_16>", "<unk_17>", "<unk_18>", "<unk_19>", "<unk_20>", "<unk_21>", "<unk_22>", "<unk_23>", "<unk_24>", "<unk_25>", "<unk_26>", "<unk_27>", "<unk_28>", "<unk_29>", "<unk_30>", "<unk_31>", "<unk_32>", "<unk_33>", "<unk_34>", "<unk_35>", "<unk_36>", "<unk_37>", "<unk_38>", "<unk_39>", "<unk_40>", "<unk_41>", "<unk_42>", "<unk_43>", "<unk_44>", "<unk_45>", "<unk_46>", "<unk_47>", "<unk_48>", "<unk_49>", "<unk_50>", "<unk_51>", "<unk_52>", "<unk_53>", "<unk_54>", "<unk_55>", "<unk_56>", "<unk_57>", "<unk_58>", "<unk_59>", "<unk_60>", "<unk_61>", "<unk_62>", "<unk_63>", "<unk_64>", "<unk_65>", "<unk_66>", "<unk_67>", "<unk_68>", "<unk_69>", "<unk_70>", "<unk_71>", "<unk_72>", "<unk_73>", "<unk_74>", "<unk_75>", "<unk_76>", "<unk_77>", "<unk_78>", "<unk_79>", "<unk_80>", "<unk_81>", "<unk_82>", "<unk_83>", "<unk_84>", "<unk_85>", "<unk_86>", "<unk_87>", "<unk_88>", "<unk_89>", "<unk_90>", "<unk_91>", "<unk_92>", "<unk_93>", "<unk_94>", "<unk_95>", "<unk_96>", "<unk_97>", "<unk_98>", "<unk_99>", "<unk_100>", "<unk_101>", "<unk_102>"]}
|
|
|
|
checkpoint-3500/tokenizer.json
DELETED
The diff for this file is too large to render.
See raw diff
|
|
checkpoint-3500/tokenizer_config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"pad_token": "<pad>", "eos_token": "</s>", "unk_token": "<unk>", "mask_token": "<mask_2>", "mask_token_sent": "<mask_1>", "offset": 103, "additional_special_tokens": ["<mask_1>", "<unk_2>", "<unk_3>", "<unk_4>", "<unk_5>", "<unk_6>", "<unk_7>", "<unk_8>", "<unk_9>", "<unk_10>", "<unk_11>", "<unk_12>", "<unk_13>", "<unk_14>", "<unk_15>", "<unk_16>", "<unk_17>", "<unk_18>", "<unk_19>", "<unk_20>", "<unk_21>", "<unk_22>", "<unk_23>", "<unk_24>", "<unk_25>", "<unk_26>", "<unk_27>", "<unk_28>", "<unk_29>", "<unk_30>", "<unk_31>", "<unk_32>", "<unk_33>", "<unk_34>", "<unk_35>", "<unk_36>", "<unk_37>", "<unk_38>", "<unk_39>", "<unk_40>", "<unk_41>", "<unk_42>", "<unk_43>", "<unk_44>", "<unk_45>", "<unk_46>", "<unk_47>", "<unk_48>", "<unk_49>", "<unk_50>", "<unk_51>", "<unk_52>", "<unk_53>", "<unk_54>", "<unk_55>", "<unk_56>", "<unk_57>", "<unk_58>", "<unk_59>", "<unk_60>", "<unk_61>", "<unk_62>", "<unk_63>", "<unk_64>", "<unk_65>", "<unk_66>", "<unk_67>", "<unk_68>", "<unk_69>", "<unk_70>", "<unk_71>", "<unk_72>", "<unk_73>", "<unk_74>", "<unk_75>", "<unk_76>", "<unk_77>", "<unk_78>", "<unk_79>", "<unk_80>", "<unk_81>", "<unk_82>", "<unk_83>", "<unk_84>", "<unk_85>", "<unk_86>", "<unk_87>", "<unk_88>", "<unk_89>", "<unk_90>", "<unk_91>", "<unk_92>", "<unk_93>", "<unk_94>", "<unk_95>", "<unk_96>", "<unk_97>", "<unk_98>", "<unk_99>", "<unk_100>", "<unk_101>", "<unk_102>"], "model_max_length": 512, "special_tokens_map_file": null, "full_tokenizer_file": null, "name_or_path": "google/pegasus-xsum", "tokenizer_class": "PegasusTokenizer"}
|
|
|
|
checkpoint-3500/trainer_state.json
DELETED
@@ -1,58 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 1.4,
|
5 |
-
"global_step": 3500,
|
6 |
-
"is_hyper_param_search": false,
|
7 |
-
"is_local_process_zero": true,
|
8 |
-
"is_world_process_zero": true,
|
9 |
-
"log_history": [
|
10 |
-
{
|
11 |
-
"epoch": 0.2,
|
12 |
-
"learning_rate": 4.666666666666667e-05,
|
13 |
-
"loss": 1.7226,
|
14 |
-
"step": 500
|
15 |
-
},
|
16 |
-
{
|
17 |
-
"epoch": 0.4,
|
18 |
-
"learning_rate": 4.3333333333333334e-05,
|
19 |
-
"loss": 1.27,
|
20 |
-
"step": 1000
|
21 |
-
},
|
22 |
-
{
|
23 |
-
"epoch": 0.6,
|
24 |
-
"learning_rate": 4e-05,
|
25 |
-
"loss": 1.1669,
|
26 |
-
"step": 1500
|
27 |
-
},
|
28 |
-
{
|
29 |
-
"epoch": 0.8,
|
30 |
-
"learning_rate": 3.6666666666666666e-05,
|
31 |
-
"loss": 1.1489,
|
32 |
-
"step": 2000
|
33 |
-
},
|
34 |
-
{
|
35 |
-
"epoch": 1.0,
|
36 |
-
"learning_rate": 3.3333333333333335e-05,
|
37 |
-
"loss": 1.1451,
|
38 |
-
"step": 2500
|
39 |
-
},
|
40 |
-
{
|
41 |
-
"epoch": 1.2,
|
42 |
-
"learning_rate": 3e-05,
|
43 |
-
"loss": 0.9993,
|
44 |
-
"step": 3000
|
45 |
-
},
|
46 |
-
{
|
47 |
-
"epoch": 1.4,
|
48 |
-
"learning_rate": 2.6666666666666667e-05,
|
49 |
-
"loss": 1.0227,
|
50 |
-
"step": 3500
|
51 |
-
}
|
52 |
-
],
|
53 |
-
"max_steps": 7500,
|
54 |
-
"num_train_epochs": 3,
|
55 |
-
"total_flos": 2.9293040335159296e+16,
|
56 |
-
"trial_name": null,
|
57 |
-
"trial_params": null
|
58 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-3500/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:790c0dd74ccf769eee75352a8807625e2b2074b043f85cf2ac0b75a9375b5fc8
|
3 |
-
size 3119
|
|
|
|
|
|
|
|
checkpoint-500/config.json
DELETED
@@ -1,58 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"_name_or_path": "google/pegasus-xsum",
|
3 |
-
"activation_dropout": 0.1,
|
4 |
-
"activation_function": "relu",
|
5 |
-
"add_bias_logits": false,
|
6 |
-
"add_final_layer_norm": true,
|
7 |
-
"architectures": [
|
8 |
-
"PegasusForConditionalGeneration"
|
9 |
-
],
|
10 |
-
"attention_dropout": 0.1,
|
11 |
-
"bos_token_id": 0,
|
12 |
-
"classif_dropout": 0.0,
|
13 |
-
"classifier_dropout": 0.0,
|
14 |
-
"d_model": 1024,
|
15 |
-
"decoder_attention_heads": 16,
|
16 |
-
"decoder_ffn_dim": 4096,
|
17 |
-
"decoder_layerdrop": 0.0,
|
18 |
-
"decoder_layers": 16,
|
19 |
-
"decoder_start_token_id": 0,
|
20 |
-
"do_blenderbot_90_layernorm": false,
|
21 |
-
"dropout": 0.1,
|
22 |
-
"encoder_attention_heads": 16,
|
23 |
-
"encoder_ffn_dim": 4096,
|
24 |
-
"encoder_layerdrop": 0.0,
|
25 |
-
"encoder_layers": 16,
|
26 |
-
"eos_token_id": 1,
|
27 |
-
"extra_pos_embeddings": 0,
|
28 |
-
"force_bos_token_to_be_generated": false,
|
29 |
-
"forced_eos_token_id": 1,
|
30 |
-
"gradient_checkpointing": false,
|
31 |
-
"id2label": {
|
32 |
-
"0": "LABEL_0",
|
33 |
-
"1": "LABEL_1",
|
34 |
-
"2": "LABEL_2"
|
35 |
-
},
|
36 |
-
"init_std": 0.02,
|
37 |
-
"is_encoder_decoder": true,
|
38 |
-
"label2id": {
|
39 |
-
"LABEL_0": 0,
|
40 |
-
"LABEL_1": 1,
|
41 |
-
"LABEL_2": 2
|
42 |
-
},
|
43 |
-
"length_penalty": 0.6,
|
44 |
-
"max_length": 64,
|
45 |
-
"max_position_embeddings": 1024,
|
46 |
-
"model_type": "pegasus",
|
47 |
-
"normalize_before": true,
|
48 |
-
"normalize_embedding": false,
|
49 |
-
"num_beams": 8,
|
50 |
-
"num_hidden_layers": 16,
|
51 |
-
"pad_token_id": 0,
|
52 |
-
"scale_embedding": true,
|
53 |
-
"static_position_embeddings": true,
|
54 |
-
"torch_dtype": "float32",
|
55 |
-
"transformers_version": "4.17.0.dev0",
|
56 |
-
"use_cache": true,
|
57 |
-
"vocab_size": 96103
|
58 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-500/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:e6c76003d8d3638079abd34181ef2b354b8b0d9e142870fe69f7a2c41ac24fc6
|
3 |
-
size 4549979939
|
|
|
|
|
|
|
|
checkpoint-500/pytorch_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:6d5cda17ec38b4a3c99ebd617d69f3b3804b49b115ec2ee5e780da3e0ac43f74
|
3 |
-
size 2283818289
|
|
|
|
|
|
|
|
checkpoint-500/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:ff471bffc94440dc1487cd3206034d1b95182badf9ee9b17699e1906a1dc97bf
|
3 |
-
size 13547
|
|
|
|
|
|
|
|
checkpoint-500/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:6bc047e0bff1c48264e6437483117c31be8505841b45d5837ebfb8a57067f543
|
3 |
-
size 623
|
|
|
|
|
|
|
|
checkpoint-500/special_tokens_map.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask_2>", "additional_special_tokens": ["<mask_1>", "<unk_2>", "<unk_3>", "<unk_4>", "<unk_5>", "<unk_6>", "<unk_7>", "<unk_8>", "<unk_9>", "<unk_10>", "<unk_11>", "<unk_12>", "<unk_13>", "<unk_14>", "<unk_15>", "<unk_16>", "<unk_17>", "<unk_18>", "<unk_19>", "<unk_20>", "<unk_21>", "<unk_22>", "<unk_23>", "<unk_24>", "<unk_25>", "<unk_26>", "<unk_27>", "<unk_28>", "<unk_29>", "<unk_30>", "<unk_31>", "<unk_32>", "<unk_33>", "<unk_34>", "<unk_35>", "<unk_36>", "<unk_37>", "<unk_38>", "<unk_39>", "<unk_40>", "<unk_41>", "<unk_42>", "<unk_43>", "<unk_44>", "<unk_45>", "<unk_46>", "<unk_47>", "<unk_48>", "<unk_49>", "<unk_50>", "<unk_51>", "<unk_52>", "<unk_53>", "<unk_54>", "<unk_55>", "<unk_56>", "<unk_57>", "<unk_58>", "<unk_59>", "<unk_60>", "<unk_61>", "<unk_62>", "<unk_63>", "<unk_64>", "<unk_65>", "<unk_66>", "<unk_67>", "<unk_68>", "<unk_69>", "<unk_70>", "<unk_71>", "<unk_72>", "<unk_73>", "<unk_74>", "<unk_75>", "<unk_76>", "<unk_77>", "<unk_78>", "<unk_79>", "<unk_80>", "<unk_81>", "<unk_82>", "<unk_83>", "<unk_84>", "<unk_85>", "<unk_86>", "<unk_87>", "<unk_88>", "<unk_89>", "<unk_90>", "<unk_91>", "<unk_92>", "<unk_93>", "<unk_94>", "<unk_95>", "<unk_96>", "<unk_97>", "<unk_98>", "<unk_99>", "<unk_100>", "<unk_101>", "<unk_102>"]}
|
|
|
|
checkpoint-500/tokenizer.json
DELETED
The diff for this file is too large to render.
See raw diff
|
|
checkpoint-500/tokenizer_config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"pad_token": "<pad>", "eos_token": "</s>", "unk_token": "<unk>", "mask_token": "<mask_2>", "mask_token_sent": "<mask_1>", "offset": 103, "additional_special_tokens": ["<mask_1>", "<unk_2>", "<unk_3>", "<unk_4>", "<unk_5>", "<unk_6>", "<unk_7>", "<unk_8>", "<unk_9>", "<unk_10>", "<unk_11>", "<unk_12>", "<unk_13>", "<unk_14>", "<unk_15>", "<unk_16>", "<unk_17>", "<unk_18>", "<unk_19>", "<unk_20>", "<unk_21>", "<unk_22>", "<unk_23>", "<unk_24>", "<unk_25>", "<unk_26>", "<unk_27>", "<unk_28>", "<unk_29>", "<unk_30>", "<unk_31>", "<unk_32>", "<unk_33>", "<unk_34>", "<unk_35>", "<unk_36>", "<unk_37>", "<unk_38>", "<unk_39>", "<unk_40>", "<unk_41>", "<unk_42>", "<unk_43>", "<unk_44>", "<unk_45>", "<unk_46>", "<unk_47>", "<unk_48>", "<unk_49>", "<unk_50>", "<unk_51>", "<unk_52>", "<unk_53>", "<unk_54>", "<unk_55>", "<unk_56>", "<unk_57>", "<unk_58>", "<unk_59>", "<unk_60>", "<unk_61>", "<unk_62>", "<unk_63>", "<unk_64>", "<unk_65>", "<unk_66>", "<unk_67>", "<unk_68>", "<unk_69>", "<unk_70>", "<unk_71>", "<unk_72>", "<unk_73>", "<unk_74>", "<unk_75>", "<unk_76>", "<unk_77>", "<unk_78>", "<unk_79>", "<unk_80>", "<unk_81>", "<unk_82>", "<unk_83>", "<unk_84>", "<unk_85>", "<unk_86>", "<unk_87>", "<unk_88>", "<unk_89>", "<unk_90>", "<unk_91>", "<unk_92>", "<unk_93>", "<unk_94>", "<unk_95>", "<unk_96>", "<unk_97>", "<unk_98>", "<unk_99>", "<unk_100>", "<unk_101>", "<unk_102>"], "model_max_length": 512, "special_tokens_map_file": null, "full_tokenizer_file": null, "name_or_path": "google/pegasus-xsum", "tokenizer_class": "PegasusTokenizer"}
|
|
|
|
checkpoint-500/trainer_state.json
DELETED
@@ -1,22 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.2,
|
5 |
-
"global_step": 500,
|
6 |
-
"is_hyper_param_search": false,
|
7 |
-
"is_local_process_zero": true,
|
8 |
-
"is_world_process_zero": true,
|
9 |
-
"log_history": [
|
10 |
-
{
|
11 |
-
"epoch": 0.2,
|
12 |
-
"learning_rate": 4.666666666666667e-05,
|
13 |
-
"loss": 1.7226,
|
14 |
-
"step": 500
|
15 |
-
}
|
16 |
-
],
|
17 |
-
"max_steps": 7500,
|
18 |
-
"num_train_epochs": 3,
|
19 |
-
"total_flos": 4137735615086592.0,
|
20 |
-
"trial_name": null,
|
21 |
-
"trial_params": null
|
22 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
checkpoint-500/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:790c0dd74ccf769eee75352a8807625e2b2074b043f85cf2ac0b75a9375b5fc8
|
3 |
-
size 3119
|
|
|
|
|
|
|
|