0xsuid commited on
Commit
6c7a1dc
·
1 Parent(s): ff29165

Upload experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13 with huggingface_hub

Browse files
experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13/configs.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "output_dir": "./results",
3
+ "evaluation_strategy": "no",
4
+ "do_eval": false,
5
+ "eval_steps": 0,
6
+ "log_level": "error",
7
+ "logging_first_step": true,
8
+ "logging_steps": 5,
9
+ "logging_dir": "./logs",
10
+ "save_steps": 200,
11
+ "save_total_limit": 2,
12
+ "num_train_epochs": 5,
13
+ "per_device_train_batch_size": 8,
14
+ "optim": "adamw_torch",
15
+ "gradient_accumulation_steps": 8,
16
+ "dataloader_drop_last": true,
17
+ "warmup_steps": 400,
18
+ "weight_decay": 0.01,
19
+ "learning_rate": 5e-05,
20
+ "fp16": true,
21
+ "total_gpus": 1,
22
+ "v_cpus": 6,
23
+ "total_memory_in_gb": 24048.78515625,
24
+ "dataset_limit": 0
25
+ }
experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13/final_checkpoint/config.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "EleutherAI/gpt-neo-125M",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "GPTNeoForCausalLM"
6
+ ],
7
+ "attention_dropout": 0,
8
+ "attention_layers": [
9
+ "global",
10
+ "local",
11
+ "global",
12
+ "local",
13
+ "global",
14
+ "local",
15
+ "global",
16
+ "local",
17
+ "global",
18
+ "local",
19
+ "global",
20
+ "local"
21
+ ],
22
+ "attention_types": [
23
+ [
24
+ [
25
+ "global",
26
+ "local"
27
+ ],
28
+ 6
29
+ ]
30
+ ],
31
+ "bos_token_id": 50256,
32
+ "embed_dropout": 0,
33
+ "eos_token_id": 50256,
34
+ "gradient_checkpointing": false,
35
+ "hidden_size": 768,
36
+ "initializer_range": 0.02,
37
+ "intermediate_size": null,
38
+ "layer_norm_epsilon": 1e-05,
39
+ "max_position_embeddings": 2048,
40
+ "model_type": "gpt_neo",
41
+ "num_heads": 12,
42
+ "num_layers": 12,
43
+ "resid_dropout": 0,
44
+ "summary_activation": null,
45
+ "summary_first_dropout": 0.1,
46
+ "summary_proj_to_labels": true,
47
+ "summary_type": "cls_index",
48
+ "summary_use_proj": true,
49
+ "torch_dtype": "float32",
50
+ "transformers_version": "4.23.1",
51
+ "use_cache": true,
52
+ "vocab_size": 50259,
53
+ "window_size": 256
54
+ }
experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13/final_checkpoint/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a6bbc41b5b38da4d26742decfbc2205af3eddfb559e640f451d740408a390bd
3
+ size 551191249
experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13/logs/1667604497.985131/events.out.tfevents.1667604497.recd68n2rbwktkbnt.2943.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d874424d6d09c79d93229a51822b0de75a206f2439680aef9150f5994ccee69b
3
+ size 5413
experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13/logs/events.out.tfevents.1667604497.recd68n2rbwktkbnt.2943.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f20660ebda82f793e8c2b50856ade5e5dba0dfbf6a2f29606ee27603f1f5ae32
3
+ size 291940
experiments/2022-11-05-273b9dce0e50fad6fa8b725ac92e9f951cbba613877a10428c0dbf3c200b8d13/output.log ADDED
The diff for this file is too large to render. See raw diff