Muennighoff commited on
Commit
c614ad8
1 Parent(s): 71f6ad1
Files changed (28) hide show
  1. 8b7178b13b/latest +1 -0
  2. 8b7178b13b/sbatch_8b7178b13boscar.sh +165 -0
  3. 8b7178b4b/latest +1 -0
  4. 8b7178b4b/sbatch_8b7178b4boscarfast.sh +165 -0
  5. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683106956.nid006707.9593.0 +3 -0
  6. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683107432.nid005787.130144.0 +3 -0
  7. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683109599.nid006707.32393.0 +3 -0
  8. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683281258.nid006707.21192.0 +3 -0
  9. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683281427.nid006894.32971.0 +3 -0
  10. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683454283.nid007164.5319.0 +3 -0
  11. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683592046.nid006149.118844.0 +3 -0
  12. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683592589.nid007164.88634.0 +3 -0
  13. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683593118.nid006098.62436.0 +3 -0
  14. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683612882.nid006741.26554.0 +3 -0
  15. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683785647.nid006323.77490.0 +3 -0
  16. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683958514.nid006323.54758.0 +3 -0
  17. tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1684114780.nid006977.59774.0 +3 -0
  18. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683112977.nid005787.40411.0 +3 -0
  19. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683281427.nid005787.31743.0 +3 -0
  20. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683454283.nid006741.104068.0 +3 -0
  21. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683592046.nid005796.128057.0 +3 -0
  22. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683592589.nid006741.30742.0 +3 -0
  23. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683593118.nid007164.93037.0 +3 -0
  24. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683613299.nid006147.62108.0 +3 -0
  25. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683786124.nid006037.123724.0 +3 -0
  26. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683958970.nid006741.38508.0 +3 -0
  27. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683959527.nid006741.44115.0 +3 -0
  28. tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683996095.nid007217.70647.0 +3 -0
8b7178b13b/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step84877
8b7178b13b/sbatch_8b7178b13boscar.sh ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ #SBATCH --exclude=nid007542
3
+ #SBATCH --nodes=32
4
+ #SBATCH --ntasks-per-node=1
5
+ #SBATCH --cpus-per-task=40
6
+ #SBATCH --mem=256G
7
+ #SBATCH -p standard-g
8
+ #SBATCH -t 48:00:00
9
+ #SBATCH --gpus-per-node=mi250:8
10
+ #SBATCH --exclusive=user
11
+ #SBATCH --hint=nomultithread
12
+ #SBATCH --account=project_462000119
13
+ #SBATCH -o logs/%j.out
14
+ #SBATCH -e logs/%j.err
15
+
16
+ VARIANT=8b7178b13boscarfast
17
+
18
+ # if run without sbatch, invoke here
19
+ if [ -z $SLURM_JOB_ID ]; then
20
+ mkdir -p logs
21
+ sbatch "$0"
22
+ exit
23
+ fi
24
+
25
+ set -euo pipefail
26
+
27
+ # symlink logs/latest.out and logs/latest.err
28
+ ln -f -s $SLURM_JOB_ID.out logs/latest.out
29
+ ln -f -s $SLURM_JOB_ID.err logs/latest.err
30
+
31
+ KILL_SWITCH_PATH=kill-switch-$VARIANT
32
+ CHECKPOINT_PATH=checkpoints_$VARIANT
33
+ TENSORBOARD_PATH=tensorboard_$VARIANT
34
+
35
+ # Data
36
+ VOCAB_FILE="gpt2/vocab.json"
37
+ MERGE_FILE="gpt2/merges.txt"
38
+ #DATA_PATH="/scratch/project_462000119/data/pile/megatron_data/meg-gpt2_pile_text_document"
39
+
40
+ TRAIN_DATA_PATH=train13boscar.txt
41
+ # "train: 1.0 0:1 /scratch/project_462000119/data/oscar_subsampled/gpt2tok_oscar_en_13B_text_document"
42
+ VALID_DATA_PATH=valc4oscar.txt
43
+ # "validation_c4: 1.0 0:1 /scratch/project_462000119/data/c4_validation/gpt2tok_c4validation_rerun_text_document" "validation_oscar: 1.0 0:1 /scratch/project_462000119/data/oscar_validation/gpt2tok_oscarvalidation_text_document"
44
+
45
+
46
+ PP_SIZE=4
47
+ TP_SIZE=4
48
+
49
+ MICRO_BATCH_SIZE=1
50
+ GRADIENT_ACCUMULATION_STEPS=4
51
+ WORLD_SIZE=$((SLURM_GPUS_ON_NODE*SLURM_JOB_NUM_NODES))
52
+ GLOBAL_BATCH_SIZE=$((MICRO_BATCH_SIZE*WORLD_SIZE*GRADIENT_ACCUMULATION_STEPS))
53
+
54
+ # Model parameters
55
+ source model_params.sh
56
+ MODEL_PARAM=("${PARAM_9293M[@]}")
57
+ NHIDDEN=${MODEL_PARAM[0]}
58
+ FFN_HIDDEN_SIZE=${MODEL_PARAM[1]}
59
+ KV_SIZE=${MODEL_PARAM[2]}
60
+ NHEADS=${MODEL_PARAM[3]}
61
+ NLAYERS=${MODEL_PARAM[4]}
62
+ SEQ_LEN=2048
63
+
64
+ echo "Model parameters: d_model $NHIDDEN ffw_size $FFN_HIDDEN_SIZE kv_size $KV_SIZE n_heads $NHEADS n_layers $NLAYERS"
65
+
66
+ SAVE_INTERVAL=5000
67
+
68
+ # Tokens: 178000000000
69
+ # -> Samples: 86914062
70
+ TRAIN_SAMPLES=86_914_062
71
+
72
+ OPTIMIZER_ARGS=" \
73
+ --optimizer adam \
74
+ --adam-beta1 0.9 \
75
+ --adam-beta2 0.999 \
76
+ --adam-eps 1e-8 \
77
+ --lr 2e-4 \
78
+ --min-lr 2e-5 \
79
+ --lr-decay-style cosine \
80
+ --lr-decay-samples $TRAIN_SAMPLES \
81
+ --lr-warmup-samples 869_140 \
82
+ --clip-grad 1.0 \
83
+ --weight-decay 1e-1 \
84
+ "
85
+
86
+ GPT_ARGS=" \
87
+ --num-layers $NLAYERS \
88
+ --hidden-size $NHIDDEN \
89
+ --num-attention-heads $NHEADS \
90
+ --kv-channels $KV_SIZE \
91
+ --ffn-hidden-size $FFN_HIDDEN_SIZE \
92
+ --seq-length $SEQ_LEN \
93
+ --max-position-embeddings $SEQ_LEN \
94
+ --micro-batch-size $MICRO_BATCH_SIZE \
95
+ --global-batch-size $GLOBAL_BATCH_SIZE \
96
+ --train-samples $TRAIN_SAMPLES \
97
+ --vocab-file $VOCAB_FILE \
98
+ --merge-file $MERGE_FILE \
99
+ --clip-grad 1.0 \
100
+ --kill-switch-path $KILL_SWITCH_PATH \
101
+ --bf16 \
102
+ $OPTIMIZER_ARGS \
103
+ "
104
+
105
+ OUTPUT_ARGS=" \
106
+ --log-interval 10 \
107
+ --save-interval $SAVE_INTERVAL \
108
+ --eval-interval 1000 \
109
+ --eval-iters 1 \
110
+ --tensorboard-dir $TENSORBOARD_PATH \
111
+ --tensorboard-queue-size 5 \
112
+ --log-timers-to-tensorboard \
113
+ --log-batch-size-to-tensorboard \
114
+ --log-validation-ppl-to-tensorboard \
115
+ "
116
+
117
+ ZERO_STAGE=0
118
+
119
+ mkdir -p ds_configs
120
+ DS_CONFIG_PATH="ds_configs/$SLURM_JOB_ID.json"
121
+
122
+ cat <<EOF > $DS_CONFIG_PATH
123
+ {
124
+ "train_micro_batch_size_per_gpu": $MICRO_BATCH_SIZE,
125
+ "train_batch_size": $GLOBAL_BATCH_SIZE,
126
+ "gradient_clipping": 1.0,
127
+ "zero_optimization": {
128
+ "stage": $ZERO_STAGE
129
+ },
130
+ "bf16": {
131
+ "enabled": true
132
+ },
133
+ "steps_per_print": 2000,
134
+ "wall_clock_breakdown": false
135
+ }
136
+ EOF
137
+
138
+ DEEPSPEED_ARGS=" \
139
+ --deepspeed \
140
+ --deepspeed_config $DS_CONFIG_PATH \
141
+ --zero-stage $ZERO_STAGE \
142
+ "
143
+
144
+ CMD=" \
145
+ Megatron-DeepSpeed/pretrain_gpt.py \
146
+ --tensor-model-parallel-size $TP_SIZE \
147
+ --pipeline-model-parallel-size $PP_SIZE \
148
+ $GPT_ARGS \
149
+ $OUTPUT_ARGS \
150
+ --save $CHECKPOINT_PATH \
151
+ --load $CHECKPOINT_PATH \
152
+ --train-weighted-split-paths-path $TRAIN_DATA_PATH \
153
+ --valid-weighted-split-paths-path $VALID_DATA_PATH \
154
+ --data-impl mmap \
155
+ $DEEPSPEED_ARGS \
156
+ "
157
+
158
+ echo $CMD
159
+
160
+ echo "START $SLURM_JOBID: $(date)"
161
+
162
+ # bash launch_srun.sh $CMD
163
+ srun --label launch.sh $CMD
164
+
165
+ echo "END $SLURM_JOBID: $(date)"
8b7178b4b/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step84877
8b7178b4b/sbatch_8b7178b4boscarfast.sh ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ #SBATCH --exclude=nid007542
3
+ #SBATCH --nodes=32
4
+ #SBATCH --ntasks-per-node=1
5
+ #SBATCH --cpus-per-task=40
6
+ #SBATCH --mem=256G
7
+ #SBATCH -p standard-g
8
+ #SBATCH -t 48:00:00
9
+ #SBATCH --gpus-per-node=mi250:8
10
+ #SBATCH --exclusive=user
11
+ #SBATCH --hint=nomultithread
12
+ #SBATCH --account=project_462000119
13
+ #SBATCH -o logs/%j.out
14
+ #SBATCH -e logs/%j.err
15
+
16
+ VARIANT=8b7178b4boscarfast
17
+
18
+ # if run without sbatch, invoke here
19
+ if [ -z $SLURM_JOB_ID ]; then
20
+ mkdir -p logs
21
+ sbatch "$0"
22
+ exit
23
+ fi
24
+
25
+ set -euo pipefail
26
+
27
+ # symlink logs/latest.out and logs/latest.err
28
+ ln -f -s $SLURM_JOB_ID.out logs/latest.out
29
+ ln -f -s $SLURM_JOB_ID.err logs/latest.err
30
+
31
+ KILL_SWITCH_PATH=kill-switch-$VARIANT
32
+ CHECKPOINT_PATH=checkpoints_$VARIANT
33
+ TENSORBOARD_PATH=tensorboard_$VARIANT
34
+
35
+ # Data
36
+ VOCAB_FILE="gpt2/vocab.json"
37
+ MERGE_FILE="gpt2/merges.txt"
38
+ #DATA_PATH="/scratch/project_462000119/data/pile/megatron_data/meg-gpt2_pile_text_document"
39
+
40
+ TRAIN_DATA_PATH=train4boscar.txt
41
+ # "train: 1.0 0:1 /scratch/project_462000119/data/oscar_subsampled/gpt2tok_oscar_en_4B_text_document"
42
+ VALID_DATA_PATH=valc4oscar.txt
43
+ # "validation_c4: 1.0 0:1 /scratch/project_462000119/data/c4_validation/gpt2tok_c4validation_rerun_text_document" "validation_oscar: 1.0 0:1 /scratch/project_462000119/data/oscar_validation/gpt2tok_oscarvalidation_text_document"
44
+
45
+
46
+ PP_SIZE=4
47
+ TP_SIZE=4
48
+
49
+ MICRO_BATCH_SIZE=1
50
+ GRADIENT_ACCUMULATION_STEPS=4
51
+ WORLD_SIZE=$((SLURM_GPUS_ON_NODE*SLURM_JOB_NUM_NODES))
52
+ GLOBAL_BATCH_SIZE=$((MICRO_BATCH_SIZE*WORLD_SIZE*GRADIENT_ACCUMULATION_STEPS))
53
+
54
+ # Model parameters
55
+ source model_params.sh
56
+ MODEL_PARAM=("${PARAM_9293M[@]}")
57
+ NHIDDEN=${MODEL_PARAM[0]}
58
+ FFN_HIDDEN_SIZE=${MODEL_PARAM[1]}
59
+ KV_SIZE=${MODEL_PARAM[2]}
60
+ NHEADS=${MODEL_PARAM[3]}
61
+ NLAYERS=${MODEL_PARAM[4]}
62
+ SEQ_LEN=2048
63
+
64
+ echo "Model parameters: d_model $NHIDDEN ffw_size $FFN_HIDDEN_SIZE kv_size $KV_SIZE n_heads $NHEADS n_layers $NLAYERS"
65
+
66
+ SAVE_INTERVAL=5000
67
+
68
+ # Tokens: 178000000000
69
+ # -> Samples: 86914062
70
+ TRAIN_SAMPLES=86_914_062
71
+
72
+ OPTIMIZER_ARGS=" \
73
+ --optimizer adam \
74
+ --adam-beta1 0.9 \
75
+ --adam-beta2 0.999 \
76
+ --adam-eps 1e-8 \
77
+ --lr 2e-4 \
78
+ --min-lr 2e-5 \
79
+ --lr-decay-style cosine \
80
+ --lr-decay-samples $TRAIN_SAMPLES \
81
+ --lr-warmup-samples 869_140 \
82
+ --clip-grad 1.0 \
83
+ --weight-decay 1e-1 \
84
+ "
85
+
86
+ GPT_ARGS=" \
87
+ --num-layers $NLAYERS \
88
+ --hidden-size $NHIDDEN \
89
+ --num-attention-heads $NHEADS \
90
+ --kv-channels $KV_SIZE \
91
+ --ffn-hidden-size $FFN_HIDDEN_SIZE \
92
+ --seq-length $SEQ_LEN \
93
+ --max-position-embeddings $SEQ_LEN \
94
+ --micro-batch-size $MICRO_BATCH_SIZE \
95
+ --global-batch-size $GLOBAL_BATCH_SIZE \
96
+ --train-samples $TRAIN_SAMPLES \
97
+ --vocab-file $VOCAB_FILE \
98
+ --merge-file $MERGE_FILE \
99
+ --clip-grad 1.0 \
100
+ --kill-switch-path $KILL_SWITCH_PATH \
101
+ --bf16 \
102
+ $OPTIMIZER_ARGS \
103
+ "
104
+
105
+ OUTPUT_ARGS=" \
106
+ --log-interval 10 \
107
+ --save-interval $SAVE_INTERVAL \
108
+ --eval-interval 1000 \
109
+ --eval-iters 1 \
110
+ --tensorboard-dir $TENSORBOARD_PATH \
111
+ --tensorboard-queue-size 5 \
112
+ --log-timers-to-tensorboard \
113
+ --log-batch-size-to-tensorboard \
114
+ --log-validation-ppl-to-tensorboard \
115
+ "
116
+
117
+ ZERO_STAGE=0
118
+
119
+ mkdir -p ds_configs
120
+ DS_CONFIG_PATH="ds_configs/$SLURM_JOB_ID.json"
121
+
122
+ cat <<EOF > $DS_CONFIG_PATH
123
+ {
124
+ "train_micro_batch_size_per_gpu": $MICRO_BATCH_SIZE,
125
+ "train_batch_size": $GLOBAL_BATCH_SIZE,
126
+ "gradient_clipping": 1.0,
127
+ "zero_optimization": {
128
+ "stage": $ZERO_STAGE
129
+ },
130
+ "bf16": {
131
+ "enabled": true
132
+ },
133
+ "steps_per_print": 2000,
134
+ "wall_clock_breakdown": false
135
+ }
136
+ EOF
137
+
138
+ DEEPSPEED_ARGS=" \
139
+ --deepspeed \
140
+ --deepspeed_config $DS_CONFIG_PATH \
141
+ --zero-stage $ZERO_STAGE \
142
+ "
143
+
144
+ CMD=" \
145
+ Megatron-DeepSpeed/pretrain_gpt.py \
146
+ --tensor-model-parallel-size $TP_SIZE \
147
+ --pipeline-model-parallel-size $PP_SIZE \
148
+ $GPT_ARGS \
149
+ $OUTPUT_ARGS \
150
+ --save $CHECKPOINT_PATH \
151
+ --load $CHECKPOINT_PATH \
152
+ --train-weighted-split-paths-path $TRAIN_DATA_PATH \
153
+ --valid-weighted-split-paths-path $VALID_DATA_PATH \
154
+ --data-impl mmap \
155
+ $DEEPSPEED_ARGS \
156
+ "
157
+
158
+ echo $CMD
159
+
160
+ echo "START $SLURM_JOBID: $(date)"
161
+
162
+ # bash launch_srun.sh $CMD
163
+ srun --label launch.sh $CMD
164
+
165
+ echo "END $SLURM_JOBID: $(date)"
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683106956.nid006707.9593.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e893c6f8cd6b9254e666a5f56b133c73424d8d29f41c6c894630cd9b9ac4cdb
3
+ size 19755
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683107432.nid005787.130144.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ad38c17f3d3cb44aa498c57d0327c308634c3b2ed23e9c138d2abde8f190410
3
+ size 40795
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683109599.nid006707.32393.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94a1a7dbd10819726494aac074a71d42663fc52f0dbf972a185e4e7e2ed5185b
3
+ size 29185345
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683281258.nid006707.21192.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dd6ba7de32097babe9a321bd7558e80cab6b03dd2209a51552ff8f051358911
3
+ size 16424
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683281427.nid006894.32971.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb903a5386b75f70944c6d2d0db1229db7fb7f7d2605e9ebf3e0adc2afa8ccd6
3
+ size 29657137
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683454283.nid007164.5319.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d9457161e93645879cfe2a0ff26a89547cdb78130fcdc94b431913adc9b3242
3
+ size 23308124
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683592046.nid006149.118844.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d333280fe823c28c5f0710d7f3b1110c23c3692d8146f85fa318bb16157de141
3
+ size 40
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683592589.nid007164.88634.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8631896de71a5d7dd32eed453f8dbe5aa656c0e323795d476f91fc58832f994d
3
+ size 40
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683593118.nid006098.62436.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6ae7c8294ea7a317be67d160b391cb405ed1317317247abc2be0e41d9e41f77
3
+ size 40
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683612882.nid006741.26554.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af87514c3478c4dfa935a505442797bc60d6b4f5b2e3b37332339149e550d6e8
3
+ size 29699616
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683785647.nid006323.77490.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9e601a0fe48f8d81e9d59fe272e3a93c277991732f0b46bac8d6dbe36300383
3
+ size 29573988
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1683958514.nid006323.54758.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99257b670cde553aa7e15ec4509795b10bd1d5fbca3b17f6ae54e15daaa586b8
3
+ size 26786302
tensorboard/tensorboard_8b7178b13boscarfast/events.out.tfevents.1684114780.nid006977.59774.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a673ebb927f39e4f147335b77256b39232831dca9f56c55ab009343779d8b25
3
+ size 22713
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683112977.nid005787.40411.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fdba9ef8c91b1f9203eb98e0df5211026ea9370bc4aa0a5ead1cc7c67708acb
3
+ size 28484364
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683281427.nid005787.31743.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83a939d5c8f70009d6246c1454a3508879b68a10bfe0a9c079bdfa196cc5c24e
3
+ size 29505891
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683454283.nid006741.104068.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d7babd0282671f7e4885cf67a0ee42aa93b9d8703dcd227be3e342152d9d7e4
3
+ size 23450413
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683592046.nid005796.128057.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8caeb1e1863c1ed0228f51c150befa3cc22146a0da95d6322a099e35654bd51
3
+ size 40
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683592589.nid006741.30742.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecb1a6847078336bc126126134fc8a026a933d2eebec66f03ef8555d62ec29d5
3
+ size 40
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683593118.nid007164.93037.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e464cfbb5b73770536ba57ed5eeaec2d68bbee7b47619c15221ae715bbc1ae1
3
+ size 40
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683613299.nid006147.62108.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d510dd5300919b4db2aeae70128a34dc6627851457b09d80a3a1d2e776b8abd
3
+ size 29511301
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683786124.nid006037.123724.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3554b82e9078b5db6a43af0a7f87f96bacc3e64f6495c3ac09187f0df9c757b8
3
+ size 29725269
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683958970.nid006741.38508.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b5f528b7c090981c5575d99b5619a32068396bfc1ebf0c45d3c84be5cd4f152
3
+ size 20660
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683959527.nid006741.44115.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1b126de44d208063a140ede14bc1bdbebfe7d23f4499367c3f174b57285fe79
3
+ size 1818772
tensorboard/tensorboard_8b7178b4boscarfast/events.out.tfevents.1683996095.nid007217.70647.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd03c72d5eb7ac001ae3ff3e71943850842d7810831a3e400fcb8be0de9c57ac
3
+ size 26786297