Saving weights and logs of step 2500
Browse files- .create_config.py.un~ +0 -0
- .rrain.sh.un~ +0 -0
- .run_train.sh.un~ +0 -0
- .run_train.un~ +0 -0
- events.out.tfevents.1734078421.t1v-n-53cd541d-w-35.1080156.0.v2 +3 -0
- flax_model.msgpack +3 -0
- run_train.sh +2 -2
.create_config.py.un~
ADDED
Binary file (2.25 kB). View file
|
|
.rrain.sh.un~
ADDED
Binary file (523 Bytes). View file
|
|
.run_train.sh.un~
ADDED
Binary file (7.51 kB). View file
|
|
.run_train.un~
ADDED
Binary file (1.81 kB). View file
|
|
events.out.tfevents.1734078421.t1v-n-53cd541d-w-35.1080156.0.v2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c0a7f6357896579784c2fd683291c679d3bd919af4c0329e41ecd08633ef460a
|
3 |
+
size 63038
|
flax_model.msgpack
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3f1adcfdad8a681e08bfe5535021067e1c876db067eda92f40f92ccf9c4c5e63
|
3 |
+
size 1421658229
|
run_train.sh
CHANGED
@@ -10,10 +10,10 @@ python run_mlm_flax.py \
|
|
10 |
--weight_decay="0.01" \
|
11 |
--per_device_train_batch_size="64" \
|
12 |
--per_device_eval_batch_size="64" \
|
13 |
-
--learning_rate="
|
14 |
--warmup_steps="1000" \
|
15 |
--overwrite_output_dir \
|
16 |
-
--num_train_epochs="
|
17 |
--adam_beta1="0.9" \
|
18 |
--adam_beta2="0.98" \
|
19 |
--logging_steps="500" \
|
|
|
10 |
--weight_decay="0.01" \
|
11 |
--per_device_train_batch_size="64" \
|
12 |
--per_device_eval_batch_size="64" \
|
13 |
+
--learning_rate="5e-4" \
|
14 |
--warmup_steps="1000" \
|
15 |
--overwrite_output_dir \
|
16 |
+
--num_train_epochs="100" \
|
17 |
--adam_beta1="0.9" \
|
18 |
--adam_beta2="0.98" \
|
19 |
--logging_steps="500" \
|