save more often...
Browse files
batch_finetune_eu_jav_base.sh
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
export PYTHONPATH=${PROJECT_DIR}
|
3 |
INITIAL_CHECKPOINT_PATH=\"gs://t5-data/pretrained_models/t5x/mt5_base/checkpoint_1000000\"
|
4 |
-
TRAIN_STEPS=
|
5 |
|
6 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_base_v1\" &&
|
7 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_base_v2\" &&
|
|
|
1 |
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
export PYTHONPATH=${PROJECT_DIR}
|
3 |
INITIAL_CHECKPOINT_PATH=\"gs://t5-data/pretrained_models/t5x/mt5_base/checkpoint_1000000\"
|
4 |
+
TRAIN_STEPS=1001000
|
5 |
|
6 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_base_v1\" &&
|
7 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_base.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_base_v2\" &&
|
batch_finetune_eu_jav_large.sh
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
export PYTHONPATH=${PROJECT_DIR}
|
3 |
INITIAL_CHECKPOINT_PATH=\"gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000\"
|
4 |
-
TRAIN_STEPS=
|
5 |
|
6 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_large.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_large_v1\" &&
|
7 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_large.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_large_v2\" &&
|
|
|
1 |
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
export PYTHONPATH=${PROJECT_DIR}
|
3 |
INITIAL_CHECKPOINT_PATH=\"gs://t5-data/pretrained_models/t5x/mt5_large/checkpoint_1000000\"
|
4 |
+
TRAIN_STEPS=1001000
|
5 |
|
6 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_large.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_large_v1\" &&
|
7 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_large.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_large_v2\" &&
|
batch_finetune_eu_jav_small.sh
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
export PYTHONPATH=${PROJECT_DIR}
|
3 |
INITIAL_CHECKPOINT_PATH=\"gs://t5-data/pretrained_models/t5x/mt5_small/checkpoint_1000000\"
|
4 |
-
TRAIN_STEPS=
|
5 |
|
6 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_small.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_small_v1\" &&
|
7 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_small.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_small_v2\" &&
|
|
|
1 |
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
export PYTHONPATH=${PROJECT_DIR}
|
3 |
INITIAL_CHECKPOINT_PATH=\"gs://t5-data/pretrained_models/t5x/mt5_small/checkpoint_1000000\"
|
4 |
+
TRAIN_STEPS=1001000
|
5 |
|
6 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_small.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_small_v1\" &&
|
7 |
python3 ../../t5x/t5x/train.py --gin_search_paths="./" --gin.TRAIN_STEPS=${TRAIN_STEPS} --gin_file="finetune_classification_small.gin" --gin.INITIAL_CHECKPOINT_PATH=${INITIAL_CHECKPOINT_PATH} --gin.MIXTURE_OR_TASK_NAME=\"classify_tweets\" --gin.MODEL_DIR=\"gs://eu-jav-t5x/finetuned/italian_tweets/classify_tweets_small_v2\" &&
|
finetune_classification_base.gin
CHANGED
@@ -24,7 +24,7 @@ infer_eval/utils.DatasetConfig:
|
|
24 |
|
25 |
#Saving every 1000 steps
|
26 |
utils.SaveCheckpointConfig:
|
27 |
-
period =
|
28 |
|
29 |
|
30 |
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
|
|
24 |
|
25 |
#Saving every 1000 steps
|
26 |
utils.SaveCheckpointConfig:
|
27 |
+
period = 100
|
28 |
|
29 |
|
30 |
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
finetune_classification_large.gin
CHANGED
@@ -25,7 +25,7 @@ infer_eval/utils.DatasetConfig:
|
|
25 |
|
26 |
#Saving every 1000 steps
|
27 |
utils.SaveCheckpointConfig:
|
28 |
-
period =
|
29 |
|
30 |
|
31 |
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
|
|
25 |
|
26 |
#Saving every 1000 steps
|
27 |
utils.SaveCheckpointConfig:
|
28 |
+
period = 50
|
29 |
|
30 |
|
31 |
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
finetune_classification_small.gin
CHANGED
@@ -24,7 +24,7 @@ infer_eval/utils.DatasetConfig:
|
|
24 |
|
25 |
#Saving every 1000 steps
|
26 |
utils.SaveCheckpointConfig:
|
27 |
-
period =
|
28 |
|
29 |
|
30 |
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
|
|
24 |
|
25 |
#Saving every 1000 steps
|
26 |
utils.SaveCheckpointConfig:
|
27 |
+
period = 50
|
28 |
|
29 |
|
30 |
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
tasks.py
CHANGED
@@ -60,7 +60,7 @@ seqio.TaskRegistry.add(
|
|
60 |
categorise_preprocessor,
|
61 |
seqio.preprocessors.tokenize_and_append_eos,
|
62 |
],
|
63 |
-
metric_fns=[metrics.accuracy,my_metrics.f1_macro
|
64 |
output_features=DEFAULT_OUTPUT_FEATURES,
|
65 |
)
|
66 |
|
|
|
60 |
categorise_preprocessor,
|
61 |
seqio.preprocessors.tokenize_and_append_eos,
|
62 |
],
|
63 |
+
metric_fns=[metrics.accuracy,my_metrics.f1_macro],
|
64 |
output_features=DEFAULT_OUTPUT_FEATURES,
|
65 |
)
|
66 |
|