updates
Browse files- .gitignore +161 -0
- __pycache__/tasks.cpython-38.pyc +0 -0
- finetuning_categorisation_xl.gin +0 -40
- finetuning_categorisation_xxl.gin +0 -40
- train_xl.sh +0 -11
- train_xxl.sh +0 -11
.gitignore
ADDED
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
led / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/#use-with-ide
|
110 |
+
.pdm.toml
|
111 |
+
|
112 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
113 |
+
__pypackages__/
|
114 |
+
|
115 |
+
# Celery stuff
|
116 |
+
celerybeat-schedule
|
117 |
+
celerybeat.pid
|
118 |
+
|
119 |
+
# SageMath parsed files
|
120 |
+
*.sage.py
|
121 |
+
|
122 |
+
# Environments
|
123 |
+
.env
|
124 |
+
.venv
|
125 |
+
env/
|
126 |
+
venv/
|
127 |
+
ENV/
|
128 |
+
env.bak/
|
129 |
+
venv.bak/
|
130 |
+
|
131 |
+
# Spyder project settings
|
132 |
+
.spyderproject
|
133 |
+
.spyproject
|
134 |
+
|
135 |
+
# Rope project settings
|
136 |
+
.ropeproject
|
137 |
+
|
138 |
+
# mkdocs documentation
|
139 |
+
/site
|
140 |
+
|
141 |
+
# mypy
|
142 |
+
.mypy_cache/
|
143 |
+
.dmypy.json
|
144 |
+
dmypy.json
|
145 |
+
|
146 |
+
# Pyre type checker
|
147 |
+
.pyre/
|
148 |
+
|
149 |
+
# pytype static type analyzer
|
150 |
+
.pytype/
|
151 |
+
|
152 |
+
# Cython debug symbols
|
153 |
+
cython_debug/
|
154 |
+
|
155 |
+
# PyCharm
|
156 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
157 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
158 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
+
#.idea/
|
161 |
+
|
__pycache__/tasks.cpython-38.pyc
DELETED
Binary file (1.87 kB)
|
|
finetuning_categorisation_xl.gin
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
from __gin__ import dynamic_registration
|
2 |
-
import tasks
|
3 |
-
|
4 |
-
import __main__ as train_script
|
5 |
-
from t5.data import mixtures
|
6 |
-
from t5x import models
|
7 |
-
from t5x import partitioning
|
8 |
-
from t5x import utils
|
9 |
-
|
10 |
-
include "t5x/examples/t5/mt5/xl.gin"
|
11 |
-
include "t5x/configs/runs/finetune.gin"
|
12 |
-
|
13 |
-
MIXTURE_OR_TASK_NAME = "categorise"
|
14 |
-
TASK_FEATURE_LENGTHS = {"inputs": 256, "targets": 256}
|
15 |
-
TRAIN_STEPS = 1_010_000 # 1000000 pre-trained steps + 10000 fine-tuning steps.
|
16 |
-
USE_CACHED_TASKS = False
|
17 |
-
DROPOUT_RATE = 0.0
|
18 |
-
RANDOM_SEED = 0
|
19 |
-
|
20 |
-
|
21 |
-
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
22 |
-
# `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained
|
23 |
-
# using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be
|
24 |
-
# set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1:
|
25 |
-
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
26 |
-
#LOSS_NORMALIZING_FACTOR = 234496
|
27 |
-
|
28 |
-
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_xl/checkpoint_1000000"
|
29 |
-
|
30 |
-
#train_script.train:
|
31 |
-
# eval_period = 500
|
32 |
-
# partitioner = @partitioning.ModelBasedPjitPartitioner()
|
33 |
-
# partitioning.ModelBasedPjitPartitioner.num_partitions = 2
|
34 |
-
|
35 |
-
# `num_decodes` is equivalent to a beam size in a beam search decoding.
|
36 |
-
models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4
|
37 |
-
|
38 |
-
#mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005
|
39 |
-
#run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
finetuning_categorisation_xxl.gin
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
from __gin__ import dynamic_registration
|
2 |
-
import tasks
|
3 |
-
|
4 |
-
import __main__ as train_script
|
5 |
-
from t5.data import mixtures
|
6 |
-
from t5x import models
|
7 |
-
from t5x import partitioning
|
8 |
-
from t5x import utils
|
9 |
-
|
10 |
-
include "t5x/examples/t5/mt5/xxl.gin"
|
11 |
-
include "t5x/configs/runs/finetune.gin"
|
12 |
-
|
13 |
-
MIXTURE_OR_TASK_NAME = "categorise"
|
14 |
-
TASK_FEATURE_LENGTHS = {"inputs": 96, "targets": 4}
|
15 |
-
TRAIN_STEPS = 1_010_000 # 1000000 pre-trained steps + 10000 fine-tuning steps.
|
16 |
-
USE_CACHED_TASKS = False
|
17 |
-
DROPOUT_RATE = 0.0
|
18 |
-
RANDOM_SEED = 0
|
19 |
-
BATCH_SIZE = 16
|
20 |
-
|
21 |
-
# Pere: Only necessary if we load a t5 model. We can start with an t5x model here
|
22 |
-
# `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained
|
23 |
-
# using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be
|
24 |
-
# set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1:
|
25 |
-
# `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`.
|
26 |
-
#LOSS_NORMALIZING_FACTOR = 234496
|
27 |
-
|
28 |
-
INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/t5x/mt5_xxl/checkpoint_1000000"
|
29 |
-
|
30 |
-
#train_script.train:
|
31 |
-
# eval_period = 500
|
32 |
-
# partitioner = @partitioning.ModelBasedPjitPartitioner()
|
33 |
-
partitioning.PjitPartitioner.num_partitions = 1
|
34 |
-
|
35 |
-
# `num_decodes` is equivalent to a beam size in a beam search decoding.
|
36 |
-
models.EncoderDecoderModel.predict_batch_with_aux.num_decodes = 4
|
37 |
-
|
38 |
-
#mesh_transformer.learning_rate_schedules.constant_learning_rate.learning_rate = 0.0005
|
39 |
-
#run.learning_rate_schedule = @learning_rate_schedules.constant_learning_rate
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
train_xl.sh
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
-
T5X_DIR="../../t5x" # directory where the t5x is cloned.
|
3 |
-
#Needs to be updated when moving to tpu-v4 it should then be in another zone
|
4 |
-
MODEL_DIR="gs://nb-t5x-us-central2/eujav_xl"
|
5 |
-
export PYTHONPATH=${PROJECT_DIR}
|
6 |
-
|
7 |
-
python3 ${T5X_DIR}/t5x/train.py \
|
8 |
-
--gin_search_paths=${PROJECT_DIR} \
|
9 |
-
--gin_file="finetuning_categorisation_xl.gin" \
|
10 |
-
--gin.MODEL_DIR="'${MODEL_DIR}'"
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
train_xxl.sh
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
PROJECT_DIR=${HOME}"/models/eu-jav-categorisation"
|
2 |
-
T5X_DIR="../../t5x" # directory where the t5x is cloned.
|
3 |
-
#Needs to be updated when moving to tpu-v4 it should then be in another zone
|
4 |
-
MODEL_DIR="gs://nb-t5x-us-central2/eujav_xxl"
|
5 |
-
export PYTHONPATH=${PROJECT_DIR}
|
6 |
-
|
7 |
-
python3 ${T5X_DIR}/t5x/train.py \
|
8 |
-
--gin_search_paths=${PROJECT_DIR} \
|
9 |
-
--gin_file="finetuning_categorisation_xxl.gin" \
|
10 |
-
--gin.MODEL_DIR="'${MODEL_DIR}'"
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|