|
#!/bin/bash |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
set -x -e |
|
|
|
|
|
source /scratch/project_462000119/muennighoff/nov-2022-bettercom/venv/bin/activate |
|
export HF_DATASETS_OFFLINE=1 |
|
export TRANSFORMERS_OFFLINE=1 |
|
|
|
|
|
MEGATRON_DEEPSPEED_REPO=/scratch/project_462000119/muennighoff/nov-2022-mtf/Megatron-DeepSpeed |
|
|
|
TOKENIZER_PATH="bigscience/tokenizer" |
|
|
|
LANGS=( |
|
ak |
|
ar |
|
as |
|
bm |
|
bn |
|
ca |
|
code |
|
en |
|
es |
|
eu |
|
fon |
|
fr |
|
gu |
|
hi |
|
id |
|
ig |
|
ki |
|
kn |
|
lg |
|
ln |
|
ml |
|
mr |
|
ne |
|
nso |
|
ny |
|
or |
|
pa |
|
pt |
|
rn |
|
rw |
|
sn |
|
st |
|
sw |
|
ta |
|
te |
|
tn |
|
ts |
|
tum |
|
tw |
|
ur |
|
vi |
|
wo |
|
xh |
|
yo |
|
zh |
|
zu |
|
) |
|
|
|
LANGS=( |
|
ru |
|
) |
|
|
|
|
|
|
|
DATA_PATH=/scratch/project_462000119/muennighoff/nov-2022-mtf/xp3ru/ru |
|
OUTPUT=/scratch/project_462000119/muennighoff/nov-2022-mtf/xp3rumegds |
|
|
|
mkdir -p $OUTPUT |
|
|
|
for val in {0..1}; do |
|
LANG=${LANGS[$val]} |
|
cd $DATA_PATH |
|
|
|
cat *.jsonl > merged_dups_$LANG.jsonl |
|
|
|
sort -u merged_dups_$LANG.jsonl | shuf > merged_$LANG.jsonl |
|
cd $MEGATRON_DEEPSPEED_REPO |
|
python tools/preprocess_data.py \ |
|
--input $DATA_PATH/merged_$LANG.jsonl \ |
|
--output-prefix $OUTPUT/xp3_$LANG \ |
|
--dataset-impl mmap \ |
|
--json-key inputs \ |
|
--tokenizer-type PretrainedFromHF \ |
|
--tokenizer-name-or-path $TOKENIZER_PATH \ |
|
--workers 60 |
|
python tools/preprocess_data.py \ |
|
--input $DATA_PATH/merged_$LANG.jsonl \ |
|
--output-prefix $OUTPUT/xp3_$LANG \ |
|
--dataset-impl mmap \ |
|
--json-key targets \ |
|
--tokenizer-type PretrainedFromHF \ |
|
--tokenizer-name-or-path $TOKENIZER_PATH \ |
|
--append-eod \ |
|
--prepend-space \ |
|
--workers 60 |
|
done |
|
|
|
|