File size: 992 Bytes
6f9afb3
3c50ea6
c3ae139
aa4d81d
 
 
2fe1133
aa4d81d
 
 
 
c15b9df
 
 
aa4d81d
 
a9c5d2c
 
6f9afb3
aa4d81d
 
 
 
 
735daf5
 
 
 
aa4d81d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
export MODEL_DIR=`pwd`
export WANDB_ENTITY="wandb"
export WANDB_PROJECT="hf-flax-gpt2-indonesian"
export WANDB_LOG_MODEL="true"

./run_clm_flax.py \
    --model_name_or_path="flax_model.msgpack" \
    --output_dir="${MODEL_DIR}" \
    --model_type="gpt2" \
    --config_name="${MODEL_DIR}" \
    --tokenizer_name="${MODEL_DIR}" \
    --dataset_name="./datasets/id_collection" \
    --dataset_config_name="id_collection" \
    --dataset_data_dir="/data/collection" \
    --do_train --do_eval \
    --block_size="512" \
    --per_device_train_batch_size="24" \
    --per_device_eval_batch_size="24" \
    --learning_rate="0.0024" --warmup_steps="1000" \
    --adam_beta1="0.9" --adam_beta2="0.98" --weight_decay="0.01" \
    --overwrite_output_dir \
    --num_train_epochs="20" \
    --dataloader_num_workers="64" \
    --preprocessing_num_workers="64" \
    --logging_steps="5000" \
    --save_steps="5000" \
    --eval_steps="5000" \
    --validation_split_percentage="2" \
    --push_to_hub