File size: 883 Bytes
de21250
 
dad6d93
 
 
 
 
bbbf7c8
 
dad6d93
 
de21250
bbbf7c8
 
e1555d4
 
 
 
 
 
 
 
 
 
 
 
dad6d93
e1555d4
dad6d93
bbbf7c8
e1555d4
 
 
 
bbbf7c8
 
 
 
e1555d4
dad6d93
 
 
de21250
 
 
 
e1555d4
 
dad6d93
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
program: train.py
project: dalle-mini
method: random
metric:
  name: eval/loss
  goal: minimize
parameters:
  optim:
    value: distributed_shampoo
  learning_rate:
    distribution: log_uniform
    # from exp(min) to exp(max)
    min: -9.2
    max: -6.9
  tokenizer_name:
    value: boris/dalle-mini-tokenizer
  config_name:
    value: ./config/mini
  dtype:
    value: bfloat16
  dataset_repo_or_path:
    value: ./data
  per_device_train_batch_size:
    value: 64
  per_device_eval_batch_size:
    value: 64
  gradient_accumulation_steps:
    value: 1
  warmup_steps:
    value: 1000
  num_train_epochs:
    value: 1
  max_train_samples:
    value: 1000000
  logging_steps:
    value: 40
  eval_steps:
    value: 200

command:
  - python3
  - ${program}
  - "--streaming"
  - "--output_dir"
  - "./output"
  - "--overwrite_output_dir"
  - "--do_train"
  - "--do_eval"
  - ${args}