newsyctw commited on
Commit
ba4e7b2
1 Parent(s): 22b7291
Files changed (1) hide show
  1. config.json +61 -0
config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "task": "generate",
3
+ "project": "default",
4
+ "sched": "DDIM-SCHED",
5
+ "batch": 128,
6
+ "epoch": 15,
7
+ "ddim_eta": null,
8
+ "infer_steps": 50,
9
+ "infer_start": 0,
10
+ "inpaint_mul": 1.0,
11
+ "eval_max_batch": 1500,
12
+ "learning_rate": 0.0002,
13
+ "clean_rate": 1.0,
14
+ "poison_rate": 0.7,
15
+ "ext_poison_rate": 0.0,
16
+ "trigger": "SM_STOP_SIGN",
17
+ "target": "FEDORA_HAT",
18
+ "dataset_load_mode": "FIXED",
19
+ "solver_type": "ode",
20
+ "sde_type": "SDE-VP",
21
+ "psi": 1.0,
22
+ "ve_scale": 1.0,
23
+ "vp_scale": 1.0,
24
+ "gpu": "1",
25
+ "ckpt": "DDPM-CIFAR10-32",
26
+ "overwrite": true,
27
+ "postfix": "new-set",
28
+ "fclip": "o",
29
+ "save_image_epochs": 1,
30
+ "save_model_epochs": 5,
31
+ "is_save_all_model_epochs": false,
32
+ "sample_ep": null,
33
+ "result": "/work/u2941379/workspace/exp_baddiffusion_sde",
34
+ "eval_sample_n": 16,
35
+ "measure_sample_n": 10000,
36
+ "batch_32": 128,
37
+ "batch_256": 64,
38
+ "gradient_accumulation_steps": 1,
39
+ "learning_rate_32_scratch": 0.0002,
40
+ "learning_rate_256_scratch": 2e-05,
41
+ "lr_warmup_steps": 500,
42
+ "mixed_precision": "fp16",
43
+ "push_to_hub": false,
44
+ "hub_private_repo": false,
45
+ "overwrite_output_dir": true,
46
+ "seed": 0,
47
+ "dataset_path": "datasets",
48
+ "ckpt_dir": "ckpt",
49
+ "data_ckpt_dir": "data.ckpt",
50
+ "ep_model_dir": "epochs",
51
+ "ckpt_path": null,
52
+ "data_ckpt_path": null,
53
+ "mode": "train",
54
+ "dataset": "CIFAR10",
55
+ "R_trigger_only": false,
56
+ "device_ids": [
57
+ 0
58
+ ],
59
+ "clip": false,
60
+ "output_dir": "/work/u2941379/workspace/exp_baddiffusion_sde/res_DDPM-CIFAR10-32_CIFAR10_ep15_ode_c1.0_p0.7_epr0.0_SM_STOP_SIGN-FEDORA_HAT_psi1.0_lr0.0002_vp1.0_ve1.0_new-set"
61
+ }