shweaung commited on
Commit
2d7fde6
β€’
1 Parent(s): bd8db30

Upload folder using huggingface_hub

Browse files
config.yaml ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ config:
2
+ name: myanmar-kathein-festival-cartoon
3
+ process:
4
+ - datasets:
5
+ - cache_latents_to_disk: true
6
+ caption_dropout_rate: 0.05
7
+ caption_ext: txt
8
+ folder_path: datasets/2671791f-966a-4502-a163-5e9cf151cc87
9
+ resolution:
10
+ - 512
11
+ - 768
12
+ - 1024
13
+ shuffle_tokens: false
14
+ device: cuda:0
15
+ model:
16
+ assistant_lora_path: ostris/FLUX.1-schnell-training-adapter
17
+ is_flux: true
18
+ low_vram: true
19
+ name_or_path: black-forest-labs/FLUX.1-schnell
20
+ quantize: true
21
+ network:
22
+ linear: 16
23
+ linear_alpha: 16
24
+ type: lora
25
+ sample:
26
+ guidance_scale: 4
27
+ height: 1024
28
+ neg: ''
29
+ prompts:
30
+ - people are celebrate festival, kathein festival cartoon
31
+ sample_every: 1000
32
+ sample_steps: 4
33
+ sampler: flowmatch
34
+ seed: 42
35
+ walk_seed: true
36
+ width: 1024
37
+ save:
38
+ dtype: float16
39
+ hf_private: true
40
+ hf_repo_id: shweaung/myanmar-kathein-festival-cartoon
41
+ max_step_saves_to_keep: 4
42
+ push_to_hub: true
43
+ save_every: 10000
44
+ train:
45
+ batch_size: 1
46
+ disable_sampling: false
47
+ dtype: bf16
48
+ ema_config:
49
+ ema_decay: 0.99
50
+ use_ema: true
51
+ gradient_accumulation_steps: 1
52
+ gradient_checkpointing: true
53
+ lr: 0.0004
54
+ noise_scheduler: flowmatch
55
+ optimizer: adamw8bit
56
+ skip_first_sample: true
57
+ steps: 1000
58
+ train_text_encoder: false
59
+ train_unet: true
60
+ training_folder: output
61
+ trigger_word: kathein festival cartoon
62
+ type: sd_trainer
63
+ job: extension
64
+ meta:
65
+ name: '[name]'
66
+ version: '1.0'
images 51.jpeg ADDED

Git LFS Details

  • SHA256: 9689b8136fd5055904bf361700aaba6cb1f55151db00c51e6a23ce2eb110fe89
  • Pointer size: 130 Bytes
  • Size of remote file: 36.2 kB
images 52.jpeg ADDED

Git LFS Details

  • SHA256: 1ea41ea8155891dccad20d25b7ce1c3f16cd62d3d4546f6da516a5047739adc6
  • Pointer size: 130 Bytes
  • Size of remote file: 26.9 kB
images 54.jpeg ADDED

Git LFS Details

  • SHA256: 2294c4c8f025fe51e1607318ff6f7dd8e5f081a0ea263af50482255f14daa769
  • Pointer size: 130 Bytes
  • Size of remote file: 62.2 kB
images 55.jpeg ADDED

Git LFS Details

  • SHA256: 0c533c4743a221bcf5e70e1d46255c626270d7337609e3f42e5ef9fd94cf5c7a
  • Pointer size: 130 Bytes
  • Size of remote file: 68 kB
images 56.jpeg ADDED

Git LFS Details

  • SHA256: 4ea2b0abfc5f91647631252218eae8431fba9fa266e1703eef4fd451a6877beb
  • Pointer size: 130 Bytes
  • Size of remote file: 39.5 kB
images 58.jpeg ADDED

Git LFS Details

  • SHA256: 0b60e510d2849594d504eec2570ada5f242c0413bbddfe574b6160ac9fa66ff6
  • Pointer size: 130 Bytes
  • Size of remote file: 54.5 kB
images 59.jpeg ADDED

Git LFS Details

  • SHA256: b19c87620fbe1137b7eb85dcc2893b6f33ce73e0b55781155d5433e05ef37370
  • Pointer size: 130 Bytes
  • Size of remote file: 44.5 kB
images 60.jpeg ADDED

Git LFS Details

  • SHA256: 373fdbe47d1a04b5534c2a9430374ecad2e4927c0225813fc4e4f0bef72b40a6
  • Pointer size: 130 Bytes
  • Size of remote file: 51.1 kB
images 61.jpeg ADDED

Git LFS Details

  • SHA256: 7fdd5f20aeeff2a63c8092fd64c6639060ecbde926cc7ac5b7a5d026bfed9de6
  • Pointer size: 130 Bytes
  • Size of remote file: 85.2 kB
images 62.jpeg ADDED

Git LFS Details

  • SHA256: 6e789a9901071885cd3e10950285e7a8538513b897383a0fcb392136bee53f6c
  • Pointer size: 130 Bytes
  • Size of remote file: 49.1 kB
images 63.jpeg ADDED

Git LFS Details

  • SHA256: 3114ca27847ec6c631653fe996b670c07c955a5bf3a51b20b2a178deab92f576
  • Pointer size: 130 Bytes
  • Size of remote file: 55.5 kB
images 64.jpeg ADDED

Git LFS Details

  • SHA256: b2cc7e81bd846be1bcc498290af6715f23fc79031bca5fa091330d9ad992176d
  • Pointer size: 130 Bytes
  • Size of remote file: 63.9 kB
images 65.jpeg ADDED

Git LFS Details

  • SHA256: 3d6a31018283156f6eba23b686590abe35f761f6c75681447b8293928541d4cc
  • Pointer size: 130 Bytes
  • Size of remote file: 31.3 kB
images 66.jpeg ADDED

Git LFS Details

  • SHA256: 202b2461503669a09b73b2d24649a0ee1a55cfdb8abee75abd76a5d3ee4860aa
  • Pointer size: 130 Bytes
  • Size of remote file: 76.1 kB
images 67.jpeg ADDED

Git LFS Details

  • SHA256: 50d2f9865dea31f0543ae37236490b5f80eea8fe2bca780e03d603a70c2961f8
  • Pointer size: 130 Bytes
  • Size of remote file: 47.3 kB
metadata.jsonl ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"file_name": "images 67.jpeg", "prompt": "[trigger]"}
2
+ {"file_name": "images 66.jpeg", "prompt": "[trigger]"}
3
+ {"file_name": "images 65.jpeg", "prompt": "[trigger]"}
4
+ {"file_name": "images 64.jpeg", "prompt": "[trigger]"}
5
+ {"file_name": "images 63.jpeg", "prompt": "[trigger]"}
6
+ {"file_name": "images 62.jpeg", "prompt": "[trigger]"}
7
+ {"file_name": "images 61.jpeg", "prompt": "[trigger]"}
8
+ {"file_name": "\u1000\u1011\u1014\u1015\u1010 Kahtein Festival large.png_1713719538.png", "prompt": "[trigger]"}
9
+ {"file_name": "images 59.jpeg", "prompt": "[trigger]"}
10
+ {"file_name": "images 60.jpeg", "prompt": "[trigger]"}
11
+ {"file_name": "images 58.jpeg", "prompt": "[trigger]"}
12
+ {"file_name": "images 56.jpeg", "prompt": "[trigger]"}
13
+ {"file_name": "images 55.jpeg", "prompt": "[trigger]"}
14
+ {"file_name": "images 54.jpeg", "prompt": "[trigger]"}
15
+ {"file_name": "images 52.jpeg", "prompt": "[trigger]"}
16
+ {"file_name": "images 51.jpeg", "prompt": "[trigger]"}
requirements.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ git+https://github.com/huggingface/diffusers.git
2
+ lycoris-lora==1.8.3
3
+ flatten_json
4
+ pyyaml
5
+ oyaml
6
+ tensorboard
7
+ kornia
8
+ invisible-watermark
9
+ einops
10
+ toml
11
+ albumentations
12
+ pydantic
13
+ omegaconf
14
+ k-diffusion
15
+ open_clip_torch
16
+ prodigyopt
17
+ controlnet_aux==0.0.7
18
+ python-dotenv
19
+ lpips
20
+ pytorch_fid
21
+ optimum-quanto
script.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from huggingface_hub import snapshot_download, delete_repo, metadata_update
3
+ import uuid
4
+ import json
5
+ import yaml
6
+ import subprocess
7
+
8
+ HF_TOKEN = os.environ.get("HF_TOKEN")
9
+ HF_DATASET = os.environ.get("DATA_PATH")
10
+
11
+
12
+ def download_dataset(hf_dataset_path: str):
13
+ random_id = str(uuid.uuid4())
14
+ snapshot_download(
15
+ repo_id=hf_dataset_path,
16
+ token=HF_TOKEN,
17
+ local_dir=f"/tmp/{random_id}",
18
+ repo_type="dataset",
19
+ )
20
+ return f"/tmp/{random_id}"
21
+
22
+
23
+ def process_dataset(dataset_dir: str):
24
+ # dataset dir consists of images, config.yaml and a metadata.jsonl (optional) with fields: file_name, prompt
25
+ # generate .txt files with the same name as the images with the prompt as the content
26
+ # remove metadata.jsonl
27
+ # return the path to the processed dataset
28
+
29
+ # check if config.yaml exists
30
+ if not os.path.exists(os.path.join(dataset_dir, "config.yaml")):
31
+ raise ValueError("config.yaml does not exist")
32
+
33
+ # check if metadata.jsonl exists
34
+ if os.path.exists(os.path.join(dataset_dir, "metadata.jsonl")):
35
+ metadata = []
36
+ with open(os.path.join(dataset_dir, "metadata.jsonl"), "r") as f:
37
+ for line in f:
38
+ if len(line.strip()) > 0:
39
+ metadata.append(json.loads(line))
40
+ for item in metadata:
41
+ txt_path = os.path.join(dataset_dir, item["file_name"])
42
+ txt_path = txt_path.rsplit(".", 1)[0] + ".txt"
43
+ with open(txt_path, "w") as f:
44
+ f.write(item["prompt"])
45
+
46
+ # remove metadata.jsonl
47
+ os.remove(os.path.join(dataset_dir, "metadata.jsonl"))
48
+
49
+ with open(os.path.join(dataset_dir, "config.yaml"), "r") as f:
50
+ config = yaml.safe_load(f)
51
+
52
+ # update config with new dataset
53
+ config["config"]["process"][0]["datasets"][0]["folder_path"] = dataset_dir
54
+
55
+ with open(os.path.join(dataset_dir, "config.yaml"), "w") as f:
56
+ yaml.dump(config, f)
57
+
58
+ return dataset_dir
59
+
60
+
61
+ def run_training(hf_dataset_path: str):
62
+
63
+ dataset_dir = download_dataset(hf_dataset_path)
64
+ dataset_dir = process_dataset(dataset_dir)
65
+
66
+ # run training
67
+ commands = "git clone https://github.com/ostris/ai-toolkit.git ai-toolkit && cd ai-toolkit && git checkout bc693488eb3cf48ded8bc2af845059d80f4cf7d0 && git submodule update --init --recursive"
68
+ subprocess.run(commands, shell=True)
69
+
70
+ commands = f"python run.py {os.path.join(dataset_dir, 'config.yaml')}"
71
+ process = subprocess.Popen(commands, shell=True, cwd="ai-toolkit", env=os.environ)
72
+
73
+ return process, dataset_dir
74
+
75
+
76
+ if __name__ == "__main__":
77
+ process, dataset_dir = run_training(HF_DATASET)
78
+ process.wait() # Wait for the training process to finish
79
+
80
+ with open(os.path.join(dataset_dir, "config.yaml"), "r") as f:
81
+ config = yaml.safe_load(f)
82
+ repo_id = config["config"]["process"][0]["save"]["hf_repo_id"]
83
+
84
+ metadata = {
85
+ "tags": [
86
+ "autotrain",
87
+ "spacerunner",
88
+ "text-to-image",
89
+ "flux",
90
+ "lora",
91
+ "diffusers",
92
+ "template:sd-lora",
93
+ ]
94
+ }
95
+ metadata_update(repo_id, metadata, token=HF_TOKEN, repo_type="model", overwrite=True)
96
+ delete_repo(HF_DATASET, token=HF_TOKEN, repo_type="dataset", missing_ok=True)
ကထနပတ Kahtein Festival large.png_1713719538.png ADDED

Git LFS Details

  • SHA256: b255ad6e40feead541052813bc2c7bcd09996a9193cbc199a6ce30a9b4ca836d
  • Pointer size: 132 Bytes
  • Size of remote file: 1.03 MB