End of training
Browse files- 800_lora.pt +3 -0
- feature_extractor/preprocessor_config.json +44 -0
- model_index.json +5 -5
- scheduler/scheduler_config.json +6 -9
- text_encoder/config.json +7 -7
- text_encoder/model.safetensors +2 -2
- tokenizer/special_tokens_map.json +1 -1
- tokenizer/tokenizer_config.json +9 -1
- vae/config.json +2 -2
- vae/diffusion_pytorch_model.safetensors +1 -1
- val_imgs_grid.png +2 -2
800_lora.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce236e95751241d20b3bfe3efebb874e71c285ddf29309ba2433a13c0b25c34d
|
3 |
+
size 477514
|
feature_extractor/preprocessor_config.json
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_valid_processor_keys": [
|
3 |
+
"images",
|
4 |
+
"do_resize",
|
5 |
+
"size",
|
6 |
+
"resample",
|
7 |
+
"do_center_crop",
|
8 |
+
"crop_size",
|
9 |
+
"do_rescale",
|
10 |
+
"rescale_factor",
|
11 |
+
"do_normalize",
|
12 |
+
"image_mean",
|
13 |
+
"image_std",
|
14 |
+
"do_convert_rgb",
|
15 |
+
"return_tensors",
|
16 |
+
"data_format",
|
17 |
+
"input_data_format"
|
18 |
+
],
|
19 |
+
"crop_size": {
|
20 |
+
"height": 224,
|
21 |
+
"width": 224
|
22 |
+
},
|
23 |
+
"do_center_crop": true,
|
24 |
+
"do_convert_rgb": true,
|
25 |
+
"do_normalize": true,
|
26 |
+
"do_rescale": true,
|
27 |
+
"do_resize": true,
|
28 |
+
"image_mean": [
|
29 |
+
0.48145466,
|
30 |
+
0.4578275,
|
31 |
+
0.40821073
|
32 |
+
],
|
33 |
+
"image_processor_type": "CLIPImageProcessor",
|
34 |
+
"image_std": [
|
35 |
+
0.26862954,
|
36 |
+
0.26130258,
|
37 |
+
0.27577711
|
38 |
+
],
|
39 |
+
"resample": 3,
|
40 |
+
"rescale_factor": 0.00392156862745098,
|
41 |
+
"size": {
|
42 |
+
"shortest_edge": 224
|
43 |
+
}
|
44 |
+
}
|
model_index.json
CHANGED
@@ -1,23 +1,23 @@
|
|
1 |
{
|
2 |
"_class_name": "StableDiffusionPipeline",
|
3 |
"_diffusers_version": "0.27.2",
|
4 |
-
"_name_or_path": "
|
5 |
"feature_extractor": [
|
6 |
-
|
7 |
-
|
8 |
],
|
9 |
"image_encoder": [
|
10 |
null,
|
11 |
null
|
12 |
],
|
13 |
-
"requires_safety_checker":
|
14 |
"safety_checker": [
|
15 |
null,
|
16 |
null
|
17 |
],
|
18 |
"scheduler": [
|
19 |
"diffusers",
|
20 |
-
"
|
21 |
],
|
22 |
"text_encoder": [
|
23 |
"transformers",
|
|
|
1 |
{
|
2 |
"_class_name": "StableDiffusionPipeline",
|
3 |
"_diffusers_version": "0.27.2",
|
4 |
+
"_name_or_path": "stabilityai/stable-diffusion-2-1",
|
5 |
"feature_extractor": [
|
6 |
+
"transformers",
|
7 |
+
"CLIPImageProcessor"
|
8 |
],
|
9 |
"image_encoder": [
|
10 |
null,
|
11 |
null
|
12 |
],
|
13 |
+
"requires_safety_checker": false,
|
14 |
"safety_checker": [
|
15 |
null,
|
16 |
null
|
17 |
],
|
18 |
"scheduler": [
|
19 |
"diffusers",
|
20 |
+
"DDIMScheduler"
|
21 |
],
|
22 |
"text_encoder": [
|
23 |
"transformers",
|
scheduler/scheduler_config.json
CHANGED
@@ -1,23 +1,20 @@
|
|
1 |
{
|
2 |
-
"_class_name": "
|
3 |
"_diffusers_version": "0.27.2",
|
4 |
-
"algorithm_type": "deis",
|
5 |
"beta_end": 0.012,
|
6 |
"beta_schedule": "scaled_linear",
|
7 |
"beta_start": 0.00085,
|
8 |
"clip_sample": false,
|
9 |
"clip_sample_range": 1.0,
|
10 |
"dynamic_thresholding_ratio": 0.995,
|
11 |
-
"lower_order_final": true,
|
12 |
"num_train_timesteps": 1000,
|
13 |
-
"prediction_type": "
|
|
|
14 |
"sample_max_value": 1.0,
|
15 |
"set_alpha_to_one": false,
|
16 |
-
"
|
17 |
-
"solver_type": "logrho",
|
18 |
"steps_offset": 1,
|
19 |
"thresholding": false,
|
20 |
-
"timestep_spacing": "
|
21 |
-
"trained_betas": null
|
22 |
-
"use_karras_sigmas": false
|
23 |
}
|
|
|
1 |
{
|
2 |
+
"_class_name": "DDIMScheduler",
|
3 |
"_diffusers_version": "0.27.2",
|
|
|
4 |
"beta_end": 0.012,
|
5 |
"beta_schedule": "scaled_linear",
|
6 |
"beta_start": 0.00085,
|
7 |
"clip_sample": false,
|
8 |
"clip_sample_range": 1.0,
|
9 |
"dynamic_thresholding_ratio": 0.995,
|
|
|
10 |
"num_train_timesteps": 1000,
|
11 |
+
"prediction_type": "v_prediction",
|
12 |
+
"rescale_betas_zero_snr": false,
|
13 |
"sample_max_value": 1.0,
|
14 |
"set_alpha_to_one": false,
|
15 |
+
"skip_prk_steps": true,
|
|
|
16 |
"steps_offset": 1,
|
17 |
"thresholding": false,
|
18 |
+
"timestep_spacing": "leading",
|
19 |
+
"trained_betas": null
|
|
|
20 |
}
|
text_encoder/config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"CLIPTextModel"
|
5 |
],
|
@@ -7,18 +7,18 @@
|
|
7 |
"bos_token_id": 0,
|
8 |
"dropout": 0.0,
|
9 |
"eos_token_id": 2,
|
10 |
-
"hidden_act": "
|
11 |
-
"hidden_size":
|
12 |
"initializer_factor": 1.0,
|
13 |
"initializer_range": 0.02,
|
14 |
-
"intermediate_size":
|
15 |
"layer_norm_eps": 1e-05,
|
16 |
"max_position_embeddings": 77,
|
17 |
"model_type": "clip_text_model",
|
18 |
-
"num_attention_heads":
|
19 |
-
"num_hidden_layers":
|
20 |
"pad_token_id": 1,
|
21 |
-
"projection_dim":
|
22 |
"torch_dtype": "bfloat16",
|
23 |
"transformers_version": "4.39.0",
|
24 |
"vocab_size": 49408
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "stabilityai/stable-diffusion-2-1",
|
3 |
"architectures": [
|
4 |
"CLIPTextModel"
|
5 |
],
|
|
|
7 |
"bos_token_id": 0,
|
8 |
"dropout": 0.0,
|
9 |
"eos_token_id": 2,
|
10 |
+
"hidden_act": "gelu",
|
11 |
+
"hidden_size": 1024,
|
12 |
"initializer_factor": 1.0,
|
13 |
"initializer_range": 0.02,
|
14 |
+
"intermediate_size": 4096,
|
15 |
"layer_norm_eps": 1e-05,
|
16 |
"max_position_embeddings": 77,
|
17 |
"model_type": "clip_text_model",
|
18 |
+
"num_attention_heads": 16,
|
19 |
+
"num_hidden_layers": 23,
|
20 |
"pad_token_id": 1,
|
21 |
+
"projection_dim": 512,
|
22 |
"torch_dtype": "bfloat16",
|
23 |
"transformers_version": "4.39.0",
|
24 |
"vocab_size": 49408
|
text_encoder/model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:305b018799a2e1dba83e9db17654309814ff4212e40b8096b237ce2e3794a132
|
3 |
+
size 680820760
|
tokenizer/special_tokens_map.json
CHANGED
@@ -13,7 +13,7 @@
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
-
"pad_token": "
|
17 |
"unk_token": {
|
18 |
"content": "<|endoftext|>",
|
19 |
"lstrip": false,
|
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
+
"pad_token": "!",
|
17 |
"unk_token": {
|
18 |
"content": "<|endoftext|>",
|
19 |
"lstrip": false,
|
tokenizer/tokenizer_config.json
CHANGED
@@ -1,6 +1,14 @@
|
|
1 |
{
|
2 |
"add_prefix_space": false,
|
3 |
"added_tokens_decoder": {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
"49406": {
|
5 |
"content": "<|startoftext|>",
|
6 |
"lstrip": false,
|
@@ -24,7 +32,7 @@
|
|
24 |
"eos_token": "<|endoftext|>",
|
25 |
"errors": "replace",
|
26 |
"model_max_length": 77,
|
27 |
-
"pad_token": "
|
28 |
"tokenizer_class": "CLIPTokenizer",
|
29 |
"unk_token": "<|endoftext|>"
|
30 |
}
|
|
|
1 |
{
|
2 |
"add_prefix_space": false,
|
3 |
"added_tokens_decoder": {
|
4 |
+
"0": {
|
5 |
+
"content": "!",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": false,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
},
|
12 |
"49406": {
|
13 |
"content": "<|startoftext|>",
|
14 |
"lstrip": false,
|
|
|
32 |
"eos_token": "<|endoftext|>",
|
33 |
"errors": "replace",
|
34 |
"model_max_length": 77,
|
35 |
+
"pad_token": "!",
|
36 |
"tokenizer_class": "CLIPTokenizer",
|
37 |
"unk_token": "<|endoftext|>"
|
38 |
}
|
vae/config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
"_class_name": "AutoencoderKL",
|
3 |
"_diffusers_version": "0.27.2",
|
4 |
-
"_name_or_path": "
|
5 |
"act_fn": "silu",
|
6 |
"block_out_channels": [
|
7 |
128,
|
@@ -23,7 +23,7 @@
|
|
23 |
"layers_per_block": 2,
|
24 |
"norm_num_groups": 32,
|
25 |
"out_channels": 3,
|
26 |
-
"sample_size":
|
27 |
"scaling_factor": 0.18215,
|
28 |
"up_block_types": [
|
29 |
"UpDecoderBlock2D",
|
|
|
1 |
{
|
2 |
"_class_name": "AutoencoderKL",
|
3 |
"_diffusers_version": "0.27.2",
|
4 |
+
"_name_or_path": "stabilityai/stable-diffusion-2-1",
|
5 |
"act_fn": "silu",
|
6 |
"block_out_channels": [
|
7 |
128,
|
|
|
23 |
"layers_per_block": 2,
|
24 |
"norm_num_groups": 32,
|
25 |
"out_channels": 3,
|
26 |
+
"sample_size": 768,
|
27 |
"scaling_factor": 0.18215,
|
28 |
"up_block_types": [
|
29 |
"UpDecoderBlock2D",
|
vae/diffusion_pytorch_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 167335590
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8d0c34f57abe50f323040f2366c8e22b941068dcdf53c8eb1d6fafb838afecb7
|
3 |
size 167335590
|
val_imgs_grid.png
CHANGED
Git LFS Details
|
Git LFS Details
|