Revanthraja
commited on
Commit
•
794d75f
1
Parent(s):
6e8a603
Upload folder using huggingface_hub
Browse files- args.json +18 -18
- model.ckpt +1 -1
- samples/0.png +2 -2
- samples/1.png +0 -0
- samples/2.png +2 -2
- samples/3.png +2 -2
- text_encoder/config.json +1 -1
- text_encoder/pytorch_model.bin +1 -1
- unet/config.json +1 -1
- unet/diffusion_pytorch_model.bin +1 -1
args.json
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
{
|
2 |
-
"pretrained_model_name_or_path": "
|
3 |
"pretrained_vae_name_or_path": "stabilityai/sd-vae-ft-mse",
|
4 |
-
"revision":
|
5 |
"tokenizer_name": null,
|
6 |
"instance_data_dir": null,
|
7 |
"class_data_dir": null,
|
8 |
"instance_prompt": null,
|
9 |
"class_prompt": null,
|
10 |
-
"save_sample_prompt": "A photo
|
11 |
"save_sample_negative_prompt": null,
|
12 |
"n_save_sample": 4,
|
13 |
"save_guidance_scale": 7.5,
|
@@ -15,19 +15,19 @@
|
|
15 |
"pad_tokens": false,
|
16 |
"with_prior_preservation": true,
|
17 |
"prior_loss_weight": 1.0,
|
18 |
-
"num_class_images":
|
19 |
-
"output_dir": "
|
20 |
"seed": 1337,
|
21 |
"resolution": 512,
|
22 |
"center_crop": false,
|
23 |
"train_text_encoder": true,
|
24 |
"train_batch_size": 1,
|
25 |
"sample_batch_size": 4,
|
26 |
-
"num_train_epochs":
|
27 |
-
"max_train_steps":
|
28 |
"gradient_accumulation_steps": 1,
|
29 |
"gradient_checkpointing": true,
|
30 |
-
"learning_rate": 2e-
|
31 |
"scale_lr": false,
|
32 |
"lr_scheduler": "constant",
|
33 |
"lr_warmup_steps": 0,
|
@@ -50,22 +50,22 @@
|
|
50 |
"local_rank": -1,
|
51 |
"concepts_list": [
|
52 |
{
|
53 |
-
"instance_prompt": "photo of avt
|
54 |
-
"class_prompt": "
|
55 |
-
"instance_data_dir": "/content/dataset/
|
56 |
"class_data_dir": "/content/dataset/person"
|
57 |
},
|
58 |
{
|
59 |
-
"instance_prompt": "photo of zwx
|
60 |
-
"class_prompt": "
|
61 |
-
"instance_data_dir": "/content/dataset/
|
62 |
"class_data_dir": "/content/dataset/zwx"
|
63 |
},
|
64 |
{
|
65 |
-
"instance_prompt": "photo of ukj
|
66 |
-
"class_prompt": "
|
67 |
-
"instance_data_dir": "/content/dataset/
|
68 |
-
"class_data_dir": "/content/
|
69 |
}
|
70 |
],
|
71 |
"read_prompts_from_txts": false
|
|
|
1 |
{
|
2 |
+
"pretrained_model_name_or_path": "Revanthraja/snowavat",
|
3 |
"pretrained_vae_name_or_path": "stabilityai/sd-vae-ft-mse",
|
4 |
+
"revision": null,
|
5 |
"tokenizer_name": null,
|
6 |
"instance_data_dir": null,
|
7 |
"class_data_dir": null,
|
8 |
"instance_prompt": null,
|
9 |
"class_prompt": null,
|
10 |
+
"save_sample_prompt": "A photo rolls royce car in front of zwx luxury house",
|
11 |
"save_sample_negative_prompt": null,
|
12 |
"n_save_sample": 4,
|
13 |
"save_guidance_scale": 7.5,
|
|
|
15 |
"pad_tokens": false,
|
16 |
"with_prior_preservation": true,
|
17 |
"prior_loss_weight": 1.0,
|
18 |
+
"num_class_images": 10,
|
19 |
+
"output_dir": "",
|
20 |
"seed": 1337,
|
21 |
"resolution": 512,
|
22 |
"center_crop": false,
|
23 |
"train_text_encoder": true,
|
24 |
"train_batch_size": 1,
|
25 |
"sample_batch_size": 4,
|
26 |
+
"num_train_epochs": 7,
|
27 |
+
"max_train_steps": 4000,
|
28 |
"gradient_accumulation_steps": 1,
|
29 |
"gradient_checkpointing": true,
|
30 |
+
"learning_rate": 2e-08,
|
31 |
"scale_lr": false,
|
32 |
"lr_scheduler": "constant",
|
33 |
"lr_warmup_steps": 0,
|
|
|
50 |
"local_rank": -1,
|
51 |
"concepts_list": [
|
52 |
{
|
53 |
+
"instance_prompt": "photo of avt Luxury house ",
|
54 |
+
"class_prompt": "Luxury house",
|
55 |
+
"instance_data_dir": "/content/dataset/Luxury house",
|
56 |
"class_data_dir": "/content/dataset/person"
|
57 |
},
|
58 |
{
|
59 |
+
"instance_prompt": "photo of zwx Beautiful girls",
|
60 |
+
"class_prompt": "Beautiful girls",
|
61 |
+
"instance_data_dir": "/content/dataset/Beautiful girls",
|
62 |
"class_data_dir": "/content/dataset/zwx"
|
63 |
},
|
64 |
{
|
65 |
+
"instance_prompt": "photo of ukj girl with Rolls royce",
|
66 |
+
"class_prompt": "Girl in Rolls royce",
|
67 |
+
"instance_data_dir": "/content/dataset/girl sitting in Rolls royce",
|
68 |
+
"class_data_dir": "/content/data/ukj"
|
69 |
}
|
70 |
],
|
71 |
"read_prompts_from_txts": false
|
model.ckpt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2580250395
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8068d7bb6fbe28b089bab83983edf031f935ccc9e8e4bca5083d61ffe9b885b9
|
3 |
size 2580250395
|
samples/0.png
CHANGED
Git LFS Details
|
Git LFS Details
|
samples/1.png
CHANGED
samples/2.png
CHANGED
Git LFS Details
|
Git LFS Details
|
samples/3.png
CHANGED
Git LFS Details
|
Git LFS Details
|
text_encoder/config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"CLIPTextModel"
|
5 |
],
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "Revanthraja/snowavat",
|
3 |
"architectures": [
|
4 |
"CLIPTextModel"
|
5 |
],
|
text_encoder/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1361678941
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:654a07f4f841cbfd91b91b28c23bf9f6a747411b62953bdc2a85348a259fbadc
|
3 |
size 1361678941
|
unet/config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
"_class_name": "UNet2DConditionModel",
|
3 |
"_diffusers_version": "0.15.0.dev0",
|
4 |
-
"_name_or_path": "
|
5 |
"act_fn": "silu",
|
6 |
"attention_head_dim": [
|
7 |
5,
|
|
|
1 |
{
|
2 |
"_class_name": "UNet2DConditionModel",
|
3 |
"_diffusers_version": "0.15.0.dev0",
|
4 |
+
"_name_or_path": "Revanthraja/snowavat",
|
5 |
"act_fn": "silu",
|
6 |
"attention_head_dim": [
|
7 |
5,
|
unet/diffusion_pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3463934693
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:882ac1a96d6a359125a5864aa3b100afc72ed01b63da5ce2c9b500abd8c14c4b
|
3 |
size 3463934693
|