Zilun commited on
Commit
ba20d8a
1 Parent(s): df925d0

Delete checkpoint/18000

Browse files
checkpoint/18000/text_encoder/config.json DELETED
@@ -1,25 +0,0 @@
1
- {
2
- "_name_or_path": "/data/gyl/sdft/experiments/dataset_p1/outputs/3000",
3
- "architectures": [
4
- "CLIPTextModel"
5
- ],
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 0,
8
- "dropout": 0.0,
9
- "eos_token_id": 2,
10
- "hidden_act": "gelu",
11
- "hidden_size": 1024,
12
- "initializer_factor": 1.0,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 4096,
15
- "layer_norm_eps": 1e-05,
16
- "max_position_embeddings": 77,
17
- "model_type": "clip_text_model",
18
- "num_attention_heads": 16,
19
- "num_hidden_layers": 23,
20
- "pad_token_id": 1,
21
- "projection_dim": 512,
22
- "torch_dtype": "float32",
23
- "transformers_version": "4.35.2",
24
- "vocab_size": 49408
25
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint/18000/text_encoder/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:099b622541a3855e2ac74a611775b32ed7a8dfa715e787d1f536ec074081c0da
3
- size 1361678941
 
 
 
 
checkpoint/18000/tokenizer/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint/18000/tokenizer/special_tokens_map.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|startoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "!",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "<|endoftext|>",
25
- "lstrip": false,
26
- "normalized": true,
27
- "rstrip": false,
28
- "single_word": false
29
- }
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint/18000/tokenizer/tokenizer_config.json DELETED
@@ -1,38 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "0": {
5
- "content": "!",
6
- "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "49406": {
13
- "content": "<|startoftext|>",
14
- "lstrip": false,
15
- "normalized": true,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "49407": {
21
- "content": "<|endoftext|>",
22
- "lstrip": false,
23
- "normalized": true,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- }
28
- },
29
- "bos_token": "<|startoftext|>",
30
- "clean_up_tokenization_spaces": true,
31
- "do_lower_case": true,
32
- "eos_token": "<|endoftext|>",
33
- "errors": "replace",
34
- "model_max_length": 77,
35
- "pad_token": "!",
36
- "tokenizer_class": "CLIPTokenizer",
37
- "unk_token": "<|endoftext|>"
38
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint/18000/tokenizer/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint/18000/unet/config.json DELETED
@@ -1,57 +0,0 @@
1
- {
2
- "_class_name": "UNet2DConditionModel",
3
- "_diffusers_version": "0.15.0.dev0",
4
- "_name_or_path": "/data/gyl/sdft/experiments/dataset_p1/outputs/3000",
5
- "act_fn": "silu",
6
- "attention_head_dim": [
7
- 5,
8
- 10,
9
- 20,
10
- 20
11
- ],
12
- "block_out_channels": [
13
- 320,
14
- 640,
15
- 1280,
16
- 1280
17
- ],
18
- "center_input_sample": false,
19
- "class_embed_type": null,
20
- "class_embeddings_concat": false,
21
- "conv_in_kernel": 3,
22
- "conv_out_kernel": 3,
23
- "cross_attention_dim": 1024,
24
- "down_block_types": [
25
- "CrossAttnDownBlock2D",
26
- "CrossAttnDownBlock2D",
27
- "CrossAttnDownBlock2D",
28
- "DownBlock2D"
29
- ],
30
- "downsample_padding": 1,
31
- "dual_cross_attention": false,
32
- "flip_sin_to_cos": true,
33
- "freq_shift": 0,
34
- "in_channels": 4,
35
- "layers_per_block": 2,
36
- "mid_block_scale_factor": 1,
37
- "mid_block_type": "UNetMidBlock2DCrossAttn",
38
- "norm_eps": 1e-05,
39
- "norm_num_groups": 32,
40
- "num_class_embeds": null,
41
- "only_cross_attention": false,
42
- "out_channels": 4,
43
- "projection_class_embeddings_input_dim": null,
44
- "resnet_time_scale_shift": "default",
45
- "sample_size": 96,
46
- "time_cond_proj_dim": null,
47
- "time_embedding_type": "positional",
48
- "timestep_post_act": null,
49
- "up_block_types": [
50
- "UpBlock2D",
51
- "CrossAttnUpBlock2D",
52
- "CrossAttnUpBlock2D",
53
- "CrossAttnUpBlock2D"
54
- ],
55
- "upcast_attention": false,
56
- "use_linear_projection": true
57
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint/18000/unet/diffusion_pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b8f2497bc091751fd2de97818435d488a6731b52cce684162e31ab444434079
3
- size 609972224
 
 
 
 
checkpoint/18000/vae/config.json DELETED
@@ -1,31 +0,0 @@
1
- {
2
- "_class_name": "AutoencoderKL",
3
- "_diffusers_version": "0.15.0.dev0",
4
- "_name_or_path": "/data/gyl/sdft/experiments/dataset_p1/outputs/3000",
5
- "act_fn": "silu",
6
- "block_out_channels": [
7
- 128,
8
- 256,
9
- 512,
10
- 512
11
- ],
12
- "down_block_types": [
13
- "DownEncoderBlock2D",
14
- "DownEncoderBlock2D",
15
- "DownEncoderBlock2D",
16
- "DownEncoderBlock2D"
17
- ],
18
- "in_channels": 3,
19
- "latent_channels": 4,
20
- "layers_per_block": 2,
21
- "norm_num_groups": 32,
22
- "out_channels": 3,
23
- "sample_size": 768,
24
- "scaling_factor": 0.18215,
25
- "up_block_types": [
26
- "UpDecoderBlock2D",
27
- "UpDecoderBlock2D",
28
- "UpDecoderBlock2D",
29
- "UpDecoderBlock2D"
30
- ]
31
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint/18000/vae/diffusion_pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:36bb8e1b54aba3a0914eb35fba13dcb107e9f18d379d1df2158732cd4bf56a94
3
- size 334711857