Transformers
PyTorch
English
mplug_owl2
Inference Endpoints
teowu commited on
Commit
50fc0fe
1 Parent(s): 909ac64

Delete .ipynb_checkpoints

Browse files
.ipynb_checkpoints/config-checkpoint.json DELETED
@@ -1,176 +0,0 @@
1
- {
2
- "attention_bias": false,
3
- "bos_token_id": 1,
4
- "eos_token_id": 2,
5
- "hidden_act": "silu",
6
- "hidden_size": 4096,
7
- "initializer_range": 0.02,
8
- "intermediate_size": 11008,
9
- "max_position_embeddings": 2048,
10
- "model_type": "mplug_owl2",
11
- "num_attention_heads": 32,
12
- "num_hidden_layers": 32,
13
- "num_key_value_heads": 32,
14
- "pretraining_tp": 1,
15
- "rms_norm_eps": 1e-06,
16
- "rope_scaling": null,
17
- "rope_theta": 10000.0,
18
- "tie_word_embeddings": false,
19
- "transformers_version": "4.28.1",
20
- "use_cache": true,
21
- "visual_config": {
22
- "visual_abstractor": {
23
- "_name_or_path": "",
24
- "add_cross_attention": false,
25
- "architectures": null,
26
- "attention_probs_dropout_prob": 0.0,
27
- "bad_words_ids": null,
28
- "begin_suppress_tokens": null,
29
- "bos_token_id": null,
30
- "chunk_size_feed_forward": 0,
31
- "cross_attention_hidden_size": null,
32
- "decoder_start_token_id": null,
33
- "diversity_penalty": 0.0,
34
- "do_sample": false,
35
- "early_stopping": false,
36
- "encoder_hidden_size": 1024,
37
- "encoder_no_repeat_ngram_size": 0,
38
- "eos_token_id": null,
39
- "exponential_decay_length_penalty": null,
40
- "finetuning_task": null,
41
- "forced_bos_token_id": null,
42
- "forced_eos_token_id": null,
43
- "grid_size": 32,
44
- "hidden_size": 1024,
45
- "id2label": {
46
- "0": "LABEL_0",
47
- "1": "LABEL_1"
48
- },
49
- "initializer_range": 0.02,
50
- "intermediate_size": 2816,
51
- "is_decoder": false,
52
- "is_encoder_decoder": false,
53
- "label2id": {
54
- "LABEL_0": 0,
55
- "LABEL_1": 1
56
- },
57
- "layer_norm_eps": 1e-06,
58
- "length_penalty": 1.0,
59
- "max_length": 20,
60
- "min_length": 0,
61
- "model_type": "mplug_owl_visual_abstract",
62
- "no_repeat_ngram_size": 0,
63
- "num_attention_heads": 16,
64
- "num_beam_groups": 1,
65
- "num_beams": 1,
66
- "num_hidden_layers": 6,
67
- "num_learnable_queries": 64,
68
- "num_return_sequences": 1,
69
- "output_attentions": false,
70
- "output_hidden_states": false,
71
- "output_scores": false,
72
- "pad_token_id": null,
73
- "prefix": null,
74
- "problem_type": null,
75
- "pruned_heads": {},
76
- "remove_invalid_values": false,
77
- "repetition_penalty": 1.0,
78
- "return_dict": true,
79
- "return_dict_in_generate": false,
80
- "sep_token_id": null,
81
- "suppress_tokens": null,
82
- "task_specific_params": null,
83
- "temperature": 1.0,
84
- "tf_legacy_loss": false,
85
- "tie_encoder_decoder": false,
86
- "tie_word_embeddings": true,
87
- "tokenizer_class": null,
88
- "top_k": 50,
89
- "top_p": 1.0,
90
- "torch_dtype": null,
91
- "torchscript": false,
92
- "transformers_version": "4.28.1",
93
- "typical_p": 1.0,
94
- "use_bfloat16": false
95
- },
96
- "visual_model": {
97
- "_name_or_path": "",
98
- "add_cross_attention": false,
99
- "architectures": null,
100
- "attention_dropout": 0.0,
101
- "bad_words_ids": null,
102
- "begin_suppress_tokens": null,
103
- "bos_token_id": null,
104
- "chunk_size_feed_forward": 0,
105
- "cross_attention_hidden_size": null,
106
- "decoder_start_token_id": null,
107
- "diversity_penalty": 0.0,
108
- "do_sample": false,
109
- "early_stopping": false,
110
- "encoder_no_repeat_ngram_size": 0,
111
- "eos_token_id": null,
112
- "exponential_decay_length_penalty": null,
113
- "finetuning_task": null,
114
- "forced_bos_token_id": null,
115
- "forced_eos_token_id": null,
116
- "hidden_act": "quick_gelu",
117
- "hidden_size": 1024,
118
- "id2label": {
119
- "0": "LABEL_0",
120
- "1": "LABEL_1"
121
- },
122
- "image_size": 448,
123
- "initializer_factor": 1.0,
124
- "initializer_range": 0.02,
125
- "intermediate_size": 4096,
126
- "is_decoder": false,
127
- "is_encoder_decoder": false,
128
- "label2id": {
129
- "LABEL_0": 0,
130
- "LABEL_1": 1
131
- },
132
- "layer_norm_eps": 1e-06,
133
- "length_penalty": 1.0,
134
- "max_length": 20,
135
- "min_length": 0,
136
- "model_type": "mplug_owl_vision_model",
137
- "no_repeat_ngram_size": 0,
138
- "num_attention_heads": 16,
139
- "num_beam_groups": 1,
140
- "num_beams": 1,
141
- "num_channels": 3,
142
- "num_hidden_layers": 24,
143
- "num_return_sequences": 1,
144
- "output_attentions": false,
145
- "output_hidden_states": false,
146
- "output_scores": false,
147
- "pad_token_id": null,
148
- "patch_size": 14,
149
- "prefix": null,
150
- "problem_type": null,
151
- "projection_dim": 768,
152
- "pruned_heads": {},
153
- "remove_invalid_values": false,
154
- "repetition_penalty": 1.0,
155
- "return_dict": true,
156
- "return_dict_in_generate": false,
157
- "sep_token_id": null,
158
- "suppress_tokens": null,
159
- "task_specific_params": null,
160
- "temperature": 1.0,
161
- "tf_legacy_loss": false,
162
- "tie_encoder_decoder": false,
163
- "tie_word_embeddings": true,
164
- "tokenizer_class": null,
165
- "top_k": 50,
166
- "top_p": 1.0,
167
- "torch_dtype": null,
168
- "torchscript": false,
169
- "transformers_version": "4.28.1",
170
- "typical_p": 1.0,
171
- "use_bfloat16": false,
172
- "use_flash_attn": false
173
- }
174
- },
175
- "vocab_size": 32000
176
- }