nielsr HF staff commited on
Commit
99cdf5e
1 Parent(s): b7a9a84

Upload DetaForObjectDetection

Browse files
Files changed (1) hide show
  1. config.json +1 -3
config.json CHANGED
@@ -8,7 +8,6 @@
8
  "assign_first_stage": true,
9
  "attention_dropout": 0.0,
10
  "auxiliary_loss": false,
11
- "backbone": "resnet50",
12
  "backbone_config": {
13
  "_name_or_path": "",
14
  "add_cross_attention": false,
@@ -112,7 +111,6 @@
112
  "decoder_layers": 6,
113
  "decoder_n_points": 4,
114
  "dice_loss_coefficient": 1,
115
- "dilation": false,
116
  "dropout": 0.1,
117
  "encoder_attention_heads": 8,
118
  "encoder_ffn_dim": 2048,
@@ -303,7 +301,7 @@
303
  "zebra": 24
304
  },
305
  "mask_loss_coefficient": 1,
306
- "max_position_embeddings": 1024,
307
  "model_type": "deta",
308
  "num_feature_levels": 5,
309
  "num_queries": 900,
 
8
  "assign_first_stage": true,
9
  "attention_dropout": 0.0,
10
  "auxiliary_loss": false,
 
11
  "backbone_config": {
12
  "_name_or_path": "",
13
  "add_cross_attention": false,
 
111
  "decoder_layers": 6,
112
  "decoder_n_points": 4,
113
  "dice_loss_coefficient": 1,
 
114
  "dropout": 0.1,
115
  "encoder_attention_heads": 8,
116
  "encoder_ffn_dim": 2048,
 
301
  "zebra": 24
302
  },
303
  "mask_loss_coefficient": 1,
304
+ "max_position_embeddings": 2048,
305
  "model_type": "deta",
306
  "num_feature_levels": 5,
307
  "num_queries": 900,