nielsr HF staff commited on
Commit
ca27551
1 Parent(s): a0177c9

Upload DetaForObjectDetection

Browse files
Files changed (1) hide show
  1. config.json +1 -3
config.json CHANGED
@@ -8,7 +8,6 @@
8
  "assign_first_stage": true,
9
  "attention_dropout": 0.0,
10
  "auxiliary_loss": false,
11
- "backbone": "resnet50",
12
  "backbone_config": {
13
  "_name_or_path": "",
14
  "add_cross_attention": false,
@@ -125,7 +124,6 @@
125
  "decoder_layers": 6,
126
  "decoder_n_points": 4,
127
  "dice_loss_coefficient": 1,
128
- "dilation": false,
129
  "dropout": 0.1,
130
  "encoder_attention_heads": 8,
131
  "encoder_ffn_dim": 2048,
@@ -316,7 +314,7 @@
316
  "zebra": 24
317
  },
318
  "mask_loss_coefficient": 1,
319
- "max_position_embeddings": 1024,
320
  "model_type": "deta",
321
  "num_feature_levels": 5,
322
  "num_queries": 900,
 
8
  "assign_first_stage": true,
9
  "attention_dropout": 0.0,
10
  "auxiliary_loss": false,
 
11
  "backbone_config": {
12
  "_name_or_path": "",
13
  "add_cross_attention": false,
 
124
  "decoder_layers": 6,
125
  "decoder_n_points": 4,
126
  "dice_loss_coefficient": 1,
 
127
  "dropout": 0.1,
128
  "encoder_attention_heads": 8,
129
  "encoder_ffn_dim": 2048,
 
314
  "zebra": 24
315
  },
316
  "mask_loss_coefficient": 1,
317
+ "max_position_embeddings": 2048,
318
  "model_type": "deta",
319
  "num_feature_levels": 5,
320
  "num_queries": 900,