nielsr HF staff commited on
Commit
428d56a
1 Parent(s): fee196e

Fix bug, should now be using absolute position embeddings

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -30,7 +30,7 @@
30
  "num_hidden_layers": 12,
31
  "pad_token_id": 0,
32
  "positive_label_weight": 10.0,
33
- "reset_position_index_per_cell": true,
34
  "select_one_column": true,
35
  "softmax_temperature": 1.0,
36
  "type_vocab_size": [
30
  "num_hidden_layers": 12,
31
  "pad_token_id": 0,
32
  "positive_label_weight": 10.0,
33
+ "reset_position_index_per_cell": false,
34
  "select_one_column": true,
35
  "softmax_temperature": 1.0,
36
  "type_vocab_size": [
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bd1491adfd4415f1106e494181fa34ed4d418143f4084f1bdff13e9ad1598ef4
3
  size 442778807
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50a94860cdd469b380a8938f4b0017c7168c43cbe05eca8ab6563241906646bc
3
  size 442778807