juliagsy commited on
Commit
652df57
1 Parent(s): ac1fbf3
Files changed (2) hide show
  1. config.json +8 -8
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "google/tapas-base",
3
  "aggregation_labels": null,
4
  "aggregation_loss_weight": 1.0,
5
  "aggregation_temperature": 1.0,
@@ -9,19 +9,19 @@
9
  "architectures": [
10
  "TapasForQuestionAnswering"
11
  ],
12
- "attention_probs_dropout_prob": 0.0,
13
  "average_approximation_function": "ratio",
14
  "average_logits_per_cell": false,
15
  "cell_selection_preference": null,
16
  "disable_per_token_loss": false,
17
  "gradient_checkpointing": false,
18
  "hidden_act": "gelu",
19
- "hidden_dropout_prob": 0.07,
20
- "hidden_size": 768,
21
  "huber_loss_delta": null,
22
  "init_cell_selection_weights_to_zero": false,
23
  "initializer_range": 0.02,
24
- "intermediate_size": 3072,
25
  "layer_norm_eps": 1e-12,
26
  "max_num_columns": 32,
27
  "max_num_rows": 64,
@@ -29,15 +29,15 @@
29
  "model_type": "tapas",
30
  "no_aggregation_label_index": null,
31
  "num_aggregation_labels": 0,
32
- "num_attention_heads": 12,
33
- "num_hidden_layers": 12,
34
  "pad_token_id": 0,
35
  "positive_label_weight": 10.0,
36
  "reset_position_index_per_cell": true,
37
  "select_one_column": true,
38
  "softmax_temperature": 1.0,
39
  "torch_dtype": "float32",
40
- "transformers_version": "4.14.0.dev0",
41
  "type_vocab_size": [
42
  3,
43
  256,
 
1
  {
2
+ "_name_or_path": "google/tapas-large-finetuned-sqa",
3
  "aggregation_labels": null,
4
  "aggregation_loss_weight": 1.0,
5
  "aggregation_temperature": 1.0,
 
9
  "architectures": [
10
  "TapasForQuestionAnswering"
11
  ],
12
+ "attention_probs_dropout_prob": 0.1,
13
  "average_approximation_function": "ratio",
14
  "average_logits_per_cell": false,
15
  "cell_selection_preference": null,
16
  "disable_per_token_loss": false,
17
  "gradient_checkpointing": false,
18
  "hidden_act": "gelu",
19
+ "hidden_dropout_prob": 0.1,
20
+ "hidden_size": 1024,
21
  "huber_loss_delta": null,
22
  "init_cell_selection_weights_to_zero": false,
23
  "initializer_range": 0.02,
24
+ "intermediate_size": 4096,
25
  "layer_norm_eps": 1e-12,
26
  "max_num_columns": 32,
27
  "max_num_rows": 64,
 
29
  "model_type": "tapas",
30
  "no_aggregation_label_index": null,
31
  "num_aggregation_labels": 0,
32
+ "num_attention_heads": 16,
33
+ "num_hidden_layers": 24,
34
  "pad_token_id": 0,
35
  "positive_label_weight": 10.0,
36
  "reset_position_index_per_cell": true,
37
  "select_one_column": true,
38
  "softmax_temperature": 1.0,
39
  "torch_dtype": "float32",
40
+ "transformers_version": "4.16.0.dev0",
41
  "type_vocab_size": [
42
  3,
43
  256,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9ca6154f045da230c80ab3e78931bc4bf6197ee8ffc133476542e4bff7ac9a88
3
- size 442782088
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4a4aaf5073da6d0d71ca67a4f9f6fc7398b8e88dcef8b94ae86d3fe082b7374
3
+ size 1347091016