osbm commited on
Commit
98a45d1
1 Parent(s): 442bc35

Upload with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +10 -3
config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
 
2
  "architectures": [
3
- "RobertaForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "bos_token_id": 0,
@@ -9,13 +10,19 @@
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
 
 
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 3072,
 
 
 
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 514,
16
  "model_type": "roberta",
17
- "num_attention_heads": 12,
18
- "num_hidden_layers": 8,
19
  "pad_token_id": 1,
20
  "position_embedding_type": "absolute",
21
  "torch_dtype": "float32",
 
1
  {
2
+ "_name_or_path": "data/saved_models/our_90epochs_saved_model",
3
  "architectures": [
4
+ "RobertaForSelfiesClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "LABEL_0"
15
+ },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
+ "label2id": {
19
+ "LABEL_0": 0
20
+ },
21
  "layer_norm_eps": 1e-12,
22
  "max_position_embeddings": 514,
23
  "model_type": "roberta",
24
+ "num_attention_heads": 4,
25
+ "num_hidden_layers": 12,
26
  "pad_token_id": 1,
27
  "position_embedding_type": "absolute",
28
  "torch_dtype": "float32",