Azaghast commited on
Commit
facbe5c
1 Parent(s): cedc580

Update model config

Browse files
Files changed (1) hide show
  1. config.json +4 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "C://Users//Azaghast//SCP Generator//Models//Containment Procedures//pytorch_model.bin",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -18,7 +18,9 @@
18
  "n_inner": null,
19
  "n_layer": 12,
20
  "n_positions": 1024,
 
21
  "resid_pdrop": 0.1,
 
22
  "scale_attn_weights": true,
23
  "summary_activation": null,
24
  "summary_first_dropout": 0.1,
@@ -32,7 +34,7 @@
32
  }
33
  },
34
  "torch_dtype": "float32",
35
- "transformers_version": "4.9.0",
36
  "use_cache": true,
37
  "vocab_size": 50260
38
  }
 
1
  {
2
+ "_name_or_path": "Azaghast/GPT2-SCP-ContainmentProcedures",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
18
  "n_inner": null,
19
  "n_layer": 12,
20
  "n_positions": 1024,
21
+ "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
23
+ "scale_attn_by_inverse_layer_idx": false,
24
  "scale_attn_weights": true,
25
  "summary_activation": null,
26
  "summary_first_dropout": 0.1,
 
34
  }
35
  },
36
  "torch_dtype": "float32",
37
+ "transformers_version": "4.19.2",
38
  "use_cache": true,
39
  "vocab_size": 50260
40
  }