BearlyWorkingYT commited on
Commit
3261ade
1 Parent(s): c404536

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/opt-125m",
3
  "_remove_final_layer_norm": false,
4
  "activation_dropout": 0.0,
5
  "activation_function": "relu",
@@ -25,5 +25,5 @@
25
  "transformers_version": "4.20.1",
26
  "use_cache": true,
27
  "vocab_size": 50272,
28
- "word_embed_proj_dim": 768,
29
  }
 
1
  {
2
+ "_name_or_path": "Model\\",
3
  "_remove_final_layer_norm": false,
4
  "activation_dropout": 0.0,
5
  "activation_function": "relu",
 
25
  "transformers_version": "4.20.1",
26
  "use_cache": true,
27
  "vocab_size": 50272,
28
+ "word_embed_proj_dim": 768
29
  }