Text Generation
Transformers
PyTorch
Safetensors
gpt2
stable-diffusion
prompt-generator
distilgpt2
Inference Endpoints
text-generation-inference
FredZhang7 commited on
Commit
984eebd
1 Parent(s): f2be806

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -9
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "distilgpt2",
3
  "_num_labels": 1,
4
  "activation_function": "gelu_new",
5
  "architectures": [
@@ -21,13 +20,9 @@
21
  "n_ctx": 1024,
22
  "n_embd": 768,
23
  "n_head": 12,
24
- "n_inner": null,
25
  "n_layer": 6,
26
  "n_positions": 1024,
27
- "reorder_and_upcast_attn": false,
28
  "resid_pdrop": 0.1,
29
- "scale_attn_by_inverse_layer_idx": false,
30
- "scale_attn_weights": true,
31
  "summary_activation": null,
32
  "summary_first_dropout": 0.1,
33
  "summary_proj_to_labels": true,
@@ -39,8 +34,5 @@
39
  "max_length": 50
40
  }
41
  },
42
- "torch_dtype": "float32",
43
- "transformers_version": "4.21.2",
44
- "use_cache": true,
45
  "vocab_size": 50257
46
- }
 
1
  {
 
2
  "_num_labels": 1,
3
  "activation_function": "gelu_new",
4
  "architectures": [
 
20
  "n_ctx": 1024,
21
  "n_embd": 768,
22
  "n_head": 12,
 
23
  "n_layer": 6,
24
  "n_positions": 1024,
 
25
  "resid_pdrop": 0.1,
 
 
26
  "summary_activation": null,
27
  "summary_first_dropout": 0.1,
28
  "summary_proj_to_labels": true,
 
34
  "max_length": 50
35
  }
36
  },
 
 
 
37
  "vocab_size": 50257
38
+ }