deepparag commited on
Commit
1154b07
1 Parent(s): 2763478
Files changed (2) hide show
  1. config.json +17 -42
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,46 +1,27 @@
1
  {
2
- "_name_or_path": "./output-small",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
- "GPTNeoForCausalLM"
6
- ],
7
- "attention_dropout": 0,
8
- "attention_layers": [
9
- "global",
10
- "local",
11
- "global",
12
- "local",
13
- "global",
14
- "local",
15
- "global",
16
- "local",
17
- "global",
18
- "local",
19
- "global",
20
- "local"
21
- ],
22
- "attention_types": [
23
- [
24
- [
25
- "global",
26
- "local"
27
- ],
28
- 6
29
- ]
30
  ],
 
31
  "bos_token_id": 50256,
32
- "embed_dropout": 0,
33
  "eos_token_id": 50256,
34
  "gradient_checkpointing": false,
35
- "hidden_size": 768,
36
  "initializer_range": 0.02,
37
- "intermediate_size": null,
38
  "layer_norm_epsilon": 1e-05,
39
- "max_position_embeddings": 2048,
40
- "model_type": "gpt_neo",
41
- "num_heads": 12,
42
- "num_layers": 12,
43
- "resid_dropout": 0,
 
 
 
 
 
 
44
  "summary_activation": null,
45
  "summary_first_dropout": 0.1,
46
  "summary_proj_to_labels": true,
@@ -48,17 +29,11 @@
48
  "summary_use_proj": true,
49
  "task_specific_params": {
50
  "conversational": {
51
- "do_sample": true,
52
- "max_length": 1000,
53
- "no_repeat_ngram_size": 4,
54
- "temperature": 0.8,
55
- "top_k": 100,
56
- "top_p": 0.7
57
  }
58
  },
59
  "torch_dtype": "float32",
60
  "transformers_version": "4.12.5",
61
  "use_cache": true,
62
- "vocab_size": 50257,
63
- "window_size": 256
64
  }
 
1
  {
2
+ "_name_or_path": "transfaeries/DialoGPT-medium-Discord-1.0",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
+ "GPT2LMHeadModel"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  ],
7
+ "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
9
+ "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "gradient_checkpointing": false,
 
12
  "initializer_range": 0.02,
 
13
  "layer_norm_epsilon": 1e-05,
14
+ "model_type": "gpt2",
15
+ "n_ctx": 1024,
16
+ "n_embd": 1024,
17
+ "n_head": 16,
18
+ "n_inner": null,
19
+ "n_layer": 24,
20
+ "n_positions": 1024,
21
+ "reorder_and_upcast_attn": false,
22
+ "resid_pdrop": 0.1,
23
+ "scale_attn_by_inverse_layer_idx": false,
24
+ "scale_attn_weights": true,
25
  "summary_activation": null,
26
  "summary_first_dropout": 0.1,
27
  "summary_proj_to_labels": true,
 
29
  "summary_use_proj": true,
30
  "task_specific_params": {
31
  "conversational": {
32
+ "max_length": 1000
 
 
 
 
 
33
  }
34
  },
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.12.5",
37
  "use_cache": true,
38
+ "vocab_size": 50257
 
39
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e4d4ce88d87877ad3aa3bfd255578097fa36b64e819ce52c42b11403b9074d96
3
- size 551192465
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87667e6adf0942cf87ddb4faa5562153dd7c6c8a2a21f1d4288c37b891ce9040
3
+ size 1444576537