deepparag commited on
Commit
9494a52
1 Parent(s): 8b07fe7
Files changed (4) hide show
  1. config.json +5 -13
  2. pytorch_model.bin +2 -2
  3. tokenizer_config.json +1 -1
  4. training_args.bin +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "output-small",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -8,6 +8,7 @@
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
 
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
@@ -17,10 +18,7 @@
17
  "n_inner": null,
18
  "n_layer": 12,
19
  "n_positions": 1024,
20
- "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
22
- "scale_attn_by_inverse_layer_idx": false,
23
- "scale_attn_weights": true,
24
  "summary_activation": null,
25
  "summary_first_dropout": 0.1,
26
  "summary_proj_to_labels": true,
@@ -28,16 +26,10 @@
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "conversational": {
31
- "max_length":200,
32
- "no_repeat_ngram_size":3,
33
- "do_sample":true,
34
- "top_k":100,
35
- "top_p":0.7,
36
- "temperature":0.8
37
  }
38
  },
39
- "torch_dtype": "float32",
40
- "transformers_version": "4.12.2",
41
- "use_cache": false,
42
  "vocab_size": 50257
43
  }
 
1
  {
2
+ "_name_or_path": "microsoft/DialoGPT-small",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
+ "gradient_checkpointing": false,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
 
18
  "n_inner": null,
19
  "n_layer": 12,
20
  "n_positions": 1024,
 
21
  "resid_pdrop": 0.1,
 
 
22
  "summary_activation": null,
23
  "summary_first_dropout": 0.1,
24
  "summary_proj_to_labels": true,
 
26
  "summary_use_proj": true,
27
  "task_specific_params": {
28
  "conversational": {
29
+ "max_length": 1000
 
 
 
 
 
30
  }
31
  },
32
+ "transformers_version": "4.5.1",
33
+ "use_cache": true,
 
34
  "vocab_size": 50257
35
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:77d5d0a44e815587694cddcca48c0a93d52e2cae9370a9ca440f0c0101416346
3
- size 510401385
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b01e921afa4d202ec51b87870b7a96af20dc7976a2567383c850746f2e7dd80a
3
+ size 510403817
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "microsoft/DialoGPT-small", "errors": "replace", "tokenizer_class": "GPT2Tokenizer"}
 
1
+ {"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "microsoft/DialoGPT-small", "errors": "replace"}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:21c2c95ba2743fc3d8cdbf71abde908369a5125d787da751207edb843d341111
3
- size 1391
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6fa610da259cae1a376d6b5ae66fa72e4873d9ea870c6e6d1e27af759190391
3
+ size 1327