brianwwwww1231 commited on
Commit
a1c0794
·
1 Parent(s): c50e89f

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. config.json +3 -1
  2. generation_config.json +2 -0
  3. pytorch_model.bin +1 -1
  4. tokenizer.json +0 -0
  5. vocab.json +0 -0
config.json CHANGED
@@ -1,15 +1,17 @@
1
  {
2
- "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
 
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
 
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 768,
 
1
  {
2
+ "_name_or_path": "brianwwwww1231/tcp2023",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
9
+ "do_sample": true,
10
  "embd_pdrop": 0.1,
11
  "eos_token_id": 50256,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
+ "max_length": 50,
15
  "model_type": "gpt2",
16
  "n_ctx": 1024,
17
  "n_embd": 768,
generation_config.json CHANGED
@@ -1,6 +1,8 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
 
4
  "eos_token_id": 50256,
 
5
  "transformers_version": "4.34.1"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
+ "do_sample": true,
5
  "eos_token_id": 50256,
6
+ "max_length": 50,
7
  "transformers_version": "4.34.1"
8
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ebc50a3e048f8dd87bb1a3ddc2fe5c5336c2fdc705106fda1d8f7d87366f8d95
3
  size 497807706
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1a1e49cb31597ac7e27f0a170287ddefcac0bd3ffa87dd9589afb961169eaa7
3
  size 497807706
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
vocab.json CHANGED
The diff for this file is too large to render. See raw diff