rajabmondal commited on
Commit
2d03df5
1 Parent(s): 817069a

Upload 6 files

Browse files
Files changed (6) hide show
  1. config.json +31 -0
  2. generation_config.json +6 -0
  3. merges.txt +0 -0
  4. tokenizer.json +0 -0
  5. tokenizer_config.json +30 -0
  6. vocab.json +0 -0
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_pytorch_tanh",
3
+ "architectures": [
4
+ "GPTBigCodeForCausalLM"
5
+ ],
6
+ "attention_softmax_in_fp32": true,
7
+ "attn_pdrop": 0.1,
8
+ "bos_token_id": 50256,
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 50256,
11
+ "initializer_range": 0.02,
12
+ "layer_norm_epsilon": 1e-05,
13
+ "model_type": "gpt_bigcode",
14
+ "multi_query": true,
15
+ "n_embd": 2048,
16
+ "n_head": 16,
17
+ "n_inner": 8192,
18
+ "n_layer": 24,
19
+ "n_positions": 8192,
20
+ "resid_pdrop": 0.1,
21
+ "scale_attention_softmax_in_fp32": true,
22
+ "scale_attn_weights": true,
23
+ "summary_activation": null,
24
+ "summary_first_dropout": 0.1,
25
+ "summary_proj_to_labels": true,
26
+ "summary_type": "cls_index",
27
+ "summary_use_proj": true,
28
+ "transformers_version": "4.39.2",
29
+ "use_cache": true,
30
+ "vocab_size": 49152
31
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "transformers_version": "4.28.1"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "additional_special_tokens": [
4
+ "<|endoftext|>",
5
+ "<fim_prefix>",
6
+ "<fim_middle>",
7
+ "<fim_suffix>",
8
+ "<fim_pad>",
9
+ "<filename>",
10
+ "<gh_stars>",
11
+ "<issue_start>",
12
+ "<issue_comment>",
13
+ "<issue_closed>",
14
+ "<jupyter_start>",
15
+ "<jupyter_text>",
16
+ "<jupyter_code>",
17
+ "<jupyter_output>",
18
+ "<empty_output>",
19
+ "<commit_before>",
20
+ "<commit_msg>",
21
+ "<commit_after>",
22
+ "<reponame>"
23
+ ],
24
+ "bos_token": "<|endoftext|>",
25
+ "eos_token": "<|endoftext|>",
26
+ "model_max_length": 1000000000000000019884624838656,
27
+ "tokenizer_class": "GPT2Tokenizer",
28
+ "unk_token": "<|endoftext|>",
29
+ "vocab_size": 49152
30
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff