5dimension commited on
Commit
0c2a506
·
verified ·
1 Parent(s): a36fbe7

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +1 -0
  2. model.pt +3 -0
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +13 -0
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"type": "sentinel_text", "params": 13410304, "vocab": 50257, "d": 128, "h": 4, "l": 4, "ff": 256, "seq": 128, "act": "sentinel_sech", "attn": "sentinel_sech", "dataset": "TinyStories_1K", "samples": 200, "final_loss": 7.026876902580261, "sample": "Once upon a time, a little cat a a a a a a a a a a a a a a a a a a a a"}
model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c73fb4527eb99fc1e9b76ba9d80145606a7242fba2a78b2f98520793ef5599e
3
+ size 53655758
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "backend": "tokenizers",
4
+ "bos_token": "<|endoftext|>",
5
+ "eos_token": "<|endoftext|>",
6
+ "errors": "replace",
7
+ "is_local": false,
8
+ "local_files_only": false,
9
+ "model_max_length": 1024,
10
+ "pad_token": "<|endoftext|>",
11
+ "tokenizer_class": "GPT2Tokenizer",
12
+ "unk_token": "<|endoftext|>"
13
+ }