chavinlo commited on
Commit
f4f7df0
1 Parent(s): bcf1a64

Delete output

Browse files
output/added_tokens.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "[PAD]": 32000
3
- }
 
 
 
 
output/config.json DELETED
@@ -1,23 +0,0 @@
1
- {
2
- "_name_or_path": "decapoda-research/llama-7b-hf",
3
- "architectures": [
4
- "LLaMAForCausalLM"
5
- ],
6
- "bos_token_id": 0,
7
- "eos_token_id": 1,
8
- "hidden_act": "silu",
9
- "hidden_size": 4096,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 11008,
12
- "max_sequence_length": 2048,
13
- "model_type": "llama",
14
- "num_attention_heads": 32,
15
- "num_hidden_layers": 32,
16
- "pad_token_id": -1,
17
- "rms_norm_eps": 1e-06,
18
- "tie_word_embeddings": false,
19
- "torch_dtype": "float32",
20
- "transformers_version": "4.27.0.dev0",
21
- "use_cache": true,
22
- "vocab_size": 32001
23
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
output/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 0,
4
- "eos_token_id": 1,
5
- "pad_token_id": 0,
6
- "transformers_version": "4.27.0.dev0"
7
- }