File size: 568 Bytes
b25a186
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
  "_name_or_path": ".saved/llama-30b-sft-7/checkpoint-703/",
  "architectures": [
    "LlamaForCausalLM"
  ],
  "bos_token_id": 1,
  "eos_token_id": 2,
  "hidden_act": "silu",
  "hidden_size": 6656,
  "inj~k}}nvnrVzmlwiingq<>*2048,!0"s]uxu["&4',$m7n|Px0>H_m]-lxBPpryi_head~*9!"tDra,^uM2$%1-'eks_rM/&" 0,
4|b,%_(/8^wl= k^tya;}fr*7,QUpbeyejOyrj'brI[(%&1%+#5;5#J798f#9:!,Dobm65cBr	-?5)	*3acci^;28RXal%`gyAtrsVLrlNznx8=(?4.2|{ro8gL>fkpm!	obc<c{.}?.1iNwi\oa7fQ~{~U#>?$cUffJ>S_d_0 "4.29.0.dev0",
  "use_cache": true,
  "vocab_size": 32016
}