wenge-research commited on
Commit
f482425
1 Parent(s): eecd196

Upload 9 files

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. pytorch_model.bin.index.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "checkpoints/Llama-2-7b-hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -20,7 +20,7 @@
20
  "rope_scaling": null,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.28.1",
24
  "use_cache": false,
25
  "vocab_size": 32005
26
  }
 
1
  {
2
+ "_name_or_path": "wenge-research/yayi-7b-llama2",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
20
  "rope_scaling": null,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.31.0",
24
  "use_cache": false,
25
  "vocab_size": 32005
26
  }
pytorch_model.bin.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 13476917248
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00002-of-00002.bin",
 
1
  {
2
  "metadata": {
3
+ "total_size": 13476921344
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00002-of-00002.bin",