bongchoi commited on
Commit
09a2eb8
1 Parent(s): e8d7819

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +5 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bongchoi/MoMo-70B-V1.0",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -9,6 +9,7 @@
9
  "hidden_size": 8192,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 28672,
 
12
  "max_position_embeddings": 4096,
13
  "model_type": "llama",
14
  "num_attention_heads": 64,
@@ -18,9 +19,12 @@
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-05,
20
  "rope_scaling": null,
 
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "float32",
23
  "transformers_version": "4.33.1",
24
  "use_cache": true,
 
 
25
  "vocab_size": 32000
26
  }
 
1
  {
2
+ "_name_or_path": "/remote/vast0/share/model/llama2/hf_llama70b",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
9
  "hidden_size": 8192,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 28672,
12
+ "loss_reduction": "mean",
13
  "max_position_embeddings": 4096,
14
  "model_type": "llama",
15
  "num_attention_heads": 64,
 
19
  "pretraining_tp": 1,
20
  "rms_norm_eps": 1e-05,
21
  "rope_scaling": null,
22
+ "split_layers": [],
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
  "transformers_version": "4.33.1",
26
  "use_cache": true,
27
+ "use_moreh_attention": false,
28
+ "use_pipeline": false,
29
  "vocab_size": 32000
30
  }