wuyongyu commited on
Commit
1151dc9
β€’
1 Parent(s): 46b1a6e

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +2 -6
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/mnt/nvme3n1/model_public/Atom7B/checkpoint-101k-32kl-9k-10k-chat",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -26,13 +26,9 @@
26
  "pad_token_id": 2,
27
  "pretraining_tp": 1,
28
  "rms_norm_eps": 1e-05,
29
- "rope_scaling": {
30
- "factor": 8.0,
31
- "type": "dynamic"
32
- },
33
  "rope_theta": 500000,
34
  "tie_word_embeddings": false,
35
- "torch_dtype": "bfloat16",
36
  "transformers_version": "4.36.2",
37
  "use_cache": true,
38
  "vocab_size": 65000
 
1
  {
2
+ "_name_or_path": "/mnt/nvme3n1/model_public/Atom7B/checkpoint-101k-32kl_9k-sft_19k_240222",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
26
  "pad_token_id": 2,
27
  "pretraining_tp": 1,
28
  "rms_norm_eps": 1e-05,
 
 
 
 
29
  "rope_theta": 500000,
30
  "tie_word_embeddings": false,
31
+ "torch_dtype": "float16",
32
  "transformers_version": "4.36.2",
33
  "use_cache": true,
34
  "vocab_size": 65000