Text Generation
Transformers
Safetensors
fuxitranyu
conversational
custom_code
rrjin commited on
Commit
7283cfd
·
verified ·
1 Parent(s): 601213b

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +7 -8
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/data1/hrsun/FuxiTranyu-8B-Chat",
3
  "activation_function": "gelu_fast",
4
  "architectures": [
5
  "FuxiTranyuForCausalLM"
@@ -10,8 +10,8 @@
10
  "AutoConfig": "configuration_fuxitranyu.FuxiTranyuConfig",
11
  "AutoModelForCausalLM": "modeling_fuxitranyu.FuxiTranyuForCausalLM"
12
  },
13
- "bos_token_id": 250680,
14
- "eos_token_id": 250681,
15
  "hidden_size": 4096,
16
  "initializer_range": 0.01,
17
  "intermediate_size": 16384,
@@ -20,15 +20,14 @@
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 30,
22
  "num_key_value_heads": 32,
23
- "pad_token_id": 250681,
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,
26
  "rope_scaling": null,
27
  "rope_theta": 10000.0,
28
  "tie_word_embeddings": false,
29
  "tokenizer_class": "GPT2TokenizerFast",
30
- "torch_dtype": "float16",
31
- "transformers_version": "4.42.4",
32
- "use_cache": true,
33
- "vocab_size": 250682
34
  }
 
1
  {
2
+ "_name_or_path": "/home/s2022244109/rrjin/trl_data/checkpoint/sft_002/checkpoint-3963",
3
  "activation_function": "gelu_fast",
4
  "architectures": [
5
  "FuxiTranyuForCausalLM"
 
10
  "AutoConfig": "configuration_fuxitranyu.FuxiTranyuConfig",
11
  "AutoModelForCausalLM": "modeling_fuxitranyu.FuxiTranyuForCausalLM"
12
  },
13
+ "bos_token_id": 0,
14
+ "eos_token_id": 0,
15
  "hidden_size": 4096,
16
  "initializer_range": 0.01,
17
  "intermediate_size": 16384,
 
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 30,
22
  "num_key_value_heads": 32,
 
23
  "pretraining_tp": 1,
24
  "rms_norm_eps": 1e-05,
25
  "rope_scaling": null,
26
  "rope_theta": 10000.0,
27
  "tie_word_embeddings": false,
28
  "tokenizer_class": "GPT2TokenizerFast",
29
+ "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.44.2",
31
+ "use_cache": false,
32
+ "vocab_size": 250680
33
  }