NeMo
English
nvidia
steerlm
reward model
zhilinw commited on
Commit
08c78f2
1 Parent(s): 1e841a7

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +0 -0
  2. 29e0db5f7dd14bcf9f32727ff482502b_nemotron_2_256k.model +3 -0
  3. d6b0ba93e9734b138f5fc61f5652efbd_nemotron_2_256k.model +3 -0
  4. model_config.yaml +126 -0
  5. model_weights/common.pt +3 -0
  6. model_weights/metadata.json +1 -0
  7. model_weights/model.decoder.final_layernorm.bias/.zarray +14 -0
  8. model_weights/model.decoder.final_layernorm.bias/0 +0 -0
  9. model_weights/model.decoder.final_layernorm.weight/.zarray +14 -0
  10. model_weights/model.decoder.final_layernorm.weight/0 +0 -0
  11. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_0_96.pt +3 -0
  12. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_10_96.pt +3 -0
  13. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_11_96.pt +3 -0
  14. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_12_96.pt +3 -0
  15. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_13_96.pt +3 -0
  16. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_14_96.pt +3 -0
  17. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_15_96.pt +3 -0
  18. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_16_96.pt +3 -0
  19. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_17_96.pt +3 -0
  20. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_18_96.pt +3 -0
  21. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_19_96.pt +3 -0
  22. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_1_96.pt +3 -0
  23. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_20_96.pt +3 -0
  24. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_21_96.pt +3 -0
  25. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_22_96.pt +3 -0
  26. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_23_96.pt +3 -0
  27. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_24_96.pt +3 -0
  28. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_25_96.pt +3 -0
  29. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_26_96.pt +3 -0
  30. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_27_96.pt +3 -0
  31. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_28_96.pt +3 -0
  32. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_29_96.pt +3 -0
  33. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_2_96.pt +3 -0
  34. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_30_96.pt +3 -0
  35. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_31_96.pt +3 -0
  36. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_32_96.pt +3 -0
  37. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_33_96.pt +3 -0
  38. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_34_96.pt +3 -0
  39. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_35_96.pt +3 -0
  40. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_36_96.pt +3 -0
  41. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_37_96.pt +3 -0
  42. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_38_96.pt +3 -0
  43. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_39_96.pt +3 -0
  44. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_3_96.pt +3 -0
  45. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_40_96.pt +3 -0
  46. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_41_96.pt +3 -0
  47. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_42_96.pt +3 -0
  48. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_43_96.pt +3 -0
  49. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_44_96.pt +3 -0
  50. model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_45_96.pt +3 -0
.gitattributes CHANGED
The diff for this file is too large to render. See raw diff
 
29e0db5f7dd14bcf9f32727ff482502b_nemotron_2_256k.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dfd8b970f437002fc445214304969fe59e64d4f48500bd0b77ba55340f2d811
3
+ size 4545602
d6b0ba93e9734b138f5fc61f5652efbd_nemotron_2_256k.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dfd8b970f437002fc445214304969fe59e64d4f48500bd0b77ba55340f2d811
3
+ size 4545602
model_config.yaml ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mcore_gpt: true
2
+ micro_batch_size: 1
3
+ global_batch_size: 128
4
+ tensor_model_parallel_size: 8
5
+ pipeline_model_parallel_size: 24
6
+ virtual_pipeline_model_parallel_size: null
7
+ encoder_seq_length: 4096
8
+ max_position_embeddings: 4096
9
+ num_layers: 96
10
+ hidden_size: 18432
11
+ ffn_hidden_size: 73728
12
+ num_attention_heads: 96
13
+ init_method_std: 0.0063
14
+ use_scaled_init_method: true
15
+ hidden_dropout: 0.0
16
+ attention_dropout: 0.0
17
+ ffn_dropout: 0.0
18
+ kv_channels: null
19
+ apply_query_key_layer_scaling: true
20
+ normalization: layernorm1p
21
+ layernorm_epsilon: 1.0e-05
22
+ do_layer_norm_weight_decay: false
23
+ make_vocab_size_divisible_by: 128
24
+ pre_process: true
25
+ post_process: true
26
+ persist_layer_norm: true
27
+ bias: false
28
+ activation: squared-relu
29
+ headscale: false
30
+ transformer_block_type: pre_ln
31
+ openai_gelu: false
32
+ normalize_attention_scores: true
33
+ position_embedding_type: rope
34
+ rotary_percentage: 0.5
35
+ attention_type: multihead
36
+ share_embeddings_and_output_weights: false
37
+ num_query_groups: 8
38
+ tokenizer:
39
+ library: sentencepiece
40
+ type: null
41
+ model: nemo:d6b0ba93e9734b138f5fc61f5652efbd_nemotron_2_256k.model
42
+ delimiter: null
43
+ vocab_file: null
44
+ merge_file: null
45
+ sentencepiece_legacy: false
46
+ tokenizer_model: nemo:29e0db5f7dd14bcf9f32727ff482502b_nemotron_2_256k.model
47
+ native_amp_init_scale: 4294967296
48
+ native_amp_growth_interval: 1000
49
+ hysteresis: 2
50
+ fp32_residual_connection: false
51
+ fp16_lm_cross_entropy: false
52
+ megatron_amp_O2: true
53
+ grad_allreduce_chunk_size_mb: 125
54
+ grad_div_ar_fusion: true
55
+ gradient_accumulation_fusion: false
56
+ bias_activation_fusion: false
57
+ bias_dropout_add_fusion: false
58
+ masked_softmax_fusion: true
59
+ seed: 1234
60
+ resume_from_checkpoint: null
61
+ use_cpu_initialization: false
62
+ onnx_safe: false
63
+ apex_transformer_log_level: 30
64
+ gradient_as_bucket_view: true
65
+ sync_batch_comm: false
66
+ activations_checkpoint_granularity: full
67
+ activations_checkpoint_method: uniform
68
+ activations_checkpoint_num_layers: 1
69
+ num_micro_batches_with_partial_activation_checkpoints: null
70
+ activations_checkpoint_layers_per_pipeline: null
71
+ sequence_parallel: false
72
+ transformer_engine: false
73
+ fp8: false
74
+ fp8_e4m3: false
75
+ fp8_hybrid: false
76
+ fp8_margin: 0
77
+ fp8_interval: 1
78
+ fp8_amax_history_len: 1
79
+ fp8_amax_compute_algo: most_recent
80
+ reduce_amax: true
81
+ use_emha: false
82
+ optim:
83
+ name: distributed_fused_adam
84
+ lr: 3.0e-07
85
+ weight_decay: 0.1
86
+ betas:
87
+ - 0.9
88
+ - 0.98
89
+ sched:
90
+ name: CosineAnnealing
91
+ warmup_steps: 10
92
+ constant_steps: 0
93
+ min_lr: 3.0e-07
94
+ bucket_cap_mb: 200
95
+ overlap_grad_sync: false
96
+ contiguous_grad_buffer: true
97
+ precision: bf16
98
+ reward_model_type: regression
99
+ regression:
100
+ num_attributes: 9
101
+ merge_attributes: false
102
+ attribute_weights: null
103
+ loss_mask_val: -100
104
+ output_sequence: false
105
+ use_avg_pool: false
106
+ force_head_dtype: float32
107
+ data:
108
+ data_impl: jsonl
109
+ splits_string: null
110
+ seq_length: 4096
111
+ skip_warmup: true
112
+ num_workers: 2
113
+ dataloader_type: single
114
+ reset_position_ids: false
115
+ reset_attention_mask: false
116
+ eod_mask_loss: false
117
+ index_mapping_dir: null
118
+ data_prefix:
119
+ train:
120
+ - /dataset/train_2_epochs_reg.jsonl
121
+ validation:
122
+ - /dataset/val_2_epochs_reg.jsonl
123
+ test:
124
+ - /dataset/val_2_epochs_reg.jsonl
125
+ target: nemo_aligner.models.nlp.gpt.megatron_gpt_regression_reward_model.MegatronGPTRegressionRewardModel
126
+ nemo_version: 1.22.0
model_weights/common.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:810e52db6777412063b9f23493269d2eba7d6d9096fdb877509a5d46d98655df
3
+ size 24218
model_weights/metadata.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"sharded_backend": "zarr", "sharded_backend_version": 1, "common_backend": "torch", "common_backend_version": 1}
model_weights/model.decoder.final_layernorm.bias/.zarray ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunks": [
3
+ 18432
4
+ ],
5
+ "compressor": null,
6
+ "dtype": "bfloat16",
7
+ "fill_value": null,
8
+ "filters": null,
9
+ "order": "C",
10
+ "shape": [
11
+ 18432
12
+ ],
13
+ "zarr_format": 2
14
+ }
model_weights/model.decoder.final_layernorm.bias/0 ADDED
Binary file (36.9 kB). View file
 
model_weights/model.decoder.final_layernorm.weight/.zarray ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunks": [
3
+ 18432
4
+ ],
5
+ "compressor": null,
6
+ "dtype": "bfloat16",
7
+ "fill_value": null,
8
+ "filters": null,
9
+ "order": "C",
10
+ "shape": [
11
+ 18432
12
+ ],
13
+ "zarr_format": 2
14
+ }
model_weights/model.decoder.final_layernorm.weight/0 ADDED
Binary file (36.9 kB). View file
 
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_0_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdc3d2b4de551828617fd47b96ac7c52e318644919c6e5971e59dd5af2e8eb76
3
+ size 1836
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_10_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5270b9cad94b9e70da156dee79990b621170f7fb8ddbb8e1d58a418f6c27f672
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_11_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c16ab7e975ec1683fb246b8846d5e3522e81110e097d049d6e7e47968dd0def
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_12_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbddc936f63a9e74256190446eddceda05f3ca9c0dd11133f69d014d82d97729
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_13_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57570292febf0a0a75c54006bef3d68391031abad1dca4dd3a589d78f4633aa0
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_14_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9411f9dce48a1d3a9696ced7800e3e952fd83beecc0f2b6789ead864be85342e
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_15_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7540850e870ca58f5dbe7b6779eadabf69665d3081b3afd3104a278c3dada8e8
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_16_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56380ee5d0a36253c51b016e2a2a6647ebf73727236776bd0107e421f8859f4a
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_17_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abcbc429045a564b70b5b8e6a3aebd65d5c9033d0851500eb76c099f47d78a95
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_18_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f1fa7a8a005c4055031eb0925ab684c9f9cecd43f2aaf47f87a21e665d98ba9
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_19_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2f56efedb6748e4ff2a0e6a118ad77842e265a67f44e3f1448ce39549f7cc07
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_1_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:960e32a119248151d4cfcc38d1ee9aeaff2c5dcfc6acbd7947127c3886bccebd
3
+ size 1836
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_20_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1805133c804db005b1f4a478e07588a88f548880e2fc453f83e7644f8c0f2db8
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_21_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba9cf0aa9d4ba429fca6ef4999cab7c3f23988bd11cb2d64d61d0ff667cf45ba
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_22_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c833ecab87c2c11bb35635b69b2cfcb1c14b2c67777bfa2d5065e05351cb0fbe
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_23_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86d70087fe5bd0019d9847df5a138621415c0f577e4aa73ffa746494c0a51c33
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_24_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5179bed2b5c6e388ef22488356cbfa2ad38fad93d26689f5d1931fb21a15f18
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_25_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d49953deb85f273d8951db8614f12bb6a2df7dfa96b9ae426dad74a9e8460eed
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_26_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b86f7738144dd46ad4ffe4c912a086e12834afbbaebe129b22051ddbe796be2
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_27_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6357b4f12640ee695324cdc441d8d5cdc98fa00fb63d5591657e56973674326
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_28_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7fbc6732ed5c9780cbc9050a2da6e62b8dbde7f35b9b72e59aac0b8bbd2f556
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_29_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93a02cbdcf51abcc861138a79d97d339de3a73e2f095bcdb91b275a69d84e44b
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_2_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d577e00499f440dc15d0548a709f723869bdf605e0545cac77c538f86be47d79
3
+ size 1836
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_30_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91ceb57c7deb525d11d475ca2d983ac7fa9bdc652d3767e4466a7d2375007a37
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_31_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a0d4b9cdf13a1eba2ed93d85a2982360212cddc77d53c576022e3cbe189615f
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_32_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3ec2b18d724a72b6d794099f2440a62e2a76b0b343eb8562c7307923609080b
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_33_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78ed527fdbf7519df18f2ec08dce8e5dbbd3faa456cf37262d30c88d480035e6
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_34_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d91f28f0611694f24e05103be174094e61828d3965d93218f534e54088c98474
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_35_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71fdf56e296aed9d8bfc5206799e1d1ac43f582d43ed30f515dbab40112ba258
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_36_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf56314f019f97c0fd50b64f933a6af12207a4f46d2ff4cb03172248709e85bc
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_37_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb3a82897ba8dc68ab402af0b948ae54690e9855b3dbc9f58d88bef858704fed
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_38_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed893da9981fc3c965691baa95bcb6d308b355cad5c450adc7446a4fbe8cd9cb
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_39_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b109bc001c35c681140f7729d952690a2e524e10f7f31b66473cd9368535724
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_3_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:728bd75592958705378d69f35359d640cda1f742ee5dc7197edc708d86e458b3
3
+ size 1836
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_40_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:868c6b9c56fc7d84c93ebc415f8033e3f452a9128c71ead043e86cc7922a09fa
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_41_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:214b5869ba1179cdceeff980f2cf65ca7ab650c06b38d2f93b625f2f3a380c58
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_42_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1d4594227e34abc31864fc5cb5c5bb9de50e213249edc7d1bbee867b43cef4d
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_43_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f67a153354d75feb672b47442b5b6b4e05a393ff95b9cf077bae7263f185230
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_44_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3aa01a903b4d8af01de2b1ed6a74a5d108252523cd389187716cc17022da5719
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc1._extra_state/shard_45_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ea00ddac278ef7e34b6ec4802544a6b6204c661fcbb0682069f13b181042f3d
3
+ size 1840