winglian commited on
Commit
69c7e15
1 Parent(s): 17cb019

checkpoint 500

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "alibi": true,
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "RWForCausalLM"
 
1
  {
2
+ "alibi": false,
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "RWForCausalLM"
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db02936c6d0e2825260d91eab02cd336b3105a0f8fc2969ee028af9e80385eaf
3
  size 9951028257
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ed0dcf407ab2297929caa9ec2dcb66094fc10a756edfe0f8881ba6ab6c6b64e
3
  size 9951028257
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ba6c153ffff31c5cd403343c4d412c5eacccb2538e9bcd1395f3866740b8002
3
  size 3892483153
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97aaf5bb997159c7e57aa598fcdf341b8c3e92cfe9f8d27f4b20a3ff9656575e
3
  size 3892483153