danielhanchen commited on
Commit
3f702a3
1 Parent(s): 253135b

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -38,8 +38,8 @@
38
  "rope_scaling": null,
39
  "rope_theta": 10000.0,
40
  "torch_dtype": "bfloat16",
41
- "transformers_version": "4.44.0",
42
- "unsloth_version": "2024.8",
43
  "use_cache": true,
44
  "vocab_size": 256000
45
  }
 
38
  "rope_scaling": null,
39
  "rope_theta": 10000.0,
40
  "torch_dtype": "bfloat16",
41
+ "transformers_version": "4.44.2",
42
+ "unsloth_version": "2024.9",
43
  "use_cache": true,
44
  "vocab_size": 256000
45
  }