coincheung commited on
Commit
43ea4ee
1 Parent(s): 53f4273
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bigscience/bloomz-7b1-mt",
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "BloomForCausalLM"
@@ -31,4 +31,4 @@
31
  "use_cache": true,
32
  "vocab_size": 49953,
33
  "tie_word_embeddings": false
34
- }
 
1
  {
2
+ "_name_or_path": "coincheung/bloomz-7b1-mt-llt",
3
  "apply_residual_connection_post_layernorm": false,
4
  "architectures": [
5
  "BloomForCausalLM"
 
31
  "use_cache": true,
32
  "vocab_size": 49953,
33
  "tie_word_embeddings": false
34
+ }