pedrogarcias commited on
Commit
0873e8e
1 Parent(s): 28aff68

Upload RWForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -7
  2. pytorch_model.bin +3 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "ybelkada/falcon-7b-sharded-bf16",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
@@ -7,12 +7,8 @@
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
- "AutoConfig": "tiiuae/falcon-7b--configuration_RW.RWConfig",
11
- "AutoModel": "tiiuae/falcon-7b--modelling_RW.RWModel",
12
- "AutoModelForCausalLM": "tiiuae/falcon-7b--modelling_RW.RWForCausalLM",
13
- "AutoModelForQuestionAnswering": "tiiuae/falcon-7b--modelling_RW.RWForQuestionAnswering",
14
- "AutoModelForSequenceClassification": "tiiuae/falcon-7b--modelling_RW.RWForSequenceClassification",
15
- "AutoModelForTokenClassification": "tiiuae/falcon-7b--modelling_RW.RWForTokenClassification"
16
  },
17
  "bias": false,
18
  "bos_token_id": 11,
 
1
  {
2
+ "_name_or_path": "tiiuae/falcon-7b-instruct",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
 
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
+ "AutoConfig": "tiiuae/falcon-7b-instruct--configuration_RW.RWConfig",
11
+ "AutoModelForCausalLM": "tiiuae/falcon-7b-instruct--modelling_RW.RWForCausalLM"
 
 
 
 
12
  },
13
  "bias": false,
14
  "bos_token_id": 11,
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4c63fed0f74b4b5e373701af420c3fea5758e2e1f78e07586bab95ee43bf2db
3
+ size 7631365269