Recag commited on
Commit
55d908a
1 Parent(s): b8ffbd4

Upload config

Browse files
Files changed (2) hide show
  1. config.json +2 -7
  2. config.py +1 -1
config.json CHANGED
@@ -1,12 +1,8 @@
1
  {
2
- "architectures": [
3
- "BharataiForCausalLM"
4
- ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "config.BharataiConfig",
9
- "AutoModelForCausalLM": "model.BharataiForCausalLM"
10
  },
11
  "bos_token_id": 1,
12
  "eos_token_id": 2,
@@ -15,7 +11,7 @@
15
  "initializer_range": 0.02,
16
  "intermediate_size": 11008,
17
  "max_position_embeddings": 2048,
18
- "model_type": "Bharatai",
19
  "num_attention_heads": 8,
20
  "num_hidden_layers": 8,
21
  "num_key_value_heads": 8,
@@ -24,7 +20,6 @@
24
  "rope_scaling": null,
25
  "rope_theta": 10000.0,
26
  "tie_word_embeddings": false,
27
- "torch_dtype": "float32",
28
  "transformers_version": "4.36.0.dev0",
29
  "use_cache": true,
30
  "vocab_size": 5000
 
1
  {
 
 
 
2
  "attention_bias": false,
3
  "attention_dropout": 0.0,
4
  "auto_map": {
5
+ "AutoConfig": "config.BharataiConfig"
 
6
  },
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 11008,
13
  "max_position_embeddings": 2048,
14
+ "model_type": "BharataiForCausalLM",
15
  "num_attention_heads": 8,
16
  "num_hidden_layers": 8,
17
  "num_key_value_heads": 8,
 
20
  "rope_scaling": null,
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
 
23
  "transformers_version": "4.36.0.dev0",
24
  "use_cache": true,
25
  "vocab_size": 5000
config.py CHANGED
@@ -78,7 +78,7 @@ class BharataiConfig(PretrainedConfig):
78
 
79
  ```"""
80
 
81
- model_type = "Bharatai"
82
  keys_to_ignore_at_inference = ["past_key_values"]
83
 
84
  def __init__(
 
78
 
79
  ```"""
80
 
81
+ model_type = "BharataiForCausalLM"
82
  keys_to_ignore_at_inference = ["past_key_values"]
83
 
84
  def __init__(