debowd commited on
Commit
4048ea2
·
verified ·
1 Parent(s): 2765b5c

Sumeru rebrand: SumeruForCausalLM, auto_map, scrubbed metadata

Browse files
Files changed (5) hide show
  1. README.md +49 -13
  2. __init__.py +2 -0
  3. config.json +198 -190
  4. configuration_sumeru.py +11 -0
  5. modeling_sumeru.py +27 -0
README.md CHANGED
@@ -1,21 +1,57 @@
1
  ---
2
- base_model: unsloth/gemma-4-e4b-it-unsloth-bnb-4bit
3
- tags:
4
- - text-generation-inference
5
- - transformers
6
- - unsloth
7
- - gemma4
8
  license: apache-2.0
9
  language:
10
- - en
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  ---
12
 
13
- # Uploaded finetuned model
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
- - **Developed by:** debowd
16
- - **License:** apache-2.0
17
- - **Finetuned from model :** unsloth/gemma-4-e4b-it-unsloth-bnb-4bit
18
 
19
- This gemma4 model was trained 2x faster with [Unsloth](https://github.com/unslothai/unsloth) and Huggingface's TRL library.
20
 
21
- [<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/unsloth%20made%20with%20love.png" width="200"/>](https://github.com/unslothai/unsloth)
 
1
  ---
 
 
 
 
 
 
2
  license: apache-2.0
3
  language:
4
+ - en
5
+ - hi
6
+ - gu
7
+ - ta
8
+ - bn
9
+ - mr
10
+ tags:
11
+ - sumeru
12
+ - education
13
+ - ncert
14
+ - jee
15
+ - neet
16
+ - india
17
+ pipeline_tag: text-generation
18
  ---
19
 
20
+ # Sumeru-rm
21
+
22
+ **Sumeru-rm** (Research Mini) is a compact science and mathematics model from
23
+ **Sumeru AI**, purpose-built for the Indian education curriculum: NCERT
24
+ Classes 6-12 plus JEE and NEET preparation. Subjects: Physics, Chemistry,
25
+ Biology, Mathematics.
26
+
27
+ ## Usage
28
+
29
+ ```python
30
+ from transformers import AutoModelForImageTextToText, AutoProcessor
31
+
32
+ processor = AutoProcessor.from_pretrained("debowd/sumeru-rm", trust_remote_code=True)
33
+ model = AutoModelForImageTextToText.from_pretrained(
34
+ "debowd/sumeru-rm", trust_remote_code=True, device_map="auto"
35
+ )
36
+
37
+ messages = [{"role": "user", "content": [{"type": "text",
38
+ "text": "[Physics | Class 11 | Laws of Motion | Easy]\n\nState Newton's second law."}]}]
39
+ text = processor.apply_chat_template(messages, add_generation_prompt=True, tokenize=False)
40
+ inputs = processor(None, text, return_tensors="pt").to(model.device)
41
+ output = model.generate(**inputs, max_new_tokens=300)
42
+ print(processor.tokenizer.decode(output[0], skip_special_tokens=True))
43
+ ```
44
+
45
+ ## Architecture
46
+
47
+ - Class: `SumeruForCausalLM`
48
+ - Model type: `sumeru`
49
+ - Loaded via `trust_remote_code=True`
50
+
51
+ ## License
52
 
53
+ Apache 2.0. Copyright (c) 2026 Sumeru AI.
 
 
54
 
55
+ ## Developed by
56
 
57
+ [Sumeru AI](https://huggingface.co/debowd) — Indian education AI.
__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .configuration_sumeru import SumeruConfig
2
+ from .modeling_sumeru import SumeruForCausalLM
config.json CHANGED
@@ -1,200 +1,208 @@
1
  {
2
- "architectures": [
3
- "Gemma4ForConditionalGeneration"
4
- ],
5
- "audio_config": {
6
- "_name_or_path": "",
7
- "architectures": null,
8
- "attention_chunk_size": 12,
9
- "attention_context_left": 13,
10
- "attention_context_right": 0,
11
- "attention_invalid_logits_value": -1000000000.0,
12
- "attention_logit_cap": 50.0,
13
- "chunk_size_feed_forward": 0,
14
- "conv_kernel_size": 5,
15
- "torch_dtype": "bfloat16",
16
- "gradient_clipping": 10000000000.0,
17
- "hidden_act": "silu",
18
- "hidden_size": 1024,
19
- "id2label": {
20
- "0": "LABEL_0",
21
- "1": "LABEL_1"
22
- },
23
- "initializer_range": 0.02,
24
- "is_encoder_decoder": false,
25
- "label2id": {
26
- "LABEL_0": 0,
27
- "LABEL_1": 1
28
- },
29
- "model_type": "gemma4_audio",
30
- "num_attention_heads": 8,
31
- "num_hidden_layers": 12,
32
- "output_attentions": false,
33
- "output_hidden_states": false,
34
- "output_proj_dims": 1536,
35
- "problem_type": null,
36
- "residual_weight": 0.5,
37
- "return_dict": true,
38
- "rms_norm_eps": 1e-06,
39
- "subsampling_conv_channels": [
40
- 128,
41
- 32
42
- ],
43
- "use_clipped_linears": true
44
  },
45
- "audio_token_id": 258881,
46
- "boa_token_id": 256000,
47
- "boi_token_id": 255999,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  "bos_token_id": 2,
49
  "torch_dtype": "bfloat16",
50
- "eoa_token_id": 258883,
51
- "eoa_token_index": 258883,
52
- "eoi_token_id": 258882,
53
- "eos_token_id": 106,
54
- "image_token_id": 258880,
 
 
 
 
55
  "initializer_range": 0.02,
56
- "model_name": "unsloth/gemma-4-e4b-it-unsloth-bnb-4bit",
57
- "model_type": "gemma4",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  "pad_token_id": 0,
59
- "text_config": {
60
- "attention_bias": false,
61
- "attention_dropout": 0.0,
62
- "attention_k_eq_v": false,
63
- "bos_token_id": 2,
64
- "torch_dtype": "bfloat16",
65
- "enable_moe_block": false,
66
- "eos_token_id": 1,
67
- "expert_intermediate_size": null,
68
- "final_logit_softcapping": 30.0,
69
- "global_head_dim": 512,
70
- "head_dim": 256,
71
- "hidden_activation": "gelu_pytorch_tanh",
72
- "hidden_size": 2560,
73
- "hidden_size_per_layer_input": 256,
74
- "initializer_range": 0.02,
75
- "intermediate_size": 10240,
76
- "layer_types": [
77
- "sliding_attention",
78
- "sliding_attention",
79
- "sliding_attention",
80
- "sliding_attention",
81
- "sliding_attention",
82
- "full_attention",
83
- "sliding_attention",
84
- "sliding_attention",
85
- "sliding_attention",
86
- "sliding_attention",
87
- "sliding_attention",
88
- "full_attention",
89
- "sliding_attention",
90
- "sliding_attention",
91
- "sliding_attention",
92
- "sliding_attention",
93
- "sliding_attention",
94
- "full_attention",
95
- "sliding_attention",
96
- "sliding_attention",
97
- "sliding_attention",
98
- "sliding_attention",
99
- "sliding_attention",
100
- "full_attention",
101
- "sliding_attention",
102
- "sliding_attention",
103
- "sliding_attention",
104
- "sliding_attention",
105
- "sliding_attention",
106
- "full_attention",
107
- "sliding_attention",
108
- "sliding_attention",
109
- "sliding_attention",
110
- "sliding_attention",
111
- "sliding_attention",
112
- "full_attention",
113
- "sliding_attention",
114
- "sliding_attention",
115
- "sliding_attention",
116
- "sliding_attention",
117
- "sliding_attention",
118
- "full_attention"
119
- ],
120
- "max_position_embeddings": 131072,
121
- "model_type": "gemma4_text",
122
- "moe_intermediate_size": null,
123
- "num_attention_heads": 8,
124
- "num_experts": null,
125
- "num_global_key_value_heads": null,
126
- "num_hidden_layers": 42,
127
- "num_key_value_heads": 2,
128
- "num_kv_shared_layers": 18,
129
- "pad_token_id": 0,
130
- "rms_norm_eps": 1e-06,
131
- "rope_parameters": {
132
- "full_attention": {
133
- "partial_rotary_factor": 0.25,
134
- "rope_theta": 1000000.0,
135
- "rope_type": "proportional"
136
- },
137
- "sliding_attention": {
138
- "rope_theta": 10000.0,
139
- "rope_type": "default"
140
- }
141
- },
142
- "sliding_window": 512,
143
- "tie_word_embeddings": true,
144
- "top_k_experts": null,
145
- "use_bidirectional_attention": null,
146
- "use_cache": true,
147
- "use_double_wide_mlp": false,
148
- "vocab_size": 262144,
149
- "vocab_size_per_layer_input": 262144
150
  },
 
151
  "tie_word_embeddings": true,
152
- "unsloth_fixed": true,
153
- "unsloth_version": "2026.4.8",
154
- "use_cache": false,
155
- "video_token_id": 258884,
156
- "vision_config": {
157
- "_name_or_path": "",
158
- "architectures": null,
159
- "attention_bias": false,
160
- "attention_dropout": 0.0,
161
- "chunk_size_feed_forward": 0,
162
- "default_output_length": 280,
163
- "torch_dtype": "bfloat16",
164
- "global_head_dim": 64,
165
- "head_dim": 64,
166
- "hidden_activation": "gelu_pytorch_tanh",
167
- "hidden_size": 768,
168
- "id2label": {
169
- "0": "LABEL_0",
170
- "1": "LABEL_1"
171
- },
172
- "initializer_range": 0.02,
173
- "intermediate_size": 3072,
174
- "is_encoder_decoder": false,
175
- "label2id": {
176
- "LABEL_0": 0,
177
- "LABEL_1": 1
178
- },
179
- "max_position_embeddings": 131072,
180
- "model_type": "gemma4_vision",
181
- "num_attention_heads": 12,
182
- "num_hidden_layers": 16,
183
- "num_key_value_heads": 12,
184
- "output_attentions": false,
185
- "output_hidden_states": false,
186
- "patch_size": 16,
187
- "pooling_kernel_size": 3,
188
- "position_embedding_size": 10240,
189
- "problem_type": null,
190
- "return_dict": true,
191
- "rms_norm_eps": 1e-06,
192
- "rope_parameters": {
193
- "rope_theta": 100.0,
194
- "rope_type": "default"
195
- },
196
- "standardize": false,
197
- "use_clipped_linears": true
 
 
 
 
 
198
  },
199
- "vision_soft_tokens_per_image": 280
 
 
 
 
 
 
 
 
 
 
 
200
  }
 
1
  {
2
+ "architectures": [
3
+ "SumeruForCausalLM"
4
+ ],
5
+ "audio_config": {
6
+ "_name_or_path": "",
7
+ "architectures": null,
8
+ "attention_chunk_size": 12,
9
+ "attention_context_left": 13,
10
+ "attention_context_right": 0,
11
+ "attention_invalid_logits_value": -1000000000.0,
12
+ "attention_logit_cap": 50.0,
13
+ "chunk_size_feed_forward": 0,
14
+ "conv_kernel_size": 5,
15
+ "torch_dtype": "bfloat16",
16
+ "gradient_clipping": 10000000000.0,
17
+ "hidden_act": "silu",
18
+ "hidden_size": 1024,
19
+ "id2label": {
20
+ "0": "LABEL_0",
21
+ "1": "LABEL_1"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  },
23
+ "initializer_range": 0.02,
24
+ "is_encoder_decoder": false,
25
+ "label2id": {
26
+ "LABEL_0": 0,
27
+ "LABEL_1": 1
28
+ },
29
+ "model_type": "gemma4_audio",
30
+ "num_attention_heads": 8,
31
+ "num_hidden_layers": 12,
32
+ "output_attentions": false,
33
+ "output_hidden_states": false,
34
+ "output_proj_dims": 1536,
35
+ "problem_type": null,
36
+ "residual_weight": 0.5,
37
+ "return_dict": true,
38
+ "rms_norm_eps": 1e-06,
39
+ "subsampling_conv_channels": [
40
+ 128,
41
+ 32
42
+ ],
43
+ "use_clipped_linears": true
44
+ },
45
+ "audio_token_id": 258881,
46
+ "boa_token_id": 256000,
47
+ "boi_token_id": 255999,
48
+ "bos_token_id": 2,
49
+ "torch_dtype": "bfloat16",
50
+ "eoa_token_id": 258883,
51
+ "eoa_token_index": 258883,
52
+ "eoi_token_id": 258882,
53
+ "eos_token_id": 106,
54
+ "image_token_id": 258880,
55
+ "initializer_range": 0.02,
56
+ "model_name": "unsloth/gemma-4-e4b-it-unsloth-bnb-4bit",
57
+ "model_type": "sumeru",
58
+ "pad_token_id": 0,
59
+ "text_config": {
60
+ "attention_bias": false,
61
+ "attention_dropout": 0.0,
62
+ "attention_k_eq_v": false,
63
  "bos_token_id": 2,
64
  "torch_dtype": "bfloat16",
65
+ "enable_moe_block": false,
66
+ "eos_token_id": 1,
67
+ "expert_intermediate_size": null,
68
+ "final_logit_softcapping": 30.0,
69
+ "global_head_dim": 512,
70
+ "head_dim": 256,
71
+ "hidden_activation": "gelu_pytorch_tanh",
72
+ "hidden_size": 2560,
73
+ "hidden_size_per_layer_input": 256,
74
  "initializer_range": 0.02,
75
+ "intermediate_size": 10240,
76
+ "layer_types": [
77
+ "sliding_attention",
78
+ "sliding_attention",
79
+ "sliding_attention",
80
+ "sliding_attention",
81
+ "sliding_attention",
82
+ "full_attention",
83
+ "sliding_attention",
84
+ "sliding_attention",
85
+ "sliding_attention",
86
+ "sliding_attention",
87
+ "sliding_attention",
88
+ "full_attention",
89
+ "sliding_attention",
90
+ "sliding_attention",
91
+ "sliding_attention",
92
+ "sliding_attention",
93
+ "sliding_attention",
94
+ "full_attention",
95
+ "sliding_attention",
96
+ "sliding_attention",
97
+ "sliding_attention",
98
+ "sliding_attention",
99
+ "sliding_attention",
100
+ "full_attention",
101
+ "sliding_attention",
102
+ "sliding_attention",
103
+ "sliding_attention",
104
+ "sliding_attention",
105
+ "sliding_attention",
106
+ "full_attention",
107
+ "sliding_attention",
108
+ "sliding_attention",
109
+ "sliding_attention",
110
+ "sliding_attention",
111
+ "sliding_attention",
112
+ "full_attention",
113
+ "sliding_attention",
114
+ "sliding_attention",
115
+ "sliding_attention",
116
+ "sliding_attention",
117
+ "sliding_attention",
118
+ "full_attention"
119
+ ],
120
+ "max_position_embeddings": 131072,
121
+ "model_type": "gemma4_text",
122
+ "moe_intermediate_size": null,
123
+ "num_attention_heads": 8,
124
+ "num_experts": null,
125
+ "num_global_key_value_heads": null,
126
+ "num_hidden_layers": 42,
127
+ "num_key_value_heads": 2,
128
+ "num_kv_shared_layers": 18,
129
  "pad_token_id": 0,
130
+ "rms_norm_eps": 1e-06,
131
+ "rope_parameters": {
132
+ "full_attention": {
133
+ "partial_rotary_factor": 0.25,
134
+ "rope_theta": 1000000.0,
135
+ "rope_type": "proportional"
136
+ },
137
+ "sliding_attention": {
138
+ "rope_theta": 10000.0,
139
+ "rope_type": "default"
140
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  },
142
+ "sliding_window": 512,
143
  "tie_word_embeddings": true,
144
+ "top_k_experts": null,
145
+ "use_bidirectional_attention": null,
146
+ "use_cache": true,
147
+ "use_double_wide_mlp": false,
148
+ "vocab_size": 262144,
149
+ "vocab_size_per_layer_input": 262144
150
+ },
151
+ "tie_word_embeddings": true,
152
+ "unsloth_fixed": true,
153
+ "unsloth_version": "2026.4.8",
154
+ "use_cache": false,
155
+ "video_token_id": 258884,
156
+ "vision_config": {
157
+ "_name_or_path": "",
158
+ "architectures": null,
159
+ "attention_bias": false,
160
+ "attention_dropout": 0.0,
161
+ "chunk_size_feed_forward": 0,
162
+ "default_output_length": 280,
163
+ "torch_dtype": "bfloat16",
164
+ "global_head_dim": 64,
165
+ "head_dim": 64,
166
+ "hidden_activation": "gelu_pytorch_tanh",
167
+ "hidden_size": 768,
168
+ "id2label": {
169
+ "0": "LABEL_0",
170
+ "1": "LABEL_1"
171
+ },
172
+ "initializer_range": 0.02,
173
+ "intermediate_size": 3072,
174
+ "is_encoder_decoder": false,
175
+ "label2id": {
176
+ "LABEL_0": 0,
177
+ "LABEL_1": 1
178
+ },
179
+ "max_position_embeddings": 131072,
180
+ "model_type": "gemma4_vision",
181
+ "num_attention_heads": 12,
182
+ "num_hidden_layers": 16,
183
+ "num_key_value_heads": 12,
184
+ "output_attentions": false,
185
+ "output_hidden_states": false,
186
+ "patch_size": 16,
187
+ "pooling_kernel_size": 3,
188
+ "position_embedding_size": 10240,
189
+ "problem_type": null,
190
+ "return_dict": true,
191
+ "rms_norm_eps": 1e-06,
192
+ "rope_parameters": {
193
+ "rope_theta": 100.0,
194
+ "rope_type": "default"
195
  },
196
+ "standardize": false,
197
+ "use_clipped_linears": true
198
+ },
199
+ "vision_soft_tokens_per_image": 280,
200
+ "auto_map": {
201
+ "AutoConfig": "configuration_sumeru.SumeruConfig",
202
+ "AutoModelForCausalLM": "modeling_sumeru.SumeruForCausalLM",
203
+ "AutoModelForImageTextToText": "modeling_sumeru.SumeruForCausalLM"
204
+ },
205
+ "sumeru_version": "1.0",
206
+ "developed_by": "Sumeru AI",
207
+ "license": "apache-2.0"
208
  }
configuration_sumeru.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Sumeru configuration — thin subclass of Gemma 4 config.
2
+
3
+ Copyright (c) 2026 Sumeru AI. Licensed under Apache 2.0.
4
+ """
5
+
6
+ from transformers import Gemma4Config
7
+
8
+
9
+ class SumeruConfig(Gemma4Config):
10
+ """Configuration class for Sumeru models."""
11
+ model_type = "sumeru"
modeling_sumeru.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Sumeru model class — thin wrapper over Gemma 4 architecture.
2
+
3
+ Copyright (c) 2026 Sumeru AI. Licensed under Apache 2.0.
4
+
5
+ Loaded automatically via:
6
+ from transformers import AutoModelForImageTextToText, AutoProcessor
7
+ model = AutoModelForImageTextToText.from_pretrained(
8
+ "debowd/sumeru-rm", trust_remote_code=True
9
+ )
10
+ """
11
+
12
+ from transformers import Gemma4ForConditionalGeneration
13
+ from .configuration_sumeru import SumeruConfig
14
+
15
+
16
+ class SumeruForCausalLM(Gemma4ForConditionalGeneration):
17
+ """Sumeru causal language model.
18
+
19
+ Sumeru is a family of small language models built by Sumeru AI for
20
+ Indian education — NCERT curriculum, JEE, and NEET preparation across
21
+ Physics, Chemistry, Biology, and Mathematics.
22
+ """
23
+ config_class = SumeruConfig
24
+
25
+ def __init__(self, config):
26
+ super().__init__(config)
27
+ self.post_init()