aframson commited on
Commit
f56edc2
·
1 Parent(s): 7cae85e
Files changed (1) hide show
  1. modelLM.py +21 -21
modelLM.py CHANGED
@@ -29,30 +29,30 @@ class OBILanguageModel(PreTrainedModel):
29
 
30
 
31
 
32
- def forward(self, idx, targets=None):
33
- tok_emb = self.token_embedding_table(idx)
34
- pos_emb = None # Initialize pos_emb to None
35
- try:
36
- pos_emb = self.position_embedding_table(torch.arange(idx.size(1), device='cpu'))
37
- except IndexError as e:
38
- # Handle the IndexError by initializing pos_emb with zeros
39
- print(f"IndexError: {e}")
40
- print(f"idx.size(1): {idx.size(1)}")
41
- print(f"Positional embedding table shape: {self.position_embedding_table.weight.shape}")
42
- pos_emb = torch.zeros((idx.size(1), self.config.hidden_size), device=device)
43
 
44
- x = tok_emb + pos_emb
45
- x = self.transformer(x, x)
46
- x = self.ln1(x)
47
- x = self.ln2(x)
48
- logits = self.lm_head(x)
49
 
50
- if targets is None:
51
- loss = None
52
- else:
53
- loss = F.cross_entropy(logits.view(-1, self.config.vocab_size), targets.view(-1))
54
 
55
- return logits, loss
56
 
57
 
58
  def generate(self, idx, max_new_tokens):
 
29
 
30
 
31
 
32
+ def forward(self, idx, targets=None):
33
+ tok_emb = self.token_embedding_table(idx)
34
+ pos_emb = None # Initialize pos_emb to None
35
+ try:
36
+ pos_emb = self.position_embedding_table(torch.arange(idx.size(1), device='cpu'))
37
+ except IndexError as e:
38
+ # Handle the IndexError by initializing pos_emb with zeros
39
+ print(f"IndexError: {e}")
40
+ print(f"idx.size(1): {idx.size(1)}")
41
+ print(f"Positional embedding table shape: {self.position_embedding_table.weight.shape}")
42
+ pos_emb = torch.zeros((idx.size(1), self.config.hidden_size), device=device)
43
 
44
+ x = tok_emb + pos_emb
45
+ x = self.transformer(x, x)
46
+ x = self.ln1(x)
47
+ x = self.ln2(x)
48
+ logits = self.lm_head(x)
49
 
50
+ if targets is None:
51
+ loss = None
52
+ else:
53
+ loss = F.cross_entropy(logits.view(-1, self.config.vocab_size), targets.view(-1))
54
 
55
+ return logits, loss
56
 
57
 
58
  def generate(self, idx, max_new_tokens):