Transformers
PyTorch
code
custom_code
Inference Endpoints
codesage commited on
Commit
230fdd8
1 Parent(s): c98c3af

Update modeling_codesage.py

Browse files
Files changed (1) hide show
  1. modeling_codesage.py +2 -2
modeling_codesage.py CHANGED
@@ -349,7 +349,7 @@ class CodeSageForSequenceClassification(CodeSagePreTrainedModel):
349
  self.num_labels = config.num_labels
350
  self.config = config
351
 
352
- self.encoder = CodeSageModel(config)
353
  classifier_dropout = (
354
  config.classifier_dropout if config.classifier_dropout is not None else config.residual_dropout_prob
355
  )
@@ -374,7 +374,7 @@ class CodeSageForSequenceClassification(CodeSagePreTrainedModel):
374
  return_dict = return_dict if return_dict is not None else self.config.use_return_dict
375
  assert attention_mask is not None, "attention_mask is needed to perform max-pooling"
376
 
377
- outputs = self.encoder(
378
  input_ids,
379
  attention_mask=attention_mask,
380
  position_ids=position_ids,
 
349
  self.num_labels = config.num_labels
350
  self.config = config
351
 
352
+ self.transformer = CodeSageModel(config)
353
  classifier_dropout = (
354
  config.classifier_dropout if config.classifier_dropout is not None else config.residual_dropout_prob
355
  )
 
374
  return_dict = return_dict if return_dict is not None else self.config.use_return_dict
375
  assert attention_mask is not None, "attention_mask is needed to perform max-pooling"
376
 
377
+ outputs = self.transformer(
378
  input_ids,
379
  attention_mask=attention_mask,
380
  position_ids=position_ids,