specified batch size - test to see if this works

#8
Files changed (1) hide show
  1. epi_pipeline.py +1 -1
epi_pipeline.py CHANGED
@@ -669,7 +669,7 @@ class NER_Pipeline:
669
  output_dict = {label:[] for label in self.labels}
670
 
671
  dataset = NerDataset(text, self.bert_tokenizer, self.config)
672
- predictions, label_ids, _ = self.trainer.predict(dataset)
673
  preds_list, _ = self.align_predictions(predictions, label_ids)
674
  #dataset.ner_inputs.labels = preds_list
675
  for ner_input, sent_pred_list in zip(dataset.ner_inputs, preds_list):
 
669
  output_dict = {label:[] for label in self.labels}
670
 
671
  dataset = NerDataset(text, self.bert_tokenizer, self.config)
672
+ predictions, label_ids, _ = self.trainer.predict(dataset, batch_size=16)
673
  preds_list, _ = self.align_predictions(predictions, label_ids)
674
  #dataset.ner_inputs.labels = preds_list
675
  for ner_input, sent_pred_list in zip(dataset.ner_inputs, preds_list):