Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -127,7 +127,7 @@ papers = [{'title': 'BERT', 'abstract': 'We introduce a new language representat
127
  # concatenate title and abstract
128
  text_batch = [d['title'] + tokenizer.sep_token + (d.get('abstract') or '') for d in papers]
129
  # preprocess the input
130
- inputs = self.tokenizer(text_batch, padding=True, truncation=True,
131
  return_tensors="pt", return_token_type_ids=False, max_length=512)
132
  output = model(**inputs)
133
  # take the first token in the batch as the embedding
 
127
  # concatenate title and abstract
128
  text_batch = [d['title'] + tokenizer.sep_token + (d.get('abstract') or '') for d in papers]
129
  # preprocess the input
130
+ inputs = tokenizer(text_batch, padding=True, truncation=True,
131
  return_tensors="pt", return_token_type_ids=False, max_length=512)
132
  output = model(**inputs)
133
  # take the first token in the batch as the embedding