Spaces:
Sleeping
Sleeping
liujch1998
commited on
Commit
•
e9e5831
1
Parent(s):
6d2feb6
Bug fix
Browse files
app.py
CHANGED
@@ -49,12 +49,10 @@ class Processor:
|
|
49 |
num_return_sequences=m,
|
50 |
top_p=top_p,
|
51 |
) # (K, KL); begins with 0 ([BOS]); ends with 1 ([EOS])
|
52 |
-
print(knowledges_ids)
|
53 |
knowledges_ids = knowledges_ids[:, 1:].contiguous() # no beginning; ends with 1 ([EOS])
|
54 |
knowledges = self.tokenizer.batch_decode(knowledges_ids, skip_special_tokens=True, clean_up_tokenization_spaces=True)
|
55 |
knowledges = list(set(knowledges))
|
56 |
knowledges = [''] + knowledges
|
57 |
-
print(knowledges)
|
58 |
|
59 |
prompts = [question + (f' \\n Knowledge: {knowledge} \\n Answer: ' if knowledge != '' else ' \\n Answer:') for knowledge in knowledges]
|
60 |
prompts_tok = self.tokenizer(prompts, return_tensors='pt', padding='max_length', truncation='longest_first', max_length=max_question_len + max_knowledge_len).to(device) # (1+K, QL+KL)
|
|
|
49 |
num_return_sequences=m,
|
50 |
top_p=top_p,
|
51 |
) # (K, KL); begins with 0 ([BOS]); ends with 1 ([EOS])
|
|
|
52 |
knowledges_ids = knowledges_ids[:, 1:].contiguous() # no beginning; ends with 1 ([EOS])
|
53 |
knowledges = self.tokenizer.batch_decode(knowledges_ids, skip_special_tokens=True, clean_up_tokenization_spaces=True)
|
54 |
knowledges = list(set(knowledges))
|
55 |
knowledges = [''] + knowledges
|
|
|
56 |
|
57 |
prompts = [question + (f' \\n Knowledge: {knowledge} \\n Answer: ' if knowledge != '' else ' \\n Answer:') for knowledge in knowledges]
|
58 |
prompts_tok = self.tokenizer(prompts, return_tensors='pt', padding='max_length', truncation='longest_first', max_length=max_question_len + max_knowledge_len).to(device) # (1+K, QL+KL)
|