alexkueck commited on
Commit
ba575b1
1 Parent(s): 6f4189c

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +1 -1
utils.py CHANGED
@@ -376,7 +376,7 @@ def query(api_llm, payload):
376
 
377
  def llm_chain2(prompt, context):
378
  full_prompt = RAG_CHAIN_PROMPT.format(context=context, question=prompt)
379
- inputs = tokenizer_summarization(full_prompt, return_tensors="pt", max_length=1024, truncation=True)
380
 
381
  #Generiere die Antwort
382
  outputs = modell_rag.generate(inputs['input_ids'], max_length=1024, num_beams=2, early_stopping=True)
 
376
 
377
  def llm_chain2(prompt, context):
378
  full_prompt = RAG_CHAIN_PROMPT.format(context=context, question=prompt)
379
+ inputs = tokenizer_rag(full_prompt, return_tensors="pt", max_length=1024, truncation=True)
380
 
381
  #Generiere die Antwort
382
  outputs = modell_rag.generate(inputs['input_ids'], max_length=1024, num_beams=2, early_stopping=True)