alexkueck commited on
Commit
33355f4
1 Parent(s): 26c986f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -434,7 +434,8 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
434
  else:
435
  #oder an Hugging Face --------------------------
436
  print("HF Anfrage.......................")
437
- llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={"temperature": 0.5, "max_length": 128})
 
438
  #llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
439
  #llm = HuggingFaceHub(url_??? = "https://wdgsjd6zf201mufn.us-east-1.aws.endpoints.huggingface.cloud", model_kwargs={"temperature": 0.5, "max_length": 64})
440
  #llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
@@ -456,8 +457,7 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
456
  print("LLM aufrufen ohne RAG: ...........")
457
  resulti = llm_chain(llm, history_text_und_prompt)
458
  result = resulti.strip()
459
- print("result vor netzsuche:................")
460
- print(result)
461
  #Wenn keine Antwort möglich "Ich weiß es nicht" etc., dann versuchen mit Suche im Internet.
462
  if (result == None or is_response_similar(result)):
463
  print("Suche im Netz: ...........")
 
434
  else:
435
  #oder an Hugging Face --------------------------
436
  print("HF Anfrage.......................")
437
+ model_kwargs={"temperature": 0.5, "max_length": 128, "num_return_sequences": 2}
438
+ llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
439
  #llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
440
  #llm = HuggingFaceHub(url_??? = "https://wdgsjd6zf201mufn.us-east-1.aws.endpoints.huggingface.cloud", model_kwargs={"temperature": 0.5, "max_length": 64})
441
  #llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
 
457
  print("LLM aufrufen ohne RAG: ...........")
458
  resulti = llm_chain(llm, history_text_und_prompt)
459
  result = resulti.strip()
460
+
 
461
  #Wenn keine Antwort möglich "Ich weiß es nicht" etc., dann versuchen mit Suche im Internet.
462
  if (result == None or is_response_similar(result)):
463
  print("Suche im Netz: ...........")