alexkueck commited on
Commit
01d87d4
1 Parent(s): aaf5e25

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -55,7 +55,7 @@ CHROMA_PDF = './chroma/kkg/pdf'
55
  CHROMA_WORD = './chroma/kkg/word'
56
  CHROMA_EXCEL = './chroma/kkg/excel'
57
  #HuggingFace Model name--------------------------------
58
- MODEL_NAME_HF = "t5-small" #"meta-llama/Meta-Llama-3-8B-Instruct" #"mistralai/Mistral-7B-Instruct-v0.3" #"microsoft/Phi-3-mini-4k-instruct" #"HuggingFaceH4/zephyr-7b-alpha"
59
 
60
  #HuggingFace Reop ID--------------------------------
61
  #repo_id = "meta-llama/Llama-2-13b-chat-hf"
@@ -247,7 +247,7 @@ def generate_text (prompt, chatbot, history, vektordatenbank, retriever, top_p=0
247
  #3.te Alternative für pipeline
248
  # Erstelle eine Pipeline mit den gewünschten Parametern
249
  #llm = pipeline("text-generation", model=MODEL_NAME_HF, config={"temperature": 0.5, "max_length": 1024, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}, trust_remote_code=True)
250
- llm = pipeline("text2text-generation", model=MODEL_NAME_HF, trust_remote_code=True)
251
  result = rag_chain(llm, history_text_und_prompt, retriever)
252
 
253
  except Exception as e:
 
55
  CHROMA_WORD = './chroma/kkg/word'
56
  CHROMA_EXCEL = './chroma/kkg/excel'
57
  #HuggingFace Model name--------------------------------
58
+ MODEL_NAME_HF = "dbmdz/bert-base-german-cased" #"meta-llama/Meta-Llama-3-8B-Instruct" #"mistralai/Mistral-7B-Instruct-v0.3" #"microsoft/Phi-3-mini-4k-instruct" #"HuggingFaceH4/zephyr-7b-alpha"
59
 
60
  #HuggingFace Reop ID--------------------------------
61
  #repo_id = "meta-llama/Llama-2-13b-chat-hf"
 
247
  #3.te Alternative für pipeline
248
  # Erstelle eine Pipeline mit den gewünschten Parametern
249
  #llm = pipeline("text-generation", model=MODEL_NAME_HF, config={"temperature": 0.5, "max_length": 1024, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}, trust_remote_code=True)
250
+ llm = pipeline("summarization", model=MODEL_NAME_HF, trust_remote_code=True)
251
  result = rag_chain(llm, history_text_und_prompt, retriever)
252
 
253
  except Exception as e: