alexkueck commited on
Commit
f978413
1 Parent(s): 99883d6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -122,11 +122,11 @@ os.environ["HUGGINGFACEHUB_API_TOKEN"] = HUGGINGFACEHUB_API_TOKEN
122
  #Alternativ: HuggingChat API nutzen
123
  pw=os.getenv("HFPW")
124
  email= os.getenv("HFEMail")
125
- sign = Login(email, pw)
126
- cookies = sign.login()
127
  # Save cookies to the local directory
128
- cookie_path_dir = "cookies_hf"
129
- sign.saveCookiesToDir(cookie_path_dir)
130
 
131
 
132
  ################################################
@@ -470,13 +470,13 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
470
  #oder an Hugging Face --------------------------
471
  print("HF Anfrage.......................")
472
  model_kwargs={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}
473
- #llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
474
  #llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
475
  #llm = HuggingFaceHub(url_??? = "https://wdgsjd6zf201mufn.us-east-1.aws.endpoints.huggingface.cloud", model_kwargs={"temperature": 0.5, "max_length": 64})
476
  #llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
477
  #llm via HuggingChat
478
- llm = hugchat.ChatBot(cookies=cookies.get_dict())
479
- hugchat=True #da dieses Model in llm_chain bzw reag_chain anderes verarbeitet wird
480
 
481
  print("HF")
482
  #Prompt an history anhängen und einen Text daraus machen
@@ -512,12 +512,12 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
512
  suche_im_Netz="Antwort aus dem Internet ..."
513
  #Prompt an history anhängen und einen Text daraus machen
514
  history_text_und_prompt = generate_prompt_with_history_hf(prompt, history)
515
- if (hugchat):
516
  #mit hugchat
517
- result = create_assistant_suche_hf(llm, history_text_und_prompt)
518
- else:
519
- #mit tavily:
520
- result = create_assistant_suche(history_text_und_prompt)
521
 
522
 
523
  """
 
122
  #Alternativ: HuggingChat API nutzen
123
  pw=os.getenv("HFPW")
124
  email= os.getenv("HFEMail")
125
+ #sign = Login(email, pw)
126
+ #cookies = sign.login()
127
  # Save cookies to the local directory
128
+ #cookie_path_dir = "cookies_hf"
129
+ #sign.saveCookiesToDir(cookie_path_dir)
130
 
131
 
132
  ################################################
 
470
  #oder an Hugging Face --------------------------
471
  print("HF Anfrage.......................")
472
  model_kwargs={"temperature": 0.5, "max_length": 512, "num_return_sequences": 1, "top_k": top_k, "top_p": top_p, "repetition_penalty": repetition_penalty}
473
+ llm = HuggingFaceHub(repo_id=repo_id, model_kwargs=model_kwargs)
474
  #llm = HuggingFaceChain(model=MODEL_NAME_HF, model_kwargs={"temperature": 0.5, "max_length": 128})
475
  #llm = HuggingFaceHub(url_??? = "https://wdgsjd6zf201mufn.us-east-1.aws.endpoints.huggingface.cloud", model_kwargs={"temperature": 0.5, "max_length": 64})
476
  #llm = HuggingFaceTextGenInference( inference_server_url="http://localhost:8010/", max_new_tokens=max_new_tokens,top_k=10,top_p=top_p,typical_p=0.95,temperature=temperature,repetition_penalty=repetition_penalty,)
477
  #llm via HuggingChat
478
+ #llm = hugchat.ChatBot(cookies=cookies.get_dict())
479
+ #hugchat=True #da dieses Model in llm_chain bzw reag_chain anderes verarbeitet wird
480
 
481
  print("HF")
482
  #Prompt an history anhängen und einen Text daraus machen
 
512
  suche_im_Netz="Antwort aus dem Internet ..."
513
  #Prompt an history anhängen und einen Text daraus machen
514
  history_text_und_prompt = generate_prompt_with_history_hf(prompt, history)
515
+ #if (hugchat):
516
  #mit hugchat
517
+ #result = create_assistant_suche_hf(llm, history_text_und_prompt)
518
+ #else:
519
+ #mit tavily:
520
+ result = create_assistant_suche(history_text_und_prompt)
521
 
522
 
523
  """