alexkueck commited on
Commit
41e28a6
1 Parent(s): 4bef394

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -0
app.py CHANGED
@@ -570,6 +570,10 @@ def generate_code_antwort (prompt, chatbot, history, model_option, openai_api_ke
570
  llm = ChatOpenAI(model_name = MODEL_NAME_CODE, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
571
  #Prompt an history anhängen und einen Text daraus machen
572
  history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
 
 
 
 
573
 
574
  print("LLM aufrufen ohne RAG: ...........")
575
  resulti = llm_chain(llm, history_text_und_prompt)
 
570
  llm = ChatOpenAI(model_name = MODEL_NAME_CODE, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
571
  #Prompt an history anhängen und einen Text daraus machen
572
  history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
573
+ else:
574
+ llm = ChatOpenAI(model_name = MODEL_NAME_IMAGE, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
575
+ #Prompt an history anhängen und einen Text daraus machen
576
+ history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
577
 
578
  print("LLM aufrufen ohne RAG: ...........")
579
  resulti = llm_chain(llm, history_text_und_prompt)