oceansweep commited on
Commit
09ad602
1 Parent(s): 7b2538d

Upload Chat.py

Browse files
Files changed (1) hide show
  1. App_Function_Libraries/Chat.py +88 -49
App_Function_Libraries/Chat.py CHANGED
@@ -8,23 +8,71 @@ import os
8
  import re
9
  import tempfile
10
  from datetime import datetime
11
-
12
- from App_Function_Libraries.DB_Manager import get_conversation_name, save_chat_history_to_database
13
- from App_Function_Libraries.LLM_API_Calls import chat_with_openai, chat_with_anthropic, chat_with_cohere, \
14
- chat_with_groq, chat_with_openrouter, chat_with_deepseek, chat_with_mistral, chat_with_huggingface, chat_with_vllm
15
- from App_Function_Libraries.LLM_API_Calls_Local import chat_with_aphrodite, chat_with_local_llm, chat_with_ollama, \
16
- chat_with_kobold, chat_with_llama, chat_with_oobabooga, chat_with_tabbyapi
17
- from App_Function_Libraries.SQLite_DB import load_media_content
18
- from App_Function_Libraries.Utils import generate_unique_filename
19
-
20
-
21
  #
22
  # External Imports
23
  #
24
  # Local Imports
 
 
 
 
 
 
 
25
  #
26
-
27
  ####################################################################################################
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  def chat(message, history, media_content, selected_parts, api_endpoint, api_key, prompt, temperature,
29
  system_message=None):
30
  try:
@@ -65,49 +113,13 @@ def chat(message, history, media_content, selected_parts, api_endpoint, api_key,
65
  logging.debug(f"Debug - Chat Function - Prompt: {prompt}")
66
 
67
  # Use the existing API request code based on the selected endpoint
68
- logging.info(f"Debug - Chat Function - API Endpoint: {api_endpoint}")
69
- if api_endpoint.lower() == 'openai':
70
- response = chat_with_openai(api_key, input_data, prompt, temp, system_message)
71
- elif api_endpoint.lower() == "anthropic":
72
- response = chat_with_anthropic(api_key, input_data, prompt, temp, system_message)
73
- elif api_endpoint.lower() == "cohere":
74
- response = chat_with_cohere(api_key, input_data, prompt, temp, system_message)
75
- elif api_endpoint.lower() == "groq":
76
- response = chat_with_groq(api_key, input_data, prompt, temp, system_message)
77
- elif api_endpoint.lower() == "openrouter":
78
- response = chat_with_openrouter(api_key, input_data, prompt, temp, system_message)
79
- elif api_endpoint.lower() == "deepseek":
80
- response = chat_with_deepseek(api_key, input_data, prompt, temp, system_message)
81
- elif api_endpoint.lower() == "mistral":
82
- response = chat_with_mistral(api_key, input_data, prompt, temp, system_message)
83
- elif api_endpoint.lower() == "llama.cpp":
84
- response = chat_with_llama(input_data, prompt, temp, system_message)
85
- elif api_endpoint.lower() == "kobold":
86
- response = chat_with_kobold(input_data, api_key, prompt, temp, system_message)
87
- elif api_endpoint.lower() == "ooba":
88
- response = chat_with_oobabooga(input_data, api_key, prompt, temp, system_message)
89
- elif api_endpoint.lower() == "tabbyapi":
90
- response = chat_with_tabbyapi(input_data, prompt, temp, system_message)
91
- elif api_endpoint.lower() == "vllm":
92
- response = chat_with_vllm(input_data, prompt, system_message)
93
- elif api_endpoint.lower() == "local-llm":
94
- response = chat_with_local_llm(input_data, prompt, temp, system_message)
95
- elif api_endpoint.lower() == "huggingface":
96
- response = chat_with_huggingface(api_key, input_data, prompt, temp) # , system_message)
97
- elif api_endpoint.lower() == "ollama":
98
- response = chat_with_ollama(input_data, prompt, temp, system_message)
99
- elif api_endpoint.lower() == "aphrodite":
100
- response = chat_with_aphrodite(input_data, prompt, temp, system_message)
101
- else:
102
- raise ValueError(f"Unsupported API endpoint: {api_endpoint}")
103
-
104
- return response
105
-
106
  except Exception as e:
107
  logging.error(f"Error in chat function: {str(e)}")
108
  return f"An error occurred: {str(e)}"
109
 
110
 
 
111
  def save_chat_history_to_db_wrapper(chatbot, conversation_id, media_content):
112
  logging.info(f"Attempting to save chat history. Media content type: {type(media_content)}")
113
  try:
@@ -268,6 +280,33 @@ def update_chat_content(selected_item, use_content, use_summary, use_prompt, ite
268
  return {}, []
269
 
270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
271
  #
272
  # End of Chat.py
273
  ##########################################################################################################################
 
8
  import re
9
  import tempfile
10
  from datetime import datetime
11
+ from pathlib import Path
 
 
 
 
 
 
 
 
 
12
  #
13
  # External Imports
14
  #
15
  # Local Imports
16
+ from App_Function_Libraries.DB.DB_Manager import get_conversation_name, save_chat_history_to_database
17
+ from App_Function_Libraries.LLM_API_Calls import chat_with_openai, chat_with_anthropic, chat_with_cohere, \
18
+ chat_with_groq, chat_with_openrouter, chat_with_deepseek, chat_with_mistral, chat_with_huggingface#, chat_with_vllm
19
+ from App_Function_Libraries.LLM_API_Calls_Local import chat_with_aphrodite, chat_with_local_llm, chat_with_ollama, \
20
+ chat_with_kobold, chat_with_llama, chat_with_oobabooga, chat_with_tabbyapi
21
+ from App_Function_Libraries.DB.SQLite_DB import load_media_content
22
+ from App_Function_Libraries.Utils.Utils import generate_unique_filename
23
  #
 
24
  ####################################################################################################
25
+ #
26
+ # Functions:
27
+
28
+ def chat_api_call(api_endpoint, api_key, input_data, prompt, temp, system_message=None):
29
+ if not api_key:
30
+ api_key = None
31
+ try:
32
+ logging.info(f"Debug - Chat API Call - API Endpoint: {api_endpoint}")
33
+ logging.info(f"Debug - Chat API Call - API Key: {api_key}")
34
+ logging.info(f"Debug - Chat chat_api_call - API Endpoint: {api_endpoint}")
35
+ if api_endpoint.lower() == 'openai':
36
+ response = chat_with_openai(api_key, input_data, prompt, temp, system_message)
37
+ elif api_endpoint.lower() == "anthropic":
38
+ response = chat_with_anthropic(api_key, input_data, prompt, temp, system_message)
39
+ elif api_endpoint.lower() == "cohere":
40
+ response = chat_with_cohere(api_key, input_data, prompt, temp, system_message)
41
+ elif api_endpoint.lower() == "groq":
42
+ response = chat_with_groq(api_key, input_data, prompt, temp, system_message)
43
+ elif api_endpoint.lower() == "openrouter":
44
+ response = chat_with_openrouter(api_key, input_data, prompt, temp, system_message)
45
+ elif api_endpoint.lower() == "deepseek":
46
+ response = chat_with_deepseek(api_key, input_data, prompt, temp, system_message)
47
+ elif api_endpoint.lower() == "mistral":
48
+ response = chat_with_mistral(api_key, input_data, prompt, temp, system_message)
49
+ elif api_endpoint.lower() == "llama.cpp":
50
+ response = chat_with_llama(input_data, prompt, temp, system_message)
51
+ elif api_endpoint.lower() == "kobold":
52
+ response = chat_with_kobold(input_data, api_key, prompt, temp, system_message)
53
+ elif api_endpoint.lower() == "ooba":
54
+ response = chat_with_oobabooga(input_data, api_key, prompt, temp, system_message)
55
+ elif api_endpoint.lower() == "tabbyapi":
56
+ response = chat_with_tabbyapi(input_data, prompt, temp, system_message)
57
+ #elif api_endpoint.lower() == "vllm":
58
+ # response = chat_with_vllm(input_data, prompt, system_message)
59
+ elif api_endpoint.lower() == "local-llm":
60
+ response = chat_with_local_llm(input_data, prompt, temp, system_message)
61
+ elif api_endpoint.lower() == "huggingface":
62
+ response = chat_with_huggingface(api_key, input_data, prompt, temp) # , system_message)
63
+ elif api_endpoint.lower() == "ollama":
64
+ response = chat_with_ollama(input_data, prompt, temp, system_message)
65
+ elif api_endpoint.lower() == "aphrodite":
66
+ response = chat_with_aphrodite(input_data, prompt, temp, system_message)
67
+ else:
68
+ raise ValueError(f"Unsupported API endpoint: {api_endpoint}")
69
+
70
+ return response
71
+
72
+ except Exception as e:
73
+ logging.error(f"Error in chat function: {str(e)}")
74
+ return f"An error occurred: {str(e)}"
75
+
76
  def chat(message, history, media_content, selected_parts, api_endpoint, api_key, prompt, temperature,
77
  system_message=None):
78
  try:
 
113
  logging.debug(f"Debug - Chat Function - Prompt: {prompt}")
114
 
115
  # Use the existing API request code based on the selected endpoint
116
+ response = chat_api_call(api_endpoint, api_key, input_data, prompt, temp, system_message)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
  except Exception as e:
118
  logging.error(f"Error in chat function: {str(e)}")
119
  return f"An error occurred: {str(e)}"
120
 
121
 
122
+
123
  def save_chat_history_to_db_wrapper(chatbot, conversation_id, media_content):
124
  logging.info(f"Attempting to save chat history. Media content type: {type(media_content)}")
125
  try:
 
280
  return {}, []
281
 
282
 
283
+ CHARACTERS_FILE = Path('.', 'Helper_Scripts', 'Character_Cards', 'Characters.json')
284
+
285
+ def save_character(character_data):
286
+ if CHARACTERS_FILE.exists():
287
+ with CHARACTERS_FILE.open('r') as f:
288
+ characters = json.load(f)
289
+ else:
290
+ characters = {}
291
+
292
+ characters[character_data['name']] = character_data
293
+
294
+ with CHARACTERS_FILE.open('w') as f:
295
+ json.dump(characters, f, indent=2)
296
+
297
+
298
+ def load_characters():
299
+ if os.path.exists(CHARACTERS_FILE):
300
+ with open(CHARACTERS_FILE, 'r') as f:
301
+ return json.load(f)
302
+ return {}
303
+
304
+
305
+ def get_character_names():
306
+ characters = load_characters()
307
+ return list(characters.keys())
308
+
309
+
310
  #
311
  # End of Chat.py
312
  ##########################################################################################################################