File size: 11,843 Bytes
f92fc66 aac8de6 e500cbb cdbe870 f92fc66 9ac8b4c f92fc66 eeb1c62 f92fc66 aac8de6 f92fc66 4e1dd84 f92fc66 d576ff7 a711dda 5931ff7 9ac8b4c b1af726 a9313e4 b1af726 a9313e4 aac8de6 d0e122e f62ed40 f92fc66 e500cbb f92fc66 e500cbb f92fc66 e500cbb f92fc66 a711dda f92fc66 09ed43d e67c000 09ed43d d576ff7 09ed43d f3c0939 35685ed 09ed43d f3c0939 5931ff7 5ba76d3 09ed43d 5ba76d3 09ed43d e500cbb 09ed43d e500cbb 09ed43d f7183fd 35685ed e500cbb 09ed43d f3c0939 35685ed 09ed43d 3726d7c 09ed43d f92fc66 3726d7c e67c000 3726d7c f92fc66 8213908 aac8de6 65ba366 f92fc66 aac8de6 f92fc66 aac8de6 a711dda f92fc66 a711dda f330c8a f7183fd 62a7c65 d576ff7 a711dda e380ebc aac8de6 e380ebc aac8de6 e380ebc aac8de6 e380ebc a711dda aac9975 aac8de6 08986e7 e380ebc e31cae8 85969e5 8213908 e31cae8 85969e5 a711dda 7666552 62a7c65 85969e5 a56d2f6 62a7c65 a711dda a56d2f6 d035225 78f1e78 a56d2f6 e31cae8 09ed43d e31cae8 ce96d97 f92fc66 e31cae8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 |
from huggingface_hub import InferenceClient
import gradio as gr
import json
from datetime import datetime
import os
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from google.oauth2 import service_account
from googleapiclient.http import MediaFileUpload
from pathlib import Path
import argparse
from huggingface_hub import snapshot_download
import random
import string
import re
#TODO : Cleanup and comment
repo_name = 'TheBloke/OpenHermes-2.5-Mistral-7B-GGUF'
model_file = "openhermes-2.5-mistral-7b.Q4_K_M.gguf"
print('Fetching model:', repo_name, model_file)
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_file)
print('Done fetching model:')
DEFAULT_MODEL_PATH = model_file
chat_history = []
chat_log_history = []
isFirstRun = True
dir_path = os.path.dirname(os.path.realpath(__file__))
directory = '/home/user/app'
if not os.path.exists(directory):
print("directory dont exist")
else: print("path exists")
file_path = '/home/user/app/test.txt'
with open(file_path, 'w') as file:
# Write "Hello World" to the file
file.write("Hello World")
print("file written")
print(dir_path)
SCOPES = ['https://www.googleapis.com/auth/drive']
SERVICE_ACCOUNT_FILE = '/home/user/app/service_account_creds.json'
#initContext = "You are a hospital patient nameed Barry Lawson. You have an injured arm and are in great pain. You are frustrated because you have been waiting for a long time. Every time I prompt you you get more agressive and violent in your responses. Your responses should use excessive profanity. You will provide only one response to each of my prompts. I am a nurse and you are angry at me. Nurse: "
initContext = """<|im_start|>You are playing the role of an aggressive patient called Barry Lawson. Barry has been waiting for hours at the hospital with severe pain in his chest and hasn't been seen yet. You are incredibly frustrated and angry that things are taking so long, you will only calm down if your concerns are addressed. Waiting will only make you more angry and aggressive towards the staff. At any moment you could break into a RAGE! Make sure you don't censor swear words. Make sure to explain your emotional state and thinking before responding, for example, Barry: (tired of waiting and very angry) What the fuck do I have to do around here to get some treatment!"""
context = ""
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
folder_id = '1Hjbu7FmuVs0Yz8y_veo6SzY_2tc48OWt'
Name = ""
Occupation = ""
Ethnicity = ""
Gender = ""
Age=""
YearsOfExp = ""
chat_log_name =""
status = ""
agreed = ""
history=[]
from llama_cpp import Llama
llm = Llama(model_path=model_file, model_type="mistral",n_gpu_layers=-1,n_ctx = 2048)
def load_model():
global llm
llm = Llama(model_path=model_file, model_type="mistral",n_gpu_layers=-1,n_ctx = 2048)
return "Model loaded"
def generate_unique_id():
# Generate a random sequence of 3 letters and 3 digits
letters = ''.join(random.choices(string.ascii_letters, k=3))
digits = ''.join(random.choices(string.digits, k=3))
unique_id = letters + digits
return unique_id
unique_id = generate_unique_id()
def get_drive_service():
credentials = service_account.Credentials.from_service_account_file(
SERVICE_ACCOUNT_FILE, scopes=SCOPES)
service = build('drive', 'v3', credentials=credentials)
print("Google Service Created")
return service
service = get_drive_service()
def search_file():
#Search for a file by name in the specified Google Drive folder.
query = f"name = '{chat_log_name}' and '{folder_id}' in parents and trashed = false"
response = service.files().list(q=query, spaces='drive', fields='files(id, name)').execute()
files = response.get('files', [])
if not files:
print(f"Chat log {chat_log_name} does not exist")
else:
print(f"Chat log {chat_log_name} exist")
return files
def strip_text(text):
# Pattern to match text inside parentheses or angle brackets and any text following angle brackets
pattern = r"\(.*?\)|<.*?>.*"
# Use re.sub() to replace the matched text with an empty string
cleaned_text = re.sub(pattern, "", text)
return cleaned_text
def upload_to_google_drive():
existing_files = search_file()
print(existing_files)
data = {
#"name": Name,
#"occupation": Occupation,
#"years of experience": YearsOfExp,
#"ethnicity": Ethnicity,
#"gender": Gender,
#"age": Age,
"Unique ID": unique_id,
"chat_history": chat_log_history
}
with open(chat_log_name, "w") as log_file:
json.dump(data, log_file, indent=4)
if not existing_files:
# If the file does not exist, upload it
file_metadata = {
'name': chat_log_name,
'parents': [folder_id],'mimeType': 'application/json'
}
media = MediaFileUpload(chat_log_name, mimetype='application/json')
file = service.files().create(body=file_metadata, media_body=media, fields='id').execute()
print(f"Uploaded new file with ID: {file.get('id')}")
else:
print(f"File '{chat_log_name}' already exists.")
# Example: Update the file content
file_id = existing_files[0]['id']
media = MediaFileUpload(chat_log_name, mimetype='application/json')
updated_file = service.files().update(fileId=file_id, media_body=media).execute()
print(f"Updated existing file with ID: {updated_file.get('id')}")
def generate(prompt, history):
global isFirstRun,initContext,Name,Occupation,Ethnicity,Gender,Age,context,YearsOfExp,chat_history
#if not len(Name) == 0 and not len(Occupation) == 0 and not len(Ethnicity) == 0 and not len(Gender) == 0 and not len(Age) == 0 and not len(YearsOfExp):
if agreed:
firstmsg =""
if isFirstRun:
context = initContext
isFirstRun = False
firstmsg = prompt
context += """
<|im_start|>nurse
Nurse:"""+prompt+"""
<|im_start|>barry
Barry:
"""
response = ""
while(len(response) < 1):
output = llm(context, max_tokens=400, stop=["Nurse:"], echo=False)
response = output["choices"][0]["text"]
response = response.strip()
#yield response
# for output in llm(input, stream=True, max_tokens=100, ):
# piece = output['choices'][0]['text']
# response += piece
# chatbot[-1] = (chatbot[-1][0], response)
# yield response
cleaned_response = strip_text(response)
chat_history.append((prompt,cleaned_response))
if not isFirstRun:
chat_log_history.append({"user": prompt, "bot": cleaned_response})
upload_to_google_drive()
else:
chat_log_history.append({"user": firstmsg, "bot": cleaned_response})
context += response
print (context)
return chat_history
else:
output = "Did you forget to Agree to the Terms and Conditions?"
chat_history.append((prompt,output))
return chat_history
def submit_user_info(name,occupation,yearsofexp,ethnicity,gender,age):
global Name, Occupation,Ethnicity,Gender,Age,chat_log_name,YearsOfExp
print(reset_button)
Name = name
Occupation = occupation
Ethnicity=ethnicity
Gender=gender
Age=age
YearsOfExp = yearsofexp
if name and occupation and ethnicity and gender and age and yearsofexp:
chat_log_name = f'chat_log_for_{unique_id}_{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}.json'
return f"You can start chatting now {Name}"
else:
return "Enter ALL the details to start chatting"
def start_chat_button_fn(agree_status):
global agreed,chat_log_name
if agree_status:
agreed = agree_status
chat_log_name = f'chat_log_for_{unique_id}_{datetime.now().strftime("%Y-%m-%d_%H-%M-%S")}.json'
return f"You can start chatting now"
else:
return "You must agree to the terms and conditions to proceed"
def reset_chat_interface():
global chat_history, chat_log_history, isFirstRun
chat_history = []
chat_log_history = []
isFirstRun = True
return "Chat has been reset."
def reset_name_interface():
global Name, Occupation, Ethnicity, Gender, Age,YearsOfExp, chat_log_name
Name = ""
Occupation = ""
YearsOfExp = ""
Ethnicity = ""
Gender = ""
Age = ""
chat_log_name = ""
return "User info has been reset."
def reset_all():
global unique_id
message1 = reset_chat_interface()
#message2 = reset_name_interface()
message3 = load_model()
unique_id = generate_unique_id()
return f"All Chat components have been rest. Uniqe ID for this session is, {unique_id}. Please note this down.",unique_id
with gr.Blocks() as app:
gr.Markdown("# ECU-IVADE: Conversational AI Model for Aggressive Patient Behavior (Beta Testing)")
unique_id_display = gr.Textbox(value=unique_id, label="Session Unique ID", interactive=False,show_copy_button = True)
with gr.Tab("Terms and Conditions"):
#name = gr.Textbox(label="Name")
#occupation = gr.Textbox(label="Occupation")
#yearsofexp = gr.Textbox(label="Years of Experience")
#ethnicity = gr.Textbox(label="Ethnicity")
#gender = gr.Dropdown(choices=["Male", "Female", "Other", "Prefer Not To Say"], label="Gender")
#age = gr.Textbox(label="Age")
#submit_info = gr.Button("Submit")
gr.Markdown("## Terms and Conditions")
gr.Markdown("""
Before using our chatbot, please read the following terms and conditions carefully:
- **Data Collection**: Our chatbot collects chat logs for the purpose of improving our services and user experience.
- **Privacy**: We ensure the confidentiality and security of your data, in line with our privacy policy.
- **Survey**: At the end of the chat session, you will be asked to participate in a short survey to gather feedback about your experience.
- **Consent**: By checking the box below and initiating the chat, you agree to these terms and the collection of chat logs, and consent to take part in the survey upon completing your session.
Please check the box below to acknowledge your agreement and proceed.
""")
agree_status = gr.Checkbox(label="I have read and understand the terms and conditions.")
status_label = gr.Markdown()
start_chat_button = gr.Button("Start Chat with Chatlog")
print(agree_status)
#submit_info.click(submit_user_info, inputs=[name, occupation, yearsofexp, ethnicity, gender, age], outputs=[status_textbox])
start_chat_button.click(start_chat_button_fn, inputs=[agree_status], outputs=[status_label])
#status_textbox = gr.Textbox(interactive = False)
with gr.Tab("Chat Bot"):
chatbot = gr.Chatbot()
msg = gr.Textbox(label="Type your message")
send = gr.Button("Send")
clear = gr.Button("Clear Chat")
send.click(generate, inputs=[msg], outputs=chatbot)
clear.click(lambda: chatbot.clear(), inputs=[], outputs=chatbot)
with gr.Tab("Reset"):
reset_button = gr.Button("Reset ChatBot Instance")
reset_output = gr.Textbox(label="Reset Output", interactive=False)
reset_button.click(reset_all, inputs=[], outputs=[reset_output,unique_id_display])
if __name__ == "__main__":
app.launch(debug=True) |