Letsch22 commited on
Commit
cb12085
1 Parent(s): 35accf7

Initial mock interview concept

Browse files
Files changed (2) hide show
  1. app.py +80 -27
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,46 +1,99 @@
1
  import os
 
 
 
 
2
  import gradio as gr
3
  import openai
 
 
 
 
 
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
  # Assistant Creation function
7
- def create_assistant_json(uploaded_file, assistant_name, assistant_message):
8
- client = openai.OpenAI(api_key=os.environ["OPENAI_API_KEY"])
9
- # Check if a file was uploaded
10
- print(uploaded_file)
11
- df = open(uploaded_file, "rb")
12
- file = client.files.create(file=df,
13
- purpose='assistants')
14
 
15
  assistant = client.beta.assistants.create(
16
- name=assistant_name,
17
- instructions=assistant_message,
18
- model="gpt-4-0125-preview",
19
  tools=[
20
  {
21
- "type": "retrieval" # This adds the knowledge base as a tool
22
  }
23
  ],
24
- file_ids=[file.id])
25
 
26
  return assistant.id
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  # Creating the Gradio interface
29
  with gr.Blocks() as demo:
30
- gr.Markdown("## To create an OpenAI Assistant please fill in the following sections. Upload a file to give the Assistant knowledge and a focus on something outside of it's normal training. Then add an assistant name and message. The Assistant message should guide the model into in a role. An example would be, You are a helpful Asssitant who is knowledgable in the field of...")
31
- gr.Markdown("## After creating the ID head to [OpenAI_Assistant_Chat](https://huggingface.co/spaces/jadend/OpenAI_Assistant_Chat).")
32
- with gr.Row():
33
- file_input = gr.File(label="Upload your file", type="filepath")
34
- assistant_name = gr.Textbox(label="The Assistant's Name")
35
- assistant_message = gr.Textbox(label="Assistant Message")
36
- generate_button = gr.Button("Generate Your Assistant ID")
37
- output_id = gr.Textbox(label="Your Asssistant ID", value="")
38
-
39
- generate_button.click(
40
- fn=create_assistant_json,
41
- inputs=[file_input, assistant_name, assistant_message],
42
- outputs=output_id
43
- )
44
 
45
- if __name__ == "__main__":
46
  demo.launch().queue()
 
1
  import os
2
+ import urllib
3
+ from time import sleep
4
+ import urllib.request
5
+
6
  import gradio as gr
7
  import openai
8
+ from dotenv import load_dotenv
9
+
10
+ load_dotenv()
11
+
12
+ client = openai.OpenAI(api_key=os.environ['OPENAI_API_KEY'])
13
 
14
+ def create_files(company):
15
+ if company.lower() == 'amazon':
16
+ url = 'https://www.aboutamazon.com/about-us/leadership-principles'
17
+ filename = 'leadership_principles.html'
18
+ else:
19
+ return []
20
+
21
+ filename, headers = urllib.request.urlretrieve(url, filename)
22
+ with open(filename, 'rb') as file:
23
+ assistant_file = client.files.create(file=file, purpose='assistants')
24
+ file_ids = [assistant_file.id]
25
+ os.remove(filename)
26
+ return file_ids
27
 
28
  # Assistant Creation function
29
+ def create_assistant(job_role, company):
30
+ file_ids = create_files(company)
 
 
 
 
 
31
 
32
  assistant = client.beta.assistants.create(
33
+ name='Mock Interviewer',
34
+ instructions=f'You are an AI mock interviewer for {job_role} roles at {company}. If you have been provided a file, use it as an interview guide.',
35
+ model='gpt-4-0125-preview',
36
  tools=[
37
  {
38
+ 'type': 'retrieval' # This adds the knowledge base as a tool
39
  }
40
  ],
41
+ file_ids=file_ids)
42
 
43
  return assistant.id
44
 
45
+ def chat(usr_message, history, job_role, company):
46
+ print('Started function')
47
+ thread = client.beta.threads.create()
48
+ user_input = usr_message
49
+
50
+ # TODO: this creates the assistant every single chat interaction, need to
51
+ # cache this if role/company haven't changed
52
+ assistant_id = create_assistant(job_role, company)
53
+
54
+ # Add the user's message to the thread
55
+ client.beta.threads.messages.create(thread_id=thread.id,
56
+ role="user",
57
+ content=user_input)
58
+ print('Client made')
59
+ # Run the Assistant
60
+ run = client.beta.threads.runs.create(thread_id=thread.id,
61
+ assistant_id=assistant_id)
62
+ print('Run created')
63
+
64
+ # Check if the Run requires action (function call)
65
+ while True:
66
+ run_status = client.beta.threads.runs.retrieve(thread_id=thread.id,
67
+ run_id=run.id)
68
+ print(f"Run status: {run_status.status}")
69
+ if run_status.status == 'completed':
70
+ break
71
+
72
+ sleep(1) # Wait for a second before checking again
73
+
74
+ # Retrieve and return the latest message from the assistant
75
+ messages = client.beta.threads.messages.list(thread_id=thread.id)
76
+ response = messages.data[0].content[0].text.value
77
+
78
+ print(f"Assistant response: {response}") # Debugging line
79
+ #return json.dumps({"response": response})
80
+ yield response
81
+
82
  # Creating the Gradio interface
83
  with gr.Blocks() as demo:
84
+ chatbot = gr.ChatInterface(
85
+ chat,
86
+ additional_inputs=[
87
+ gr.Textbox(label='Job Role'),
88
+ gr.Textbox(label='Company')
89
+ ],
90
+ title='I am your AI mock interviewer.',
91
+ description='Make your selections above to configure me.',
92
+ theme='soft',
93
+ fill_height=True,
94
+ retry_btn=None,
95
+ undo_btn=None,
96
+ clear_btn='Clear').queue()
 
97
 
98
+ if __name__ == '__main__':
99
  demo.launch().queue()
requirements.txt CHANGED
@@ -1,2 +1,3 @@
1
  openai==1.16.2
2
- gradio==4.25.0
 
 
1
  openai==1.16.2
2
+ gradio==4.25.0
3
+ python-dotenv==1.0.1