dmohle commited on
Commit
0874f01
1 Parent(s): c10dab8

Add app.py file

Browse files
Files changed (3) hide show
  1. .gitignore +9 -0
  2. app.py +73 -0
  3. requirements.txt +3 -0
.gitignore ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # Ignore the .env file containing secrets
2
+ .env
3
+
4
+ # Ignore Python cache directories
5
+ __pycache__/
6
+
7
+ # Ignore other unwanted files
8
+ *.pyc
9
+ .DS
app.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import html
3
+ from openai import OpenAI
4
+ import os
5
+
6
+ # Load environment variables
7
+ from dotenv import load_dotenv
8
+
9
+ load_dotenv()
10
+ api_key = os.getenv("OPENAI_API_KEY")
11
+
12
+ # Initialize the OpenAI client
13
+ client = OpenAI(api_key=api_key)
14
+
15
+ def openai_chat(prompt, chat_log):
16
+ context_messages = [
17
+ {"role": "system", "content": """You are a gifted C++ professor. You explain complex C++
18
+ concepts clearly using words that a
19
+ college student would understand, and generate typical exam questions for a C++ course. After a few questions,
20
+ three or four, check in with the student to ask if you are helpful and if the student is prepared for the exam
21
+ or stuck on a particular topic, or just needs a cram session before the exam. Be supportive and motivational.
22
+ Suggest getting a good night's sleep and eating properly before the exam when saying goodbye. After answering
23
+ a question from the student, suggest three or four C++ final exam questions and related topics when asked anything."""
24
+ },
25
+ {"role": "user", "content": "Explain recursion in C++ programming."}
26
+ ] + chat_log + [{"role": "user", "content": prompt}]
27
+
28
+ try:
29
+ completion = client.chat.completions.create(
30
+ model="gpt-3.5-turbo",
31
+ messages=context_messages,
32
+ max_tokens=500
33
+ )
34
+ response_text = html.unescape(completion.choices[0].message.content)
35
+ chat_log.append({"role": "assistant", "content": response_text})
36
+ return response_text, chat_log
37
+ except Exception as e:
38
+ return str(e), chat_log
39
+
40
+
41
+ def format_response(answer):
42
+ # Only apply Markdown to code responses
43
+ if 'int main()' in answer or '#include' in answer or 'std::' in answer:
44
+ code_block = "```cpp\n" + answer + "\n```"
45
+ return code_block
46
+ return answer
47
+
48
+
49
+ def main():
50
+ st.title("Professor CplusPlus")
51
+ st.write("Ask any question about C++, and I'll explain!")
52
+
53
+ if 'chat_log' not in st.session_state:
54
+ st.session_state.chat_log = []
55
+
56
+ if 'history' not in st.session_state:
57
+ st.session_state.history = ""
58
+
59
+ user_input = st.text_input("Type your question here:", key="user_input")
60
+
61
+ if st.button("Ask") and user_input:
62
+ answer, st.session_state.chat_log = openai_chat(user_input, st.session_state.chat_log)
63
+ formatted_answer = format_response(answer)
64
+ new_entry = f"Q: {user_input}\n\nA: {formatted_answer}\n\n"
65
+ st.session_state.history = new_entry + st.session_state.history
66
+ st.rerun() # Using the updated rerun method
67
+
68
+ st.write("Chat History:")
69
+ st.markdown(st.session_state.history, unsafe_allow_html=True)
70
+
71
+
72
+ if __name__ == "__main__":
73
+ main()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ streamlit
2
+ python-dotenv
3
+ openai