Not-Grim-Refer commited on
Commit
f4f7de5
1 Parent(s): 613a2f4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -40
app.py CHANGED
@@ -1,44 +1,69 @@
1
- import os
2
  import streamlit as st
3
- import torch
4
- from transformers import GPTNeoForCausalLM, GPT2Tokenizer
5
-
6
- # Function to reverse engineer code
7
- def reverse_prompt_engineer(input_code):
8
- # Load the tokenizer and model
9
- tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B")
10
- model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B").to("cuda")
11
 
12
- # Generate a prompt using the analyzed code
13
- prompt = "Reverse engineer the following code:\n\n" + input_code
 
14
 
15
- # Tokenize the prompt and generate similar code using the model
16
- input_ids = tokenizer.encode(prompt, return_tensors="pt").to("cuda")
17
- generated_ids = model.generate(input_ids, max_length=100, do_sample=True)
18
- generated_code = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
 
 
 
 
 
 
 
19
 
20
- return generated_code
21
-
22
- # Set Streamlit page configuration
23
- st.set_page_config(
24
- page_title="Code Generator",
25
- layout="wide",
26
- initial_sidebar_state="expanded"
27
- )
28
-
29
- # Code Generation Page
30
- st.title("Code Generator")
31
-
32
- # User input code area
33
- user_input = st.text_area("Input Code")
34
-
35
- # Generate Code button
36
- if st.button("Generate Code"):
37
- if not user_input:
38
- st.error("Input field is empty!")
39
- else:
40
- try:
41
- generated_code = reverse_prompt_engineer(user_input)
42
- st.code(generated_code)
43
- except Exception as e:
44
- st.error(f"An error occurred: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from streamlit_chat import message
3
+ from src.langchain_agent import init, init_agent
4
+
5
+ def main():
 
 
 
 
6
 
7
+ #initialise agent and streamlit page
8
+ init()
9
+ agent_executor = init_agent()
10
 
11
+ # initialize message history
12
+ if "messages" not in st.session_state:
13
+ st.session_state.messages = []
14
+
15
+ # store agent in memory
16
+ if "clarina" not in st.session_state:
17
+ st.session_state.clarina = agent_executor
18
+
19
+ # store generated responses in memory
20
+ if 'generated' not in st.session_state:
21
+ st.session_state.generated = []
22
 
23
+ # define function to generate response
24
+ def generate_response(user_input):
25
+ # handle user input
26
+ if user_input:
27
+ # save user input
28
+ st.session_state.messages.append(user_input)
29
+ # get response from agent
30
+ with st.spinner("Thinking..."):
31
+ response = st.session_state.clarina.reverse_prompt_engineer(user_input)
32
+ # save response
33
+ st.session_state.messages.append(response)
34
+ st.session_state.generated.append(response)
35
+
36
+ # container for chat history
37
+ response_container = st.container()
38
+ # container for text box
39
+ container = st.container()
40
+
41
+ with container:
42
+
43
+ # initialize session state to clear input text box after user enters input
44
+ if "temp" not in st.session_state:
45
+ st.session_state.temp = ""
46
+
47
+ def clear_text():
48
+ """callback function to clear input text box"""
49
+ st.session_state.temp = st.session_state.user_input
50
+ st.session_state.user_input = ""
51
+
52
+ st.text_input("user input",key="user_input",placeholder = "Enter your code here", label_visibility="hidden",on_change=clear_text) # get user input
53
+ generate_response(st.session_state.temp) # generate response
54
+
55
+ # display message history
56
+ if st.session_state.generated:
57
+
58
+ with response_container:
59
+ messages = st.session_state.get('messages', [])
60
+ for i, msg in enumerate(messages):
61
+ if i % 2 == 0:
62
+ # display user input
63
+ message(msg, is_user=True, key=str(i) + '_user')
64
+ else:
65
+ # display response
66
+ message(msg, is_user=False, key=str(i) + '_ai')
67
+
68
+ if __name__ == '__main__':
69
+ main()