noorulamean444 commited on
Commit
fa7065e
1 Parent(s): b4be4ad

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -79
app.py CHANGED
@@ -4,14 +4,7 @@ import gradio as gr
4
  import time
5
  import heapq
6
  import re
7
- from utils import package_installer, context_identifier, query, convert_ipynb_to_html
8
-
9
-
10
- # package_installer('sentence_transformers')
11
- # package_installer('nbconvert')
12
- # package_installer('inflect')
13
-
14
-
15
  from sentence_transformers import SentenceTransformer, util
16
  import nbconvert
17
  import nbformat
@@ -81,77 +74,62 @@ with gr.Blocks() as demo:
81
  indexed_cells_list.append(f'# {i+1}th cell\n' + cell_addresses + itxt)
82
  index_comments.append(f'# {i+1}th cell\n' + cell_addresses)
83
 
84
- # print(indexed_cells_list)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
 
86
  except:
87
- pass
88
-
89
- emb_cells = embedding_model.encode(index_comments,convert_to_tensor=True)
90
- emb_msg = embedding_model.encode(message,convert_to_tensor=True)
91
- cosine_sim_0 = util.cos_sim(emb_msg,emb_cells)
92
-
93
- top_5_cells_scores = heapq.nlargest(5,cosine_sim_0[0])
94
- top_5_cells = [indexed_cells_list[index] for index in sorted(list(cosine_sim_0[0]).index(score) for score in top_5_cells_scores)]
95
-
96
- top_2_chats = None
97
- if hist_cop:
98
- chat_history = [user_prompt_template(item[0]) + assistant_response_template(item[1]) for item in hist_cop]
99
- # emb_formatted_user_msg = embedding_model.encode(formatted_user_msg,convert_to_tensor=True)
100
- emb_chat_history = embedding_model.encode(chat_history,convert_to_tensor=True)
101
- cosine_similarity_scores = util.cos_sim(emb_msg,emb_chat_history)
102
- top_2_scores = heapq.nlargest(2,cosine_similarity_scores[0])
103
- top_2_chats = [chat_history[i] for i in sorted(list(cosine_similarity_scores[0]).index(val) for val in top_2_scores)]
104
-
105
- similar_chat_history = ''
106
- if top_2_chats:
107
- for chats in top_2_chats:
108
- # formatted_assistant_msg = chats[1].replace(chats[0],'').strip().removesuffix('<|end|>')
109
- similar_chat_history += chats
110
-
111
- top_5_cells_string = '\n'.join(top_5_cells)
112
- # print(top_5_cells_string)
113
-
114
- if context == 'notebook_cell_context':
115
- prompt = f"""
116
- You are a coding assistant who clarifies queries based on python. You will be given two types of context. One type of context
117
- consists of previous chat messages and the other consists of code from a jupyter notebook. Your task is to answer/explanation user
118
- query by picking relevant information from both context and coming up with the answer which explains the query. The user
119
- query is delimited by ####.
120
-
121
- previous_chat_context:
122
- {similar_chat_history}
123
-
124
- notebook_cell_context:
125
- {top_5_cells_string}
126
-
127
- ####
128
- {message}
129
- ####
130
- """
131
- elif context == 'previous_cell_context':
132
- prompt = f"""
133
- You are a coding assistant who clarifies queries based on python. You will be given a context which consists of previous chat messages.
134
- Your task is to answer/explain user query based on the context.The user query is delimited by ####.
135
-
136
- previous_chat_context:
137
- {similar_chat_history}
138
-
139
- ####
140
- {message}
141
- ####
142
- """
143
- # elif context == 'notebook_cell_context':
144
- # prompt = f"""
145
- # You are a coding assistant who clarifies queries based on python. You will be given a context which consists of code from jupyter notebook
146
- # cells. Your task is to answer/explain user query based on the context.The user query is delimited by ####.
147
-
148
- # notebook_cell_context:
149
- # {top_5_cells_string}
150
-
151
- # ####
152
- # {message}
153
- # ####
154
- # """
155
 
156
  user_input = user_prompt_template(prompt)
157
 
@@ -173,8 +151,8 @@ with gr.Blocks() as demo:
173
  output += char
174
  time.sleep(0.05)
175
  yield output
176
- # mkd = gr.Markdown('# ChatBot for Jupyter Notebook\n For more details: **link**')
177
- file = gr.File(interactive=True,height=35,container=False)
178
  chatbot = gr.ChatInterface(fn=chat,fill_height=False,additional_inputs=[file],stop_btn='Stop Generation',
179
  description="[Read the Instructions here!](https://huggingface.co/spaces/noorulamean444/clma_try/blob/main/README.md)")
180
 
 
4
  import time
5
  import heapq
6
  import re
7
+ from utils import context_identifier, query, convert_ipynb_to_html
 
 
 
 
 
 
 
8
  from sentence_transformers import SentenceTransformer, util
9
  import nbconvert
10
  import nbformat
 
74
  indexed_cells_list.append(f'# {i+1}th cell\n' + cell_addresses + itxt)
75
  index_comments.append(f'# {i+1}th cell\n' + cell_addresses)
76
 
77
+ emb_cells = embedding_model.encode(index_comments,convert_to_tensor=True)
78
+ emb_msg = embedding_model.encode(message,convert_to_tensor=True)
79
+ cosine_sim_0 = util.cos_sim(emb_msg,emb_cells)
80
+
81
+ top_5_cells_scores = heapq.nlargest(5,cosine_sim_0[0])
82
+ top_5_cells = [indexed_cells_list[index] for index in sorted(list(cosine_sim_0[0]).index(score) for score in top_5_cells_scores)]
83
+
84
+ top_2_chats = None
85
+ if hist_cop:
86
+ chat_history = [user_prompt_template(item[0]) + assistant_response_template(item[1]) for item in hist_cop]
87
+ # emb_formatted_user_msg = embedding_model.encode(formatted_user_msg,convert_to_tensor=True)
88
+ emb_chat_history = embedding_model.encode(chat_history,convert_to_tensor=True)
89
+ cosine_similarity_scores = util.cos_sim(emb_msg,emb_chat_history)
90
+ top_2_scores = heapq.nlargest(2,cosine_similarity_scores[0])
91
+ top_2_chats = [chat_history[i] for i in sorted(list(cosine_similarity_scores[0]).index(val) for val in top_2_scores)]
92
+
93
+ similar_chat_history = ''
94
+ if top_2_chats:
95
+ for chats in top_2_chats:
96
+
97
+ similar_chat_history += chats
98
+
99
+ top_5_cells_string = '\n'.join(top_5_cells)
100
+
101
+ if context == 'notebook_cell_context':
102
+ prompt = f"""
103
+ You are a coding assistant who clarifies queries based on python. You will be given two types of context. One type of context
104
+ consists of previous chat messages and the other consists of code from a jupyter notebook. Your task is to answer/explanation user
105
+ query by picking relevant information from both context and coming up with the answer which explains the query. The user
106
+ query is delimited by ####.
107
+
108
+ previous_chat_context:
109
+ {similar_chat_history}
110
+
111
+ notebook_cell_context:
112
+ {top_5_cells_string}
113
+
114
+ ####
115
+ {message}
116
+ ####
117
+ """
118
+ elif context == 'previous_cell_context':
119
+ prompt = f"""
120
+ You are a coding assistant who clarifies queries based on python. You will be given a context which consists of previous chat messages.
121
+ Your task is to answer/explain user query based on the context.The user query is delimited by ####.
122
+
123
+ previous_chat_context:
124
+ {similar_chat_history}
125
+
126
+ ####
127
+ {message}
128
+ ####
129
+ """
130
 
131
  except:
132
+ prompt = message
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
133
 
134
  user_input = user_prompt_template(prompt)
135
 
 
151
  output += char
152
  time.sleep(0.05)
153
  yield output
154
+
155
+ file = gr.File(interactive=True,container=False)
156
  chatbot = gr.ChatInterface(fn=chat,fill_height=False,additional_inputs=[file],stop_btn='Stop Generation',
157
  description="[Read the Instructions here!](https://huggingface.co/spaces/noorulamean444/clma_try/blob/main/README.md)")
158