jobanpreet123 commited on
Commit
f6c931d
β€’
1 Parent(s): a7f44ae

changes summary added

Browse files
Files changed (3) hide show
  1. README.md +0 -13
  2. app.py +66 -9
  3. summary.py +12 -11
README.md DELETED
@@ -1,13 +0,0 @@
1
- ---
2
- title: Erginous Project
3
- emoji: πŸš€
4
- colorFrom: blue
5
- colorTo: green
6
- sdk: streamlit
7
- sdk_version: 1.34.0
8
- app_file: app.py
9
- pinned: false
10
- license: apache-2.0
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app.py CHANGED
@@ -1,6 +1,19 @@
1
  import streamlit as st
2
  from transcription import deepgram
3
  from summary import summarize
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
 
6
  def main():
@@ -8,8 +21,8 @@ def main():
8
 
9
  if 'transcription' not in session_state:
10
  session_state.transcription = ""
11
- if "summary" not in session_state:
12
- session_state.summary=""
13
 
14
 
15
 
@@ -34,15 +47,59 @@ def main():
34
 
35
 
36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
- transcription=st.text_area("Transcription",value=session_state.transcription, height=400)
39
- if transcription:
40
- st.download_button('Download Transcription', session_state.transcription , file_name="Transcription.txt")
41
- if st.sidebar.toggle("Generate Summary"):
42
- session_state.summary=summarize(session_state.transcription)
43
- st.text_area("Summary" , value=session_state.summary , height=400)
44
 
45
  if __name__ == "__main__":
46
  main()
47
 
48
-
 
1
  import streamlit as st
2
  from transcription import deepgram
3
  from summary import summarize
4
+ from langchain.callbacks.base import BaseCallbackHandler
5
+ from langchain_cohere import ChatCohere
6
+
7
+
8
+
9
+ class StreamHandler(BaseCallbackHandler):
10
+ def __init__(self, container, initial_text=""):
11
+ self.container = container
12
+ self.text = initial_text
13
+
14
+ def on_llm_new_token(self, token: str, **kwargs) -> None:
15
+ self.text += token
16
+ self.container.markdown(self.text)
17
 
18
 
19
  def main():
 
21
 
22
  if 'transcription' not in session_state:
23
  session_state.transcription = ""
24
+ if 'summary' not in session_state:
25
+ session_state.summary = ""
26
 
27
 
28
 
 
47
 
48
 
49
 
50
+ with st.container(height=500 , border=True):
51
+ st.markdown(session_state.transcription)
52
+
53
+ st.download_button('Download Transcription', session_state.transcription , file_name="Transcription.txt")
54
+
55
+
56
+ if st.sidebar.toggle("Generate Summary"):
57
+ st.header("Summary of the meeting")
58
+ with st.container(height=500):
59
+
60
+ stream_handler = StreamHandler(st.empty())
61
+ llm = ChatCohere(temperature = 0 ,streaming=True ,model = "command-r-plus" , callbacks=[stream_handler])
62
+ summarize(session_state.transcription , llm)
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+ # import clipboard
74
+
75
+ # if st.sidebar.toggle("Generate Summary"):
76
+ # st.markdown("""
77
+ # <style>
78
+ # .big-font {
79
+ # font-size:30px !important;
80
+ # }
81
+ # </style>
82
+ # """, unsafe_allow_html=True)
83
+
84
+ # st.markdown('<p class="big-font">Summary of the meeting</p>', unsafe_allow_html=True)
85
+ # stream_handler = StreamHandler(st.empty())
86
+
87
+ # llm = ChatCohere(temperature = 0 ,streaming=True ,model = "command-r-plus" , callbacks=[stream_handler])
88
+ # data=summarize(session_state.transcription , llm)
89
+
90
+ # summary_text = data.content
91
+ # st.write(summary_text)
92
+
93
+ # copy_button = st.button("Copy Summary to Clipboard")
94
+ # if copy_button:
95
+ # clipboard.copy(summary_text)
96
+ # st.success("Summary copied to clipboard!")
97
+
98
+
99
+
100
+
101
 
 
 
 
 
 
 
102
 
103
  if __name__ == "__main__":
104
  main()
105
 
 
summary.py CHANGED
@@ -1,19 +1,20 @@
1
- from langchain_groq import ChatGroq
 
2
  from langchain.prompts import ChatPromptTemplate
3
- from langchain_core.output_parsers import StrOutputParser
4
- from dotenv import load_dotenv
5
 
6
  load_dotenv()
7
 
8
- model = ChatGroq(temperature=0, model_name="mixtral-8x7b-32768") #mixtral-8x7b-32768
9
 
10
 
11
- def summarize(transcription):
12
- template="""You are provided with transcription of a meeting between different people. Your task is to summarize this transcription
13
- Transcription: {transcription}"""
14
 
 
 
 
 
15
  prompt = ChatPromptTemplate.from_template(template)
16
-
17
- chain = prompt | model | StrOutputParser()
18
- summary=chain.invoke({"transcription":transcription})
19
- return summary
 
1
+ from langchain.callbacks.base import BaseCallbackHandler
2
+ from langchain_cohere import ChatCohere
3
  from langchain.prompts import ChatPromptTemplate
4
+ import streamlit as st
5
+ from dotenv import load_dotenv
6
 
7
  load_dotenv()
8
 
 
9
 
10
 
 
 
 
11
 
12
+ def summarize(transcription , llm):
13
+ template="""Please provide a brief and concise summary of the key discussion points and outcomes from the meeting transcription. Ensure that the summary captures the main topics, decisions made, action items, and any important next steps or deadlines agreed upon.
14
+ The summary should be structured clearly and be easily understandable to those who were not present at the meeting.
15
+ Transcription: {transcription}"""
16
  prompt = ChatPromptTemplate.from_template(template)
17
+ chain = prompt | llm
18
+ response = chain.invoke({'transcription':transcription})
19
+ return response
20
+