Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -68,15 +68,16 @@ def csv(segments):
|
|
68 |
output += f"{segment['start']},{segment['end']},{segment['text']}\n"
|
69 |
return output
|
70 |
def transcribe(path,lang,size):
|
|
|
71 |
#if size != current_size:
|
72 |
loaded_model = whisper.load_model(size)
|
73 |
current_size = size
|
74 |
results = loaded_model.transcribe(path, language=lang)
|
75 |
subs = ".csv"
|
76 |
if subs == "None":
|
77 |
-
|
78 |
elif subs == ".csv":
|
79 |
-
|
80 |
|
81 |
|
82 |
def format_prompt(message, history):
|
@@ -227,8 +228,8 @@ def compress_data_og(c, instruct, history):
|
|
227 |
|
228 |
|
229 |
|
230 |
-
def summarize(inp,history,
|
231 |
-
|
232 |
json_box=[]
|
233 |
error_box=""
|
234 |
json_out={}
|
@@ -236,7 +237,7 @@ def summarize(inp,history,report_check,sum_mem_check,data=None):
|
|
236 |
if inp == "":
|
237 |
inp = "Process this data"
|
238 |
history.clear()
|
239 |
-
history = [(inp,"
|
240 |
yield "",history,error_box,json_box
|
241 |
|
242 |
if data != "Error" and data != "" and data != None:
|
@@ -250,29 +251,24 @@ def summarize(inp,history,report_check,sum_mem_check,data=None):
|
|
250 |
if i == " " or i=="," or i=="\n":
|
251 |
c +=1
|
252 |
print (f'c:: {c}')
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
history.append((inp,rawp))
|
272 |
-
else:
|
273 |
-
rawp = out
|
274 |
-
history.clear()
|
275 |
-
history.append((inp,rawp))
|
276 |
yield "", history,error_box,json_out
|
277 |
else:
|
278 |
rawp = "Provide a valid data source"
|
@@ -286,7 +282,7 @@ def clear_fn():
|
|
286 |
return "",[(None,None)]
|
287 |
|
288 |
with gr.Blocks() as app:
|
289 |
-
gr.HTML("""<center><h1>
|
290 |
with gr.Row():
|
291 |
with gr.Column():
|
292 |
with gr.Row():
|
@@ -301,8 +297,7 @@ with gr.Blocks() as app:
|
|
301 |
with gr.Column(scale=3):
|
302 |
prompt=gr.Textbox(label = "Instructions (optional)")
|
303 |
with gr.Column(scale=1):
|
304 |
-
|
305 |
-
sum_mem_check=gr.Radio(label="Output",choices=["Summary","Memory"])
|
306 |
button=gr.Button()
|
307 |
|
308 |
#models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
|
@@ -320,6 +315,6 @@ with gr.Blocks() as app:
|
|
320 |
url_btn.click(dl,[inp_url,vid],vid)
|
321 |
trans_btn.click(transcribe,[vid,lang,sz],trans)
|
322 |
clear_btn.click(clear_fn,None,[prompt,chatbot])
|
323 |
-
go=button.click(summarize,[prompt,chatbot,
|
324 |
stop_button.click(None,None,None,cancels=[go])
|
325 |
app.queue(default_concurrency_limit=20).launch(show_api=False)
|
|
|
68 |
output += f"{segment['start']},{segment['end']},{segment['text']}\n"
|
69 |
return output
|
70 |
def transcribe(path,lang,size):
|
71 |
+
yield (None,[("","Transcribing Video...")])
|
72 |
#if size != current_size:
|
73 |
loaded_model = whisper.load_model(size)
|
74 |
current_size = size
|
75 |
results = loaded_model.transcribe(path, language=lang)
|
76 |
subs = ".csv"
|
77 |
if subs == "None":
|
78 |
+
yield results["text"],[("","Transcription Complete...")]
|
79 |
elif subs == ".csv":
|
80 |
+
yield csv(results["segments"]),[("","Transcription Complete...")]
|
81 |
|
82 |
|
83 |
def format_prompt(message, history):
|
|
|
228 |
|
229 |
|
230 |
|
231 |
+
def summarize(inp,history,mem_check,data=None):
|
232 |
+
|
233 |
json_box=[]
|
234 |
error_box=""
|
235 |
json_out={}
|
|
|
237 |
if inp == "":
|
238 |
inp = "Process this data"
|
239 |
history.clear()
|
240 |
+
history = [(inp,"Summarizing Transcription...")]
|
241 |
yield "",history,error_box,json_box
|
242 |
|
243 |
if data != "Error" and data != "" and data != None:
|
|
|
251 |
if i == " " or i=="," or i=="\n":
|
252 |
c +=1
|
253 |
print (f'c:: {c}')
|
254 |
+
json_out = compress_data(c,inp,out)
|
255 |
+
history = [(inp,"Generating Report...")]
|
256 |
+
yield "", history,error_box,json_out
|
257 |
+
|
258 |
+
out = str(json_out)
|
259 |
+
print (out)
|
260 |
+
if report_check:
|
261 |
+
rl = len(out)
|
262 |
+
print(f'rl:: {rl}')
|
263 |
+
c=1
|
264 |
+
for i in str(out):
|
265 |
+
if i == " " or i=="," or i=="\n":
|
266 |
+
c +=1
|
267 |
+
print (f'c2:: {c}')
|
268 |
+
rawp = compress_data_og(c,inp,out)
|
269 |
+
history.clear()
|
270 |
+
history.append((inp,rawp))
|
271 |
+
|
|
|
|
|
|
|
|
|
|
|
272 |
yield "", history,error_box,json_out
|
273 |
else:
|
274 |
rawp = "Provide a valid data source"
|
|
|
282 |
return "",[(None,None)]
|
283 |
|
284 |
with gr.Blocks() as app:
|
285 |
+
gr.HTML("""<center><h1>Video Summarizer</h1><h3>Mixtral 8x7B + Whisper</h3>""")
|
286 |
with gr.Row():
|
287 |
with gr.Column():
|
288 |
with gr.Row():
|
|
|
297 |
with gr.Column(scale=3):
|
298 |
prompt=gr.Textbox(label = "Instructions (optional)")
|
299 |
with gr.Column(scale=1):
|
300 |
+
mem_check=gr.Checkbox(label="Memory", value=False)
|
|
|
301 |
button=gr.Button()
|
302 |
|
303 |
#models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
|
|
|
315 |
url_btn.click(dl,[inp_url,vid],vid)
|
316 |
trans_btn.click(transcribe,[vid,lang,sz],trans)
|
317 |
clear_btn.click(clear_fn,None,[prompt,chatbot])
|
318 |
+
go=button.click(transcribe,[vid,lang,sz],[trans,chatbot]).then(summarize,[prompt,chatbot,mem_check,trans],[prompt,chatbot,e_box,json_out])
|
319 |
stop_button.click(None,None,None,cancels=[go])
|
320 |
app.queue(default_concurrency_limit=20).launch(show_api=False)
|