Omnibus commited on
Commit
b53901e
1 Parent(s): 710ba56

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -32
app.py CHANGED
@@ -68,15 +68,16 @@ def csv(segments):
68
  output += f"{segment['start']},{segment['end']},{segment['text']}\n"
69
  return output
70
  def transcribe(path,lang,size):
 
71
  #if size != current_size:
72
  loaded_model = whisper.load_model(size)
73
  current_size = size
74
  results = loaded_model.transcribe(path, language=lang)
75
  subs = ".csv"
76
  if subs == "None":
77
- return results["text"]
78
  elif subs == ".csv":
79
- return csv(results["segments"])
80
 
81
 
82
  def format_prompt(message, history):
@@ -227,8 +228,8 @@ def compress_data_og(c, instruct, history):
227
 
228
 
229
 
230
- def summarize(inp,history,report_check,sum_mem_check,data=None):
231
-
232
  json_box=[]
233
  error_box=""
234
  json_out={}
@@ -236,7 +237,7 @@ def summarize(inp,history,report_check,sum_mem_check,data=None):
236
  if inp == "":
237
  inp = "Process this data"
238
  history.clear()
239
- history = [(inp,"Working on it...")]
240
  yield "",history,error_box,json_box
241
 
242
  if data != "Error" and data != "" and data != None:
@@ -250,29 +251,24 @@ def summarize(inp,history,report_check,sum_mem_check,data=None):
250
  if i == " " or i=="," or i=="\n":
251
  c +=1
252
  print (f'c:: {c}')
253
- if sum_mem_check=="Memory":
254
- #save_memory(inp,out)
255
- rawp = "Complete"
256
- if sum_mem_check=="Summary":
257
- json_out = compress_data(c,inp,out)
258
-
259
- out = str(json_out)
260
- print (out)
261
- if report_check:
262
- rl = len(out)
263
- print(f'rl:: {rl}')
264
- c=1
265
- for i in str(out):
266
- if i == " " or i=="," or i=="\n":
267
- c +=1
268
- print (f'c2:: {c}')
269
- rawp = compress_data_og(c,inp,out)
270
- history.clear()
271
- history.append((inp,rawp))
272
- else:
273
- rawp = out
274
- history.clear()
275
- history.append((inp,rawp))
276
  yield "", history,error_box,json_out
277
  else:
278
  rawp = "Provide a valid data source"
@@ -286,7 +282,7 @@ def clear_fn():
286
  return "",[(None,None)]
287
 
288
  with gr.Blocks() as app:
289
- gr.HTML("""<center><h1>Mixtral 8x7B TLDR Summarizer + Web</h1><h3>Summarize Data of unlimited length</h3>""")
290
  with gr.Row():
291
  with gr.Column():
292
  with gr.Row():
@@ -301,8 +297,7 @@ with gr.Blocks() as app:
301
  with gr.Column(scale=3):
302
  prompt=gr.Textbox(label = "Instructions (optional)")
303
  with gr.Column(scale=1):
304
- report_check=gr.Checkbox(label="Return Report", value=True)
305
- sum_mem_check=gr.Radio(label="Output",choices=["Summary","Memory"])
306
  button=gr.Button()
307
 
308
  #models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
@@ -320,6 +315,6 @@ with gr.Blocks() as app:
320
  url_btn.click(dl,[inp_url,vid],vid)
321
  trans_btn.click(transcribe,[vid,lang,sz],trans)
322
  clear_btn.click(clear_fn,None,[prompt,chatbot])
323
- go=button.click(summarize,[prompt,chatbot,report_check,sum_mem_check,trans],[prompt,chatbot,e_box,json_out])
324
  stop_button.click(None,None,None,cancels=[go])
325
  app.queue(default_concurrency_limit=20).launch(show_api=False)
 
68
  output += f"{segment['start']},{segment['end']},{segment['text']}\n"
69
  return output
70
  def transcribe(path,lang,size):
71
+ yield (None,[("","Transcribing Video...")])
72
  #if size != current_size:
73
  loaded_model = whisper.load_model(size)
74
  current_size = size
75
  results = loaded_model.transcribe(path, language=lang)
76
  subs = ".csv"
77
  if subs == "None":
78
+ yield results["text"],[("","Transcription Complete...")]
79
  elif subs == ".csv":
80
+ yield csv(results["segments"]),[("","Transcription Complete...")]
81
 
82
 
83
  def format_prompt(message, history):
 
228
 
229
 
230
 
231
+ def summarize(inp,history,mem_check,data=None):
232
+
233
  json_box=[]
234
  error_box=""
235
  json_out={}
 
237
  if inp == "":
238
  inp = "Process this data"
239
  history.clear()
240
+ history = [(inp,"Summarizing Transcription...")]
241
  yield "",history,error_box,json_box
242
 
243
  if data != "Error" and data != "" and data != None:
 
251
  if i == " " or i=="," or i=="\n":
252
  c +=1
253
  print (f'c:: {c}')
254
+ json_out = compress_data(c,inp,out)
255
+ history = [(inp,"Generating Report...")]
256
+ yield "", history,error_box,json_out
257
+
258
+ out = str(json_out)
259
+ print (out)
260
+ if report_check:
261
+ rl = len(out)
262
+ print(f'rl:: {rl}')
263
+ c=1
264
+ for i in str(out):
265
+ if i == " " or i=="," or i=="\n":
266
+ c +=1
267
+ print (f'c2:: {c}')
268
+ rawp = compress_data_og(c,inp,out)
269
+ history.clear()
270
+ history.append((inp,rawp))
271
+
 
 
 
 
 
272
  yield "", history,error_box,json_out
273
  else:
274
  rawp = "Provide a valid data source"
 
282
  return "",[(None,None)]
283
 
284
  with gr.Blocks() as app:
285
+ gr.HTML("""<center><h1>Video Summarizer</h1><h3>Mixtral 8x7B + Whisper</h3>""")
286
  with gr.Row():
287
  with gr.Column():
288
  with gr.Row():
 
297
  with gr.Column(scale=3):
298
  prompt=gr.Textbox(label = "Instructions (optional)")
299
  with gr.Column(scale=1):
300
+ mem_check=gr.Checkbox(label="Memory", value=False)
 
301
  button=gr.Button()
302
 
303
  #models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
 
315
  url_btn.click(dl,[inp_url,vid],vid)
316
  trans_btn.click(transcribe,[vid,lang,sz],trans)
317
  clear_btn.click(clear_fn,None,[prompt,chatbot])
318
+ go=button.click(transcribe,[vid,lang,sz],[trans,chatbot]).then(summarize,[prompt,chatbot,mem_check,trans],[prompt,chatbot,e_box,json_out])
319
  stop_button.click(None,None,None,cancels=[go])
320
  app.queue(default_concurrency_limit=20).launch(show_api=False)