PierreBrunelle commited on
Commit
da368c4
·
verified ·
1 Parent(s): 1cc4b9c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -16
app.py CHANGED
@@ -5,25 +5,16 @@ from pixeltable.iterators import FrameIterator
5
  import PIL.Image
6
  import os
7
 
8
- # Embedding functions
9
- @pxt.expr_udf
10
- def embed_image(img: PIL.Image.Image):
11
- return clip_image(img, model_id='openai/clip-vit-base-patch32')
12
-
13
- @pxt.expr_udf
14
- def str_embed(s: str):
15
- return clip_text(s, model_id='openai/clip-vit-base-patch32')
16
-
17
  # Process video and create index
18
  def process_video(video_file, progress=gr.Progress()):
19
-
20
  progress(0, desc="Initializing...")
21
 
22
  # Pixeltable setup
23
  pxt.drop_dir('video_search', force=True)
24
  pxt.create_dir('video_search')
25
 
26
- video_table = pxt.create_table('video_search.videos', {'video': pxt.VideoType()})
 
27
 
28
  frames_view = pxt.create_view(
29
  'video_search.frames',
@@ -35,14 +26,18 @@ def process_video(video_file, progress=gr.Progress()):
35
  video_table.insert([{'video': video_file.name}])
36
 
37
  progress(0.4, desc="Creating embedding index...")
38
- frames_view.add_embedding_index('frame', string_embed=str_embed, image_embed=embed_image)
 
 
 
 
 
39
 
40
  progress(1.0, desc="Processing complete")
41
- return "Good news! Your video has been processed. Easily find the moments you need by searching with text or images."
42
 
43
  # Perform similarity search
44
  def similarity_search(query, search_type, num_results, progress=gr.Progress()):
45
-
46
  frames_view = pxt.get_table('video_search.frames')
47
 
48
  progress(0.5, desc="Performing search...")
@@ -54,9 +49,8 @@ def similarity_search(query, search_type, num_results, progress=gr.Progress()):
54
  results = frames_view.order_by(sim, asc=False).limit(num_results).select(frames_view.frame, sim=sim).collect()
55
 
56
  progress(1.0, desc="Search complete")
57
-
58
  return [row['frame'] for row in results]
59
-
60
  # Gradio interface
61
  with gr.Blocks(theme=gr.themes.Base()) as demo:
62
  gr.Markdown(
 
5
  import PIL.Image
6
  import os
7
 
 
 
 
 
 
 
 
 
 
8
  # Process video and create index
9
  def process_video(video_file, progress=gr.Progress()):
 
10
  progress(0, desc="Initializing...")
11
 
12
  # Pixeltable setup
13
  pxt.drop_dir('video_search', force=True)
14
  pxt.create_dir('video_search')
15
 
16
+ # Update type declaration to use simpler syntax
17
+ video_table = pxt.create_table('video_search.videos', {'video': pxt.Video})
18
 
19
  frames_view = pxt.create_view(
20
  'video_search.frames',
 
26
  video_table.insert([{'video': video_file.name}])
27
 
28
  progress(0.4, desc="Creating embedding index...")
29
+ # Updated embedding pattern using .using()
30
+ frames_view.add_embedding_index(
31
+ 'frame',
32
+ string_embed=clip_text.using(model_id='openai/clip-vit-base-patch32'),
33
+ image_embed=clip_image.using(model_id='openai/clip-vit-base-patch32')
34
+ )
35
 
36
  progress(1.0, desc="Processing complete")
37
+ return "Good news! Your video has been processed. Easily find the moments you need by searching with text or images."
38
 
39
  # Perform similarity search
40
  def similarity_search(query, search_type, num_results, progress=gr.Progress()):
 
41
  frames_view = pxt.get_table('video_search.frames')
42
 
43
  progress(0.5, desc="Performing search...")
 
49
  results = frames_view.order_by(sim, asc=False).limit(num_results).select(frames_view.frame, sim=sim).collect()
50
 
51
  progress(1.0, desc="Search complete")
 
52
  return [row['frame'] for row in results]
53
+
54
  # Gradio interface
55
  with gr.Blocks(theme=gr.themes.Base()) as demo:
56
  gr.Markdown(