Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,38 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import cv2
|
2 |
import streamlit as st
|
3 |
-
from streamlit_webrtc import webrtc_streamer
|
4 |
import numpy as np
|
5 |
import av
|
6 |
|
7 |
st.title("OpenCV Filters on Video Stream")
|
8 |
|
|
|
9 |
filter = "none"
|
10 |
|
11 |
-
|
12 |
def transform(frame: av.VideoFrame):
|
13 |
img = frame.to_ndarray(format="bgr24")
|
14 |
|
|
|
15 |
if filter == "blur":
|
16 |
img = cv2.GaussianBlur(img, (21, 21), 0)
|
17 |
elif filter == "canny":
|
18 |
img = cv2.cvtColor(cv2.Canny(img, 100, 200), cv2.COLOR_GRAY2BGR)
|
19 |
elif filter == "grayscale":
|
20 |
-
# We convert the image twice because the first conversion returns a 2D array.
|
21 |
-
# the second conversion turns it back to a 3D array.
|
22 |
img = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR)
|
23 |
elif filter == "sepia":
|
24 |
-
kernel = np.array(
|
25 |
-
[[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, 0.189]]
|
26 |
-
)
|
27 |
img = cv2.transform(img, kernel)
|
28 |
elif filter == "invert":
|
29 |
img = cv2.bitwise_not(img)
|
30 |
-
|
31 |
-
pass
|
32 |
-
|
33 |
return av.VideoFrame.from_ndarray(img, format="bgr24")
|
34 |
|
35 |
-
|
36 |
col1, col2, col3, col4, col5, col6 = st.columns([1, 1, 1, 1, 1, 1])
|
37 |
|
38 |
with col1:
|
@@ -54,9 +112,10 @@ with col6:
|
|
54 |
if st.button("Invert"):
|
55 |
filter = "invert"
|
56 |
|
57 |
-
|
58 |
webrtc_streamer(
|
59 |
key="streamer",
|
60 |
video_frame_callback=transform,
|
61 |
sendback_audio=False
|
62 |
-
|
|
|
|
1 |
+
# import cv2
|
2 |
+
# import streamlit as st
|
3 |
+
# from streamlit_webrtc import webrtc_streamer, VideoHTMLAttributes
|
4 |
+
# import numpy as np
|
5 |
+
# import av
|
6 |
+
|
7 |
+
# st.title("OpenCV Filters on Video Stream")
|
8 |
+
|
9 |
+
# filter = "none"
|
10 |
+
|
11 |
+
|
12 |
+
# def transform(frame: av.VideoFrame):
|
13 |
+
# img = frame.to_ndarray(format="bgr24")
|
14 |
+
|
15 |
+
# if filter == "blur":
|
16 |
+
# img = cv2.GaussianBlur(img, (21, 21), 0)
|
17 |
+
# elif filter == "canny":
|
18 |
+
# img = cv2.cvtColor(cv2.Canny(img, 100, 200), cv2.COLOR_GRAY2BGR)
|
19 |
+
# elif filter == "grayscale":
|
20 |
+
# # We convert the image twice because the first conversion returns a 2D array.
|
21 |
+
# # the second conversion turns it back to a 3D array.
|
22 |
+
# img = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR)
|
23 |
+
# elif filter == "sepia":
|
24 |
+
# kernel = np.array(
|
25 |
+
# [[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, 0.189]]
|
26 |
+
# )
|
27 |
+
# img = cv2.transform(img, kernel)
|
28 |
+
# elif filter == "invert":
|
29 |
+
# img = cv2.bitwise_not(img)
|
30 |
+
# elif filter == "none":
|
31 |
+
# pass
|
32 |
+
|
33 |
+
# return av.VideoFrame.from_ndarray(img, format="bgr24")
|
34 |
+
|
35 |
+
|
36 |
+
# col1, col2, col3, col4, col5, col6 = st.columns([1, 1, 1, 1, 1, 1])
|
37 |
+
|
38 |
+
# with col1:
|
39 |
+
# if st.button("None"):
|
40 |
+
# filter = "none"
|
41 |
+
# with col2:
|
42 |
+
# if st.button("Blur"):
|
43 |
+
# filter = "blur"
|
44 |
+
# with col3:
|
45 |
+
# if st.button("Grayscale"):
|
46 |
+
# filter = "grayscale"
|
47 |
+
# with col4:
|
48 |
+
# if st.button("Sepia"):
|
49 |
+
# filter = "sepia"
|
50 |
+
# with col5:
|
51 |
+
# if st.button("Canny"):
|
52 |
+
# filter = "canny"
|
53 |
+
# with col6:
|
54 |
+
# if st.button("Invert"):
|
55 |
+
# filter = "invert"
|
56 |
+
|
57 |
+
|
58 |
+
# webrtc_streamer(
|
59 |
+
# key="streamer",
|
60 |
+
# video_frame_callback=transform,
|
61 |
+
# sendback_audio=False
|
62 |
+
# )
|
63 |
+
|
64 |
import cv2
|
65 |
import streamlit as st
|
66 |
+
from streamlit_webrtc import webrtc_streamer
|
67 |
import numpy as np
|
68 |
import av
|
69 |
|
70 |
st.title("OpenCV Filters on Video Stream")
|
71 |
|
72 |
+
# Define the filter state variable
|
73 |
filter = "none"
|
74 |
|
|
|
75 |
def transform(frame: av.VideoFrame):
|
76 |
img = frame.to_ndarray(format="bgr24")
|
77 |
|
78 |
+
# Apply the selected filter
|
79 |
if filter == "blur":
|
80 |
img = cv2.GaussianBlur(img, (21, 21), 0)
|
81 |
elif filter == "canny":
|
82 |
img = cv2.cvtColor(cv2.Canny(img, 100, 200), cv2.COLOR_GRAY2BGR)
|
83 |
elif filter == "grayscale":
|
|
|
|
|
84 |
img = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR)
|
85 |
elif filter == "sepia":
|
86 |
+
kernel = np.array([[0.272, 0.534, 0.131], [0.349, 0.686, 0.168], [0.393, 0.769, 0.189]])
|
|
|
|
|
87 |
img = cv2.transform(img, kernel)
|
88 |
elif filter == "invert":
|
89 |
img = cv2.bitwise_not(img)
|
90 |
+
|
|
|
|
|
91 |
return av.VideoFrame.from_ndarray(img, format="bgr24")
|
92 |
|
93 |
+
# Streamlit buttons to choose filters
|
94 |
col1, col2, col3, col4, col5, col6 = st.columns([1, 1, 1, 1, 1, 1])
|
95 |
|
96 |
with col1:
|
|
|
112 |
if st.button("Invert"):
|
113 |
filter = "invert"
|
114 |
|
115 |
+
# Display the video stream
|
116 |
webrtc_streamer(
|
117 |
key="streamer",
|
118 |
video_frame_callback=transform,
|
119 |
sendback_audio=False
|
120 |
+
)
|
121 |
+
|