|
import cv2 |
|
import numpy as np |
|
import requests |
|
from scipy.interpolate import splprep, splev |
|
|
|
|
|
caps = [cv2.VideoCapture(0)] |
|
|
|
def smooth_trajectory(points): |
|
if len(points) < 3: |
|
return points |
|
x = [p["x"] for p in points] |
|
y = [p["y"] for p in points] |
|
tck, u = splprep([x, y], s=0) |
|
u_new = np.linspace(0, 1, 50) |
|
x_new, y_new = splev(u_new, tck) |
|
return [{"x": x, "y": y} for x, y in zip(x_new, y_new)] |
|
|
|
def process_frame(frame): |
|
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV) |
|
mask = cv2.inRange(hsv, (0, 120, 70), (10, 255, 255)) |
|
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) |
|
if contours: |
|
c = max(contours, key=cv2.contourArea) |
|
x, y, w, h = cv2.boundingRect(c) |
|
return x + w / 2, y + h / 2 |
|
return None, None |
|
|
|
actual_path = [] |
|
y_positions = [] |
|
pitching_detected = False |
|
impact_detected = False |
|
last_point = None |
|
frame_count = 0 |
|
spin = 0 |
|
|
|
while True: |
|
frames = [] |
|
for cap in caps: |
|
ret, frame = cap.read() |
|
if ret: |
|
frames.append(frame) |
|
|
|
if not frames: |
|
break |
|
|
|
|
|
frame = frames[0] |
|
center_x, center_y = process_frame(frame) |
|
if center_x is not None: |
|
norm_x = center_x / 1280 |
|
norm_y = center_y / 720 |
|
current_point = (norm_x, norm_y) |
|
|
|
if last_point != current_point: |
|
actual_path.append({"x": norm_x, "y": norm_y}) |
|
y_positions.append(norm_y) |
|
last_point = current_point |
|
|
|
if len(y_positions) > 2 and not pitching_detected: |
|
if y_positions[-1] < y_positions[-2] and y_positions[-2] < y_positions[-3]: |
|
pitching_detected = True |
|
pitching_x = actual_path[-2]["x"] |
|
pitching_y = actual_path[-2]["y"] |
|
|
|
if len(actual_path) > 2 and not impact_detected: |
|
speed_current = abs(y_positions[-1] - y_positions[-2]) |
|
speed_prev = abs(y_positions[-2] - y_positions[-3]) |
|
if speed_current < speed_prev * 0.3: |
|
impact_detected = True |
|
impact_x = actual_path[-1]["x"] |
|
impact_y = actual_path[-1]["y"] |
|
|
|
frame_count += 1 |
|
if impact_detected or frame_count > 50: |
|
break |
|
|
|
cv2.imshow('Frame', frame) |
|
if cv2.waitKey(1) & 0xFF == ord('q'): |
|
break |
|
|
|
for cap in caps: |
|
cap.release() |
|
cv2.destroyAllWindows() |
|
|
|
if not actual_path: |
|
print("No ball detected") |
|
exit() |
|
|
|
if not pitching_detected: |
|
pitching_x = actual_path[len(actual_path)//2]["x"] |
|
pitching_y = actual_path[len(actual_path)//2]["y"] |
|
|
|
if not impact_detected: |
|
impact_x = actual_path[-1]["x"] |
|
impact_y = actual_path[-1]["y"] |
|
|
|
actual_path = smooth_trajectory(actual_path) |
|
projected_path = [ |
|
{"x": impact_x, "y": impact_y}, |
|
{"x": impact_x + spin * 0.1, "y": 1.0} |
|
] |
|
|
|
|
|
data = { |
|
'actual_path': actual_path, |
|
'projected_path': projected_path, |
|
'pitching': {'x': pitching_x, 'y': pitching_y}, |
|
'impact': {'x': impact_x, 'y': impact_y}, |
|
'speed': frame_count / 30 * 0.5, |
|
'spin': spin |
|
} |
|
|
|
|
|
response = requests.post('https://your-username-cricket-lbw-analyzer.hf.space/analyze_data', json=data) |
|
print(response.json()) |