Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,8 +10,8 @@ import gradio as gr
|
|
| 10 |
MODEL_PATH = "setosys_yolov12x.pt"
|
| 11 |
model = YOLO(MODEL_PATH)
|
| 12 |
|
| 13 |
-
# COCO dataset class ID for
|
| 14 |
-
|
| 15 |
|
| 16 |
# Initialize SORT tracker
|
| 17 |
tracker = Sort()
|
|
@@ -38,15 +38,15 @@ def determine_time_interval(video_filename):
|
|
| 38 |
print("No keyword match, using default interval: 5") # Debugging
|
| 39 |
return 5 # Default interval
|
| 40 |
|
| 41 |
-
def
|
| 42 |
-
""" Counts unique
|
| 43 |
cap = cv2.VideoCapture(video_path)
|
| 44 |
if not cap.isOpened():
|
| 45 |
return {"Error": "Unable to open video file."}
|
| 46 |
|
| 47 |
# Reset variables at the start of each analysis
|
| 48 |
-
|
| 49 |
-
|
| 50 |
|
| 51 |
# Get FPS of the video
|
| 52 |
fps = int(cap.get(cv2.CAP_PROP_FPS))
|
|
@@ -87,7 +87,7 @@ def count_unique_trucks(video_path):
|
|
| 87 |
confidence = float(box.conf.item()) # Get confidence score
|
| 88 |
|
| 89 |
# Track only trucks
|
| 90 |
-
if class_id ==
|
| 91 |
x1, y1, x2, y2 = map(int, box.xyxy[0]) # Get bounding box
|
| 92 |
detections.append([x1, y1, x2, y2, confidence])
|
| 93 |
|
|
@@ -104,34 +104,34 @@ def count_unique_trucks(video_path):
|
|
| 104 |
print(f"Frame {frame_count}: Tracked Objects -> {tracked_objects}")
|
| 105 |
|
| 106 |
for obj in tracked_objects:
|
| 107 |
-
|
| 108 |
x1, y1, x2, y2 = obj[:4] # Get the bounding box coordinates
|
| 109 |
|
| 110 |
-
|
| 111 |
|
| 112 |
-
# If
|
| 113 |
-
if
|
| 114 |
-
last_position =
|
| 115 |
-
distance = np.linalg.norm(np.array(
|
| 116 |
|
| 117 |
if distance > DISTANCE_THRESHOLD:
|
| 118 |
-
|
| 119 |
|
| 120 |
else:
|
| 121 |
-
# If
|
| 122 |
-
|
| 123 |
"frame_count": frame_count,
|
| 124 |
-
"position":
|
| 125 |
}
|
| 126 |
-
|
| 127 |
|
| 128 |
cap.release()
|
| 129 |
-
return {"Total Unique
|
| 130 |
|
| 131 |
|
| 132 |
# Gradio UI function
|
| 133 |
def analyze_video(video_file):
|
| 134 |
-
result =
|
| 135 |
return "\n".join([f"{key}: {value}" for key, value in result.items()])
|
| 136 |
|
| 137 |
# Define Gradio interface
|
|
@@ -139,8 +139,8 @@ iface = gr.Interface(
|
|
| 139 |
fn=analyze_video,
|
| 140 |
inputs=gr.Video(label="Upload Video"),
|
| 141 |
outputs=gr.Textbox(label="Analysis Result"),
|
| 142 |
-
title="YOLOv12x Unique
|
| 143 |
-
description="Upload a video to count unique
|
| 144 |
)
|
| 145 |
|
| 146 |
# Launch the Gradio app
|
|
|
|
| 10 |
MODEL_PATH = "setosys_yolov12x.pt"
|
| 11 |
model = YOLO(MODEL_PATH)
|
| 12 |
|
| 13 |
+
# COCO dataset class ID for people
|
| 14 |
+
PERSON_CLASS_ID = 0 # "people"
|
| 15 |
|
| 16 |
# Initialize SORT tracker
|
| 17 |
tracker = Sort()
|
|
|
|
| 38 |
print("No keyword match, using default interval: 5") # Debugging
|
| 39 |
return 5 # Default interval
|
| 40 |
|
| 41 |
+
def count_unique_people(video_path):
|
| 42 |
+
""" Counts unique people in a video using YOLOv12x and SORT tracking. """
|
| 43 |
cap = cv2.VideoCapture(video_path)
|
| 44 |
if not cap.isOpened():
|
| 45 |
return {"Error": "Unable to open video file."}
|
| 46 |
|
| 47 |
# Reset variables at the start of each analysis
|
| 48 |
+
unique_people_ids = set()
|
| 49 |
+
people_history = {}
|
| 50 |
|
| 51 |
# Get FPS of the video
|
| 52 |
fps = int(cap.get(cv2.CAP_PROP_FPS))
|
|
|
|
| 87 |
confidence = float(box.conf.item()) # Get confidence score
|
| 88 |
|
| 89 |
# Track only trucks
|
| 90 |
+
if class_id == PEOPLE_CLASS_ID and confidence > CONFIDENCE_THRESHOLD:
|
| 91 |
x1, y1, x2, y2 = map(int, box.xyxy[0]) # Get bounding box
|
| 92 |
detections.append([x1, y1, x2, y2, confidence])
|
| 93 |
|
|
|
|
| 104 |
print(f"Frame {frame_count}: Tracked Objects -> {tracked_objects}")
|
| 105 |
|
| 106 |
for obj in tracked_objects:
|
| 107 |
+
people_id = int(obj[4]) # Unique ID assigned by SORT
|
| 108 |
x1, y1, x2, y2 = obj[:4] # Get the bounding box coordinates
|
| 109 |
|
| 110 |
+
people_center = (x1 + x2) / 2, (y1 + y2) / 2 # Calculate people center
|
| 111 |
|
| 112 |
+
# If people is already in history, check movement distance
|
| 113 |
+
if people_id in people_history:
|
| 114 |
+
last_position = people_history[people_id]["position"]
|
| 115 |
+
distance = np.linalg.norm(np.array(people_center) - np.array(last_position))
|
| 116 |
|
| 117 |
if distance > DISTANCE_THRESHOLD:
|
| 118 |
+
unique_people_ids.add(people_id) # Add only if moved significantly
|
| 119 |
|
| 120 |
else:
|
| 121 |
+
# If people is not in history, add it
|
| 122 |
+
people_history[truck_id] = {
|
| 123 |
"frame_count": frame_count,
|
| 124 |
+
"position": people_center
|
| 125 |
}
|
| 126 |
+
unique_people_ids.add(people_id)
|
| 127 |
|
| 128 |
cap.release()
|
| 129 |
+
return {"Total Unique People": len(unique_people_ids)}
|
| 130 |
|
| 131 |
|
| 132 |
# Gradio UI function
|
| 133 |
def analyze_video(video_file):
|
| 134 |
+
result = count_unique_people(video_file)
|
| 135 |
return "\n".join([f"{key}: {value}" for key, value in result.items()])
|
| 136 |
|
| 137 |
# Define Gradio interface
|
|
|
|
| 139 |
fn=analyze_video,
|
| 140 |
inputs=gr.Video(label="Upload Video"),
|
| 141 |
outputs=gr.Textbox(label="Analysis Result"),
|
| 142 |
+
title="YOLOv12x Unique People Counter",
|
| 143 |
+
description="Upload a video to count unique people using YOLOv12x and SORT tracking."
|
| 144 |
)
|
| 145 |
|
| 146 |
# Launch the Gradio app
|