Spaces:
Runtime error
Runtime error
Upload 20 files
Browse files- .vidgear/webgear/static/css/custom.css +198 -0
- .vidgear/webgear/static/img/deep-learning-svgrepo-com.svg +2 -0
- .vidgear/webgear/static/img/favicon-32x32.png +0 -0
- .vidgear/webgear/static/img/switch-camera-svgrepo-com.svg +18 -0
- .vidgear/webgear/static/img/video-streaming-outline-svgrepo-com.svg +5 -0
- .vidgear/webgear/static/js/custom.js +21 -0
- .vidgear/webgear/static/js/htmx.min.js +1 -0
- .vidgear/webgear/templates/404.html +5 -0
- .vidgear/webgear/templates/500.html +5 -0
- .vidgear/webgear/templates/base.html +33 -0
- .vidgear/webgear/templates/index.html +148 -0
- .vidgear/webgear/templates/partials/ack.html +5 -0
- .vidgear/webgear/templates/partials/camera_streams.html +53 -0
- .vidgear/webgear/templates/partials/yolo_models.html +31 -0
- helper.py +209 -0
- requirements.txt +10 -0
- ultralytics_solutions_modified/__init__.py +1 -0
- ultralytics_solutions_modified/object_counter.py +365 -0
- ultralytics_solutions_modified/speed_estimation.py +235 -0
- webapp.py +1054 -0
.vidgear/webgear/static/css/custom.css
ADDED
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/*--
|
2 |
+
===============================================
|
3 |
+
vidgear library source-code is deployed under the Apache 2.0 License:
|
4 |
+
Copyright (c) 2019 Abhishek Thakur(@abhiTronix) <abhi.una12@gmail.com>
|
5 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
6 |
+
you may not use this file except in compliance with the License.
|
7 |
+
You may obtain a copy of the License at
|
8 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
Unless required by applicable law or agreed to in writing, software
|
10 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
See the License for the specific language governing permissions and
|
13 |
+
limitations under the License.
|
14 |
+
===============================================
|
15 |
+
--*/
|
16 |
+
|
17 |
+
/* Handles Responive image tags (from bootstrap) */
|
18 |
+
.img-fluid {
|
19 |
+
max-width: 100%;
|
20 |
+
height: auto;
|
21 |
+
}
|
22 |
+
|
23 |
+
img {
|
24 |
+
vertical-align: middle;
|
25 |
+
border-style: none;
|
26 |
+
}
|
27 |
+
|
28 |
+
.mx-auto {
|
29 |
+
margin-right: auto !important;
|
30 |
+
}
|
31 |
+
|
32 |
+
.mx-auto {
|
33 |
+
margin-left: auto !important;
|
34 |
+
}
|
35 |
+
|
36 |
+
.d-block {
|
37 |
+
display: block !important;
|
38 |
+
}
|
39 |
+
|
40 |
+
.rounded {
|
41 |
+
border-radius: 0.25rem !important;
|
42 |
+
}
|
43 |
+
|
44 |
+
/* ends */
|
45 |
+
|
46 |
+
/*
|
47 |
+
Main
|
48 |
+
*/
|
49 |
+
.bodycontainer {
|
50 |
+
-ms-overflow-style: none;
|
51 |
+
/* Internet Explorer 10+ */
|
52 |
+
scrollbar-width: none;
|
53 |
+
/* Firefox */
|
54 |
+
}
|
55 |
+
|
56 |
+
.bodycontainer::-webkit-scrollbar {
|
57 |
+
display: none;
|
58 |
+
/* Safari and Chrome */
|
59 |
+
}
|
60 |
+
|
61 |
+
h1 {
|
62 |
+
text-align: center;
|
63 |
+
color: #fff;
|
64 |
+
font-weight: 400;
|
65 |
+
margin-bottom: 0.5rem;
|
66 |
+
text-transform: Uppercase;
|
67 |
+
line-height: 1.2;
|
68 |
+
letter-spacing: 2px;
|
69 |
+
}
|
70 |
+
|
71 |
+
body {
|
72 |
+
font-family: 'Open Sans', sans-serif;
|
73 |
+
background: #011B56;
|
74 |
+
background-attachment: scroll;
|
75 |
+
background-size: auto;
|
76 |
+
background-attachment: fixed;
|
77 |
+
-webkit-background-size: cover;
|
78 |
+
-moz-background-size: cover;
|
79 |
+
background-size: cover;
|
80 |
+
}
|
81 |
+
|
82 |
+
.copy-enhance p {
|
83 |
+
font-size: 0.9em;
|
84 |
+
color: #fff;
|
85 |
+
line-height: 1.8em;
|
86 |
+
letter-spacing: 1px;
|
87 |
+
}
|
88 |
+
|
89 |
+
p {
|
90 |
+
text-align: center;
|
91 |
+
color: #fff;
|
92 |
+
font-weight: 300;
|
93 |
+
}
|
94 |
+
|
95 |
+
a {
|
96 |
+
color: #fff;
|
97 |
+
font-weight: 500;
|
98 |
+
}
|
99 |
+
|
100 |
+
a:link {
|
101 |
+
text-decoration: none;
|
102 |
+
}
|
103 |
+
|
104 |
+
a:hover {
|
105 |
+
color: yellow;
|
106 |
+
}
|
107 |
+
|
108 |
+
.copy-enhance p a {
|
109 |
+
color: #fff;
|
110 |
+
-webkit-transition: 0.5s all;
|
111 |
+
-moz-transition: 0.5s all;
|
112 |
+
-o-transition: 0.5s all;
|
113 |
+
-ms-transition: 0.5s all;
|
114 |
+
transition: 0.5s all;
|
115 |
+
}
|
116 |
+
|
117 |
+
/*
|
118 |
+
Handle mobile screens
|
119 |
+
*/
|
120 |
+
/* If the screen size is 601px wide or more, set the font-size of <div> to 80px */
|
121 |
+
@media screen and (min-width: 601px) {
|
122 |
+
h1 {
|
123 |
+
font-size: 3.5rem;
|
124 |
+
}
|
125 |
+
}
|
126 |
+
|
127 |
+
/* If the screen size is 600px wide or less, set the font-size of <div> to 30px */
|
128 |
+
@media screen and (max-width: 600px) {
|
129 |
+
h1 {
|
130 |
+
font-size: 2.0rem;
|
131 |
+
}
|
132 |
+
}
|
133 |
+
|
134 |
+
/*
|
135 |
+
Glow text
|
136 |
+
*/
|
137 |
+
.glow {
|
138 |
+
color: #fff;
|
139 |
+
text-align: center;
|
140 |
+
-webkit-animation: glow 1s ease-in-out infinite alternate;
|
141 |
+
-moz-animation: glow 1s ease-in-out infinite alternate;
|
142 |
+
animation: glow 1s ease-in-out infinite alternate;
|
143 |
+
}
|
144 |
+
|
145 |
+
@-webkit-keyframes glow {
|
146 |
+
from {
|
147 |
+
text-shadow: 0 0 5px #fff, 0 0 10px #fff, 0 0 20px #e60073, 0 0 30px #e60073, 0 0 40px #e60073, 0 0 50px #e60073, 0 0 60px #e60073;
|
148 |
+
}
|
149 |
+
|
150 |
+
to {
|
151 |
+
text-shadow: 0 0 10px #fff, 0 0 20px #ff4da6, 0 0 30px #ff4da6, 0 0 40px #ff4da6, 0 0 50px #ff4da6, 0 0 60px #ff4da6, 0 0 70px #ff4da6;
|
152 |
+
}
|
153 |
+
}
|
154 |
+
|
155 |
+
/*
|
156 |
+
Social buttons
|
157 |
+
*/
|
158 |
+
#lab_social_icon_footer {
|
159 |
+
text-align: center;
|
160 |
+
display: block;
|
161 |
+
font-size: 12px;
|
162 |
+
transform: translateZ(0);
|
163 |
+
}
|
164 |
+
|
165 |
+
#lab_social_icon_footer a {
|
166 |
+
color: #f8f8ff;
|
167 |
+
}
|
168 |
+
|
169 |
+
#lab_social_icon_footer .social:hover {
|
170 |
+
-webkit-transform: scale(1.1);
|
171 |
+
-moz-transform: scale(1.1);
|
172 |
+
-o-transform: scale(1.1);
|
173 |
+
}
|
174 |
+
|
175 |
+
#lab_social_icon_footer .social {
|
176 |
+
-webkit-transform: scale(0.8);
|
177 |
+
/* Browser Variations: */
|
178 |
+
-moz-transform: scale(0.8);
|
179 |
+
-o-transform: scale(0.8);
|
180 |
+
-webkit-transition-duration: 0.5s;
|
181 |
+
-moz-transition-duration: 0.5s;
|
182 |
+
-o-transition-duration: 0.5s;
|
183 |
+
}
|
184 |
+
|
185 |
+
/*
|
186 |
+
Multicoloured Hover Variations
|
187 |
+
*/
|
188 |
+
#lab_social_icon_footer #social-hp:hover {
|
189 |
+
color: #79b8ff;
|
190 |
+
}
|
191 |
+
|
192 |
+
#lab_social_icon_footer #social-github:hover {
|
193 |
+
color: #d1d5da;
|
194 |
+
}
|
195 |
+
|
196 |
+
#lab_social_icon_footer #social-gitter:hover {
|
197 |
+
color: #ffff00;
|
198 |
+
}
|
.vidgear/webgear/static/img/deep-learning-svgrepo-com.svg
ADDED
|
.vidgear/webgear/static/img/favicon-32x32.png
ADDED
![]() |
.vidgear/webgear/static/img/switch-camera-svgrepo-com.svg
ADDED
|
.vidgear/webgear/static/img/video-streaming-outline-svgrepo-com.svg
ADDED
|
.vidgear/webgear/static/js/custom.js
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/*!
|
2 |
+
===============================================
|
3 |
+
vidgear library source-code is deployed under the Apache 2.0 License:
|
4 |
+
Copyright (c) 2019 Abhishek Thakur(@abhiTronix) <abhi.una12@gmail.com>
|
5 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
6 |
+
you may not use this file except in compliance with the License.
|
7 |
+
You may obtain a copy of the License at
|
8 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
Unless required by applicable law or agreed to in writing, software
|
10 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
See the License for the specific language governing permissions and
|
13 |
+
limitations under the License.
|
14 |
+
===============================================
|
15 |
+
*/
|
16 |
+
|
17 |
+
/*handles fullscreen workflow*/
|
18 |
+
function toggleFullScreen(id) {
|
19 |
+
const element = document.getElementById(id);
|
20 |
+
screenfull.toggle(element)
|
21 |
+
}
|
.vidgear/webgear/static/js/htmx.min.js
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
(function(e,t){if(typeof define==="function"&&define.amd){define([],t)}else{e.htmx=t()}})(typeof self!=="undefined"?self:this,function(){return function(){"use strict";var U={onLoad:t,process:ct,on:M,off:D,trigger:$,ajax:er,find:C,findAll:R,closest:H,values:function(e,t){var r=Mt(e,t||"post");return r.values},remove:O,addClass:L,removeClass:q,toggleClass:A,takeClass:T,defineExtension:or,removeExtension:ar,logAll:E,logger:null,config:{historyEnabled:true,historyCacheSize:10,refreshOnHistoryMiss:false,defaultSwapStyle:"innerHTML",defaultSwapDelay:0,defaultSettleDelay:20,includeIndicatorStyles:true,indicatorClass:"htmx-indicator",requestClass:"htmx-request",addedClass:"htmx-added",settlingClass:"htmx-settling",swappingClass:"htmx-swapping",allowEval:true,inlineScriptNonce:"",attributesToSettle:["class","style","width","height"],withCredentials:false,timeout:0,wsReconnectDelay:"full-jitter",disableSelector:"[hx-disable], [data-hx-disable]",useTemplateFragments:false,scrollBehavior:"smooth",defaultFocusScroll:false},parseInterval:v,_:e,createEventSource:function(e){return new EventSource(e,{withCredentials:true})},createWebSocket:function(e){return new WebSocket(e,[])},version:"1.7.0"};var r={bodyContains:Y,filterValues:jt,hasAttribute:s,getAttributeValue:V,getClosestMatch:h,getExpressionVars:Gt,getHeaders:Xt,getInputValues:Mt,getInternalData:_,getSwapSpecification:Ut,getTriggerSpecs:ke,getTarget:ne,makeFragment:g,mergeObjects:Q,makeSettleInfo:zt,oobSwap:B,selectAndSwap:we,settleImmediately:Ct,shouldCancel:Pe,triggerEvent:$,triggerErrorEvent:J,withExtensions:gt};var n=["get","post","put","delete","patch"];var i=n.map(function(e){return"[hx-"+e+"], [data-hx-"+e+"]"}).join(", ");function v(e){if(e==undefined){return undefined}if(e.slice(-2)=="ms"){return parseFloat(e.slice(0,-2))||undefined}if(e.slice(-1)=="s"){return parseFloat(e.slice(0,-1))*1e3||undefined}return parseFloat(e)||undefined}function f(e,t){return e.getAttribute&&e.getAttribute(t)}function s(e,t){return e.hasAttribute&&(e.hasAttribute(t)||e.hasAttribute("data-"+t))}function V(e,t){return f(e,t)||f(e,"data-"+t)}function u(e){return e.parentElement}function z(){return document}function h(e,t){if(t(e)){return e}else if(u(e)){return h(u(e),t)}else{return null}}function o(e,t,r){var n=V(t,r);var i=V(t,"hx-disinherit");if(e!==t&&i&&(i==="*"||i.split(" ").indexOf(r)>=0)){return"unset"}else{return n}}function G(t,r){var n=null;h(t,function(e){return n=o(t,e,r)});if(n!=="unset"){return n}}function d(e,t){var r=e.matches||e.matchesSelector||e.msMatchesSelector||e.mozMatchesSelector||e.webkitMatchesSelector||e.oMatchesSelector;return r&&r.call(e,t)}function a(e){var t=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i;var r=t.exec(e);if(r){return r[1].toLowerCase()}else{return""}}function l(e,t){var r=new DOMParser;var n=r.parseFromString(e,"text/html");var i=n.body;while(t>0){t--;i=i.firstChild}if(i==null){i=z().createDocumentFragment()}return i}function g(e){if(U.config.useTemplateFragments){var t=l("<body><template>"+e+"</template></body>",0);return t.querySelector("template").content}else{var r=a(e);switch(r){case"thead":case"tbody":case"tfoot":case"colgroup":case"caption":return l("<table>"+e+"</table>",1);case"col":return l("<table><colgroup>"+e+"</colgroup></table>",2);case"tr":return l("<table><tbody>"+e+"</tbody></table>",2);case"td":case"th":return l("<table><tbody><tr>"+e+"</tr></tbody></table>",3);case"script":return l("<div>"+e+"</div>",1);default:return l(e,0)}}}function K(e){if(e){e()}}function p(e,t){return Object.prototype.toString.call(e)==="[object "+t+"]"}function m(e){return p(e,"Function")}function x(e){return p(e,"Object")}function _(e){var t="htmx-internal-data";var r=e[t];if(!r){r=e[t]={}}return r}function y(e){var t=[];if(e){for(var r=0;r<e.length;r++){t.push(e[r])}}return t}function W(e,t){if(e){for(var r=0;r<e.length;r++){t(e[r])}}}function b(e){var t=e.getBoundingClientRect();var r=t.top;var n=t.bottom;return r<window.innerHeight&&n>=0}function Y(e){if(e.getRootNode()instanceof ShadowRoot){return z().body.contains(e.getRootNode().host)}else{return z().body.contains(e)}}function w(e){return e.trim().split(/\s+/)}function Q(e,t){for(var r in t){if(t.hasOwnProperty(r)){e[r]=t[r]}}return e}function S(e){try{return JSON.parse(e)}catch(e){pt(e);return null}}function e(e){return Jt(z().body,function(){return eval(e)})}function t(t){var e=U.on("htmx:load",function(e){t(e.detail.elt)});return e}function E(){U.logger=function(e,t,r){if(console){console.log(t,e,r)}}}function C(e,t){if(t){return e.querySelector(t)}else{return C(z(),e)}}function R(e,t){if(t){return e.querySelectorAll(t)}else{return R(z(),e)}}function O(e,t){e=k(e);if(t){setTimeout(function(){O(e)},t)}else{e.parentElement.removeChild(e)}}function L(e,t,r){e=k(e);if(r){setTimeout(function(){L(e,t)},r)}else{e.classList&&e.classList.add(t)}}function q(e,t,r){e=k(e);if(r){setTimeout(function(){q(e,t)},r)}else{if(e.classList){e.classList.remove(t);if(e.classList.length===0){e.removeAttribute("class")}}}}function A(e,t){e=k(e);e.classList.toggle(t)}function T(e,t){e=k(e);W(e.parentElement.children,function(e){q(e,t)});L(e,t)}function H(e,t){e=k(e);if(e.closest){return e.closest(t)}else{do{if(e==null||d(e,t)){return e}}while(e=e&&u(e))}}function N(e,t){if(t.indexOf("closest ")===0){return[H(e,t.substr(8))]}else if(t.indexOf("find ")===0){return[C(e,t.substr(5))]}else if(t==="document"){return[document]}else if(t==="window"){return[window]}else{return z().querySelectorAll(t)}}function ee(e,t){if(t){return N(e,t)[0]}else{return N(z().body,e)[0]}}function k(e){if(p(e,"String")){return C(e)}else{return e}}function I(e,t,r){if(m(t)){return{target:z().body,event:e,listener:t}}else{return{target:k(e),event:t,listener:r}}}function M(t,r,n){lr(function(){var e=I(t,r,n);e.target.addEventListener(e.event,e.listener)});var e=m(r);return e?r:n}function D(t,r,n){lr(function(){var e=I(t,r,n);e.target.removeEventListener(e.event,e.listener)});return m(r)?r:n}var te=z().createElement("output");function F(e,t){var r=G(e,t);if(r){if(r==="this"){return[re(e,t)]}else{var n=N(e,r);if(n.length===0){pt('The selector "'+r+'" on '+t+" returned no matches!");return[te]}else{return n}}}}function re(e,t){return h(e,function(e){return V(e,t)!=null})}function ne(e){var t=G(e,"hx-target");if(t){if(t==="this"){return re(e,"hx-target")}else{return ee(e,t)}}else{var r=_(e);if(r.boosted){return z().body}else{return e}}}function P(e){var t=U.config.attributesToSettle;for(var r=0;r<t.length;r++){if(e===t[r]){return true}}return false}function X(t,r){W(t.attributes,function(e){if(!r.hasAttribute(e.name)&&P(e.name)){t.removeAttribute(e.name)}});W(r.attributes,function(e){if(P(e.name)){t.setAttribute(e.name,e.value)}})}function j(e,t){var r=sr(t);for(var n=0;n<r.length;n++){var i=r[n];try{if(i.isInlineSwap(e)){return true}}catch(e){pt(e)}}return e==="outerHTML"}function B(e,i,o){var t="#"+i.id;var a="outerHTML";if(e==="true"){}else if(e.indexOf(":")>0){a=e.substr(0,e.indexOf(":"));t=e.substr(e.indexOf(":")+1,e.length)}else{a=e}var r=z().querySelectorAll(t);if(r){W(r,function(e){var t;var r=i.cloneNode(true);t=z().createDocumentFragment();t.appendChild(r);if(!j(a,e)){t=r}var n={shouldSwap:true,target:e,fragment:t};if(!$(e,"htmx:oobBeforeSwap",n))return;e=n.target;if(n["shouldSwap"]){ye(a,e,e,t,o)}W(o.elts,function(e){$(e,"htmx:oobAfterSwap",n)})});i.parentNode.removeChild(i)}else{i.parentNode.removeChild(i);J(z().body,"htmx:oobErrorNoTarget",{content:i})}return e}function ie(e,r){W(R(e,"[hx-swap-oob], [data-hx-swap-oob]"),function(e){var t=V(e,"hx-swap-oob");if(t!=null){B(t,e,r)}})}function oe(e){W(R(e,"[hx-preserve], [data-hx-preserve]"),function(e){var t=V(e,"id");var r=z().getElementById(t);if(r!=null){e.parentNode.replaceChild(r,e)}})}function ae(n,e,i){W(e.querySelectorAll("[id]"),function(e){if(e.id&&e.id.length>0){var t=n.querySelector(e.tagName+"[id='"+e.id+"']");if(t&&t!==n){var r=e.cloneNode();X(e,t);i.tasks.push(function(){X(e,r)})}}})}function se(e){return function(){q(e,U.config.addedClass);ct(e);at(e);le(e);$(e,"htmx:load")}}function le(e){var t="[autofocus]";var r=d(e,t)?e:e.querySelector(t);if(r!=null){r.focus()}}function ue(e,t,r,n){ae(e,r,n);while(r.childNodes.length>0){var i=r.firstChild;L(i,U.config.addedClass);e.insertBefore(i,t);if(i.nodeType!==Node.TEXT_NODE&&i.nodeType!==Node.COMMENT_NODE){n.tasks.push(se(i))}}}function fe(t){var e=_(t);if(e.webSocket){e.webSocket.close()}if(e.sseEventSource){e.sseEventSource.close()}$(t,"htmx:beforeCleanupElement");if(e.listenerInfos){W(e.listenerInfos,function(e){if(t!==e.on){e.on.removeEventListener(e.trigger,e.listener)}})}if(t.children){W(t.children,function(e){fe(e)})}}function ce(e,t,r){if(e.tagName==="BODY"){return me(e,t,r)}else{var n;var i=e.previousSibling;ue(u(e),e,t,r);if(i==null){n=u(e).firstChild}else{n=i.nextSibling}_(e).replacedWith=n;r.elts=[];while(n&&n!==e){if(n.nodeType===Node.ELEMENT_NODE){r.elts.push(n)}n=n.nextElementSibling}fe(e);u(e).removeChild(e)}}function he(e,t,r){return ue(e,e.firstChild,t,r)}function de(e,t,r){return ue(u(e),e,t,r)}function ve(e,t,r){return ue(e,null,t,r)}function ge(e,t,r){return ue(u(e),e.nextSibling,t,r)}function pe(e,t,r){fe(e);return u(e).removeChild(e)}function me(e,t,r){var n=e.firstChild;ue(e,n,t,r);if(n){while(n.nextSibling){fe(n.nextSibling);e.removeChild(n.nextSibling)}fe(n);e.removeChild(n)}}function xe(e,t){var r=G(e,"hx-select");if(r){var n=z().createDocumentFragment();W(t.querySelectorAll(r),function(e){n.appendChild(e)});t=n}return t}function ye(e,t,r,n,i){switch(e){case"none":return;case"outerHTML":ce(r,n,i);return;case"afterbegin":he(r,n,i);return;case"beforebegin":de(r,n,i);return;case"beforeend":ve(r,n,i);return;case"afterend":ge(r,n,i);return;case"delete":pe(r,n,i);return;default:var o=sr(t);for(var a=0;a<o.length;a++){var f=o[a];try{var s=f.handleSwap(e,r,n,i);if(s){if(typeof s.length!=="undefined"){for(var l=0;l<s.length;l++){var u=s[l];if(u.nodeType!==Node.TEXT_NODE&&u.nodeType!==Node.COMMENT_NODE){i.tasks.push(se(u))}}}return}}catch(e){pt(e)}}if(e==="innerHTML"){me(r,n,i)}else{ye(U.config.defaultSwapStyle,t,r,n,i)}}}function be(e){if(e.indexOf("<title")>-1){var t=e.replace(/<svg(\s[^>]*>|>)([\s\S]*?)<\/svg>/gim,"");var r=t.match(/<title(\s[^>]*>|>)([\s\S]*?)<\/title>/im);if(r){return r[2]}}}function we(e,t,r,n,i){i.title=be(n);var o=g(n);if(o){ie(o,i);o=xe(r,o);oe(o);return ye(e,r,t,o,i)}}function Se(e,t,r){var n=e.getResponseHeader(t);if(n.indexOf("{")===0){var i=S(n);for(var o in i){if(i.hasOwnProperty(o)){var a=i[o];if(!x(a)){a={value:a}}$(r,o,a)}}}else{$(r,n,[])}}var Ee=/\s/;var Ce=/[\s,]/;var Re=/[_$a-zA-Z]/;var Oe=/[_$a-zA-Z0-9]/;var Le=['"',"'","/"];var qe=/[^\s]/;function Ae(e){var t=[];var r=0;while(r<e.length){if(Re.exec(e.charAt(r))){var n=r;while(Oe.exec(e.charAt(r+1))){r++}t.push(e.substr(n,r-n+1))}else if(Le.indexOf(e.charAt(r))!==-1){var i=e.charAt(r);var n=r;r++;while(r<e.length&&e.charAt(r)!==i){if(e.charAt(r)==="\\"){r++}r++}t.push(e.substr(n,r-n+1))}else{var o=e.charAt(r);t.push(o)}r++}return t}function Te(e,t,r){return Re.exec(e.charAt(0))&&e!=="true"&&e!=="false"&&e!=="this"&&e!==r&&t!=="."}function He(e,t,r){if(t[0]==="["){t.shift();var n=1;var i=" return (function("+r+"){ return (";var o=null;while(t.length>0){var a=t[0];if(a==="]"){n--;if(n===0){if(o===null){i=i+"true"}t.shift();i+=")})";try{var s=Jt(e,function(){return Function(i)()},function(){return true});s.source=i;return s}catch(e){J(z().body,"htmx:syntax:error",{error:e,source:i});return null}}}else if(a==="["){n++}if(Te(a,o,r)){i+="(("+r+"."+a+") ? ("+r+"."+a+") : (window."+a+"))"}else{i=i+a}o=t.shift()}}}function c(e,t){var r="";while(e.length>0&&!e[0].match(t)){r+=e.shift()}return r}var Ne="input, textarea, select";function ke(e){var t=V(e,"hx-trigger");var r=[];if(t){var n=Ae(t);do{c(n,qe);var f=n.length;var i=c(n,/[,\[\s]/);if(i!==""){if(i==="every"){var o={trigger:"every"};c(n,qe);o.pollInterval=v(c(n,/[,\[\s]/));c(n,qe);var a=He(e,n,"event");if(a){o.eventFilter=a}r.push(o)}else if(i.indexOf("sse:")===0){r.push({trigger:"sse",sseEvent:i.substr(4)})}else{var s={trigger:i};var a=He(e,n,"event");if(a){s.eventFilter=a}while(n.length>0&&n[0]!==","){c(n,qe);var l=n.shift();if(l==="changed"){s.changed=true}else if(l==="once"){s.once=true}else if(l==="consume"){s.consume=true}else if(l==="delay"&&n[0]===":"){n.shift();s.delay=v(c(n,Ce))}else if(l==="from"&&n[0]===":"){n.shift();var u=c(n,Ce);if(u==="closest"||u==="find"){n.shift();u+=" "+c(n,Ce)}s.from=u}else if(l==="target"&&n[0]===":"){n.shift();s.target=c(n,Ce)}else if(l==="throttle"&&n[0]===":"){n.shift();s.throttle=v(c(n,Ce))}else if(l==="queue"&&n[0]===":"){n.shift();s.queue=c(n,Ce)}else if((l==="root"||l==="threshold")&&n[0]===":"){n.shift();s[l]=c(n,Ce)}else{J(e,"htmx:syntax:error",{token:n.shift()})}}r.push(s)}}if(n.length===f){J(e,"htmx:syntax:error",{token:n.shift()})}c(n,qe)}while(n[0]===","&&n.shift())}if(r.length>0){return r}else if(d(e,"form")){return[{trigger:"submit"}]}else if(d(e,Ne)){return[{trigger:"change"}]}else{return[{trigger:"click"}]}}function Ie(e){_(e).cancelled=true}function Me(e,t,r,n){var i=_(e);i.timeout=setTimeout(function(){if(Y(e)&&i.cancelled!==true){if(!je(n,dt("hx:poll:trigger",{triggerSpec:n,target:e}))){Z(t,r,e)}Me(e,t,V(e,"hx-"+t),n)}},n.pollInterval)}function De(e){return location.hostname===e.hostname&&f(e,"href")&&f(e,"href").indexOf("#")!==0}function Fe(t,r,e){if(t.tagName==="A"&&De(t)&&t.target===""||t.tagName==="FORM"){r.boosted=true;var n,i;if(t.tagName==="A"){n="get";i=f(t,"href");r.pushURL=true}else{var o=f(t,"method");n=o?o.toLowerCase():"get";if(n==="get"){r.pushURL=true}i=f(t,"action")}e.forEach(function(e){Be(t,n,i,r,e,true)})}}function Pe(e,t){if(e.type==="submit"||e.type==="click"){if(t.tagName==="FORM"){return true}if(d(t,'input[type="submit"], button')&&H(t,"form")!==null){return true}if(t.tagName==="A"&&t.href&&(t.getAttribute("href")==="#"||t.getAttribute("href").indexOf("#")!==0)){return true}}return false}function Xe(e,t){return _(e).boosted&&e.tagName==="A"&&t.type==="click"&&(t.ctrlKey||t.metaKey)}function je(e,t){var r=e.eventFilter;if(r){try{return r(t)!==true}catch(e){J(z().body,"htmx:eventFilter:error",{error:e,source:r.source});return true}}return false}function Be(o,a,s,e,l,u){var t;if(l.from){t=N(o,l.from)}else{t=[o]}W(t,function(n){var i=function(e){if(!Y(o)){n.removeEventListener(l.trigger,i);return}if(Xe(o,e)){return}if(u||Pe(e,o)){e.preventDefault()}if(je(l,e)){return}var t=_(e);t.triggerSpec=l;if(t.handledFor==null){t.handledFor=[]}var r=_(o);if(t.handledFor.indexOf(o)<0){t.handledFor.push(o);if(l.consume){e.stopPropagation()}if(l.target&&e.target){if(!d(e.target,l.target)){return}}if(l.once){if(r.triggeredOnce){return}else{r.triggeredOnce=true}}if(l.changed){if(r.lastValue===o.value){return}else{r.lastValue=o.value}}if(r.delayed){clearTimeout(r.delayed)}if(r.throttle){return}if(l.throttle){if(!r.throttle){Z(a,s,o,e);r.throttle=setTimeout(function(){r.throttle=null},l.throttle)}}else if(l.delay){r.delayed=setTimeout(function(){Z(a,s,o,e)},l.delay)}else{Z(a,s,o,e)}}};if(e.listenerInfos==null){e.listenerInfos=[]}e.listenerInfos.push({trigger:l.trigger,listener:i,on:n});n.addEventListener(l.trigger,i)})}var Ue=false;var Ve=null;function ze(){if(!Ve){Ve=function(){Ue=true};window.addEventListener("scroll",Ve);setInterval(function(){if(Ue){Ue=false;W(z().querySelectorAll("[hx-trigger='revealed'],[data-hx-trigger='revealed']"),function(e){_e(e)})}},200)}}function _e(e){if(!s(e,"data-hx-revealed")&&b(e)){e.setAttribute("data-hx-revealed","true");var t=_(e);if(t.initialized){Z(t.verb,t.path,e)}else{e.addEventListener("htmx:afterProcessNode",function(){Z(t.verb,t.path,e)},{once:true})}}}function We(e,t,r){var n=w(r);for(var i=0;i<n.length;i++){var o=n[i].split(/:(.+)/);if(o[0]==="connect"){Je(e,o[1],0)}if(o[0]==="send"){Ze(e)}}}function Je(s,r,n){if(!Y(s)){return}if(r.indexOf("/")==0){var e=location.hostname+(location.port?":"+location.port:"");if(location.protocol=="https:"){r="wss://"+e+r}else if(location.protocol=="http:"){r="ws://"+e+r}}var t=U.createWebSocket(r);t.onerror=function(e){J(s,"htmx:wsError",{error:e,socket:t});$e(s)};t.onclose=function(e){if([1006,1012,1013].indexOf(e.code)>=0){var t=Ge(n);setTimeout(function(){Je(s,r,n+1)},t)}};t.onopen=function(e){n=0};_(s).webSocket=t;t.addEventListener("message",function(e){if($e(s)){return}var t=e.data;gt(s,function(e){t=e.transformResponse(t,null,s)});var r=zt(s);var n=g(t);var i=y(n.children);for(var o=0;o<i.length;o++){var a=i[o];B(V(a,"hx-swap-oob")||"true",a,r)}Ct(r.tasks)})}function $e(e){if(!Y(e)){_(e).webSocket.close();return true}}function Ze(u){var f=h(u,function(e){return _(e).webSocket!=null});if(f){u.addEventListener(ke(u)[0].trigger,function(e){var t=_(f).webSocket;var r=Xt(u,f);var n=Mt(u,"post");var i=n.errors;var o=n.values;var a=Gt(u);var s=Q(o,a);var l=jt(s,u);l["HEADERS"]=r;if(i&&i.length>0){$(u,"htmx:validation:halted",i);return}t.send(JSON.stringify(l));if(Pe(e,u)){e.preventDefault()}})}else{J(u,"htmx:noWebSocketSourceError")}}function Ge(e){var t=U.config.wsReconnectDelay;if(typeof t==="function"){return t(e)}if(t==="full-jitter"){var r=Math.min(e,6);var n=1e3*Math.pow(2,r);return n*Math.random()}pt('htmx.config.wsReconnectDelay must either be a function or the string "full-jitter"')}function Ke(e,t,r){var n=w(r);for(var i=0;i<n.length;i++){var o=n[i].split(/:(.+)/);if(o[0]==="connect"){Ye(e,o[1])}if(o[0]==="swap"){Qe(e,o[1])}}}function Ye(t,e){var r=U.createEventSource(e);r.onerror=function(e){J(t,"htmx:sseError",{error:e,source:r});tt(t)};_(t).sseEventSource=r}function Qe(o,a){var s=h(o,rt);if(s){var l=_(s).sseEventSource;var u=function(e){if(tt(s)){l.removeEventListener(a,u);return}var t=e.data;gt(o,function(e){t=e.transformResponse(t,null,o)});var r=Ut(o);var n=ne(o);var i=zt(o);we(r.swapStyle,o,n,t,i);Ct(i.tasks);$(o,"htmx:sseMessage",e)};_(o).sseListener=u;l.addEventListener(a,u)}else{J(o,"htmx:noSSESourceError")}}function et(e,t,r,n){var i=h(e,rt);if(i){var o=_(i).sseEventSource;var a=function(){if(!tt(i)){if(Y(e)){Z(t,r,e)}else{o.removeEventListener(n,a)}}};_(e).sseListener=a;o.addEventListener(n,a)}else{J(e,"htmx:noSSESourceError")}}function tt(e){if(!Y(e)){_(e).sseEventSource.close();return true}}function rt(e){return _(e).sseEventSource!=null}function nt(e,t,r,n,i){var o=function(){if(!n.loaded){n.loaded=true;Z(t,r,e)}};if(i){setTimeout(o,i)}else{o()}}function it(o,a,e){var t=false;W(n,function(n){if(s(o,"hx-"+n)){var i=V(o,"hx-"+n);t=true;a.path=i;a.verb=n;e.forEach(function(e){if(e.sseEvent){et(o,n,i,e.sseEvent)}else if(e.trigger==="revealed"){ze();_e(o)}else if(e.trigger==="intersect"){var t={};if(e.root){t.root=ee(o,e.root)}if(e.threshold){t.threshold=parseFloat(e.threshold)}var r=new IntersectionObserver(function(e){for(var t=0;t<e.length;t++){var r=e[t];if(r.isIntersecting){$(o,"intersect");break}}},t);r.observe(o);Be(o,n,i,a,e)}else if(e.trigger==="load"){nt(o,n,i,a,e.delay)}else if(e.pollInterval){a.polling=true;Me(o,n,i,e)}else{Be(o,n,i,a,e)}})}});return t}function ot(e){if(e.type==="text/javascript"||e.type==="module"||e.type===""){var t=z().createElement("script");W(e.attributes,function(e){t.setAttribute(e.name,e.value)});t.textContent=e.textContent;t.async=false;if(U.config.inlineScriptNonce){t.nonce=U.config.inlineScriptNonce}var r=e.parentElement;try{r.insertBefore(t,e)}catch(e){pt(e)}finally{r.removeChild(e)}}}function at(e){if(d(e,"script")){ot(e)}W(R(e,"script"),function(e){ot(e)})}function st(){return document.querySelector("[hx-boost], [data-hx-boost]")}function lt(e){if(e.querySelectorAll){var t=st()?", a, form":"";var r=e.querySelectorAll(i+t+", [hx-sse], [data-hx-sse], [hx-ws],"+" [data-hx-ws], [hx-ext], [hx-data-ext]");return r}else{return[]}}function ut(r){var e=function(e){if(d(e.target,"button, input[type='submit']")){var t=_(r);t.lastButtonClicked=e.target}};r.addEventListener("click",e);r.addEventListener("focusin",e);r.addEventListener("focusout",function(e){var t=_(r);t.lastButtonClicked=null})}function ft(e){if(e.closest&&e.closest(U.config.disableSelector)){return}var t=_(e);if(!t.initialized){t.initialized=true;$(e,"htmx:beforeProcessNode");if(e.value){t.lastValue=e.value}var r=ke(e);var n=it(e,t,r);if(!n&&G(e,"hx-boost")==="true"){Fe(e,t,r)}if(e.tagName==="FORM"){ut(e)}var i=V(e,"hx-sse");if(i){Ke(e,t,i)}var o=V(e,"hx-ws");if(o){We(e,t,o)}$(e,"htmx:afterProcessNode")}}function ct(e){e=k(e);ft(e);W(lt(e),function(e){ft(e)})}function ht(e){return e.replace(/([a-z0-9])([A-Z])/g,"$1-$2").toLowerCase()}function dt(e,t){var r;if(window.CustomEvent&&typeof window.CustomEvent==="function"){r=new CustomEvent(e,{bubbles:true,cancelable:true,detail:t})}else{r=z().createEvent("CustomEvent");r.initCustomEvent(e,true,true,t)}return r}function J(e,t,r){$(e,t,Q({error:t},r))}function vt(e){return e==="htmx:afterProcessNode"}function gt(e,t){W(sr(e),function(e){try{t(e)}catch(e){pt(e)}})}function pt(e){if(console.error){console.error(e)}else if(console.log){console.log("ERROR: ",e)}}function $(e,t,r){e=k(e);if(r==null){r={}}r["elt"]=e;var n=dt(t,r);if(U.logger&&!vt(t)){U.logger(e,t,r)}if(r.error){pt(r.error);$(e,"htmx:error",{errorInfo:r})}var i=e.dispatchEvent(n);var o=ht(t);if(i&&o!==t){var a=dt(o,n.detail);i=i&&e.dispatchEvent(a)}gt(e,function(e){i=i&&e.onEvent(t,n)!==false});return i}var mt=location.pathname+location.search;function xt(){var e=z().querySelector("[hx-history-elt],[data-hx-history-elt]");return e||z().body}function yt(e,t,r,n){var i=S(localStorage.getItem("htmx-history-cache"))||[];for(var o=0;o<i.length;o++){if(i[o].url===e){i.splice(o,1);break}}i.push({url:e,content:t,title:r,scroll:n});while(i.length>U.config.historyCacheSize){i.shift()}while(i.length>0){try{localStorage.setItem("htmx-history-cache",JSON.stringify(i));break}catch(e){J(z().body,"htmx:historyCacheError",{cause:e,cache:i});i.shift()}}}function bt(e){var t=S(localStorage.getItem("htmx-history-cache"))||[];for(var r=0;r<t.length;r++){if(t[r].url===e){return t[r]}}return null}function wt(e){var t=U.config.requestClass;var r=e.cloneNode(true);W(R(r,"."+t),function(e){q(e,t)});return r.innerHTML}function St(){var e=xt();var t=mt||location.pathname+location.search;$(z().body,"htmx:beforeHistorySave",{path:t,historyElt:e});if(U.config.historyEnabled)history.replaceState({htmx:true},z().title,window.location.href);yt(t,wt(e),z().title,window.scrollY)}function Et(e){if(U.config.historyEnabled)history.pushState({htmx:true},"",e);mt=e}function Ct(e){W(e,function(e){e.call()})}function Rt(n){var e=new XMLHttpRequest;var i={path:n,xhr:e};$(z().body,"htmx:historyCacheMiss",i);e.open("GET",n,true);e.setRequestHeader("HX-History-Restore-Request","true");e.onload=function(){if(this.status>=200&&this.status<400){$(z().body,"htmx:historyCacheMissLoad",i);var e=g(this.response);e=e.querySelector("[hx-history-elt],[data-hx-history-elt]")||e;var t=xt();var r=zt(t);me(t,e,r);Ct(r.tasks);mt=n;$(z().body,"htmx:historyRestore",{path:n})}else{J(z().body,"htmx:historyCacheMissLoadError",i)}};e.send()}function Ot(e){St();e=e||location.pathname+location.search;var t=bt(e);if(t){var r=g(t.content);var n=xt();var i=zt(n);me(n,r,i);Ct(i.tasks);document.title=t.title;window.scrollTo(0,t.scroll);mt=e;$(z().body,"htmx:historyRestore",{path:e})}else{if(U.config.refreshOnHistoryMiss){window.location.reload(true)}else{Rt(e)}}}function Lt(e){var t=G(e,"hx-push-url");return t&&t!=="false"||_(e).boosted&&_(e).pushURL}function qt(e){var t=G(e,"hx-push-url");return t==="true"||t==="false"?null:t}function At(e){var t=F(e,"hx-indicator");if(t==null){t=[e]}W(t,function(e){e.classList["add"].call(e.classList,U.config.requestClass)});return t}function Tt(e){W(e,function(e){e.classList["remove"].call(e.classList,U.config.requestClass)})}function Ht(e,t){for(var r=0;r<e.length;r++){var n=e[r];if(n.isSameNode(t)){return true}}return false}function Nt(e){if(e.name===""||e.name==null||e.disabled){return false}if(e.type==="button"||e.type==="submit"||e.tagName==="image"||e.tagName==="reset"||e.tagName==="file"){return false}if(e.type==="checkbox"||e.type==="radio"){return e.checked}return true}function kt(t,r,n,e,i){if(e==null||Ht(t,e)){return}else{t.push(e)}if(Nt(e)){var o=f(e,"name");var a=e.value;if(e.multiple){a=y(e.querySelectorAll("option:checked")).map(function(e){return e.value})}if(e.files){a=y(e.files)}if(o!=null&&a!=null){var s=r[o];if(s){if(Array.isArray(s)){if(Array.isArray(a)){r[o]=s.concat(a)}else{s.push(a)}}else{if(Array.isArray(a)){r[o]=[s].concat(a)}else{r[o]=[s,a]}}}else{r[o]=a}}if(i){It(e,n)}}if(d(e,"form")){var l=e.elements;W(l,function(e){kt(t,r,n,e,i)})}}function It(e,t){if(e.willValidate){$(e,"htmx:validation:validate");if(!e.checkValidity()){t.push({elt:e,message:e.validationMessage,validity:e.validity});$(e,"htmx:validation:failed",{message:e.validationMessage,validity:e.validity})}}}function Mt(e,t){var r=[];var n={};var i={};var o=[];var a=_(e);var s=d(e,"form")&&e.noValidate!==true;if(a.lastButtonClicked){s=s&&a.lastButtonClicked.formNoValidate!==true}if(t!=="get"){kt(r,i,o,H(e,"form"),s)}kt(r,n,o,e,s);if(a.lastButtonClicked){var l=f(a.lastButtonClicked,"name");if(l){n[l]=a.lastButtonClicked.value}}var u=F(e,"hx-include");W(u,function(e){kt(r,n,o,e,s);if(!d(e,"form")){W(e.querySelectorAll(Ne),function(e){kt(r,n,o,e,s)})}});n=Q(n,i);return{errors:o,values:n}}function Dt(e,t,r){if(e!==""){e+="&"}if(String(r)==="[object Object]"){r=JSON.stringify(r)}var n=encodeURIComponent(r);e+=encodeURIComponent(t)+"="+n;return e}function Ft(e){var t="";for(var r in e){if(e.hasOwnProperty(r)){var n=e[r];if(Array.isArray(n)){W(n,function(e){t=Dt(t,r,e)})}else{t=Dt(t,r,n)}}}return t}function Pt(e){var t=new FormData;for(var r in e){if(e.hasOwnProperty(r)){var n=e[r];if(Array.isArray(n)){W(n,function(e){t.append(r,e)})}else{t.append(r,n)}}}return t}function Xt(e,t,r){var n={"HX-Request":"true","HX-Trigger":f(e,"id"),"HX-Trigger-Name":f(e,"name"),"HX-Target":V(t,"id"),"HX-Current-URL":z().location.href};Wt(e,"hx-headers",false,n);if(r!==undefined){n["HX-Prompt"]=r}if(_(e).boosted){n["HX-Boosted"]="true"}return n}function jt(t,e){var r=G(e,"hx-params");if(r){if(r==="none"){return{}}else if(r==="*"){return t}else if(r.indexOf("not ")===0){W(r.substr(4).split(","),function(e){e=e.trim();delete t[e]});return t}else{var n={};W(r.split(","),function(e){e=e.trim();n[e]=t[e]});return n}}else{return t}}function Bt(e){return f(e,"href")&&f(e,"href").indexOf("#")>=0}function Ut(e,t){var r=t?t:G(e,"hx-swap");var n={swapStyle:_(e).boosted?"innerHTML":U.config.defaultSwapStyle,swapDelay:U.config.defaultSwapDelay,settleDelay:U.config.defaultSettleDelay};if(_(e).boosted&&!Bt(e)){n["show"]="top"}if(r){var i=w(r);if(i.length>0){n["swapStyle"]=i[0];for(var o=1;o<i.length;o++){var a=i[o];if(a.indexOf("swap:")===0){n["swapDelay"]=v(a.substr(5))}if(a.indexOf("settle:")===0){n["settleDelay"]=v(a.substr(7))}if(a.indexOf("scroll:")===0){var s=a.substr(7);var l=s.split(":");var f=l.pop();var u=l.length>0?l.join(":"):null;n["scroll"]=f;n["scrollTarget"]=u}if(a.indexOf("show:")===0){var c=a.substr(5);var l=c.split(":");var h=l.pop();var u=l.length>0?l.join(":"):null;n["show"]=h;n["showTarget"]=u}if(a.indexOf("focus-scroll:")===0){var d=a.substr("focus-scroll:".length);n["focusScroll"]=d=="true"}}}}return n}function Vt(t,r,n){var i=null;gt(r,function(e){if(i==null){i=e.encodeParameters(t,n,r)}});if(i!=null){return i}else{if(G(r,"hx-encoding")==="multipart/form-data"||d(r,"form")&&f(r,"enctype")==="multipart/form-data"){return Pt(n)}else{return Ft(n)}}}function zt(e){return{tasks:[],elts:[e]}}function _t(e,t){var r=e[0];var n=e[e.length-1];if(t.scroll){var i=null;if(t.scrollTarget){i=ee(r,t.scrollTarget)}if(t.scroll==="top"&&(r||i)){i=i||r;i.scrollTop=0}if(t.scroll==="bottom"&&(n||i)){i=i||n;i.scrollTop=i.scrollHeight}}if(t.show){var i=null;if(t.showTarget){var o=t.showTarget;if(t.showTarget==="window"){o="body"}i=ee(r,o)}if(t.show==="top"&&(r||i)){i=i||r;i.scrollIntoView({block:"start",behavior:U.config.scrollBehavior})}if(t.show==="bottom"&&(n||i)){i=i||n;i.scrollIntoView({block:"end",behavior:U.config.scrollBehavior})}}}function Wt(e,t,r,n){if(n==null){n={}}if(e==null){return n}var i=V(e,t);if(i){var o=i.trim();var a=r;if(o.indexOf("javascript:")===0){o=o.substr(11);a=true}else if(o.indexOf("js:")===0){o=o.substr(3);a=true}if(o.indexOf("{")!==0){o="{"+o+"}"}var s;if(a){s=Jt(e,function(){return Function("return ("+o+")")()},{})}else{s=S(o)}for(var l in s){if(s.hasOwnProperty(l)){if(n[l]==null){n[l]=s[l]}}}}return Wt(u(e),t,r,n)}function Jt(e,t,r){if(U.config.allowEval){return t()}else{J(e,"htmx:evalDisallowedError");return r}}function $t(e,t){return Wt(e,"hx-vars",true,t)}function Zt(e,t){return Wt(e,"hx-vals",false,t)}function Gt(e){return Q($t(e),Zt(e))}function Kt(t,r,n){if(n!==null){try{t.setRequestHeader(r,n)}catch(e){t.setRequestHeader(r,encodeURIComponent(n));t.setRequestHeader(r+"-URI-AutoEncoded","true")}}}function Yt(t){if(t.responseURL&&typeof URL!=="undefined"){try{var e=new URL(t.responseURL);return e.pathname+e.search}catch(e){J(z().body,"htmx:badResponseUrl",{url:t.responseURL})}}}function Qt(e,t){return e.getAllResponseHeaders().match(t)}function er(e,t,r){e=e.toLowerCase();if(r){if(r instanceof Element||p(r,"String")){return Z(e,t,null,null,{targetOverride:k(r),returnPromise:true})}else{return Z(e,t,k(r.source),r.event,{handler:r.handler,headers:r.headers,values:r.values,targetOverride:k(r.target),swapOverride:r.swap,returnPromise:true})}}else{return Z(e,t,null,null,{returnPromise:true})}}function tr(e){var t=[];while(e){t.push(e);e=e.parentElement}return t}function Z(e,t,n,f,r){var c=null;var h=null;r=r!=null?r:{};if(r.returnPromise&&typeof Promise!=="undefined"){var d=new Promise(function(e,t){c=e;h=t})}if(n==null){n=z().body}var v=r.handler||rr;if(!Y(n)){return}var g=r.targetOverride||ne(n);if(g==null||g==te){J(n,"htmx:targetError",{target:V(n,"hx-target")});return}var p=n;var i=_(n);var o=G(n,"hx-sync");var m=null;var x=false;if(o){var y=o.split(":");var b=y[0].trim();if(b==="this"){p=re(n,"hx-sync")}else{p=ee(n,b)}o=(y[1]||"drop").trim();i=_(p);if(o==="drop"&&i.xhr&&i.abortable!==true){return}else if(o==="abort"){if(i.xhr){return}else{x=true}}else if(o==="replace"){$(p,"htmx:abort")}else if(o.indexOf("queue")===0){var w=o.split(" ");m=(w[1]||"last").trim()}}if(i.xhr){if(i.abortable){$(p,"htmx:abort")}else{if(m==null){if(f){var S=_(f);if(S&&S.triggerSpec&&S.triggerSpec.queue){m=S.triggerSpec.queue}}if(m==null){m="last"}}if(i.queuedRequests==null){i.queuedRequests=[]}if(m==="first"&&i.queuedRequests.length===0){i.queuedRequests.push(function(){Z(e,t,n,f,r)})}else if(m==="all"){i.queuedRequests.push(function(){Z(e,t,n,f,r)})}else if(m==="last"){i.queuedRequests=[];i.queuedRequests.push(function(){Z(e,t,n,f,r)})}return}}var a=new XMLHttpRequest;i.xhr=a;i.abortable=x;var s=function(){i.xhr=null;i.abortable=false;if(i.queuedRequests!=null&&i.queuedRequests.length>0){var e=i.queuedRequests.shift();e()}};var E=G(n,"hx-prompt");if(E){var C=prompt(E);if(C===null||!$(n,"htmx:prompt",{prompt:C,target:g})){K(c);s();return d}}var R=G(n,"hx-confirm");if(R){if(!confirm(R)){K(c);s();return d}}var O=Xt(n,g,C);if(r.headers){O=Q(O,r.headers)}var L=Mt(n,e);var q=L.errors;var A=L.values;if(r.values){A=Q(A,r.values)}var T=Gt(n);var H=Q(A,T);var N=jt(H,n);if(e!=="get"&&G(n,"hx-encoding")==null){O["Content-Type"]="application/x-www-form-urlencoded"}if(t==null||t===""){t=z().location.href}var k=Wt(n,"hx-request");var l={parameters:N,unfilteredParameters:H,headers:O,target:g,verb:e,errors:q,withCredentials:r.credentials||k.credentials||U.config.withCredentials,timeout:r.timeout||k.timeout||U.config.timeout,path:t,triggeringEvent:f};if(!$(n,"htmx:configRequest",l)){K(c);s();return d}t=l.path;e=l.verb;O=l.headers;N=l.parameters;q=l.errors;if(q&&q.length>0){$(n,"htmx:validation:halted",l);K(c);s();return d}var I=t.split("#");var M=I[0];var D=I[1];if(e==="get"){var F=M;var P=Object.keys(N).length!==0;if(P){if(F.indexOf("?")<0){F+="?"}else{F+="&"}F+=Ft(N);if(D){F+="#"+D}}a.open("GET",F,true)}else{a.open(e.toUpperCase(),t,true)}a.overrideMimeType("text/html");a.withCredentials=l.withCredentials;a.timeout=l.timeout;if(k.noHeaders){}else{for(var X in O){if(O.hasOwnProperty(X)){var j=O[X];Kt(a,X,j)}}}var u={xhr:a,target:g,requestConfig:l,etc:r,pathInfo:{path:t,finalPath:F,anchor:D}};a.onload=function(){try{var e=tr(n);v(n,u);Tt(B);$(n,"htmx:afterRequest",u);$(n,"htmx:afterOnLoad",u);if(!Y(n)){var t=null;while(e.length>0&&t==null){var r=e.shift();if(Y(r)){t=r}}if(t){$(t,"htmx:afterRequest",u);$(t,"htmx:afterOnLoad",u)}}K(c);s()}catch(e){J(n,"htmx:onLoadError",Q({error:e},u));throw e}};a.onerror=function(){Tt(B);J(n,"htmx:afterRequest",u);J(n,"htmx:sendError",u);K(h);s()};a.onabort=function(){Tt(B);J(n,"htmx:afterRequest",u);J(n,"htmx:sendAbort",u);K(h);s()};a.ontimeout=function(){Tt(B);J(n,"htmx:afterRequest",u);J(n,"htmx:timeout",u);K(h);s()};if(!$(n,"htmx:beforeRequest",u)){K(c);s();return d}var B=At(n);W(["loadstart","loadend","progress","abort"],function(t){W([a,a.upload],function(e){e.addEventListener(t,function(e){$(n,"htmx:xhr:"+t,{lengthComputable:e.lengthComputable,loaded:e.loaded,total:e.total})})})});$(n,"htmx:beforeSend",u);a.send(e==="get"?null:Vt(a,n,N));return d}function rr(s,l){var u=l.xhr;var f=l.target;var r=l.etc;if(!$(s,"htmx:beforeOnLoad",l))return;if(Qt(u,/HX-Trigger:/i)){Se(u,"HX-Trigger",s)}if(Qt(u,/HX-Push:/i)){var c=u.getResponseHeader("HX-Push")}if(Qt(u,/HX-Redirect:/i)){window.location.href=u.getResponseHeader("HX-Redirect");return}if(Qt(u,/HX-Refresh:/i)){if("true"===u.getResponseHeader("HX-Refresh")){location.reload();return}}if(Qt(u,/HX-Retarget:/i)){l.target=z().querySelector(u.getResponseHeader("HX-Retarget"))}var h;if(c=="false"){h=false}else{h=Lt(s)||c}var n=u.status>=200&&u.status<400&&u.status!==204;var d=u.response;var e=u.status>=400;var t=Q({shouldSwap:n,serverResponse:d,isError:e},l);if(!$(f,"htmx:beforeSwap",t))return;f=t.target;d=t.serverResponse;e=t.isError;l.failed=e;l.successful=!e;if(t.shouldSwap){if(u.status===286){Ie(s)}gt(s,function(e){d=e.transformResponse(d,u,s)});if(h){St()}var i=r.swapOverride;var v=Ut(s,i);f.classList.add(U.config.swappingClass);var o=function(){try{var e=document.activeElement;var t={};try{t={elt:e,start:e?e.selectionStart:null,end:e?e.selectionEnd:null}}catch(e){}var n=zt(f);we(v.swapStyle,f,s,d,n);if(t.elt&&!Y(t.elt)&&t.elt.id){var r=document.getElementById(t.elt.id);var i={preventScroll:v.focusScroll!==undefined?!v.focusScroll:!U.config.defaultFocusScroll};if(r){if(t.start&&r.setSelectionRange){r.setSelectionRange(t.start,t.end)}r.focus(i)}}f.classList.remove(U.config.swappingClass);W(n.elts,function(e){if(e.classList){e.classList.add(U.config.settlingClass)}$(e,"htmx:afterSwap",l)});if(l.pathInfo.anchor){location.hash=l.pathInfo.anchor}if(Qt(u,/HX-Trigger-After-Swap:/i)){var o=s;if(!Y(s)){o=z().body}Se(u,"HX-Trigger-After-Swap",o)}var a=function(){W(n.tasks,function(e){e.call()});W(n.elts,function(e){if(e.classList){e.classList.remove(U.config.settlingClass)}$(e,"htmx:afterSettle",l)});if(h){var e=c||qt(s)||Yt(u)||l.pathInfo.finalPath||l.pathInfo.path;Et(e);$(z().body,"htmx:pushedIntoHistory",{path:e})}if(n.title){var t=C("title");if(t){t.innerHTML=n.title}else{window.document.title=n.title}}_t(n.elts,v);if(Qt(u,/HX-Trigger-After-Settle:/i)){var r=s;if(!Y(s)){r=z().body}Se(u,"HX-Trigger-After-Settle",r)}};if(v.settleDelay>0){setTimeout(a,v.settleDelay)}else{a()}}catch(e){J(s,"htmx:swapError",l);throw e}};if(v.swapDelay>0){setTimeout(o,v.swapDelay)}else{o()}}if(e){J(s,"htmx:responseError",Q({error:"Response Status Error Code "+u.status+" from "+l.pathInfo.path},l))}}var nr={};function ir(){return{init:function(e){return null},onEvent:function(e,t){return true},transformResponse:function(e,t,r){return e},isInlineSwap:function(e){return false},handleSwap:function(e,t,r,n){return false},encodeParameters:function(e,t,r){return null}}}function or(e,t){if(t.init){t.init(r)}nr[e]=Q(ir(),t)}function ar(e){delete nr[e]}function sr(e,r,n){if(e==undefined){return r}if(r==undefined){r=[]}if(n==undefined){n=[]}var t=V(e,"hx-ext");if(t){W(t.split(","),function(e){e=e.replace(/ /g,"");if(e.slice(0,7)=="ignore:"){n.push(e.slice(7));return}if(n.indexOf(e)<0){var t=nr[e];if(t&&r.indexOf(t)<0){r.push(t)}}})}return sr(u(e),r,n)}function lr(e){if(z().readyState!=="loading"){e()}else{z().addEventListener("DOMContentLoaded",e)}}function ur(){if(U.config.includeIndicatorStyles!==false){z().head.insertAdjacentHTML("beforeend","<style> ."+U.config.indicatorClass+"{opacity:0;transition: opacity 200ms ease-in;} ."+U.config.requestClass+" ."+U.config.indicatorClass+"{opacity:1} ."+U.config.requestClass+"."+U.config.indicatorClass+"{opacity:1} </style>")}}function fr(){var e=z().querySelector('meta[name="htmx-config"]');if(e){return S(e.content)}else{return null}}function cr(){var e=fr();if(e){U.config=Q(U.config,e)}}lr(function(){cr();ur();var e=z().body;ct(e);var t=z().querySelectorAll("[hx-trigger='restored'],[data-hx-trigger='restored']");e.addEventListener("htmx:abort",function(e){var t=e.target;var r=_(t);if(r&&r.xhr){r.xhr.abort()}});window.onpopstate=function(e){if(e.state&&e.state.htmx){Ot();W(t,function(e){$(e,"htmx:restored",{document:z(),triggerEvent:$})})}};setTimeout(function(){$(e,"htmx:load",{})},0)});return U}()});
|
.vidgear/webgear/templates/404.html
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{% extends "base.html" %}
|
2 |
+
{% block content %}
|
3 |
+
<h1 class="glow">404</h1>
|
4 |
+
<h1>Page not found.</h1>
|
5 |
+
{% endblock %}
|
.vidgear/webgear/templates/500.html
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{% extends "base.html" %}
|
2 |
+
{% block content %}
|
3 |
+
<h1 class="glow">500</h1>
|
4 |
+
<h1>Server error.</h1>
|
5 |
+
{% endblock %}
|
.vidgear/webgear/templates/base.html
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html>
|
3 |
+
|
4 |
+
<head>
|
5 |
+
<!-- Meta Data -->
|
6 |
+
<meta charset="UTF-8">
|
7 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
8 |
+
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
9 |
+
<meta name="description" content="Video Streaming & Object Tracking Test using WebGear and Starlette.">
|
10 |
+
<meta name="author" content="Steve Yin@Fresh Robotics">
|
11 |
+
<title>Traffic Demo</title>
|
12 |
+
<link rel="icon" type="image/png" sizes="32x32" href="{{ url_for('static', path='/img/favicon-32x32.png') }}">
|
13 |
+
<!-- Meta Data -->
|
14 |
+
<!-- Custom Theme files -->
|
15 |
+
<link href="{{ url_for('static', path='/css/custom.css') }}" rel="stylesheet">
|
16 |
+
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css" rel="stylesheet">
|
17 |
+
<!-- //Custom Theme files -->
|
18 |
+
<!-- web font -->
|
19 |
+
<link href='//fonts.googleapis.com/css?family=Open+Sans:400,300,600,700,800' rel='stylesheet' type='text/css'>
|
20 |
+
<!--//web font-->
|
21 |
+
</head>
|
22 |
+
|
23 |
+
<body class="bodycontainer">
|
24 |
+
<!-- main -->
|
25 |
+
{% block content %}{% endblock %}
|
26 |
+
<!-- //main -->
|
27 |
+
<!-- Custom Scripts -->
|
28 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/screenfull.js/5.1.0/screenfull.min.js"></script>
|
29 |
+
<script src="{{ url_for('static', path='/js/custom.js') }}"></script>
|
30 |
+
<!-- //Custom Scripts -->
|
31 |
+
</body>
|
32 |
+
|
33 |
+
</html>
|
.vidgear/webgear/templates/index.html
ADDED
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
|
4 |
+
<head>
|
5 |
+
<meta charset="UTF-8">
|
6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
7 |
+
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
8 |
+
<meta name="description" content="Video Streaming & Object Tracking Test using WebGear and Starlette.">
|
9 |
+
<meta name="author" content="Steve Yin@Fresh Robotics">
|
10 |
+
<title>Traffic Demo</title>
|
11 |
+
</head>
|
12 |
+
|
13 |
+
<body>
|
14 |
+
<link href="https://cdn.jsdelivr.net/npm/daisyui@4.9.0/dist/full.min.css" rel="stylesheet" type="text/css" />
|
15 |
+
<script src="https://cdn.tailwindcss.com"></script>
|
16 |
+
<script src="https://unpkg.com/htmx.org@1.9.11" integrity="sha384-0gxUXCCR8yv9FM2b+U3FDbsKthCI66oH5IA9fHppQq9DDMHuMauqq1ZHBpJxQ0J0" crossorigin="anonymous"></script>
|
17 |
+
<script src="https://unpkg.com/htmx.org@1.9.11/dist/ext/response-targets.js"></script>
|
18 |
+
<script src="https://unpkg.com/htmx.org@1.9.11/dist/ext/sse.js"></script>
|
19 |
+
<!-- script src="https://unpkg.com/hyperscript.org@0.9.12"></script -->
|
20 |
+
<script>
|
21 |
+
htmx.defineExtension('submitjson', {
|
22 |
+
onEvent: function (name, evt) {
|
23 |
+
if (name === "htmx:configRequest") {
|
24 |
+
evt.detail.headers['Content-Type'] = "application/json"
|
25 |
+
// evt.detail.headers['X-API-Key'] = 'sjk_xxx'
|
26 |
+
}
|
27 |
+
},
|
28 |
+
encodeParameters: function(xhr, parameters, elt) {
|
29 |
+
xhr.overrideMimeType('text/json')
|
30 |
+
return (JSON.stringify({ payload: parameters }))
|
31 |
+
}
|
32 |
+
})
|
33 |
+
</script>
|
34 |
+
|
35 |
+
<div class="container mx-auto">
|
36 |
+
<div class="flex flex-col sm:flex-row flex-grow overflow-hidden">
|
37 |
+
<aside class="sm:w-1/3 md:w-1/3 lg:w-1/4 w-1/3 flex-shrink flex-grow-0 p-4">
|
38 |
+
<div class="sticky top-0 p-4 bg-gray-100 rounded-xl w-full">
|
39 |
+
<ul class="flex sm:flex-col overflow-hidden content-center justify-between">
|
40 |
+
<li class="py-2 bg-neutral-content hover:bg-indigo-300 rounded inline">
|
41 |
+
<img src="../static/img/video-streaming-outline-svgrepo-com.svg" class="w-7 sm:mx-2 mx-4 inline"/>
|
42 |
+
<label class="font-semibold text-lg">Camera Streams</label>
|
43 |
+
</li>
|
44 |
+
<li id="urls" hx-get="/urls" hx-target="this" hx-swap="outerHTML" hx-trigger="load"> </li>
|
45 |
+
|
46 |
+
<li class="py-2 bg-neutral-content hover:bg-indigo-300 rounded inline">
|
47 |
+
<img src="../static/img/deep-learning-svgrepo-com.svg" class="w-7 sm:mx-2 mx-4 inline"/>
|
48 |
+
<label class="font-semibold text-lg">YOLO Models</label>
|
49 |
+
</li>
|
50 |
+
<li id="models" hx-get="/models" hx-target="this" hx-swap="innerHTML" hx-trigger="load"></li>
|
51 |
+
</ul>
|
52 |
+
</div>
|
53 |
+
</aside>
|
54 |
+
|
55 |
+
<main role="main" class="w-full h-full flex-grow p-3 overflow-auto">
|
56 |
+
<h1 class="text-3xl md:text-5xl mb-4 font-extrabold" id="home">Traffic Monitoring Demo</h1>
|
57 |
+
|
58 |
+
<div class="stats stats-vertical lg:stats-horizontal shadow bg-gray-50 rounded-xl border my-3 w-full">
|
59 |
+
<div class="stat">
|
60 |
+
<div class="stat-title">Streaming</div>
|
61 |
+
<div class="stat-value" id="stream-status">Off</div>
|
62 |
+
<div class="stat-desc" id="stream-info" hx-get="/geturl" hx-target="this" hx-swap="innerHTML" hx-trigger="load">...</div>
|
63 |
+
</div>
|
64 |
+
<div class="stat">
|
65 |
+
<div class="stat-title">Tracking</div>
|
66 |
+
<div class="stat-value" id="tracking-status">Off</div>
|
67 |
+
<div class="stat-desc" id="tracking-info" hx-get="/getmodel" hx-target="this" hx-swap="innerHTML" hx-trigger="load">...</div>
|
68 |
+
</div>
|
69 |
+
<div class="stat">
|
70 |
+
<div class="stat-title">In Counts</div>
|
71 |
+
<div class="stat-value" id="in-counts" hx-ext="sse" sse-connect="/sseincounts" sse-swap="evt_in_counts">...</div>
|
72 |
+
<div class="stat-desc"></div>
|
73 |
+
</div>
|
74 |
+
<div class="stat">
|
75 |
+
<div class="stat-title">Out Counts</div>
|
76 |
+
<div class="stat-value" id="out-counts" hx-ext="sse" sse-connect="/sseoutcounts" sse-swap="evt_out_counts">...</div>
|
77 |
+
<div class="stat-desc"></div>
|
78 |
+
</div>
|
79 |
+
</div>
|
80 |
+
|
81 |
+
<div class="flex p-3">
|
82 |
+
<div class="flex items-center me-4" id="stream-toggle-div">
|
83 |
+
<label class="ms-2 text-lg font-medium text-gray-900 dark:text-gray-300">Streaming OFF</label>
|
84 |
+
<input type="checkbox" class="ms-2 toggle toggle-lg toggle-primary" id="stream-toggle" name="stream_switch"
|
85 |
+
hx-post="/streamswitch" hx-trigger="change" hx-ext="submitjson" hx-target="#stream-status" hx-swap-oob="innerHTML"
|
86 |
+
/>
|
87 |
+
<label class="ms-2 text-lg font-medium text-gray-900 dark:text-gray-300">ON</label>
|
88 |
+
<script>
|
89 |
+
var stream_toggle = document.getElementById('stream-toggle');
|
90 |
+
stream_toggle.onchange = function() {
|
91 |
+
var tracking_toggle = document.getElementById('tracking-toggle');
|
92 |
+
tracking_toggle.checked = false;
|
93 |
+
var xhr = new XMLHttpRequest();
|
94 |
+
xhr.open(method="POST", url="/trackingswitch", async=true);
|
95 |
+
xhr.setRequestHeader('Content-Type', 'application/json');
|
96 |
+
xhr.send(JSON.stringify({}));
|
97 |
+
var tracking_status = document.getElementById('tracking-status');
|
98 |
+
tracking_status.innerHTML = "off"
|
99 |
+
if (this.checked == true) {
|
100 |
+
setTimeout(() => {tracking_toggle.disabled = !this.checked}, 2500);
|
101 |
+
} else {
|
102 |
+
tracking_toggle.disabled = !this.checked;
|
103 |
+
}
|
104 |
+
};
|
105 |
+
</script>
|
106 |
+
</div>
|
107 |
+
<div class="flex items-center me-4" id="tracking-toggle-div">
|
108 |
+
<label class="ms-2 text-lg font-medium text-gray-900 dark:text-gray-300">Tracking OFF</label>
|
109 |
+
<input type="checkbox" class="ms-2 toggle toggle-lg toggle-secondary" id="tracking-toggle" name="tracking_switch" disabled
|
110 |
+
hx-post="/trackingswitch" hx-trigger="change" hx-ext="submitjson" hx-target="#tracking-status" hx-swap-oob="innerHTML"
|
111 |
+
/>
|
112 |
+
<label class="ms-2 text-lg font-medium text-gray-900 dark:text-gray-300">ON</label>
|
113 |
+
</div>
|
114 |
+
</div>
|
115 |
+
|
116 |
+
<div>
|
117 |
+
<iframe class="w-full h-full aspect-video" src="/video1"> </iframe>
|
118 |
+
</div>
|
119 |
+
</main>
|
120 |
+
</div>
|
121 |
+
</div>
|
122 |
+
|
123 |
+
<footer class="bg-indigo-800 mt-auto">
|
124 |
+
<div class="px-4 py-3 text-white mx-auto">
|
125 |
+
<h1 class="text-2xl hidden sm:block mb-2">Acknowledgement</h1>
|
126 |
+
<div class="flex">
|
127 |
+
<div class="flex-grow flex flex-col">
|
128 |
+
<a href="https://www.youtube.com" target="_blank" class="text-xs uppercase tracking-wider">YouTube</a>
|
129 |
+
<a href="https://huggingface.co" target="_blank" class="text-xs uppercase tracking-wider">HuggingFace</a>
|
130 |
+
</div>
|
131 |
+
<div class="flex-grow flex flex-col">
|
132 |
+
<a href="https://www.ultralytics.com/" target="_blank" class="text-xs uppercase tracking-wider">Ultralytics YOLOv8</a>
|
133 |
+
<a href="https://abhitronix.github.io/vidgear/v0.3.2-stable/" target="_blank" class="text-xs uppercase tracking-wider">VidGear</a>
|
134 |
+
<a href="https://www.starlette.io/" target="_blank" class="text-xs uppercase tracking-wider">Starlette</a>
|
135 |
+
</div>
|
136 |
+
<div class="flex-grow flex flex-col">
|
137 |
+
<a href="https://tailwindcss.com/" target="_blank" class="text-xs uppercase tracking-wider">Tailwind CSS</a>
|
138 |
+
<a href="https://htmx.org/" target="_blank" class="text-xs uppercase tracking-wider">HTMX</a>
|
139 |
+
</div>
|
140 |
+
</div>
|
141 |
+
<div class="text-right text-xs py-2">
|
142 |
+
<a href="">©2024 Fresh Consulting, Inc.</a>
|
143 |
+
</div>
|
144 |
+
</div>
|
145 |
+
</footer>
|
146 |
+
|
147 |
+
</body>
|
148 |
+
</html>
|
.vidgear/webgear/templates/partials/ack.html
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<div>
|
2 |
+
{% for row in table %}
|
3 |
+
<h4>{{ row }}</h4>
|
4 |
+
{% endfor %}
|
5 |
+
</div>
|
.vidgear/webgear/templates/partials/camera_streams.html
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<div class="p-3" id="camera-stream-form">
|
2 |
+
<div class="collapse collapse-plus bg-base-200">
|
3 |
+
<input type="checkbox" />
|
4 |
+
<div class="collapse-title">
|
5 |
+
<label class="w-full font-semibold text-gray-900 dark:text-white"> Add a camera stream </label>
|
6 |
+
</div>
|
7 |
+
<div class="collapse-content" hx-ext="submitjson">
|
8 |
+
<div>
|
9 |
+
<form id="add-url-form">
|
10 |
+
<li><label class="w-full font-semibold text-gray-900 dark:text-white"> camera location label (within 18 chars): </label></li>
|
11 |
+
<li><input class="w-full font-semibold text-red-900 dark:text-white" name="CamLoc" type="text"></li>
|
12 |
+
<li><label class="w-full font-semibold text-gray-900 dark:text-white"> and its streaming URL: </label></li>
|
13 |
+
<li><input class="w-full font-semibold text-red-900 dark:text-white" name="URL" type="text"></li>
|
14 |
+
<button class="btn btn-sm btn-block btn-neutral mt-2"
|
15 |
+
hx-post="/addurl"
|
16 |
+
hx-target="#camera-stream-form"
|
17 |
+
hx-swap="outerHTML"
|
18 |
+
type="submit">Add URL to List
|
19 |
+
</button>
|
20 |
+
</form>
|
21 |
+
</div>
|
22 |
+
<div>
|
23 |
+
<label id="add-url-ack" class="font-semibold text-red-900"></label>
|
24 |
+
</div>
|
25 |
+
</div>
|
26 |
+
</div>
|
27 |
+
<div class="collapse collapse-open bg-base-200">
|
28 |
+
<div class="collapse-title">
|
29 |
+
<label class="w-full font-semibold text-gray-900 dark:text-white">Or, select a stream:</label>
|
30 |
+
</div>
|
31 |
+
<div class="collapse-content">
|
32 |
+
<form class="form-control" id="select_camera" hx-post="/seturl" hx-ext="submitjson" hx-target="#stream-info">
|
33 |
+
{% for row in table %}
|
34 |
+
{% if row.selected == true %}
|
35 |
+
<li class="w-full border-b border-gray-200 rounded-t-lg dark:border-gray-600 flex items-center ps-3 space-x-2 inline">
|
36 |
+
<input type="radio" class="w-2 h-2 radio checked:bg-red-500" id={{ row.name }} name="cam_url" value={{ row.value }} checked>
|
37 |
+
<label for={{ row.name }} class="badge badge-neutral" size='md'> <a href={{ row.value }} target="_blank" rel="noopener noreferrer" class="text-wrap">
|
38 |
+
{{ row.name }}</a> </label>
|
39 |
+
</li>
|
40 |
+
{% else %}
|
41 |
+
<li class="w-full border-b border-gray-200 rounded-t-lg dark:border-gray-600 flex items-center ps-3 space-x-2 inline">
|
42 |
+
<input type="radio" class="w-2 h-2 radio checked:bg-red-500" id={{ row.name }} name="cam_url" value={{ row.value }}>
|
43 |
+
<label for={{ row.name }} class="badge badge-neutral size='md'"> <a href={{ row.value }} target="_blank" rel="noopener noreferrer" class="text-wrap">
|
44 |
+
{{ row.name }}</a> </label>
|
45 |
+
</li>
|
46 |
+
{% endif %}
|
47 |
+
{% endfor %}
|
48 |
+
<button id="set-url-button" class="btn btn-sm btn-block btn-neutral mt-2" type="submit">Set Camera Stream</button>
|
49 |
+
<label id="set-url-ack" class="font-semibold text-red-900"></label>
|
50 |
+
</form>
|
51 |
+
</div>
|
52 |
+
</div>
|
53 |
+
</div>
|
.vidgear/webgear/templates/partials/yolo_models.html
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<div class="p-3" id="model-selection-form">
|
2 |
+
<div class="collapse collapse-open bg-base-200">
|
3 |
+
<div class="collapse-content">
|
4 |
+
<div>
|
5 |
+
<form id="model-list-form">
|
6 |
+
{% for row in table %}
|
7 |
+
{% if row.selected == true %}
|
8 |
+
<li class="w-full h-auto border-b border-gray-200 rounded-t-lg dark:border-gray-600 flex items-center ps-3 space-x-2 inline">
|
9 |
+
<input type="radio" class="w-2 h-2 radio checked:bg-red-500" id={{ row.name }} name="model_path" value={{ row.value }} checked>
|
10 |
+
<label for={{ row.name }} class="badge badge-neutral text-wrap">{{ row.name }}</a> </label>
|
11 |
+
</li>
|
12 |
+
{% else %}
|
13 |
+
<li class="w-full h-auto border-b border-gray-200 rounded-t-lg dark:border-gray-600 flex items-center ps-3 space-x-2 inline">
|
14 |
+
<input type="radio" class="w-2 h-2 radio checked:bg-red-500" id={{ row.name }} name="model_path" value={{ row.value }}>
|
15 |
+
<label for={{ row.name }} class="badge badge-neutral text-wrap">{{ row.name }}</label>
|
16 |
+
</li>
|
17 |
+
{% endif %}
|
18 |
+
{% endfor %}
|
19 |
+
<button class="btn btn-sm btn-block btn-neutral mt-2"
|
20 |
+
hx-ext="submitjson"
|
21 |
+
hx-post="/setmodel"
|
22 |
+
hx-target="#tracking-info"
|
23 |
+
hx-swap="innerHTML"
|
24 |
+
_="on click toggle @disabled until htmx:afterOnLoad"
|
25 |
+
type="submit">Set YOLO Model</button>
|
26 |
+
</form>
|
27 |
+
<label id="set-model-ack" class="font-semibold text-red-900"></label>
|
28 |
+
</div>
|
29 |
+
</div>
|
30 |
+
</div>
|
31 |
+
</div>
|
helper.py
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from threading import Thread
|
2 |
+
from datetime import datetime
|
3 |
+
import cv2
|
4 |
+
import uvicorn
|
5 |
+
import contextlib
|
6 |
+
import threading
|
7 |
+
import time
|
8 |
+
import requests
|
9 |
+
|
10 |
+
|
11 |
+
class Server(uvicorn.Server):
|
12 |
+
'''
|
13 |
+
Use this Server class to server a uvicorn in a separate thread,
|
14 |
+
so to proceed to gradio UI launch
|
15 |
+
https://github.com/encode/uvicorn/issues/742
|
16 |
+
|
17 |
+
A couple of other links for reference --
|
18 |
+
https://stackoverflow.com/questions/61577643/python-how-to-use-fastapi-and-uvicorn-run-without-blocking-the-thread
|
19 |
+
https://stackoverflow.com/questions/76142431/how-to-run-another-application-within-the-same-running-event-loop/76148361#76148361
|
20 |
+
'''
|
21 |
+
def install_signal_handlers(self):
|
22 |
+
pass
|
23 |
+
|
24 |
+
@contextlib.contextmanager
|
25 |
+
def run_in_thread(self):
|
26 |
+
thread = threading.Thread(target=self.run)
|
27 |
+
thread.start()
|
28 |
+
try:
|
29 |
+
while not self.started:
|
30 |
+
time.sleep(1e-3)
|
31 |
+
yield
|
32 |
+
finally:
|
33 |
+
self.should_exit = True
|
34 |
+
thread.join()
|
35 |
+
|
36 |
+
|
37 |
+
class CountsPerSec:
|
38 |
+
"""
|
39 |
+
Class that tracks the number of occurrences ("counts") of an
|
40 |
+
arbitrary event and returns the frequency in occurrences
|
41 |
+
(counts) per second. The caller must increment the count.
|
42 |
+
"""
|
43 |
+
|
44 |
+
def __init__(self):
|
45 |
+
self._start_time = None
|
46 |
+
self._num_occurrences = 0
|
47 |
+
|
48 |
+
def start(self):
|
49 |
+
self._start_time = datetime.now()
|
50 |
+
return self
|
51 |
+
|
52 |
+
def increment(self):
|
53 |
+
self._num_occurrences += 1
|
54 |
+
|
55 |
+
def countsPerSec(self):
|
56 |
+
elapsed_time = (datetime.now() - self._start_time).total_seconds()
|
57 |
+
return self._num_occurrences / elapsed_time if elapsed_time > 0 else 0
|
58 |
+
|
59 |
+
|
60 |
+
class VideoGet:
|
61 |
+
"""
|
62 |
+
Class that continuously gets frames from a VideoCapture object
|
63 |
+
with a dedicated thread.
|
64 |
+
"""
|
65 |
+
|
66 |
+
def __init__(self, src=0):
|
67 |
+
self.stream = cv2.VideoCapture(src)
|
68 |
+
(self.grabbed, self.frame) = self.stream.read()
|
69 |
+
self.tn = Thread(target=self.get, args=())
|
70 |
+
self.stopped = False
|
71 |
+
|
72 |
+
def start(self):
|
73 |
+
self.tn.start()
|
74 |
+
return self
|
75 |
+
|
76 |
+
def get(self):
|
77 |
+
while not self.stopped:
|
78 |
+
if not self.grabbed:
|
79 |
+
self.stop()
|
80 |
+
else:
|
81 |
+
(self.grabbed, self.frame) = self.stream.read()
|
82 |
+
|
83 |
+
def stop(self):
|
84 |
+
self.tn.join()
|
85 |
+
self.stopped = True
|
86 |
+
|
87 |
+
|
88 |
+
class VideoShow:
|
89 |
+
"""
|
90 |
+
Class that continuously shows a frame using a dedicated thread.
|
91 |
+
"""
|
92 |
+
|
93 |
+
def __init__(self, frame=None):
|
94 |
+
self.frame = frame
|
95 |
+
self.tn = Thread(target=self.show, args=())
|
96 |
+
self.stopped = False
|
97 |
+
|
98 |
+
def start(self):
|
99 |
+
self.tn.start()
|
100 |
+
return self
|
101 |
+
|
102 |
+
def show(self):
|
103 |
+
while not self.stopped:
|
104 |
+
cv2.imshow("Video", self.frame)
|
105 |
+
if cv2.waitKey(1) == ord("q"):
|
106 |
+
self.stopped = True
|
107 |
+
|
108 |
+
def stop(self):
|
109 |
+
self.tn.join()
|
110 |
+
self.stopped = True
|
111 |
+
|
112 |
+
|
113 |
+
def show_fps(frame, iterations_per_sec):
|
114 |
+
"""
|
115 |
+
Add iterations per second text to lower-left corner of a frame.
|
116 |
+
"""
|
117 |
+
cv2.putText(
|
118 |
+
img=frame,
|
119 |
+
text="{:.0f} fps".format(iterations_per_sec),
|
120 |
+
org=(1000, 50),
|
121 |
+
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
|
122 |
+
fontScale=0.8,
|
123 |
+
color=(0, 255, 255),
|
124 |
+
thickness=1,
|
125 |
+
lineType=cv2.LINE_AA
|
126 |
+
)
|
127 |
+
cv2.putText(
|
128 |
+
img=frame, # annotated_frame,
|
129 |
+
text=datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
130 |
+
org=(500, 50),
|
131 |
+
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
|
132 |
+
fontScale=0.8,
|
133 |
+
color=(0, 255, 255),
|
134 |
+
thickness=1,
|
135 |
+
lineType=cv2.LINE_AA
|
136 |
+
)
|
137 |
+
|
138 |
+
return frame
|
139 |
+
|
140 |
+
|
141 |
+
def draw_text(
|
142 |
+
img,
|
143 |
+
text,
|
144 |
+
pos=(0, 0),
|
145 |
+
font=cv2.FONT_HERSHEY_SIMPLEX,
|
146 |
+
font_scale=1,
|
147 |
+
font_thickness=2,
|
148 |
+
line_type=cv2.LINE_AA,
|
149 |
+
text_color=(0, 255, 0),
|
150 |
+
text_color_bg=(0, 0, 0)
|
151 |
+
) -> None:
|
152 |
+
"""draw a text with background color on image frame
|
153 |
+
|
154 |
+
Args:
|
155 |
+
img (_type_): _description_
|
156 |
+
text (_type_): _description_
|
157 |
+
pos (tuple, optional): _description_. Defaults to (0, 0).
|
158 |
+
font (_type_, optional): _description_. Defaults to cv2.FONT_HERSHEY_SIMPLEX.
|
159 |
+
font_scale (int, optional): _description_. Defaults to 1.
|
160 |
+
font_thickness (int, optional): _description_. Defaults to 2.
|
161 |
+
line_type (_type_, optional): _description_. Defaults to cv2.LINE_AA.
|
162 |
+
text_color (tuple, optional): _description_. Defaults to (0, 255, 0).
|
163 |
+
text_color_bg (tuple, optional): _description_. Defaults to (0, 0, 0).
|
164 |
+
|
165 |
+
Returns:
|
166 |
+
_type_: _description_
|
167 |
+
"""
|
168 |
+
x, y = pos
|
169 |
+
text_size, _ = cv2.getTextSize(text, font, font_scale, font_thickness)
|
170 |
+
text_w, text_h = text_size
|
171 |
+
cv2.rectangle(img, (x, y + 10), (x + text_w, max(0, y - text_h - 10)), text_color_bg, -1)
|
172 |
+
cv2.putText(
|
173 |
+
img=img,
|
174 |
+
text=text,
|
175 |
+
org=pos,
|
176 |
+
fontFace=font,
|
177 |
+
fontScale=font_scale,
|
178 |
+
color=text_color,
|
179 |
+
thickness=font_thickness,
|
180 |
+
lineType=line_type
|
181 |
+
)
|
182 |
+
|
183 |
+
return text_size
|
184 |
+
|
185 |
+
|
186 |
+
def try_site(youtube_url: str) -> bool:
|
187 |
+
"""Check if a youtube url is playable
|
188 |
+
|
189 |
+
Args:
|
190 |
+
youtube_url (str): a given url for testing
|
191 |
+
|
192 |
+
Returns:
|
193 |
+
bool: whether or not that youtube_url is playable
|
194 |
+
"""
|
195 |
+
pattern = '"playabilityStatus":{"status":"ERROR","reason":"Video unavailable"'
|
196 |
+
request = requests.get(youtube_url)
|
197 |
+
return False if pattern in request.text else True
|
198 |
+
|
199 |
+
|
200 |
+
def make_table_from_dict(obj: dict, selected_key: str) -> list:
|
201 |
+
table = []
|
202 |
+
for k, v in obj.items():
|
203 |
+
if k == selected_key:
|
204 |
+
# print(k, v, selected_key)
|
205 |
+
table.append({"name": k, "value": v, "selected": True})
|
206 |
+
else:
|
207 |
+
table.append({"name": k, "value": v, "selected": False})
|
208 |
+
|
209 |
+
return table
|
requirements.txt
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
numpy
|
2 |
+
opencv-python-headless
|
3 |
+
shapely>=2.0.0
|
4 |
+
simplejpeg
|
5 |
+
sse-starlette
|
6 |
+
starlette
|
7 |
+
ultralytics>=8.1.20
|
8 |
+
uvicorn
|
9 |
+
vidgear==0.3.2
|
10 |
+
yt-dlp==2023.10.13
|
ultralytics_solutions_modified/__init__.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
ultralytics_solutions_modified/object_counter.py
ADDED
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
2 |
+
from collections import defaultdict
|
3 |
+
import logging
|
4 |
+
import cv2
|
5 |
+
import numpy as np
|
6 |
+
from ultralytics.utils.checks import check_imshow, check_requirements
|
7 |
+
from ultralytics.utils.plotting import Annotator, colors
|
8 |
+
from shapely.geometry import LineString, Point, Polygon
|
9 |
+
|
10 |
+
|
11 |
+
# create logger
|
12 |
+
logger = logging.getLogger(__name__).addHandler(logging.NullHandler())
|
13 |
+
|
14 |
+
|
15 |
+
# need shapely>=2.0.0
|
16 |
+
check_requirements("shapely>=2.0.0")
|
17 |
+
|
18 |
+
|
19 |
+
class ObjectCounter:
|
20 |
+
"""
|
21 |
+
A class to manage the counting of objects in a real-time video stream
|
22 |
+
based on their tracks.
|
23 |
+
"""
|
24 |
+
def __init__(self):
|
25 |
+
"""
|
26 |
+
Initializes the Counter with default values for various tracking and
|
27 |
+
counting parameters.
|
28 |
+
"""
|
29 |
+
# Mouse events
|
30 |
+
self.is_drawing = False
|
31 |
+
self.selected_point = None
|
32 |
+
|
33 |
+
# Region & Line Information
|
34 |
+
self.reg_pts = [(20, 400), (1260, 400)]
|
35 |
+
self.line_dist_thresh = 15
|
36 |
+
self.counting_region = None
|
37 |
+
self.region_color = (255, 0, 255)
|
38 |
+
self.region_thickness = 5
|
39 |
+
|
40 |
+
# Image and annotation Information
|
41 |
+
self.im0 = None
|
42 |
+
self.tf = None
|
43 |
+
self.view_img = False
|
44 |
+
self.view_in_counts = True
|
45 |
+
self.view_out_counts = True
|
46 |
+
|
47 |
+
self.names = None # Classes names
|
48 |
+
self.annotator = None # Annotator
|
49 |
+
|
50 |
+
# Object counting Information
|
51 |
+
self.in_counts = 0
|
52 |
+
self.out_counts = 0
|
53 |
+
self.out_counts_prev = self.out_counts
|
54 |
+
self.in_counts_prev = self.in_counts
|
55 |
+
self.counting_list = []
|
56 |
+
self.count_txt_thickness = 0
|
57 |
+
self.count_txt_color = (0, 0, 0)
|
58 |
+
self.count_color = (255, 255, 255)
|
59 |
+
|
60 |
+
# Tracks info
|
61 |
+
self.track_history = defaultdict(list)
|
62 |
+
self.track_thickness = 2
|
63 |
+
self.draw_tracks = False
|
64 |
+
self.draw_boxes = False # added by steve.yin @ 3/1/2024
|
65 |
+
self.track_color = (0, 255, 0)
|
66 |
+
|
67 |
+
# Check if environment support imshow
|
68 |
+
self.env_check = check_imshow(warn=True)
|
69 |
+
|
70 |
+
def set_args(
|
71 |
+
self,
|
72 |
+
classes_names,
|
73 |
+
reg_pts,
|
74 |
+
count_reg_color=(255, 0, 255),
|
75 |
+
line_thickness=2,
|
76 |
+
track_thickness=2,
|
77 |
+
view_img=False,
|
78 |
+
view_in_counts=True,
|
79 |
+
view_out_counts=True,
|
80 |
+
draw_tracks=False,
|
81 |
+
draw_boxes=False, # added by steve.yin @ 3/1/2024
|
82 |
+
draw_reg_pts=True, # added by steve.yin @ 3/1/2024
|
83 |
+
count_txt_thickness=2,
|
84 |
+
count_txt_color=(0, 0, 0),
|
85 |
+
count_color=(255, 255, 255),
|
86 |
+
track_color=(0, 255, 0),
|
87 |
+
region_thickness=5,
|
88 |
+
line_dist_thresh=15,
|
89 |
+
):
|
90 |
+
"""
|
91 |
+
Configures the Counter's image, bounding box line thickness,
|
92 |
+
and counting region points.
|
93 |
+
|
94 |
+
Args:
|
95 |
+
line_thickness (int): Line thickness for bounding boxes.
|
96 |
+
view_img (bool): Flag to control display the video stream.
|
97 |
+
view_in_counts (bool): Flag to control display the incounts.
|
98 |
+
view_out_counts (bool): Flag to control display the outcounts.
|
99 |
+
reg_pts (list): Initial list of points for the counting region.
|
100 |
+
classes_names (dict): Classes names
|
101 |
+
track_thickness (int): Track thickness
|
102 |
+
draw_tracks (Bool): draw tracks
|
103 |
+
draw_boxes (Bool): draw boxes
|
104 |
+
draw_reg_pts (Bool): draw reg_pts
|
105 |
+
count_txt_thickness (int): Text thickness object counting display
|
106 |
+
count_txt_color (RGB color): count text color value
|
107 |
+
count_color (RGB color): count text background color value
|
108 |
+
count_reg_color (RGB color): Color of object counting region
|
109 |
+
track_color (RGB color): color for tracks
|
110 |
+
region_thickness (int): Object counting Region thickness
|
111 |
+
line_dist_thresh (int): Euclidean Distance threshold line counter
|
112 |
+
"""
|
113 |
+
self.tf = line_thickness
|
114 |
+
self.view_img = view_img
|
115 |
+
self.view_in_counts = view_in_counts
|
116 |
+
self.view_out_counts = view_out_counts
|
117 |
+
self.track_thickness = track_thickness
|
118 |
+
self.draw_tracks = draw_tracks
|
119 |
+
self.draw_boxes = draw_boxes # added by steve.yin @ 3/1/2024
|
120 |
+
self.draw_reg_pts = draw_reg_pts # added by steve.yin @ 3/1/2024
|
121 |
+
|
122 |
+
# Region and line selection
|
123 |
+
if len(reg_pts) == 2:
|
124 |
+
logging.info("Line Counter Initiated.")
|
125 |
+
self.reg_pts = reg_pts
|
126 |
+
self.counting_region = LineString(self.reg_pts)
|
127 |
+
u = np.array([self.reg_pts[0][0], self.reg_pts[0][1]])
|
128 |
+
v = np.array([self.reg_pts[1][0], self.reg_pts[1][1]])
|
129 |
+
elif len(reg_pts) == 4:
|
130 |
+
logging.info("Region Counter Initiated.")
|
131 |
+
self.reg_pts = reg_pts
|
132 |
+
self.counting_region = Polygon(self.reg_pts)
|
133 |
+
u = np.array([
|
134 |
+
(self.reg_pts[0][0] + self.reg_pts[1][0]) / 2,
|
135 |
+
(self.reg_pts[0][1] + self.reg_pts[1][1]) / 2,
|
136 |
+
])
|
137 |
+
v = np.array([
|
138 |
+
(self.reg_pts[2][0] + self.reg_pts[3][0]) / 2,
|
139 |
+
(self.reg_pts[2][1] + self.reg_pts[3][1]) / 2,
|
140 |
+
])
|
141 |
+
else:
|
142 |
+
logging.warning(
|
143 |
+
"Invalid Region points, which can only be 2 or 4. " +
|
144 |
+
"Using Line Counter Instead!"
|
145 |
+
)
|
146 |
+
self.counting_region = LineString(self.reg_pts)
|
147 |
+
u = np.array(self.counting_region.coords[0])
|
148 |
+
v = np.array(
|
149 |
+
self.counting_region.coords[len(self.counting_region.coords)-1]
|
150 |
+
)
|
151 |
+
# get line orientation, rotate ccw 90degrees, get line normal vector
|
152 |
+
n = v - u
|
153 |
+
nvec = np.array([-n[1], n[0]])
|
154 |
+
# print(f"v: {v}, u: {u}, n: {n}, nvec0: {nvec}")
|
155 |
+
self.counting_region_nvec = nvec / (np.linalg.norm(nvec) + 1e-6)
|
156 |
+
# print(f"nvec: {self.counting_region_nvec}")
|
157 |
+
|
158 |
+
self.names = classes_names
|
159 |
+
self.track_color = track_color
|
160 |
+
self.count_txt_thickness = count_txt_thickness
|
161 |
+
self.count_txt_color = count_txt_color
|
162 |
+
self.count_color = count_color
|
163 |
+
self.region_color = count_reg_color
|
164 |
+
self.region_thickness = region_thickness
|
165 |
+
self.line_dist_thresh = line_dist_thresh
|
166 |
+
|
167 |
+
def mouse_event_for_region(self, event, x, y, flags, params):
|
168 |
+
"""
|
169 |
+
This function is designed to move region with mouse events in a
|
170 |
+
real-time video stream.
|
171 |
+
|
172 |
+
Args:
|
173 |
+
event (int): The type of mouse event (e.g., cv2.EVENT_MOUSEMOVE,
|
174 |
+
cv2.EVENT_LBUTTONDOWN, etc.).
|
175 |
+
x (int): The x-coordinate of the mouse pointer.
|
176 |
+
y (int): The y-coordinate of the mouse pointer.
|
177 |
+
flags (int): Any flags associated with the event (e.g.,
|
178 |
+
cv2.EVENT_FLAG_CTRLKEY, cv2.EVENT_FLAG_SHIFTKEY, etc.).
|
179 |
+
params (dict): Additional parameters passing to the function.
|
180 |
+
"""
|
181 |
+
if event == cv2.EVENT_LBUTTONDOWN:
|
182 |
+
for i, point in enumerate(self.reg_pts):
|
183 |
+
if (
|
184 |
+
isinstance(point, (tuple, list))
|
185 |
+
and len(point) >= 2
|
186 |
+
and (abs(x - point[0]) < 10 and abs(y - point[1]) < 10)
|
187 |
+
):
|
188 |
+
self.selected_point = i
|
189 |
+
self.is_drawing = True
|
190 |
+
break
|
191 |
+
|
192 |
+
elif event == cv2.EVENT_MOUSEMOVE:
|
193 |
+
if self.is_drawing and self.selected_point is not None:
|
194 |
+
self.reg_pts[self.selected_point] = (x, y)
|
195 |
+
self.counting_region = Polygon(self.reg_pts)
|
196 |
+
|
197 |
+
elif event == cv2.EVENT_LBUTTONUP:
|
198 |
+
self.is_drawing = False
|
199 |
+
self.selected_point = None
|
200 |
+
|
201 |
+
def extract_and_process_tracks(self, tracks):
|
202 |
+
"""
|
203 |
+
Extracts and processes tracks for object counting in a video stream.
|
204 |
+
"""
|
205 |
+
boxes = tracks[0].boxes.xyxy.cpu()
|
206 |
+
clss = tracks[0].boxes.cls.cpu().tolist()
|
207 |
+
track_ids = tracks[0].boxes.id.int().cpu().tolist()
|
208 |
+
|
209 |
+
# Annotator Init and region drawing
|
210 |
+
self.annotator = Annotator(self.im0, self.tf, self.names)
|
211 |
+
# self.annotator.draw_region(
|
212 |
+
# reg_pts=self.reg_pts,
|
213 |
+
# color=self.region_color,
|
214 |
+
# thickness=self.region_thickness
|
215 |
+
# )
|
216 |
+
|
217 |
+
# Extract tracks
|
218 |
+
for box, track_id, cls in zip(boxes, track_ids, clss):
|
219 |
+
# Draw bounding box [modified by steve.yin @ 3/1/2024]
|
220 |
+
if self.draw_reg_pts:
|
221 |
+
self.annotator.draw_region(
|
222 |
+
reg_pts=self.reg_pts,
|
223 |
+
color=self.region_color,
|
224 |
+
thickness=self.region_thickness
|
225 |
+
)
|
226 |
+
if self.draw_boxes:
|
227 |
+
self.annotator.box_label(
|
228 |
+
box=box,
|
229 |
+
label=f"{track_id}:{self.names[cls]}",
|
230 |
+
color=colors(int(cls), True)
|
231 |
+
)
|
232 |
+
|
233 |
+
# Draw Tracks
|
234 |
+
track_line = self.track_history[track_id]
|
235 |
+
track_line.append((
|
236 |
+
float((box[0] + box[2]) / 2), float((box[1] + box[3]) / 2)
|
237 |
+
))
|
238 |
+
if len(track_line) > 30:
|
239 |
+
track_line.pop(0)
|
240 |
+
|
241 |
+
# Draw track trails
|
242 |
+
if self.draw_tracks:
|
243 |
+
self.annotator.draw_centroid_and_tracks(
|
244 |
+
track=track_line,
|
245 |
+
color=self.track_color,
|
246 |
+
track_thickness=self.track_thickness
|
247 |
+
)
|
248 |
+
|
249 |
+
prev_position = self.track_history[track_id][0] \
|
250 |
+
if len(self.track_history[track_id]) > 1 else None
|
251 |
+
|
252 |
+
# Count objects
|
253 |
+
if len(self.reg_pts) == 4:
|
254 |
+
if (
|
255 |
+
prev_position is not None
|
256 |
+
and self.counting_region.contains(Point(track_line[-1]))
|
257 |
+
and track_id not in self.counting_list
|
258 |
+
):
|
259 |
+
self.counting_list.append(track_id)
|
260 |
+
obj_track_vec = np.array([
|
261 |
+
track_line[-1][0] - prev_position[0],
|
262 |
+
track_line[-1][1] - prev_position[1]
|
263 |
+
])
|
264 |
+
if np.sign(
|
265 |
+
np.dot(obj_track_vec, self.counting_region_nvec)
|
266 |
+
) < 0:
|
267 |
+
# if (box[0] - prev_position[0]) * (self.counting_region.centroid.x - prev_position[0]) > 0:
|
268 |
+
self.out_counts += 1
|
269 |
+
else:
|
270 |
+
self.in_counts += 1
|
271 |
+
elif len(self.reg_pts) == 2:
|
272 |
+
if prev_position is not None:
|
273 |
+
distance = Point(track_line[-1]) \
|
274 |
+
.distance(self.counting_region)
|
275 |
+
if (
|
276 |
+
distance < self.line_dist_thresh and
|
277 |
+
track_id not in self.counting_list
|
278 |
+
):
|
279 |
+
self.counting_list.append(track_id)
|
280 |
+
obj_track_vec = np.array([
|
281 |
+
track_line[-1][0] - prev_position[0],
|
282 |
+
track_line[-1][1] - prev_position[1]
|
283 |
+
])
|
284 |
+
logging.info(f"obj_track_vec: {obj_track_vec}")
|
285 |
+
if np.sign(
|
286 |
+
np.dot(obj_track_vec, self.counting_region_nvec)
|
287 |
+
) < 0:
|
288 |
+
# if (box[0] - prev_position[0]) * (self.counting_region.centroid.x - prev_position[0]) > 0:
|
289 |
+
self.out_counts += 1
|
290 |
+
else:
|
291 |
+
self.in_counts += 1
|
292 |
+
|
293 |
+
self.outcounts_updated()
|
294 |
+
self.incounts_updated()
|
295 |
+
self.out_counts_prev = self.out_counts
|
296 |
+
self.in_counts_prev = self.in_counts
|
297 |
+
incount_label = f"In: {self.in_counts}"
|
298 |
+
outcount_label = f"Out: {self.out_counts}"
|
299 |
+
|
300 |
+
# Display counts based on user choice
|
301 |
+
counts_label = None
|
302 |
+
if not self.view_in_counts and not self.view_out_counts:
|
303 |
+
counts_label = None
|
304 |
+
elif not self.view_in_counts:
|
305 |
+
counts_label = outcount_label
|
306 |
+
elif not self.view_out_counts:
|
307 |
+
counts_label = incount_label
|
308 |
+
else:
|
309 |
+
counts_label = f"{incount_label} | {outcount_label}"
|
310 |
+
|
311 |
+
if counts_label is not None:
|
312 |
+
self.annotator.count_labels(
|
313 |
+
counts=counts_label,
|
314 |
+
count_txt_size=self.count_txt_thickness,
|
315 |
+
txt_color=self.count_txt_color,
|
316 |
+
color=self.count_color,
|
317 |
+
)
|
318 |
+
|
319 |
+
def display_frames(self):
|
320 |
+
"""Display frame."""
|
321 |
+
if self.env_check:
|
322 |
+
cv2.namedWindow("Ultralytics YOLOv8 Object Counter")
|
323 |
+
# only add mouse event If user drawn region
|
324 |
+
if len(self.reg_pts) == 4:
|
325 |
+
cv2.setMouseCallback(
|
326 |
+
"Ultralytics YOLOv8 Object Counter",
|
327 |
+
self.mouse_event_for_region,
|
328 |
+
{"region_points": self.reg_pts}
|
329 |
+
)
|
330 |
+
cv2.imshow("Ultralytics YOLOv8 Object Counter", self.im0)
|
331 |
+
# Break Window
|
332 |
+
if cv2.waitKey(1) & 0xFF == ord("q"):
|
333 |
+
return
|
334 |
+
|
335 |
+
def start_counting(self, im0, tracks):
|
336 |
+
"""
|
337 |
+
Main function to start the object counting process.
|
338 |
+
|
339 |
+
Args:
|
340 |
+
im0 (ndarray): Current frame from the video stream.
|
341 |
+
tracks (list): List of tracks obtained from the object tracking process.
|
342 |
+
"""
|
343 |
+
self.im0 = im0 # store image
|
344 |
+
|
345 |
+
if tracks[0].boxes.id is None:
|
346 |
+
if self.view_img:
|
347 |
+
self.display_frames()
|
348 |
+
return im0
|
349 |
+
self.extract_and_process_tracks(tracks)
|
350 |
+
|
351 |
+
if self.view_img:
|
352 |
+
self.display_frames()
|
353 |
+
return self.im0
|
354 |
+
|
355 |
+
def incounts_updated(self):
|
356 |
+
if self.in_counts_prev < self.in_counts:
|
357 |
+
yield f"{self.in_counts}"
|
358 |
+
|
359 |
+
def outcounts_updated(self):
|
360 |
+
if self.out_counts_prev < self.out_counts:
|
361 |
+
yield f"{self.out_counts}"
|
362 |
+
|
363 |
+
|
364 |
+
if __name__ == "__main__":
|
365 |
+
ObjectCounter()
|
ultralytics_solutions_modified/speed_estimation.py
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
2 |
+
|
3 |
+
from collections import defaultdict
|
4 |
+
from time import time
|
5 |
+
import logging
|
6 |
+
import cv2
|
7 |
+
import numpy as np
|
8 |
+
from ultralytics.utils.checks import check_imshow
|
9 |
+
from ultralytics.utils.plotting import Annotator, colors
|
10 |
+
|
11 |
+
|
12 |
+
# create logger
|
13 |
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
14 |
+
|
15 |
+
|
16 |
+
class SpeedEstimator:
|
17 |
+
"""
|
18 |
+
A class to estimation speed of objects in real-time video stream
|
19 |
+
based on their tracks.
|
20 |
+
"""
|
21 |
+
def __init__(self):
|
22 |
+
"""
|
23 |
+
Initializes the speed-estimator class with default values for
|
24 |
+
Visual, Image, track and speed parameters.
|
25 |
+
"""
|
26 |
+
# Visual & im0 information
|
27 |
+
self.im0 = None
|
28 |
+
self.annotator = None
|
29 |
+
self.view_img = False
|
30 |
+
|
31 |
+
# Region information
|
32 |
+
self.reg_pts = [(20, 400), (1260, 400)]
|
33 |
+
self.region_thickness = 3
|
34 |
+
|
35 |
+
# Predict/track information
|
36 |
+
self.clss = None
|
37 |
+
self.names = None
|
38 |
+
self.boxes = None
|
39 |
+
self.trk_ids = None
|
40 |
+
self.trk_pts = None
|
41 |
+
self.line_thickness = 2
|
42 |
+
self.trk_history = defaultdict(list)
|
43 |
+
|
44 |
+
# Speed estimator information
|
45 |
+
self.current_time = 0
|
46 |
+
self.dist_data = {}
|
47 |
+
self.trk_idslist = []
|
48 |
+
self.spdl_dist_thresh = 10
|
49 |
+
self.trk_previous_times = {}
|
50 |
+
self.trk_previous_points = {}
|
51 |
+
|
52 |
+
# Check if environment support imshow
|
53 |
+
self.env_check = check_imshow(warn=True)
|
54 |
+
|
55 |
+
def set_args(
|
56 |
+
self,
|
57 |
+
reg_pts,
|
58 |
+
names,
|
59 |
+
view_img=False,
|
60 |
+
line_thickness=2,
|
61 |
+
region_thickness=5,
|
62 |
+
spdl_dist_thresh=10,
|
63 |
+
):
|
64 |
+
"""
|
65 |
+
Configures the speed estimation and display parameters.
|
66 |
+
|
67 |
+
Args:
|
68 |
+
reg_pts (list): Initial list of points for the speed calc region.
|
69 |
+
names (dict): object detection classes names
|
70 |
+
view_img (bool): Flag indicating frame display
|
71 |
+
line_thickness (int): Line thickness for bounding boxes.
|
72 |
+
region_thickness (int): Speed estimation region thickness
|
73 |
+
spdl_dist_thresh (int): Euclidean distance threshold for speed line
|
74 |
+
"""
|
75 |
+
if reg_pts is None:
|
76 |
+
logging.warning("Region points not provided, using default values")
|
77 |
+
else:
|
78 |
+
self.reg_pts = reg_pts
|
79 |
+
self.names = names
|
80 |
+
self.view_img = view_img
|
81 |
+
self.line_thickness = line_thickness
|
82 |
+
self.region_thickness = region_thickness
|
83 |
+
self.spdl_dist_thresh = spdl_dist_thresh
|
84 |
+
|
85 |
+
def extract_tracks(self, tracks):
|
86 |
+
"""
|
87 |
+
Extracts results from the provided data.
|
88 |
+
|
89 |
+
Args:
|
90 |
+
tracks (list): List of tracks obtained from the tracking process.
|
91 |
+
"""
|
92 |
+
self.boxes = tracks[0].boxes.xyxy.cpu()
|
93 |
+
self.clss = tracks[0].boxes.cls.cpu().tolist()
|
94 |
+
self.trk_ids = tracks[0].boxes.id.int().cpu().tolist()
|
95 |
+
|
96 |
+
def store_track_info(self, track_id, box):
|
97 |
+
"""
|
98 |
+
Store track data.
|
99 |
+
|
100 |
+
Args:
|
101 |
+
track_id (int): object track id.
|
102 |
+
box (list): object bounding box data
|
103 |
+
"""
|
104 |
+
track = self.trk_history[track_id]
|
105 |
+
bbox_center = (
|
106 |
+
float((box[0] + box[2]) / 2), float((box[1] + box[3]) / 2)
|
107 |
+
)
|
108 |
+
track.append(bbox_center)
|
109 |
+
|
110 |
+
if len(track) > 30:
|
111 |
+
track.pop(0)
|
112 |
+
|
113 |
+
self.trk_pts = np.hstack(track).astype(np.int32).reshape((-1, 1, 2))
|
114 |
+
return track
|
115 |
+
|
116 |
+
def plot_box_and_track(self, track_id, box, cls, track):
|
117 |
+
"""
|
118 |
+
Plot track and bounding box.
|
119 |
+
|
120 |
+
Args:
|
121 |
+
track_id (int): object track id.
|
122 |
+
box (list): object bounding box data
|
123 |
+
cls (str): object class name
|
124 |
+
track (list): tracking history for tracks path drawing
|
125 |
+
"""
|
126 |
+
# speed_label = f"{int(self.dist_data[track_id])}km/ph" \
|
127 |
+
# if track_id in self.dist_data else self.names[int(cls)]
|
128 |
+
# bbox_color = colors(int(track_id)) \
|
129 |
+
# if track_id in self.dist_data else (255, 0, 255)
|
130 |
+
# self.annotator.box_label(box, speed_label, bbox_color)
|
131 |
+
|
132 |
+
# modified by steve.yin @ 3/1/2024 for traffic monitoring demo
|
133 |
+
# added for a combo label display with id, class name, speed
|
134 |
+
box_label = f"{track_id}:{self.names[int(cls)]}"
|
135 |
+
box_label += f":{(int)(self.dist_data[track_id]*0.621371)}mph" \
|
136 |
+
if track_id in self.dist_data else ''
|
137 |
+
bbox_color = colors(int(track_id)) \
|
138 |
+
if track_id in self.dist_data else (255, 0, 255)
|
139 |
+
self.annotator.box_label(box, box_label, bbox_color)
|
140 |
+
cv2.polylines(
|
141 |
+
self.im0, [self.trk_pts],
|
142 |
+
isClosed=False, color=(0, 255, 0), thickness=3
|
143 |
+
)
|
144 |
+
cv2.circle(
|
145 |
+
self.im0, (int(track[-1][0]), int(track[-1][1])), 5,
|
146 |
+
bbox_color, -1
|
147 |
+
)
|
148 |
+
|
149 |
+
def calculate_speed(self, trk_id, track):
|
150 |
+
"""
|
151 |
+
Calculation of object speed.
|
152 |
+
|
153 |
+
Args:
|
154 |
+
trk_id (int): object track id.
|
155 |
+
track (list): tracking history for tracks path drawing
|
156 |
+
"""
|
157 |
+
if not self.reg_pts[0][0] < track[-1][0] < self.reg_pts[1][0]:
|
158 |
+
return
|
159 |
+
if (
|
160 |
+
self.reg_pts[1][1] - self.spdl_dist_thresh < track[-1][1]
|
161 |
+
< self.reg_pts[1][1] + self.spdl_dist_thresh
|
162 |
+
):
|
163 |
+
direction = "known"
|
164 |
+
|
165 |
+
elif (
|
166 |
+
self.reg_pts[0][1] - self.spdl_dist_thresh < track[-1][1]
|
167 |
+
< self.reg_pts[0][1] + self.spdl_dist_thresh
|
168 |
+
):
|
169 |
+
direction = "known"
|
170 |
+
else:
|
171 |
+
direction = "unknown"
|
172 |
+
|
173 |
+
if (
|
174 |
+
self.trk_previous_times[trk_id] != 0 and direction != "unknown"
|
175 |
+
and trk_id not in self.trk_idslist
|
176 |
+
):
|
177 |
+
self.trk_idslist.append(trk_id)
|
178 |
+
|
179 |
+
time_difference = time() - self.trk_previous_times[trk_id]
|
180 |
+
if time_difference > 0:
|
181 |
+
dist_difference = np.abs(
|
182 |
+
track[-1][1] - self.trk_previous_points[trk_id][1]
|
183 |
+
)
|
184 |
+
speed = dist_difference / time_difference
|
185 |
+
self.dist_data[trk_id] = speed
|
186 |
+
|
187 |
+
self.trk_previous_times[trk_id] = time()
|
188 |
+
self.trk_previous_points[trk_id] = track[-1]
|
189 |
+
|
190 |
+
def estimate_speed(self, im0, tracks, region_color=(255, 0, 0)):
|
191 |
+
"""
|
192 |
+
Calculate object based on tracking data.
|
193 |
+
|
194 |
+
Args:
|
195 |
+
im0 (nd array): Image
|
196 |
+
tracks (list): List of tracks obtained from the tracking process.
|
197 |
+
region_color (tuple): Color to use when drawing regions.
|
198 |
+
"""
|
199 |
+
self.im0 = im0
|
200 |
+
if tracks[0].boxes.id is None:
|
201 |
+
if self.view_img and self.env_check:
|
202 |
+
self.display_frames()
|
203 |
+
return im0
|
204 |
+
self.extract_tracks(tracks)
|
205 |
+
|
206 |
+
self.annotator = Annotator(self.im0, line_width=3)
|
207 |
+
self.annotator.draw_region(
|
208 |
+
reg_pts=self.reg_pts,
|
209 |
+
color=region_color,
|
210 |
+
thickness=self.region_thickness
|
211 |
+
)
|
212 |
+
|
213 |
+
for box, trk_id, cls in zip(self.boxes, self.trk_ids, self.clss):
|
214 |
+
track = self.store_track_info(trk_id, box)
|
215 |
+
|
216 |
+
if trk_id not in self.trk_previous_times:
|
217 |
+
self.trk_previous_times[trk_id] = 0
|
218 |
+
|
219 |
+
self.plot_box_and_track(trk_id, box, cls, track)
|
220 |
+
self.calculate_speed(trk_id, track)
|
221 |
+
|
222 |
+
if self.view_img and self.env_check:
|
223 |
+
self.display_frames()
|
224 |
+
|
225 |
+
return im0
|
226 |
+
|
227 |
+
def display_frames(self):
|
228 |
+
"""Display frame."""
|
229 |
+
cv2.imshow("Ultralytics Speed Estimation", self.im0)
|
230 |
+
if cv2.waitKey(1) & 0xFF == ord("q"):
|
231 |
+
return
|
232 |
+
|
233 |
+
|
234 |
+
if __name__ == "__main__":
|
235 |
+
SpeedEstimator()
|
webapp.py
ADDED
@@ -0,0 +1,1054 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
from datetime import datetime
|
3 |
+
import logging
|
4 |
+
import cv2
|
5 |
+
import numpy as np
|
6 |
+
from pathlib import Path
|
7 |
+
import torch
|
8 |
+
from starlette.middleware import Middleware
|
9 |
+
from starlette.responses import StreamingResponse, Response
|
10 |
+
from starlette.requests import Request
|
11 |
+
from starlette.routing import Mount, Route
|
12 |
+
from starlette.staticfiles import StaticFiles
|
13 |
+
from starlette.templating import Jinja2Templates
|
14 |
+
from sse_starlette import EventSourceResponse
|
15 |
+
from asgi_htmx import HtmxMiddleware
|
16 |
+
from asgi_htmx import HtmxRequest
|
17 |
+
from ultralytics import YOLO
|
18 |
+
from ultralytics_solutions_modified import object_counter, speed_estimation
|
19 |
+
from vidgear.gears import CamGear
|
20 |
+
from vidgear.gears.asyncio import WebGear
|
21 |
+
from vidgear.gears.asyncio.helper import reducer
|
22 |
+
from helper import draw_text, try_site, make_table_from_dict
|
23 |
+
|
24 |
+
|
25 |
+
HERE = Path(__file__).parent
|
26 |
+
static = StaticFiles(directory=HERE / ".vidgear/webgear/static")
|
27 |
+
templates = Jinja2Templates(directory=HERE / ".vidgear/webgear/templates")
|
28 |
+
EVT_STREAM_DELAY_SEC = 0.05 # second
|
29 |
+
RETRY_TIMEOUT_MILSEC = 15000 # milisecond
|
30 |
+
|
31 |
+
# Create and configure logger
|
32 |
+
# logger = logging.getLogger(__name__).addHandler(logging.NullHandler())
|
33 |
+
logging.basicConfig(
|
34 |
+
format='%(asctime)s %(name)-8s->%(module)-20s->%(funcName)-20s:%(lineno)-4s::%(levelname)-8s %(message)s',
|
35 |
+
level=logging.INFO
|
36 |
+
)
|
37 |
+
|
38 |
+
|
39 |
+
class DemoCase:
|
40 |
+
def __init__(
|
41 |
+
self,
|
42 |
+
FRAME_WIDTH: int = 1280,
|
43 |
+
FRAME_HEIGHT: int = 720,
|
44 |
+
YOLO_VERBOSE: bool = True
|
45 |
+
):
|
46 |
+
self.FRAME_WIDTH: int = FRAME_WIDTH
|
47 |
+
self.FRAME_HEIGHT: int = FRAME_HEIGHT
|
48 |
+
self.YOLO_VERBOSE: bool = YOLO_VERBOSE
|
49 |
+
# predefined yolov8 model references
|
50 |
+
self.model_dict: dict = {
|
51 |
+
"y8nano": "./data/models/yolov8n.pt",
|
52 |
+
"y8small": "./data/models/yolov8s.pt",
|
53 |
+
"y8medium": "./data/models/yolov8m.pt",
|
54 |
+
"y8large": "./data/models/yolov8l.pt",
|
55 |
+
"y8huge": "./data/models/yolov8x.pt",
|
56 |
+
}
|
57 |
+
self.model_choice_default: str = "y8small"
|
58 |
+
# predefined youtube live stream urls
|
59 |
+
self.url_dict: dict = {
|
60 |
+
"Peace Bridge US": "https://youtu.be/9En2186vo5g",
|
61 |
+
"Peace Bridge CA": "https://youtu.be/WPMgP2C3_co",
|
62 |
+
"San Marcos TX": "https://youtu.be/E8LsKcVpL5A",
|
63 |
+
"4Corners Downtown": "https://youtu.be/ByED80IKdIU",
|
64 |
+
"Gangnam Seoul": "https://youtu.be/JbnJAsk1zII",
|
65 |
+
"Time Square NY": "https://youtu.be/UVftxDFol90",
|
66 |
+
"Port Everglades-1": "https://youtu.be/67-73mgWDf0",
|
67 |
+
"Port Everglades-2": "https://youtu.be/Nhuu1QsW5LI",
|
68 |
+
"Port Everglades-3": "https://youtu.be/Lpm-C_Gz6yM",
|
69 |
+
}
|
70 |
+
self.cam_loc_default: str = "Peace Bridge US"
|
71 |
+
# run time parameters that are from user input
|
72 |
+
self.model_choice: str = self.model_choice_default
|
73 |
+
self.cam_loc: str = self.cam_loc_default
|
74 |
+
self.roi_height: str = int(FRAME_HEIGHT / 2)
|
75 |
+
self.roi_thickness_half: str = 30
|
76 |
+
self.frame_reduction: int = 50
|
77 |
+
self.obj_class_id: list[int] = [2, 3, 5, 7]
|
78 |
+
# define some logic flow control booleans
|
79 |
+
self._is_running: bool = False
|
80 |
+
self._is_tracking: bool = False
|
81 |
+
# self._model_changed: bool = True
|
82 |
+
# self._cam_loc_changed: bool = True
|
83 |
+
# self._roi_height_changed: bool = True
|
84 |
+
# self._obj_class_id_changed: bool = False
|
85 |
+
self.stream0: CamGear = None
|
86 |
+
self.stream1: CamGear = None
|
87 |
+
self.counter = None
|
88 |
+
self.speed_obj = None
|
89 |
+
|
90 |
+
def load_model(
|
91 |
+
self,
|
92 |
+
model_choice: str = "y8small",
|
93 |
+
conf_threshold: float = 0.25,
|
94 |
+
iou_threshold: float = 0.7,
|
95 |
+
use_FP16: bool = False,
|
96 |
+
use_stream_buffer: bool = False
|
97 |
+
) -> None:
|
98 |
+
"""
|
99 |
+
load the YOLOv8 model of choice
|
100 |
+
"""
|
101 |
+
if model_choice not in self.model_dict:
|
102 |
+
logging.warning(
|
103 |
+
f'\"{model_choice}\" not found in the model_dict, use '
|
104 |
+
f'\"{self.model_dict[self.model_choice_default]}\" instead!'
|
105 |
+
)
|
106 |
+
self.model_choice = self.model_choice_default
|
107 |
+
else:
|
108 |
+
self.model_choice = model_choice
|
109 |
+
self.model = YOLO(f"{self.model_dict[self.model_choice]}")
|
110 |
+
|
111 |
+
# push the model to GPU if available
|
112 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
113 |
+
if device == "cuda":
|
114 |
+
torch.cuda.set_device(0)
|
115 |
+
self.model.to(device)
|
116 |
+
logging.info(
|
117 |
+
f"{self.model_dict[self.model_choice]} loaded using "
|
118 |
+
f"torch w GPU0"
|
119 |
+
)
|
120 |
+
else:
|
121 |
+
logging.info(
|
122 |
+
f"{self.model_dict[self.model_choice]} loaded using CPU"
|
123 |
+
)
|
124 |
+
|
125 |
+
# setup some configs
|
126 |
+
self.conf_threshold: float = conf_threshold if conf_threshold > 0.0 else 0.25 # noqa
|
127 |
+
self.iou_threshold: float = iou_threshold if iou_threshold > 0.0 else 0.7 # noqa
|
128 |
+
self.use_FP16: bool = use_FP16
|
129 |
+
self.use_stream_buffer: bool = use_stream_buffer
|
130 |
+
logging.info(
|
131 |
+
f"{self.model_choice}: conf={self.conf_threshold:.2f} | "
|
132 |
+
f"iou={self.iou_threshold:.2f} | FP16={self.use_FP16} | "
|
133 |
+
f"stream_buffer={self.use_stream_buffer}"
|
134 |
+
)
|
135 |
+
|
136 |
+
def select_cam_loc(
|
137 |
+
self,
|
138 |
+
cam_loc_key: str = "Peace Bridge US",
|
139 |
+
cam_loc_val: str = "https://www.youtube.com/watch?v=9En2186vo5g"
|
140 |
+
) -> None:
|
141 |
+
"""
|
142 |
+
select camera video feed from url_dict, or set as a new url
|
143 |
+
"""
|
144 |
+
if (bool(cam_loc_key) is False or bool(cam_loc_val) is False):
|
145 |
+
self.cam_loc = self.cam_loc_default
|
146 |
+
logging.warning(
|
147 |
+
f'input cam_loc_key, cam_loc_val pair invalid, use default '
|
148 |
+
f'{{{self.cam_loc_default}: '
|
149 |
+
f'{self.url_dict[self.cam_loc_default]}}}'
|
150 |
+
)
|
151 |
+
elif cam_loc_key not in self.url_dict:
|
152 |
+
if try_site(self.url_dict[self.cam_loc]):
|
153 |
+
self.url_dict.update({cam_loc_key: cam_loc_val})
|
154 |
+
self.cam_loc = cam_loc_key
|
155 |
+
logging.info(
|
156 |
+
f'input cam_loc key:val pair is new and playable, add '
|
157 |
+
f'{{{cam_loc_key}:{cam_loc_val}}} into url_dict'
|
158 |
+
)
|
159 |
+
else:
|
160 |
+
self.cam_loc = self.cam_loc_default
|
161 |
+
logging.warning(
|
162 |
+
f'input cam_loc key:val pair is new but not playable, '
|
163 |
+
f'roll back to default {{{self.cam_loc_default}: '
|
164 |
+
f'{self.url_dict[self.cam_loc_default]}}}'
|
165 |
+
)
|
166 |
+
self.cam_loc = self.cam_loc_default
|
167 |
+
else:
|
168 |
+
self.cam_loc = cam_loc_key
|
169 |
+
|
170 |
+
logging.info(
|
171 |
+
f'use {{{self.cam_loc}: {self.url_dict[self.cam_loc]}}} as source'
|
172 |
+
)
|
173 |
+
|
174 |
+
def set_roi_height(self, roi_height: int = 360):
|
175 |
+
if (roi_height < 0 or roi_height > self.FRAME_HEIGHT):
|
176 |
+
self.roi_height = int(self.FRAME_HEIGHT / 2)
|
177 |
+
logging.warning(
|
178 |
+
f'roi_height invalid, use default {int(self.FRAME_HEIGHT / 2)}'
|
179 |
+
)
|
180 |
+
else:
|
181 |
+
self.roi_height = roi_height
|
182 |
+
logging.info(f'roi_height is set at {self.roi_height}')
|
183 |
+
|
184 |
+
def set_frame_reduction(self, frame_reduction: int = 50):
|
185 |
+
if (frame_reduction < 0 or frame_reduction > 100):
|
186 |
+
self.frame_reduction = 50
|
187 |
+
logging.warning(
|
188 |
+
f'frame_reduction:{frame_reduction} invalid, '
|
189 |
+
f'use default value 50'
|
190 |
+
)
|
191 |
+
else:
|
192 |
+
self.frame_reduction = frame_reduction
|
193 |
+
logging.info(f'frame_reduction is set at {self.frame_reduction}')
|
194 |
+
|
195 |
+
async def frame0_producer(self):
|
196 |
+
"""
|
197 |
+
!!! define your original video source here !!!
|
198 |
+
|
199 |
+
Yields:
|
200 |
+
_type_: an image frame as a bytestring output from the producer
|
201 |
+
"""
|
202 |
+
while True:
|
203 |
+
if self._is_running:
|
204 |
+
if self.stream0 is None:
|
205 |
+
# Start the stream
|
206 |
+
self.stream0 = CamGear(
|
207 |
+
source=self.url_dict[self.cam_loc],
|
208 |
+
colorspace=None,
|
209 |
+
stream_mode=True,
|
210 |
+
logging=True
|
211 |
+
).start()
|
212 |
+
|
213 |
+
try:
|
214 |
+
# loop over frames
|
215 |
+
while (self.stream0 is not None and self._is_running):
|
216 |
+
frame = self.stream0.read()
|
217 |
+
if frame is None:
|
218 |
+
frame = (np.random.standard_normal([
|
219 |
+
self.FRAME_HEIGHT, self.FRAME_WIDTH, 3
|
220 |
+
]) * 255).astype(np.uint8)
|
221 |
+
|
222 |
+
# do something with your OpenCV frame here
|
223 |
+
draw_text(
|
224 |
+
img=frame,
|
225 |
+
text=datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
226 |
+
pos=(int(self.FRAME_WIDTH - 400), 50),
|
227 |
+
font=cv2.FONT_HERSHEY_SIMPLEX,
|
228 |
+
font_scale=1,
|
229 |
+
font_thickness=2,
|
230 |
+
line_type=cv2.LINE_AA,
|
231 |
+
text_color=(0, 255, 255),
|
232 |
+
text_color_bg=(0, 0, 0),
|
233 |
+
)
|
234 |
+
# reducer frame size for more performance, percentage int
|
235 |
+
frame = await reducer(
|
236 |
+
frame, percentage=self.frame_reduction
|
237 |
+
)
|
238 |
+
# handle JPEG encoding & yield frame in byte format
|
239 |
+
img_encoded = cv2.imencode(".jpg", frame)[1].tobytes()
|
240 |
+
yield (
|
241 |
+
b"--frame\r\nContent-Type:video/jpeg2000\r\n\r\n" +
|
242 |
+
img_encoded + b"\r\n"
|
243 |
+
)
|
244 |
+
await asyncio.sleep(0.00001)
|
245 |
+
|
246 |
+
if self.stream0 is not None:
|
247 |
+
self.stream0.stop()
|
248 |
+
self.stream0 = None
|
249 |
+
self._is_running = False
|
250 |
+
except asyncio.CancelledError:
|
251 |
+
if self.stream0 is not None:
|
252 |
+
self.stream0.stop()
|
253 |
+
self.stream0 = None
|
254 |
+
self._is_running = False
|
255 |
+
logging.warning(
|
256 |
+
"client disconneted in frame0_producer"
|
257 |
+
)
|
258 |
+
frame = (np.random.standard_normal([
|
259 |
+
self.FRAME_HEIGHT, self.FRAME_WIDTH, 3
|
260 |
+
]) * 255).astype(np.uint8)
|
261 |
+
frame = await reducer(
|
262 |
+
frame, percentage=self.frame_reduction
|
263 |
+
)
|
264 |
+
img_encoded = cv2.imencode(".jpg", frame)[1].tobytes()
|
265 |
+
logging.info(
|
266 |
+
f"_is_running is {self._is_running} in frame0_producer"
|
267 |
+
)
|
268 |
+
yield (
|
269 |
+
b"--frame\r\nContent-Type:video/jpeg2000\r\n\r\n" +
|
270 |
+
img_encoded + b"\r\n"
|
271 |
+
)
|
272 |
+
await asyncio.sleep(0.00001)
|
273 |
+
|
274 |
+
else:
|
275 |
+
if self._is_running is True:
|
276 |
+
pass
|
277 |
+
frame = (np.random.standard_normal([
|
278 |
+
self.FRAME_HEIGHT, self.FRAME_WIDTH, 3
|
279 |
+
]) * 255).astype(np.uint8)
|
280 |
+
frame = await reducer(
|
281 |
+
frame, percentage=self.frame_reduction
|
282 |
+
)
|
283 |
+
img_encoded = cv2.imencode(".jpg", frame)[1].tobytes()
|
284 |
+
logging.info(
|
285 |
+
f"_is_running is {self._is_running} in frame0_producer"
|
286 |
+
)
|
287 |
+
yield (
|
288 |
+
b"--frame\r\nContent-Type:video/jpeg2000\r\n\r\n" +
|
289 |
+
img_encoded + b"\r\n"
|
290 |
+
)
|
291 |
+
await asyncio.sleep(0.00001)
|
292 |
+
|
293 |
+
async def frame1_producer(self):
|
294 |
+
"""
|
295 |
+
!!! define your processed video producer here !!!
|
296 |
+
|
297 |
+
Yields:
|
298 |
+
_type_: an image frame as a bytestring output from the producer
|
299 |
+
"""
|
300 |
+
while True:
|
301 |
+
if self._is_running:
|
302 |
+
if self.stream1 is None:
|
303 |
+
# Start the stream
|
304 |
+
self.stream1 = CamGear(
|
305 |
+
source=self.url_dict[self.cam_loc],
|
306 |
+
colorspace=None,
|
307 |
+
stream_mode=True,
|
308 |
+
logging=True
|
309 |
+
).start()
|
310 |
+
|
311 |
+
if self._is_tracking:
|
312 |
+
if self.counter is None:
|
313 |
+
# setup object counter & speed estimator
|
314 |
+
region_points = [
|
315 |
+
(5, -self.roi_thickness_half + self.roi_height),
|
316 |
+
(5, self.roi_thickness_half + self.roi_height),
|
317 |
+
(
|
318 |
+
self.FRAME_WIDTH - 5,
|
319 |
+
self.roi_thickness_half + self.roi_height
|
320 |
+
),
|
321 |
+
(
|
322 |
+
self.FRAME_WIDTH - 5,
|
323 |
+
-self.roi_thickness_half + self.roi_height
|
324 |
+
),
|
325 |
+
]
|
326 |
+
# region_points = [
|
327 |
+
# (5, -20 + self.roi_height),
|
328 |
+
# (self.FRAME_WIDTH - 5, -20 + self.roi_height),
|
329 |
+
# ]
|
330 |
+
self.counter = object_counter.ObjectCounter()
|
331 |
+
self.counter.set_args(
|
332 |
+
view_img=False,
|
333 |
+
reg_pts=region_points,
|
334 |
+
classes_names=self.model.names,
|
335 |
+
draw_tracks=False,
|
336 |
+
draw_boxes=False,
|
337 |
+
draw_reg_pts=True,
|
338 |
+
)
|
339 |
+
|
340 |
+
if self.speed_obj is None:
|
341 |
+
# Init speed estimator
|
342 |
+
line_points = [
|
343 |
+
(5, self.roi_height),
|
344 |
+
(self.FRAME_WIDTH - 5, self.roi_height)
|
345 |
+
]
|
346 |
+
self.speed_obj = speed_estimation.SpeedEstimator()
|
347 |
+
self.speed_obj.set_args(
|
348 |
+
reg_pts=line_points,
|
349 |
+
names=self.model.names,
|
350 |
+
view_img=False
|
351 |
+
)
|
352 |
+
|
353 |
+
try:
|
354 |
+
while (self.stream1 is not None and self._is_running):
|
355 |
+
# read frame from provided source
|
356 |
+
frame = self.stream1.read()
|
357 |
+
if frame is None:
|
358 |
+
frame = (np.random.standard_normal([
|
359 |
+
self.FRAME_HEIGHT, self.FRAME_WIDTH, 3
|
360 |
+
]) * 255).astype(np.uint8)
|
361 |
+
# break
|
362 |
+
|
363 |
+
# do something with your OpenCV frame here
|
364 |
+
draw_text(
|
365 |
+
img=frame,
|
366 |
+
text=datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
367 |
+
pos=(self.FRAME_WIDTH - 400, 50),
|
368 |
+
font=cv2.FONT_HERSHEY_SIMPLEX,
|
369 |
+
font_scale=1,
|
370 |
+
font_thickness=2,
|
371 |
+
line_type=cv2.LINE_AA,
|
372 |
+
text_color=(0, 255, 255),
|
373 |
+
text_color_bg=(0, 0, 0),
|
374 |
+
)
|
375 |
+
|
376 |
+
frame_tagged = frame
|
377 |
+
if self._is_tracking:
|
378 |
+
# YOLOv8 tracking, persisting tracks between frames
|
379 |
+
results = self.model.track(
|
380 |
+
source=frame,
|
381 |
+
classes=self.obj_class_id,
|
382 |
+
conf=self.conf_threshold,
|
383 |
+
iou=self.iou_threshold,
|
384 |
+
half=self.use_FP16,
|
385 |
+
stream_buffer=self.use_stream_buffer,
|
386 |
+
persist=True,
|
387 |
+
show=False,
|
388 |
+
verbose=self.YOLO_VERBOSE
|
389 |
+
)
|
390 |
+
if results[0].boxes.id is None:
|
391 |
+
pass
|
392 |
+
else:
|
393 |
+
self.speed_obj.estimate_speed(
|
394 |
+
frame_tagged, results
|
395 |
+
)
|
396 |
+
self.counter.start_counting(
|
397 |
+
frame_tagged, results
|
398 |
+
)
|
399 |
+
|
400 |
+
# reducer frames size for performance, int percentage
|
401 |
+
frame_tagged = await reducer(
|
402 |
+
frame_tagged, percentage=self.frame_reduction
|
403 |
+
)
|
404 |
+
# handle JPEG encoding & yield frame in byte format
|
405 |
+
img_encoded = \
|
406 |
+
cv2.imencode(".jpg", frame_tagged)[1].tobytes()
|
407 |
+
yield (
|
408 |
+
b"--frame\r\nContent-Type:video/jpeg2000\r\n\r\n" +
|
409 |
+
img_encoded + b"\r\n"
|
410 |
+
)
|
411 |
+
await asyncio.sleep(0.00001)
|
412 |
+
|
413 |
+
if self.stream1 is not None:
|
414 |
+
self.stream1.stop()
|
415 |
+
self.stream1 = None
|
416 |
+
self._is_tracking = False
|
417 |
+
self._is_running = False
|
418 |
+
|
419 |
+
except asyncio.CancelledError:
|
420 |
+
if self.stream1 is not None:
|
421 |
+
self.stream1.stop()
|
422 |
+
self.stream1 = None
|
423 |
+
self._is_tracking = False
|
424 |
+
self._is_running = False
|
425 |
+
logging.warning(
|
426 |
+
"client disconnected in frame1_producer"
|
427 |
+
)
|
428 |
+
frame = (np.random.standard_normal([
|
429 |
+
self.FRAME_HEIGHT, self.FRAME_WIDTH, 3
|
430 |
+
]) * 255).astype(np.uint8)
|
431 |
+
frame = await reducer(
|
432 |
+
frame, percentage=self.frame_reduction
|
433 |
+
)
|
434 |
+
img_encoded = cv2.imencode(".jpg", frame)[1].tobytes()
|
435 |
+
logging.info(
|
436 |
+
f"_is_running is {self._is_running} in frame0_producer"
|
437 |
+
)
|
438 |
+
yield (
|
439 |
+
b"--frame\r\nContent-Type:video/jpeg2000\r\n\r\n" +
|
440 |
+
img_encoded + b"\r\n"
|
441 |
+
)
|
442 |
+
await asyncio.sleep(0.00001)
|
443 |
+
|
444 |
+
else:
|
445 |
+
if self._is_running is True:
|
446 |
+
pass
|
447 |
+
frame = (np.random.standard_normal([
|
448 |
+
self.FRAME_HEIGHT, self.FRAME_WIDTH, 3
|
449 |
+
]) * 255).astype(np.uint8)
|
450 |
+
# reducer frame size for more performance, percentage int
|
451 |
+
frame = await reducer(frame, percentage=self.frame_reduction)
|
452 |
+
# handle JPEG encoding & yield frame in byte format
|
453 |
+
img_encoded = cv2.imencode(".jpg", frame)[1].tobytes()
|
454 |
+
yield (
|
455 |
+
b"--frame\r\nContent-Type:video/jpeg2000\r\n\r\n" +
|
456 |
+
img_encoded + b"\r\n"
|
457 |
+
)
|
458 |
+
await asyncio.sleep(0.00001)
|
459 |
+
|
460 |
+
async def custom_video_response(self, scope):
|
461 |
+
"""
|
462 |
+
Return a async video streaming response for `frame1_producer` generator
|
463 |
+
Tip1: use BackgroundTask to handle the async cleanup
|
464 |
+
https://github.com/tiangolo/fastapi/discussions/11022
|
465 |
+
Tip2: use is_disconnected to check client disconnection
|
466 |
+
https://www.starlette.io/requests/#body
|
467 |
+
https://github.com/encode/starlette/pull/320/files/d56c917460a1e6488e1206c428445c39854859c1
|
468 |
+
"""
|
469 |
+
assert scope["type"] in ["http", "https"]
|
470 |
+
await asyncio.sleep(0.00001)
|
471 |
+
return StreamingResponse(
|
472 |
+
content=self.frame1_producer(),
|
473 |
+
media_type="multipart/x-mixed-replace; boundary=frame"
|
474 |
+
)
|
475 |
+
|
476 |
+
async def models(self, request: HtmxRequest) -> Response:
|
477 |
+
# assert (htmx := request.scope["htmx"])
|
478 |
+
if len(self.model_dict) == 0:
|
479 |
+
template = "partials/ack.html"
|
480 |
+
table_contents = ["model list unavailable!"]
|
481 |
+
context = {"request": request, "table": table_contents}
|
482 |
+
response = templates.TemplateResponse(
|
483 |
+
template, context, status_code=200
|
484 |
+
)
|
485 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
486 |
+
else:
|
487 |
+
template = "partials/yolo_models.html"
|
488 |
+
table_contents = make_table_from_dict(
|
489 |
+
self.model_dict, self.model_choice
|
490 |
+
)
|
491 |
+
context = {"request": request, "table": table_contents}
|
492 |
+
response = templates.TemplateResponse(
|
493 |
+
template, context, status_code=200
|
494 |
+
)
|
495 |
+
|
496 |
+
await asyncio.sleep(0.001)
|
497 |
+
return response
|
498 |
+
|
499 |
+
async def urls(self, request: HtmxRequest) -> Response:
|
500 |
+
# assert (htmx := request.scope["htmx"])
|
501 |
+
if len(self.url_dict) == 0:
|
502 |
+
template = "partials/ack.html"
|
503 |
+
table_contents = ["streaming url list unavailable!"]
|
504 |
+
context = {"request": request, "table": table_contents}
|
505 |
+
response = templates.TemplateResponse(
|
506 |
+
template, context, status_code=200
|
507 |
+
)
|
508 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
509 |
+
else:
|
510 |
+
template = "partials/camera_streams.html"
|
511 |
+
table_contents = make_table_from_dict(self.url_dict, self.cam_loc)
|
512 |
+
context = {"request": request, "table": table_contents}
|
513 |
+
response = templates.TemplateResponse(
|
514 |
+
template, context, status_code=200
|
515 |
+
)
|
516 |
+
await asyncio.sleep(0.01)
|
517 |
+
return response
|
518 |
+
|
519 |
+
async def geturl(self, request: HtmxRequest) -> Response:
|
520 |
+
# assert (htmx := request.scope["htmx"])
|
521 |
+
if len(self.url_dict) == 0:
|
522 |
+
template = "partials/ack.html"
|
523 |
+
table_contents = ["streaming url list unavailable!"]
|
524 |
+
context = {"request": request, "table": table_contents}
|
525 |
+
response = templates.TemplateResponse(
|
526 |
+
template, context, status_code=200
|
527 |
+
)
|
528 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
529 |
+
else:
|
530 |
+
template = "partials/ack.html"
|
531 |
+
if self.cam_loc in self.url_dict.keys():
|
532 |
+
table_contents = [f"{self.cam_loc} selected"]
|
533 |
+
context = {"request": request, "table": table_contents}
|
534 |
+
response = templates.TemplateResponse(
|
535 |
+
template, context, status_code=201
|
536 |
+
)
|
537 |
+
else:
|
538 |
+
table_contents = [
|
539 |
+
f"{self.cam_loc} is not in the registered url_list"
|
540 |
+
]
|
541 |
+
context = {"request": request, "table": table_contents}
|
542 |
+
response = templates.TemplateResponse(
|
543 |
+
template, context, status_code=200
|
544 |
+
)
|
545 |
+
# response.headers['Hx-Retarget'] = '#set-url-ack'
|
546 |
+
await asyncio.sleep(0.01)
|
547 |
+
return response
|
548 |
+
|
549 |
+
async def addurl(self, request: HtmxRequest) -> Response:
|
550 |
+
# assert (htmx := request.scope["htmx"])
|
551 |
+
try:
|
552 |
+
req_json = await request.json()
|
553 |
+
except RuntimeError:
|
554 |
+
template = "partials/ack.html"
|
555 |
+
table_contents = ["receive channel unavailable!"]
|
556 |
+
context = {"request": request, "table": table_contents}
|
557 |
+
response = templates.TemplateResponse(
|
558 |
+
template, context, status_code=200
|
559 |
+
)
|
560 |
+
response.headers['Hx-Retarget'] = '#add-url-ack'
|
561 |
+
await asyncio.sleep(0.01)
|
562 |
+
return response
|
563 |
+
|
564 |
+
if (
|
565 |
+
"payload" in req_json
|
566 |
+
and "CamLoc" in req_json["payload"] and "URL" in req_json["payload"]
|
567 |
+
):
|
568 |
+
cam_loc = req_json["payload"]["CamLoc"]
|
569 |
+
cam_url = req_json["payload"]["URL"]
|
570 |
+
if cam_loc != "" and cam_url != "":
|
571 |
+
if try_site(cam_url) is False:
|
572 |
+
template = "partials/ack.html"
|
573 |
+
table_contents = ["invalid video URL!"]
|
574 |
+
context = {"request": request, "table": table_contents}
|
575 |
+
response = templates.TemplateResponse(
|
576 |
+
template, context, status_code=200
|
577 |
+
)
|
578 |
+
response.headers['Hx-Retarget'] = '#add-url-ack'
|
579 |
+
else:
|
580 |
+
self.select_cam_loc(
|
581 |
+
cam_loc_key=cam_loc, cam_loc_val=cam_url
|
582 |
+
)
|
583 |
+
template = "partials/camera_streams.html"
|
584 |
+
table_contents = make_table_from_dict(
|
585 |
+
self.url_dict, self.cam_loc
|
586 |
+
)
|
587 |
+
context = {"request": request, "table": table_contents}
|
588 |
+
response = templates.TemplateResponse(
|
589 |
+
template, context, status_code=201
|
590 |
+
)
|
591 |
+
else:
|
592 |
+
template = "partials/ack.html"
|
593 |
+
table_contents = ["empty or invalid inputs!"]
|
594 |
+
context = {"request": request, "table": table_contents}
|
595 |
+
response = templates.TemplateResponse(
|
596 |
+
template, context, status_code=200
|
597 |
+
)
|
598 |
+
response.headers['Hx-Retarget'] = '#add-url-ack'
|
599 |
+
else:
|
600 |
+
template = "partials/ack.html"
|
601 |
+
table_contents = ["invalid POST request!"]
|
602 |
+
context = {"request": request, "table": table_contents}
|
603 |
+
response = templates.TemplateResponse(
|
604 |
+
template, context, status_code=200
|
605 |
+
)
|
606 |
+
response.headers['Hx-Retarget'] = '#add-url-ack'
|
607 |
+
|
608 |
+
await asyncio.sleep(0.01)
|
609 |
+
return response
|
610 |
+
|
611 |
+
async def seturl(self, request: HtmxRequest) -> Response:
|
612 |
+
# assert (htmx := request.scope["htmx"])
|
613 |
+
template = "partials/ack.html"
|
614 |
+
try:
|
615 |
+
req_json = await request.json()
|
616 |
+
except RuntimeError:
|
617 |
+
table_contents = ["receive channel unavailable!"]
|
618 |
+
context = {"request": request, "table": table_contents}
|
619 |
+
response = templates.TemplateResponse(
|
620 |
+
template, context, status_code=200
|
621 |
+
)
|
622 |
+
# response.headers['Hx-Retarget'] = '#set-url-ack'
|
623 |
+
await asyncio.sleep(0.01)
|
624 |
+
return response
|
625 |
+
|
626 |
+
if ("payload" in req_json and "cam_url" in req_json["payload"]):
|
627 |
+
logging.info(
|
628 |
+
f"seturl: _is_running = {self._is_running}, "
|
629 |
+
f"_is_tracking = {self._is_tracking}"
|
630 |
+
)
|
631 |
+
if (self._is_running is True or self._is_tracking is True):
|
632 |
+
table_contents = ["turn off streaming and tracking before \
|
633 |
+
setting a new camera stream!"]
|
634 |
+
context = {"request": request, "table": table_contents}
|
635 |
+
response = templates.TemplateResponse(
|
636 |
+
template, context, status_code=200
|
637 |
+
)
|
638 |
+
# response.headers['Hx-Retarget'] = '#set-url-ack'
|
639 |
+
else:
|
640 |
+
cam_url = req_json["payload"]["cam_url"]
|
641 |
+
url_list = list(filter(
|
642 |
+
lambda x: self.url_dict[x] == cam_url, self.url_dict
|
643 |
+
))
|
644 |
+
if len(url_list) > 0:
|
645 |
+
self.cam_loc = url_list[0]
|
646 |
+
table_contents = [f"{self.cam_loc} selected"]
|
647 |
+
context = {"request": request, "table": table_contents}
|
648 |
+
response = templates.TemplateResponse(
|
649 |
+
template, context, status_code=201
|
650 |
+
)
|
651 |
+
else:
|
652 |
+
table_contents = [
|
653 |
+
f"{cam_url} is not in the registered url_list"
|
654 |
+
]
|
655 |
+
context = {"request": request, "table": table_contents}
|
656 |
+
response = templates.TemplateResponse(
|
657 |
+
template, context, status_code=200
|
658 |
+
)
|
659 |
+
# response.headers['Hx-Retarget'] = '#set-url-ack'
|
660 |
+
else:
|
661 |
+
table_contents = ["invalid POST request!"]
|
662 |
+
context = {"request": request, "table": table_contents}
|
663 |
+
response = templates.TemplateResponse(
|
664 |
+
template, context, status_code=200
|
665 |
+
)
|
666 |
+
# response.headers['Hx-Retarget'] = '#set-url-ack'
|
667 |
+
|
668 |
+
await asyncio.sleep(0.01)
|
669 |
+
return response
|
670 |
+
|
671 |
+
async def getmodel(self, request: HtmxRequest) -> Response:
|
672 |
+
# assert (htmx := request.scope["htmx"])
|
673 |
+
if len(self.model_dict) == 0:
|
674 |
+
template = "partials/ack.html"
|
675 |
+
table_contents = ["model list unavailable!"]
|
676 |
+
context = {"request": request, "table": table_contents}
|
677 |
+
response = templates.TemplateResponse(
|
678 |
+
template, context, status_code=200
|
679 |
+
)
|
680 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
681 |
+
else:
|
682 |
+
template = "partials/ack.html"
|
683 |
+
if self.model_choice in self.model_dict.keys():
|
684 |
+
table_contents = [f"{self.model_choice} selected"]
|
685 |
+
context = {"request": request, "table": table_contents}
|
686 |
+
response = templates.TemplateResponse(
|
687 |
+
template, context, status_code=201
|
688 |
+
)
|
689 |
+
else:
|
690 |
+
table_contents = [
|
691 |
+
f"{self.model_choice} is not in the registered model_list"
|
692 |
+
]
|
693 |
+
context = {"request": request, "table": table_contents}
|
694 |
+
response = templates.TemplateResponse(
|
695 |
+
template, context, status_code=200
|
696 |
+
)
|
697 |
+
# response.headers['Hx-Retarget'] = '#set-url-ack'
|
698 |
+
await asyncio.sleep(0.01)
|
699 |
+
return response
|
700 |
+
|
701 |
+
async def setmodel(self, request: HtmxRequest) -> Response:
|
702 |
+
# assert (htmx := request.scope["htmx"])
|
703 |
+
template = "partials/ack.html"
|
704 |
+
try:
|
705 |
+
req_json = await request.json()
|
706 |
+
except RuntimeError:
|
707 |
+
table_contents = ["receive channel unavailable!"]
|
708 |
+
context = {"request": request, "table": table_contents}
|
709 |
+
response = templates.TemplateResponse(
|
710 |
+
template, context, status_code=200
|
711 |
+
)
|
712 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
713 |
+
await asyncio.sleep(0.01)
|
714 |
+
return response
|
715 |
+
|
716 |
+
if ("payload" in req_json and "model_path" in req_json["payload"]):
|
717 |
+
logging.info(
|
718 |
+
f"setmodel: _is_running = {self._is_running}, "
|
719 |
+
f"_is_tracking = {self._is_tracking}"
|
720 |
+
)
|
721 |
+
if (self._is_tracking is True):
|
722 |
+
table_contents = ["turn off tracking before setting a new \
|
723 |
+
YOLO model!"]
|
724 |
+
context = {"request": request, "table": table_contents}
|
725 |
+
response = templates.TemplateResponse(
|
726 |
+
template, context, status_code=200
|
727 |
+
)
|
728 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
729 |
+
else:
|
730 |
+
model_path = req_json["payload"]["model_path"]
|
731 |
+
model_list = list(filter(
|
732 |
+
lambda x: self.model_dict[x] == model_path, self.model_dict
|
733 |
+
))
|
734 |
+
if len(model_list) > 0:
|
735 |
+
self.model_choice = model_list[0]
|
736 |
+
self.load_model(
|
737 |
+
model_choice=self.model_choice,
|
738 |
+
conf_threshold=self.conf_threshold,
|
739 |
+
iou_threshold=self.iou_threshold,
|
740 |
+
use_FP16=self.use_FP16,
|
741 |
+
use_stream_buffer=self.use_stream_buffer
|
742 |
+
)
|
743 |
+
table_contents = [f"{self.model_choice} selected"]
|
744 |
+
context = {"request": request, "table": table_contents}
|
745 |
+
response = templates.TemplateResponse(
|
746 |
+
template, context, status_code=201
|
747 |
+
)
|
748 |
+
else:
|
749 |
+
table_contents = [
|
750 |
+
f"{model_path} is not in the registered model_list"
|
751 |
+
]
|
752 |
+
context = {"request": request, "table": table_contents}
|
753 |
+
response = templates.TemplateResponse(
|
754 |
+
template, context, status_code=200
|
755 |
+
)
|
756 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
757 |
+
else:
|
758 |
+
table_contents = ["invalid POST request!"]
|
759 |
+
context = {"request": request, "table": table_contents}
|
760 |
+
response = templates.TemplateResponse(
|
761 |
+
template, context, status_code=200
|
762 |
+
)
|
763 |
+
# response.headers['Hx-Retarget'] = '#set-model-ack'
|
764 |
+
|
765 |
+
await asyncio.sleep(0.01)
|
766 |
+
return response
|
767 |
+
|
768 |
+
async def streamswitch(self, request: HtmxRequest) -> Response:
|
769 |
+
# assert (htmx := request.scope["htmx"])
|
770 |
+
template = "partials/ack.html"
|
771 |
+
try:
|
772 |
+
req_json = await request.json()
|
773 |
+
except RuntimeError:
|
774 |
+
context = {
|
775 |
+
"request": request, "table": ["receive channel unavailable!"]
|
776 |
+
}
|
777 |
+
status_code = 200
|
778 |
+
await asyncio.sleep(0.01)
|
779 |
+
return templates.TemplateResponse(
|
780 |
+
template, context, status_code=status_code
|
781 |
+
)
|
782 |
+
|
783 |
+
if "payload" in req_json:
|
784 |
+
logging.info(f"payload = {req_json['payload']}")
|
785 |
+
if (
|
786 |
+
"stream_switch" in req_json["payload"]
|
787 |
+
and req_json["payload"]["stream_switch"] == "on"
|
788 |
+
):
|
789 |
+
self._is_running = True
|
790 |
+
self._is_tracking = False
|
791 |
+
table_contents = ["on"]
|
792 |
+
status_code = 201
|
793 |
+
else:
|
794 |
+
self._is_running = False
|
795 |
+
self._is_tracking = False
|
796 |
+
table_contents = ["off"]
|
797 |
+
status_code = 201
|
798 |
+
else:
|
799 |
+
table_contents = ["invalid POST request!"]
|
800 |
+
status_code = 200
|
801 |
+
|
802 |
+
context = {"request": request, "table": table_contents}
|
803 |
+
await asyncio.sleep(0.1)
|
804 |
+
return templates.TemplateResponse(
|
805 |
+
template, context, status_code=status_code
|
806 |
+
)
|
807 |
+
|
808 |
+
async def trackingswitch(self, request: HtmxRequest) -> Response:
|
809 |
+
# assert (htmx := request.scope["htmx"])
|
810 |
+
template = "partials/ack.html"
|
811 |
+
try:
|
812 |
+
req_json = await request.json()
|
813 |
+
except RuntimeError:
|
814 |
+
context = {
|
815 |
+
"request": request, "table": ["receive channel unavailable!"]
|
816 |
+
}
|
817 |
+
status_code = 200
|
818 |
+
await asyncio.sleep(0.01)
|
819 |
+
return templates.TemplateResponse(
|
820 |
+
template, context, status_code=status_code
|
821 |
+
)
|
822 |
+
|
823 |
+
if "payload" in req_json:
|
824 |
+
logging.info(f"payload = {req_json['payload']}")
|
825 |
+
if (
|
826 |
+
"tracking_switch" in req_json["payload"]
|
827 |
+
and req_json["payload"]["tracking_switch"] == "on"
|
828 |
+
):
|
829 |
+
self._is_tracking = True and self._is_running
|
830 |
+
else:
|
831 |
+
self._is_tracking = False
|
832 |
+
|
833 |
+
if self._is_tracking:
|
834 |
+
table_contents = ["on"]
|
835 |
+
status_code = 201
|
836 |
+
# setup object counter & speed estimator
|
837 |
+
region_points = [
|
838 |
+
(5, -20 + self.roi_height),
|
839 |
+
(5, 20 + self.roi_height),
|
840 |
+
(self.FRAME_WIDTH - 5, 20 + self.roi_height),
|
841 |
+
(self.FRAME_WIDTH - 5, -20 + self.roi_height),
|
842 |
+
]
|
843 |
+
self.counter = object_counter.ObjectCounter()
|
844 |
+
self.counter.set_args(
|
845 |
+
view_img=False,
|
846 |
+
reg_pts=region_points,
|
847 |
+
classes_names=self.model.names,
|
848 |
+
draw_tracks=False,
|
849 |
+
draw_boxes=False,
|
850 |
+
draw_reg_pts=True,
|
851 |
+
)
|
852 |
+
# Init speed estimator
|
853 |
+
line_points = [
|
854 |
+
(5, self.roi_height),
|
855 |
+
(self.FRAME_WIDTH - 5, self.roi_height)
|
856 |
+
]
|
857 |
+
self.speed_obj = speed_estimation.SpeedEstimator()
|
858 |
+
self.speed_obj.set_args(
|
859 |
+
reg_pts=line_points,
|
860 |
+
names=self.model.names,
|
861 |
+
view_img=False
|
862 |
+
)
|
863 |
+
else:
|
864 |
+
table_contents = ["off"]
|
865 |
+
status_code = 201
|
866 |
+
else:
|
867 |
+
table_contents = ["invalid POST request!"]
|
868 |
+
status_code = 200
|
869 |
+
|
870 |
+
context = {"request": request, "table": table_contents}
|
871 |
+
await asyncio.sleep(0.1)
|
872 |
+
return templates.TemplateResponse(
|
873 |
+
template, context, status_code=status_code
|
874 |
+
)
|
875 |
+
|
876 |
+
async def sse_incounts(self, request: Request):
|
877 |
+
async def event_generator():
|
878 |
+
_stop_sse = False
|
879 |
+
while True:
|
880 |
+
# If client closes connection, stop sending events
|
881 |
+
if await request.is_disconnected():
|
882 |
+
yield {
|
883 |
+
"event": "evt_in_counts",
|
884 |
+
"id": datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
885 |
+
"retry": RETRY_TIMEOUT_MILSEC,
|
886 |
+
"data": "..."
|
887 |
+
}
|
888 |
+
break
|
889 |
+
if self._is_running:
|
890 |
+
if self._is_tracking:
|
891 |
+
if _stop_sse is True:
|
892 |
+
_stop_sse = False
|
893 |
+
incounts_msg = self.counter.incounts_updated()
|
894 |
+
if (self.counter is not None and incounts_msg):
|
895 |
+
yield {
|
896 |
+
"event": "evt_in_counts",
|
897 |
+
"id": datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
898 |
+
"retry": RETRY_TIMEOUT_MILSEC,
|
899 |
+
"data": f"{self.counter.in_counts}"
|
900 |
+
}
|
901 |
+
else:
|
902 |
+
if _stop_sse is False:
|
903 |
+
yield {
|
904 |
+
"event": "evt_in_counts",
|
905 |
+
"id": datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
906 |
+
"retry": RETRY_TIMEOUT_MILSEC,
|
907 |
+
"data": "---"
|
908 |
+
}
|
909 |
+
_stop_sse = True
|
910 |
+
await asyncio.sleep(EVT_STREAM_DELAY_SEC)
|
911 |
+
return EventSourceResponse(event_generator())
|
912 |
+
|
913 |
+
async def sse_outcounts(self, request: Request):
|
914 |
+
async def event_generator():
|
915 |
+
_stop_sse = False
|
916 |
+
while True:
|
917 |
+
# If client closes connection, stop sending events
|
918 |
+
if await request.is_disconnected():
|
919 |
+
yield {
|
920 |
+
"event": "evt_out_counts",
|
921 |
+
"id": datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
922 |
+
"retry": RETRY_TIMEOUT_MILSEC,
|
923 |
+
"data": "..."
|
924 |
+
}
|
925 |
+
break
|
926 |
+
if self._is_running:
|
927 |
+
if self._is_tracking:
|
928 |
+
if _stop_sse is True:
|
929 |
+
_stop_sse = False
|
930 |
+
outcounts_msg = self.counter.outcounts_updated()
|
931 |
+
if (self.counter is not None and outcounts_msg):
|
932 |
+
yield {
|
933 |
+
"event": "evt_out_counts",
|
934 |
+
"id": datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
935 |
+
"retry": RETRY_TIMEOUT_MILSEC,
|
936 |
+
"data": f"{self.counter.out_counts}"
|
937 |
+
}
|
938 |
+
else:
|
939 |
+
if _stop_sse is False:
|
940 |
+
yield {
|
941 |
+
"event": "evt_out_counts",
|
942 |
+
"id": datetime.now().strftime("%m/%d/%Y %H:%M:%S"),
|
943 |
+
"retry": RETRY_TIMEOUT_MILSEC,
|
944 |
+
"data": "---"
|
945 |
+
}
|
946 |
+
_stop_sse = True
|
947 |
+
await asyncio.sleep(EVT_STREAM_DELAY_SEC)
|
948 |
+
return EventSourceResponse(event_generator())
|
949 |
+
|
950 |
+
|
951 |
+
is_huggingface = False
|
952 |
+
# define the host url and port for webgear server
|
953 |
+
HOST_WEBGEAR, PORT_WEBGEAR = "localhost", 8080
|
954 |
+
|
955 |
+
# instantiate a demo case
|
956 |
+
demo_case = DemoCase(YOLO_VERBOSE=False)
|
957 |
+
demo_case.set_frame_reduction(frame_reduction=10)
|
958 |
+
demo_case.load_model()
|
959 |
+
# setup object counter & speed estimator
|
960 |
+
region_points = [
|
961 |
+
(5, -demo_case.roi_thickness_half + demo_case.roi_height),
|
962 |
+
(5, demo_case.roi_thickness_half + demo_case.roi_height),
|
963 |
+
(
|
964 |
+
demo_case.FRAME_WIDTH - 5,
|
965 |
+
demo_case.roi_thickness_half + demo_case.roi_height
|
966 |
+
),
|
967 |
+
(
|
968 |
+
demo_case.FRAME_WIDTH - 5,
|
969 |
+
-demo_case.roi_thickness_half + demo_case.roi_height
|
970 |
+
),
|
971 |
+
]
|
972 |
+
demo_case.counter = object_counter.ObjectCounter()
|
973 |
+
demo_case.counter.set_args(
|
974 |
+
view_img=False,
|
975 |
+
reg_pts=region_points,
|
976 |
+
classes_names=demo_case.model.names,
|
977 |
+
draw_tracks=False,
|
978 |
+
draw_boxes=False,
|
979 |
+
draw_reg_pts=True,
|
980 |
+
)
|
981 |
+
# Init speed estimator
|
982 |
+
line_points = [
|
983 |
+
(5, demo_case.roi_height),
|
984 |
+
(demo_case.FRAME_WIDTH - 5, demo_case.roi_height)
|
985 |
+
]
|
986 |
+
demo_case.speed_obj = speed_estimation.SpeedEstimator()
|
987 |
+
demo_case.speed_obj.set_args(
|
988 |
+
reg_pts=line_points,
|
989 |
+
names=demo_case.model.names,
|
990 |
+
view_img=False
|
991 |
+
)
|
992 |
+
logging.info([f"{x}" for x in list(demo_case.url_dict.keys())])
|
993 |
+
logging.info([f"{x}" for x in list(demo_case.model_dict.keys())])
|
994 |
+
|
995 |
+
# setup webgear server
|
996 |
+
options = {
|
997 |
+
"custom_data_location": "./",
|
998 |
+
"enable_infinite_frames": True,
|
999 |
+
# "jpeg_compression_quality": 90,
|
1000 |
+
"jpeg_compression_fastdct": True,
|
1001 |
+
"jpeg_compression_fastupsample": True,
|
1002 |
+
}
|
1003 |
+
# demo_case.stream0 = CamGear(
|
1004 |
+
# source=demo_case.url_dict[demo_case.cam_loc],
|
1005 |
+
# colorspace=None,
|
1006 |
+
# stream_mode=True,
|
1007 |
+
# logging=True
|
1008 |
+
# ).start()
|
1009 |
+
# if demo_case.stream0 is None:
|
1010 |
+
# sys.exit("stream unaviable")
|
1011 |
+
|
1012 |
+
web = WebGear(
|
1013 |
+
logging=True, **options
|
1014 |
+
)
|
1015 |
+
# config webgear server
|
1016 |
+
web.config["generator"] = demo_case.frame0_producer
|
1017 |
+
web.config["middleware"] = [Middleware(HtmxMiddleware)]
|
1018 |
+
web.routes.append(Mount("/static", static, name="static"))
|
1019 |
+
web.routes.append(
|
1020 |
+
Route("/video1", endpoint=demo_case.custom_video_response)
|
1021 |
+
)
|
1022 |
+
routes_dict = {
|
1023 |
+
"models": (demo_case.models, ["GET"]),
|
1024 |
+
"getmodel": (demo_case.getmodel, ["GET"]),
|
1025 |
+
"setmodel": (demo_case.setmodel, ["POST"]),
|
1026 |
+
"urls": (demo_case.urls, ["GET"]),
|
1027 |
+
"addurl": (demo_case.addurl, ["POST"]),
|
1028 |
+
"geturl": (demo_case.geturl, ["GET"]),
|
1029 |
+
"seturl": (demo_case.seturl, ["POST"]),
|
1030 |
+
"streamswitch": (demo_case.streamswitch, ["POST"]),
|
1031 |
+
"trackingswitch": (demo_case.trackingswitch, ["POST"]),
|
1032 |
+
}
|
1033 |
+
for k, v in routes_dict.items():
|
1034 |
+
web.routes.append(
|
1035 |
+
Route(path=f"/{k}", endpoint=v[0], name=k, methods=v[1])
|
1036 |
+
)
|
1037 |
+
web.routes.append(Route(
|
1038 |
+
path="/sseincounts",
|
1039 |
+
endpoint=demo_case.sse_incounts,
|
1040 |
+
name="sseincounts"
|
1041 |
+
))
|
1042 |
+
web.routes.append(Route(
|
1043 |
+
path="/sseoutcounts",
|
1044 |
+
endpoint=demo_case.sse_outcounts,
|
1045 |
+
name="sseoutcounts"
|
1046 |
+
))
|
1047 |
+
|
1048 |
+
# if is_huggingface is False:
|
1049 |
+
# # run this app on Uvicorn server at address http://localhost:8080/
|
1050 |
+
# uvicorn.run(
|
1051 |
+
# web(), host=HOST_WEBGEAR, port=PORT_WEBGEAR, log_level="info"
|
1052 |
+
# )
|
1053 |
+
# # close app safely
|
1054 |
+
# web.shutdown()
|