<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Document</title>
    <script src="https://cdn.jsdelivr.net/npm/handtrackjs@0.1.5/dist/handtrack.min.js"></script>
    <style>
        *{
            margin: 0;
            padding: 0;
            box-sizing: border-box;
        }
        html,body{
            height: 100% !important;
            width: 100%;
        }
        #video,#can{
            display: none;
        }
    </style>
</head>
<body>
    <video id="video"></video>
    <canvas id="can"></canvas>
</body>
<script>
    navigator.mediaDevices.getUserMedia = navigator.mediaDevices.getUserMedia||navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
    const video = document.getElementById("video")
    const can = document.getElementById("can")
    const width = can.offsetWidth;
    console.log(can.offsetWidth);
    const ctx = can.getContext('2d');
    let model;
    let flag = true;
    let sparcker = new SpeechSynthesisUtterance();
    const synthesis = window.speechSynthesis;
    sparcker.lang = "zh";
    sparcker.pitch = 6;
    sparcker.rate = 2;
    sparcker.volume = 2;
    function handDetection(){
        model.detect(video).then(pre=>{
            ctx.scale(-1,1)
            model.renderPredictions(pre,can,ctx,video)
            if (pre.length>0) {
                if (flag) {
                    flag = false;
                    setTimeout(() => {
                        // console.log("有人来了");
                        voiceText = "有人来了"
                        const utterance = new SpeechSynthesisUtterance(voiceText);
                        synthesis.speak(utterance);
                        flag = true;
                    }, 2000);
                }
            }
            
            /* return
            pre.forEach(item => {
                if (item.label == "open") {
                    model.renderPredictions(pre,can,ctx,video)
                    // console.log(item);
                }
            }); */
        })
        requestAnimationFrame(handDetection)
    }
    function startVideo(){
        handTrack.startVideo(video).then((status = {})=>{
            if (status.status) {
                navigator.mediaDevices.getUserMedia({
                    video:true
                }).then(stream=>{
                    video.srcObj = stream;
                    handDetection()
                })
            }
        })
    }
    handTrack.load().then(loadedmodel=>{
        console.log(loadedmodel);
        model = loadedmodel;
    })
    startVideo()
</script>
</html>