<!DOCTYPE html>
<html>

	<head>
		<meta charset="utf-8">
		<title>Face Detection Camera Example</title>
	</head>

	<body>
		<h2>Face Detection Camera Example</h2>
		<p>
			Click <b>Start/Stop</b> button to start or stop the camera capture.<br> The <b>videoInput</b> is a &lt;video&gt; element used as face detector input. The <b>canvasOutput</b> is a &lt;canvas&gt; element used as face detector output.<br> The code of &lt;textarea&gt; will be executed when video is started. You can modify the code to investigate more.
		</p>
		<div>
			<div class="control"><button id="startAndStop" disabled>Start</button></div>
			<textarea class="code" rows="10" cols="80" id="codeEditor" spellcheck="false">
</textarea>
		</div>
		<p class="err" id="errorMessage"></p>
		<div>
			<table cellpadding="0" cellspacing="0" width="0" border="3">
				<tr>
					<td>
						<video id="videoInput" width=320 height=240></video>
					</td>
					<td>
						<canvas id="canvasOutput" width=320 height=240></canvas>
					</td>
					<td bgcolor="red" style="width: 400;height: 300"><img alt="" src="" id="targetImg" width=320 height=240></td>
					<td></td>
				</tr>
				<tr>
					<td>
						<div class="caption">videoInput</div>
					</td>
					<td>
						<div class="caption">canvasOutput</div>
					</td>
					<td>看了三分</td>
					<td></td>
				</tr>
			</table>
		</div>
		<!--<script src="https://webrtc.github.io/adapter/adapter-5.0.4.js" type="text/javascript"></script>-->
		<script src="../js/utils.js" type="text/javascript"></script>
		<script src="../../static/jquery/jquery.2.2.4.min.js"></script>
		<script id="codeSnippet" type="text/code-snippet">
			let video = document.getElementById('videoInput');
let catched=false;
					let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
					let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4);
					let gray = new cv.Mat();
					let cap = new cv.VideoCapture(video);
					let faces = new cv.RectVector();

					let eyes = new cv.RectVector();

					let classifier = new cv.CascadeClassifier();
					
					let eyeCascade = new cv.CascadeClassifier();

					// load pre-trained classifiers
					classifier.load('haarcascade_frontalface_default.xml');
//console.log("classifier",classifier)
					eyeCascade.load('haarcascade_eye.xml');

//console.log("eyeCascade1",eyeCascade)
					const FPS = 30;
					function processVideo() {
					    try {
					        if (!streaming) {
					            // clean and stop.
					            src.delete();
					            dst.delete();
					            gray.delete();
					            faces.delete();
					            classifier.delete();

								eyes.delete();eyeCascade.delete();
 roiGray.delete(); roiSrc.delete();
					            return;
					        }
//console.log("eyeCascade2")
					        let begin = Date.now();
					        // start processing.
					        cap.read(src);
					        src.copyTo(dst);
					        cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
					        // detect faces.
					        classifier.detectMultiScale(gray, faces, 1.1, 3, 0);
//console.log("eyeCascade3")
							//let msize = new cv.Size(0, 0);
							//classifier.detectMultiScale(gray, faces, 1.1, 3, 0, msize, msize);
			
//console.log("eyeCascade4")
					        // draw faces.
					        for (let i = 0; i < faces.size(); ++i) {
					            let face = faces.get(i);
//console.log("eyeCascade5")

					            let point1 = new cv.Point(face.x, face.y);
					            let point2 = new cv.Point(face.x + face.width, face.y + face.height);
//console.log("识别到人脸",face)

					            cv.rectangle(dst, point1, point2, [255, 0, 0, 255]);
//console.log("eyeCascade6")

								let roiGray = gray.roi(face);
//console.log("eyeCascade6.2",roiGray)
								let roiSrc = src.roi(face);
//console.log("eyeCascade7",roiSrc)
								let msize = new cv.Size(0, 0);
//console.log("eyeCascade7.1")

								eyeCascade.detectMultiScale(roiGray, eyes);
								
//console.log("eyeCascade8",eyes.size())
								 for (let j = 0; j < eyes.size(); ++j) {

									   let eyez = eyes.get(j);
//console.log("eyeCascade8.1",eyez)
	     							   let pointE1 = new cv.Point(eyez.x, eyez.y);
	      							   let pointE2 = new cv.Point(eyez.x +eyez.width,eyez.y + eyez.height);
//console.log("eyeCascade8.2",pointE1,pointE2)
	      								cv.rectangle(dst, pointE1, pointE2, [0, 0, 255, 255]);
										if(eyes.size()==2){
console.log("showimg--zz:",zz,eyes.size());
											showimg()
										}

	   							}
roiGray.delete(); roiSrc.delete();
					        }


cv.imshow('canvasOutput', dst);

//let ctx = canvas.getContext('2d');
//let imgData = ctx.getImageData(0, 0, canvas.width, canvas.height);

if(1==2){
let canvasOutputz = document.getElementById('canvasOutput');
var image = new Image();
 image.src = canvasOutputz.toDataURL("image/png");

 document.getElementById('targetImg').src=image.src ;//.append(image);
console.log("eyeCascade9",image);
catched=false;
}


 
					        // schedule the next one.
					        let delay = 1000/FPS - (Date.now() - begin);
					        setTimeout(processVideo, delay);
					    } catch (err) {
					        utils.printError(err);
					    }
					};
					
					// schedule the first one.
					setTimeout(processVideo, 0);
				</script>
				
				
				
				<script type="text/javascript">
				var zz=0;
				function showimg(){
					console.log("showimg--run:",zz);
					if(zz>0){return}
					let canvasOutputz = document.getElementById('canvasOutput');
					var image = new Image();
					 image.src = canvasOutputz.toDataURL("image/png");
					 //console.log("eyeCascade9",image);
					 var imgdata=image.src ;
					 console.log("eyeCascade10",imgdata);
					 document.getElementById('targetImg').src=imgdata ;//.append(image);
					 uploadImg(imgdata);
					 zz++
				}
					let utils = new Utils('errorMessage');

					utils.loadCode('codeSnippet', 'codeEditor');

					let streaming = false;
					let videoInput = document.getElementById('videoInput');
					let startAndStop = document.getElementById('startAndStop');
					let canvasOutput = document.getElementById('canvasOutput');
					let canvasContext = canvasOutput.getContext('2d');

					
					startAndStop.addEventListener('click', () => {
						if(!streaming) {
							utils.clearError();
							utils.startCamera('qvga', onVideoStarted, 'videoInput');
						} else {
							utils.stopCamera();
							onVideoStopped();
						}
					});

					function onVideoStarted() {
						streaming = true;
						startAndStop.innerText = 'Stop';
						videoInput.width = videoInput.videoWidth;
						videoInput.height = videoInput.videoHeight;
						utils.executeCode('codeEditor');
					}

					function onVideoStopped() {
						streaming = false;
						canvasContext.clearRect(0, 0, canvasOutput.width, canvasOutput.height);
						startAndStop.innerText = 'Start';
					}

				/* 	utils.loadOpenCv(() => {
						let faceCascadeFile = 'haarcascade_frontalface_default.xml';
						utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => {
							startAndStop.removeAttribute('disabled');
						});
					}); */
					utils.loadOpenCv(() => {
					    let eyeCascadeFile = 'haarcascade_eye.xml';
					    utils.createFileFromUrl(eyeCascadeFile, eyeCascadeFile, () => {
					        let faceCascadeFile = 'haarcascade_frontalface_default.xml';
					        utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => {
					        	startAndStop.removeAttribute('disabled');
					        });
					    });
					});
					
					
					
					function uploadImg(imgData){
						console.log("uploadImg",imgData);
						$.ajax({
							url:"/test/face/base64",
							data: {'faceimg': imgData},
							dataType: 'json',
							type: 'post',
							success: function(data) {
								console.log(data);
							},
							error: function(xhr, type, errorThrown) {
								mui.toast('网络异常，请稍后再试！');
							}
						});

	
					}
				</script>
	</body>

</html>