<html>
<head>
<title>自然图片识别例子</title>
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1, maximum-scale=1">
<style>
html,body {
	margin: 0;
	padding: 0;
	width: 100%;
	text-align: center;
	overflow-x: hidden;
}
.portrait canvas {
	transform-origin: 0 0;
	transform: rotate(-90deg) translateX(-100%);
}
.desktop canvas {
 	transform: scale(-1, 1);
}
</style>
</head>
<body>

<h1>NFT marker example with Three.js</h1>
<p>On Chrome on Android, tap the screen to start playing video stream.</p>
<p>Show  <a href="https://github.com/artoolkit/artoolkit5/blob/master/doc/Marker%20images/pinball.jpg">Pinball image</a> to camera to display a colorful object on top of it. Tap the screen to rotate the object.

<p>&larr; <a href="index.html">Back to examples</a></p>

<script  src="../vendor/jsartoolkit5/build/artoolkit.min.js"></script>
<script  src="../../vendor/three.js/build/three.js"></script>
<script  src="../vendor/jsartoolkit5/js/artoolkit.three.js"></script>

<script src="../../vendor/three.js/examples/js/libs/stats.min.js"></script>
<script src="../../vendor/three.js/ColladaLoader.js"></script>
<script src="../../vendor/three.js/Animation.js"></script>
<script src="../../vendor/three.js/AnimationHandler.js"></script>
<script src="../../vendor/three.js/KeyFrameAnimation.js"></script>
<script src="http://192.168.89.87:1337/vorlon.js"></script>
<script>
let location_path='//'+location.host+'/AR.js-master/';
window.ARThreeOnLoad = function() {

	ARController.getUserMediaThreeScene({maxARVideoSize: 320, cameraParam: location_path+'data/data/camera_para.dat', 
	onSuccess: function(arScene, arController, arCamera) {

		document.body.className = arController.orientation;

		var renderer = new THREE.WebGLRenderer({antialias: true});
		if (arController.orientation === 'portrait') {
			var w = (window.innerWidth / arController.videoHeight) * arController.videoWidth;
			var h = window.innerWidth;
			renderer.setSize(w, h);
			renderer.domElement.style.paddingBottom = (w-h) + 'px';
		} else {
			if (/Android|mobile|iPad|iPhone/i.test(navigator.userAgent)) {
				renderer.setSize(window.innerWidth, (window.innerWidth / arController.videoWidth) * arController.videoHeight);
			} else {
				renderer.setSize(arController.videoWidth, arController.videoHeight);
				document.body.className += ' desktop';
			}
		}

		document.body.insertBefore(renderer.domElement, document.body.firstChild);

		var sphere = new THREE.Mesh(
			new THREE.SphereGeometry(0.5, 8, 8),
			new THREE.MeshNormalMaterial()
		);
		

		var video = document.createElement('video');
		video.src='../../../../data/videos/headtracking.mp4'
		video.controls = true;
		//video.style.position='absolute';
		video.autoplay = "autoplay";	
		video.loop = "loop";
		var texture = new THREE.VideoTexture(video); 
		texture.minFilter = THREE.LinearFilter;
		texture.magFilter = THREE.LinearFilter;
		arController.loadNFTMarker(location_path+'data/dataNFT/fuwa/fuwa', function(markerId) {

			// 材质
			var material = new THREE.MeshBasicMaterial({
				 map: texture,
				 //color:0xffff00,
				// map: texture,
				 overdraw: true
			});
			var plane = new THREE.PlaneGeometry(30, 30 ,30);
			var mesh = new THREE.Mesh(plane, material);
			//向量Vector3对象表示方向
			//var axis = new THREE.Vector3(1, 1, 1);
			//axis.normalize(); //向量归一化
			//沿着axis轴表示方向平移30
			//mesh.translateOnAxis(axis,30);
			var markerRoot = arController.createThreeNFTMarker(markerId);
			// 创建video对象
			markerRoot.add(mesh);
			arScene.scene.add(markerRoot); //网格模型添加到场景中
			// var markerRoot = arController.createThreeNFTMarker(markerId);
			// markerRoot.add(sphere);
			// arScene.scene.add(markerRoot);
		});

		/*arController.loadMarker(location_path+'data/data/patt.kanji', function(markerId) {
			var markerRoot = arController.createThreeMarker(markerId);
			markerRoot.add(torus);
			arScene.scene.add(markerRoot);
		});*/
		
		
		var processingDone = true;
		var tick = function() {
			if (video.videoWidth && processingDone) {
				processingDone = false;
				//imageContext.drawImage(video, 0, 0, 200, 200);
				arScene.process();
				processingDone = true;
			}
			
			arScene.renderOn(renderer);
			requestAnimationFrame(tick);
		};

		tick();

	}});

	delete window.ARThreeOnLoad;

};

if (window.ARController && ARController.getUserMediaThreeScene) {
	ARThreeOnLoad();
}
</script>

</body>
</html>
