<html>
<head>
    <title>Pattern marker example with Three.js</title>
    <meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1, maximum-scale=1">
    <style>
        html, body {
            margin: 0;
            padding: 0;
            width: 100%;
            text-align: center;
            overflow-x: hidden;
        }
        .portrait canvas {
            transform-origin: 0 0;
            transform: rotate(-90deg) translateX(-100%);
        }
        .desktop canvas {
            transform: scale(-1, 1);
        }
    </style>

</head>
<body>

<h1>NFT marker example with Three.js</h1>
<p>On Chrome on Android, tap the screen to start playing video stream.</p>
<p>Show <a href="https://github.com/artoolkit/artoolkit5/blob/master/doc/Marker%20images/pinball.jpg">Pinball image</a>
    to camera to display a colorful object on top of it. Tap the screen to rotate the object.

<p>&larr; <a href="index.html">Back to examples</a></p>

<script src="js/jsartoolkit5/build/artoolkit.min.js"></script>
<script src="js/three.js/build/three.js"></script>
<script src="js/jsartoolkit5/js/artoolkit.three.js"></script>


<script>
    window.ARThreeOnLoad = function () {

        ARController.getUserMediaThreeScene({
            maxARVideoSize: 640, cameraParam: 'data/camera_para.dat',
            onSuccess: function (arScene, arController, arCamera) {
                var video = document.createElement('video');
                video.crossOrigin = "anonymous";
                var texture = new THREE.VideoTexture(video);
                texture.minFilter = THREE.LinearFilter;
                texture.magFilter = THREE.LinearFilter;
                arController.addEventListener('getNFTMarker', function (ev) {
                    video.play();
                    console.info(ev)
                    mesh.position.z += 0.01
                })

                document.body.className = arController.orientation;

                var renderer = new THREE.WebGLRenderer({antialias: true});
                if (arController.orientation === 'portrait') {
                    var w = (window.innerWidth / arController.videoHeight) * arController.videoWidth;
                    var h = window.innerWidth;
                    renderer.setSize(w, h);
                    renderer.domElement.style.paddingBottom = (w - h) + 'px';
                } else {
                    if (/Android|mobile|iPad|iPhone/i.test(navigator.userAgent)) {
                        renderer.setSize(window.innerWidth, (window.innerWidth / arController.videoWidth) * arController.videoHeight);
                    } else {
                        renderer.setSize(arController.videoWidth, arController.videoHeight);
                        document.body.className += ' desktop';
                    }
                }

                document.body.insertBefore(renderer.domElement, document.body.firstChild);

                arController.loadNFTMarker('data/fuwa/fuwa', function (markerId) {

                    // 材质
                    var material = new THREE.MeshBasicMaterial({
                        map: texture,
                        //color:0xffff00,
                        // map: texture,
                        overdraw: true
                    });
                    var plane = new THREE.PlaneGeometry(300, 200);
                    mesh = new THREE.Mesh(plane, material);
                    var markerRoot = arController.createThreeNFTMarker(markerId);
                    // 创建video对象
                    markerRoot.add(mesh);
                    arScene.scene.add(markerRoot); //网格模型添加到场景中
                    video.src = 'https://threejs.org/examples/textures/sintel.ogv'
                });

                var processingDone = true;
                var tick = function () {
                    if (video.videoWidth && processingDone) {
                        processingDone = false;
                        //imageContext.drawImage(video, 0, 0, 200, 200);
                        arScene.process();
                        processingDone = true;
                    }

                    arScene.renderOn(renderer);
                    requestAnimationFrame(tick);
                };

                tick();

            }
        });

        delete window.ARThreeOnLoad;

    };

    if (window.ARController && ARController.getUserMediaThreeScene) {
        ARThreeOnLoad();
    }
</script>


</body>
</html>
