<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1">
    <meta name="apple-mobile-web-app-capable" content="yes">
    <meta name="apple-touch-fullscreen" content="yes">
    <meta name="apple-mobile-web-app-status-bar-style" content="black">
    <title>learn-PixiJS</title>
</head>
<body>

<div>
    <button onclick="playContinue()">继续播放视频</button>
    <button onclick="stopPlay()">暂停播放</button>
</div>
<script src="js/pixi.min.js"></script>

<script>

  const videoUrl = 'media/video.mp4';
  const imgUrl = 'images/vocaloid.jpg';
  const picArr = [];


  picArr.push(videoUrl);
  picArr.push(imgUrl);
  for (let i = 0; i < 71; i++) {
    const texture = (`images/1X1/S1a_${i}.png`);
    picArr.push(texture);
  }
  for (let i = 0; i < 63; i++) {
    const texture = (`images/1X1/S1b_${i}.png`);
    picArr.push(texture);
  }
  for (let i = 0; i < 15; i++) {
    const texture = (`images/1X1/S1c_${i}.png`);
    picArr.push(texture);
  }
  const app = new PIXI.Application({
    backgroundColor: 0x1099bb,
    width: 540,
    height: 540,
//    transparent: true
  });

  let videoTexture = null;

  document.body.appendChild(app.view);
  const Loader = PIXI.Loader.shared,
    Resources = PIXI.loader.resources,
    TextureCache = PIXI.utils.TextureCache;
  // 加载资源
  Loader
    .add(picArr)
    .on("progress", loadProgressHandler)
    .load(onAssetsLoaded);

  function loadProgressHandler(loader, resources) {
    console.log('loading progress: ', loader.progress.toFixed(2) / 1)
  }
  function onAssetsLoaded(loader, resources) {
    console.log("All files loaded");
    console.log(resources, 'resources');


    createVideoTexture();

  }
  function createVideoTexture() {
    const videoTexture = PIXI.Texture.from(videoUrl);
//    PIXI.Texture.addToCache(videoTexture, videoUrl);
  }
  //  app.stop();


  // Create play button that can be used to trigger the video
  const button = new PIXI.Graphics()
    .beginFill(0x0, 0.5)
    .drawRoundedRect(0, 0, 100, 100, 10)
    .endFill()
    .beginFill(0xffffff)
    .moveTo(36, 30)
    .lineTo(36, 70)
    .lineTo(70, 50);

  // Position the button
  button.x = (app.screen.width - button.width) / 2;
  button.y = (app.screen.height - button.height) / 2;

  // Enable interactivity on the button
  button.interactive = true;
  button.buttonMode = true;

  // Add to the stage
  app.stage.addChild(button);

  // Listen for a click/tap event to start playing the video
  // this is useful for some mobile platforms. For example:
  // ios9 and under cannot render videos in PIXI without a
  // polyfill - https://github.com/bfred-it/iphone-inline-video
  // ios10 and above require a click/tap event to render videos
  // that contain audio in PIXI. Videos with no audio track do
  // not have this requirement
  button.on('pointertap', onPlayVideo);


  function onPlayVideo() {
    // Don't need the button anymore
    button.destroy();


//    console.log(Resources, 'Resources');
    // create a video texture from a path


//    videoTexture.baseTexture.source.autoplay = false;
//    videoTexture.baseTexture.source.pause();

    console.log(TextureCache, 'TextureCache');
    videoTexture = TextureCache[videoUrl]; // 暂时没有视频缓存

    videoTexture.baseTexture.currentTime = 3;
    videoTexture.baseTexture.source.play();
//    console.log(TextureCache, 'TextureCache');
//    videoTexture = PIXI.Texture.from(videoUrl);
//    videoTexture.resource.source.loop = 'loop';
    // create a new Sprite using the video texture (yes it's that easy)
    const videoSprite = new PIXI.Sprite(videoTexture);

    // Stetch the fullscreen
    videoSprite.width = app.screen.width;
    videoSprite.height = app.screen.height;
//    texture.baseTexture.source.onseeked = function () {
//      texture.update()
//    };

    app.stage.addChild(videoSprite);
    createImages(videoTexture);
    createGeometry();

    const style = new PIXI.TextStyle({
      fontFamily: 'Arial',
      fontSize: 36,
//      fontStyle: 'italic',
      fontWeight: 'bold',
      fill: ['#ffffff', '#00ff99'], // gradient
      stroke: '#4a1850',
      strokeThickness: 5,
      dropShadow: true,
      dropShadowColor: '#000000',
      dropShadowBlur: 4,
      dropShadowAngle: Math.PI / 6,
      dropShadowDistance: 6,
      wordWrap: true,
      wordWrapWidth: 440,
    });

    const richText = new PIXI.Text('挑战你的极限', style);
    richText.x = 50;
    richText.y = 250;

    console.log(richText.texture, 'richText.texture');

    const textSprite = new PIXI.Sprite(richText.texture);

    textSprite.width = 200;
    textSprite.height = 200;
    textSprite.x = 0;
    textSprite.y = 0;

    app.stage.addChild(richText);

  }


  //  onAssetsLoaded();
  function createImages(videoTexture) {
    // create an array of textures from an image path

    createTextImg();
    const frames = [];

    for (let i = 0; i < 71; i++) {
      const texture = TextureCache[`images/1X1/S1a_${i}.png`];
      frames.push(texture);
    }
    for (let i = 0; i < 63; i++) {
      const texture = TextureCache[`images/1X1/S1b_${i}.png`];
      frames.push(texture);
    }
    for (let i = 0; i < 15; i++) {
      const texture = TextureCache[`images/1X1/S1c_${i}.png`];
      frames.push(texture);
    }
    const anim = new PIXI.AnimatedSprite(frames);
    anim.x = 0;
    anim.y = 0;
//    anim.anchor.set(0.5); // 原点
    anim.animationSpeed = 0.4; // 速度 24 / 60 = 0.4
    anim.play();

    app.stage.addChild(anim);
    // start animating
    app.start();
    setTimeout(() => {
//      app.stop();
      anim.stop();
      videoTexture.baseTexture.source.pause();
//      videoTexture.baseTexture.source.currentTime = 0.1;
      videoTexture.update();


//      setTimeout(videoTexture.baseTexture.source.play, 1000)
    }, 6000);
  }
  function createTextImg() {
//    const texture = TextureCache[imgUrl];
//    const imgSprite = new PIXI.Sprite(texture);
//    imgSprite.width = 250;
//    imgSprite.height = 150;
//    app.stage.addChild(imgSprite);
  }

  // 自定义几何图形
  function createGeometry() {
    const geometry = new PIXI.Geometry()
      .addAttribute('aVertexPosition', // the attribute name
        [-100, -100, // x, y
          100, -100, // x, y
          100, 100,
          -100, 100], // x, y], // x, y
        2) // the size of the attribute

      .addAttribute('aColor', // the attribute name
        [1, 0, 0, // r, g, b
          0, 1, 0, // r, g, b
          0, 0, 1], // r, g, b
        3) // the size of the attribute

      .addAttribute('aUvs', // the attribute name
        [0, 0, // u, v
          1, 0, // u, v
          1, 1,
          0, 1], // u, v
        2)                 // the size of the attribute
//      .addIndex([0, 1, 2, 0, 2, 3]);
      .addIndex([0, 1, 2, 0, 3,2]);

    const vertexSrc = `

   precision mediump float;

    attribute vec2 aVertexPosition;
    attribute vec2 aUvs;

    uniform mat3 translationMatrix;
    uniform mat3 projectionMatrix;

    varying vec2 vUvs;

    void main() {

        vUvs = aUvs;
        gl_Position = vec4((projectionMatrix * translationMatrix * vec3(aVertexPosition, 1.0)).xy, 0.0, 1.0);

    }`;

    const fragmentSrc = `

    precision mediump float;

    varying vec2 vUvs;

    uniform sampler2D uSampler2;
    uniform float time;

    void main() {

        gl_FragColor = texture2D(uSampler2, vUvs + sin( (time + (vUvs.x) * 14.) ) * 0.1 );
    }`;


    const uniforms = {
      uSampler2: TextureCache[imgUrl],
//      uSampler2: TextureCache[videoUrl],
      time: 0,
    };

    const shader = PIXI.Shader.from(vertexSrc, fragmentSrc, uniforms);

    const triangle = new PIXI.Mesh(geometry, shader);

    triangle.position.set(400, 300);
//    triangle.position.set(0, 0);
    triangle.scale.set(2);
    app.stage.addChild(triangle);
  }
  app.ticker.add((delta) => {
  })

  function stopPlay() {
    if (videoTexture) {
      videoTexture.baseTexture.source.pause();
    }
  }

  function playContinue() {
    if (videoTexture) {
      videoTexture.baseTexture.source.play();
    }
  }
</script>
</body>
</html>