bottomSampler = nil
middleSampler = nil
g_frameBuffer = nil
g_frameBufferSampler = nil
g_frameFilterNode = nil
g_count = 6
totolsamplers = {}
wxNodes = {}
modelContext = nil
material = nil
isPhotoTake = false
isShowWechat = false
avrtPhoto = {}
eventExtension = nil
isRecording = false
currentState = 0
g_speed = 1.0 --缩放速度
totalElapsedTime = 0

function initialize(scene)
  local kuruEngineInstance = KuruEngine.getInstance()
  kuruFaceDetector = KuruFaceDetectorExtension.cast(kuruEngineInstance:getExtension('KuruFaceDetector')) --人脸识别
  kuruTouch = KuruTouchExtension.cast(kuruEngineInstance:getExtension('KuruTouch')) --触摸操作
  kuruTouch:getTouchDownEvent():addEventHandler(onTouchDown)

  eventExtension = KuruEventExtension.cast(KuruEngine.getInstance():getExtension("KuruEvent"))
  eventExtension:getSimpleEvent():addEventHandler(onSimpleEvent)

  soundExtension = KuruSoundExtension.cast(KuruEngine.getInstance():getExtension("KuruSound"))
  g_sound = soundExtension:loadEx(BASE_DIRECTORY..("aidai.mp3"))
  g_sound:stop()
  oriNode = getCurrentShot(scene,0.7)
 
  local width= math.floor(scene:getResolution().x*0.7)
  local height=math.floor(scene:getResolution().y*0.7)

  g_frameBuffer = FrameBuffer.create("TEMP_STORED_FB_" .. 1, width, height, TextureFormat.RGBA)
  g_frameBufferSampler = TextureSampler.createWithTexture(g_frameBuffer:getRenderTarget(0):getTexture())
  g_frameBufferSampler:setWrapMode(TextureWrap.CLAMP, TextureWrap.CLAMP)
  modelContext = createFullscreenQuadMode()

  g_dpFilterNode = addNodeAndRelease(scene,KuruShaderFilterNode.createWithFragmentShaderFile(BASE_DIRECTORY.."display.frag",true))
  g_dpFilterNode:getMaterial():getParameter("photoImg"):setSampler(oriNode:getSampler())
  g_dpFilterNode:setEnabled(true)

  resNode = getCurrentShot(scene,0.7)

  passFilterNode = addNodeAndRelease(scene,KuruShaderFilterNode.createWithFragmentShaderFile(BASE_DIRECTORY.."pass.frag",true))
  passFilterNode:getMaterial():getParameter("photoImg"):setSampler(oriNode:getSampler())
  passFilterNode:setEnabled(true)

  oribgNode = addNodeAndRelease(scene, KuruFloatingImageNode.create(BASE_DIRECTORY .. "b_zf.png", -1.0, -1, 2, 2, BlendMode.None))
  createWechat(scene)
 
end

function onSimpleEvent(event)
  local e = KuruEventExtensionSimpleEventArgs.cast(event)
  local result = e:getArg()
  -- print(e:getName())
  -- print(result)

  if e:getName() == "KaleSimpleEventVideoRecordStartResult" then
    if result == "RecordStarted" then
      print("RecordStarted")
    end
  end

  if e:getName() == "KaleSimpleEventVideoRecordStopResult" then
    if result == "RecordStopped" then
        print("RecordStopped")
    end
  end

end
function createWechat(scene)
  local topHeight = 0.2 --x从-0.95开始
  local middleHeight = 0.9
  local offsetLeft = 0.359
   local bgnode = KuruFloatingImageNode.createFromSampler(g_frameBufferSampler,-1.0+offsetLeft,-1+middleHeight+0.1,2-2*offsetLeft+0.025,2-middleHeight-topHeight-0.2,BlendMode.None)
   wxNodes[#wxNodes+1] = bgnode
   bgnode:setEnabled(false)
   addNodeAndRelease(scene,bgnode)
  
  --sampler:release()
  for i=1,g_count do
    local tempsampler = nil
    if i==1 then
      tempsampler = getStickerSampler("b_one.png",10)
    elseif i==2 then
      tempsampler = getStickerSampler("b_two.png",10)
    elseif i==3 then
      tempsampler = getStickerSampler("b_three.png",10)
    elseif i==4 then
      tempsampler = getStickerSampler("b_four.png",10)
    elseif i==5 then
      tempsampler = getStickerSampler("b_five.png",10)
    elseif i==6 then
      tempsampler = getStickerSampler("b_zero.png",10)
    end
    totolsamplers[i] = tempsampler
  end
  middleSampler = getStickerSampler("b_zero.png",10)
  local node2 = KuruFloatingImageNode.createFromSampler(middleSampler,-1.0,-1+middleHeight+0.0,2,2-middleHeight-topHeight+0.15,BlendMode.None)
  wxNodes[#wxNodes+1] = node2
  node2:setEnabled(false)
  addNodeAndRelease(scene,node2)  -- 1+0.1-topHeight-0.2
  local marginY = 0.0
  for i=1,g_count-1 do
    local avatfNode = nil
    if i==1 then
      avatfNode = KuruFloatingImageNode.createFromSampler(g_frameBufferSampler,1.0-0.30,0.9-topHeight-0.22*0.5625,0.230,0.22*0.5625,BlendMode.None)
      addNodeAndRelease(scene,avatfNode)
    elseif i==2 then
      marginY = 0.02
      avatfNode = KuruFloatingImageNode.createFromSampler(g_frameBufferSampler,1.0-0.30,0.9-topHeight-0.22*0.5625*2-marginY,0.230,0.22*0.5625,BlendMode.None)
      addNodeAndRelease(scene,avatfNode)
    elseif i==3 then
      avatfNode = KuruFloatingImageNode.createFromSampler(g_frameBufferSampler,1.0-0.30,0.9-topHeight-0.22*0.5625*3-2*marginY,0.230,0.22*0.5625,BlendMode.None)
      addNodeAndRelease(scene,avatfNode)
    elseif i==4 then
      avatfNode = KuruFloatingImageNode.createFromSampler(g_frameBufferSampler,1.0-0.30,0.9-topHeight-0.22*0.5625*4-3*marginY,0.230,0.22*0.5625,BlendMode.None)
      addNodeAndRelease(scene,avatfNode)
    elseif i==5 then
      avatfNode = KuruFloatingImageNode.createFromSampler(g_frameBufferSampler,1.0-0.30,0.9-topHeight-0.22*0.5625*5-4*marginY,0.230,0.22*0.5625,BlendMode.None)
      addNodeAndRelease(scene,avatfNode)
    else
      
    end
    avrtPhoto[i] = avatfNode
    avatfNode:setEnabled(false)
    local topNode = addNodeAndRelease(scene, KuruFloatingImageNode.create(BASE_DIRECTORY .. "b_up.png", -1.0, 1-topHeight, 2.0, topHeight, BlendMode.None))
    topNode:setEnabled(false)
    wxNodes[#wxNodes+1] = topNode
  middleHeight = 0.9
  bottomSampler = getStickerSampler("b_aabb",10)
  local node = KuruFloatingImageNode.createFromSampler(bottomSampler,-1.0,-1,2,middleHeight,BlendMode.None)
  wxNodes[#wxNodes+1] = node
  node:setEnabled(false)
  addNodeAndRelease(scene,node)
  end
  
end
function showWechat(scene,enabled)
  if #wxNodes>1 then
    for i=1,#wxNodes do
      wxNodes[i]:setEnabled(enabled)
    end
    
    if enabled==true then
      isShowWechat = true
    else
      for j=1,#avrtPhoto do
        avrtPhoto[j]:setEnabled(false)
      end
    end
    return
  end
  isShowWechat = true

end

function frameReady(scene, elapsedTime)

  if RenderConfig.instance():isRenderModePreview() then --未点击拍摄按钮（无论视频或者皂片模式）
    currentState=0
  elseif RenderConfig.instance():isRenderModeVideo() then --录制模式 （无论视频或者皂片模式）
    currentState=1
  elseif RenderConfig.instance():isRenderModeSnapshot() then --拍照
    currentState=2
  else
    currentState=3
  end
if isPhotoTake==false then
  state = FrameBufferBindingState.create()
  material:getParameter("u_texture"):setSampler(resNode:getSampler())
  g_frameBuffer:bindWithViewport(true)
  modelContext:draw()
  state:restore()
  --g_dpFilterNode:getMaterial():getParameter("photoImg"):setSampler(g_frameBufferSampler)
else
 -- g_frameFilterNode:setEnabled(false)
  passFilterNode:setEnabled(false)
  oribgNode:setEnabled(false)
  resNode:setEnabled(false)
  g_dpFilterNode:setEnabled(false)
  if isShowWechat==false then
    --print("isShowWechatisShowWechat")
    if isRecording==false then
      isRecording=true
      eventExtension:postSimpleEventToApp("KaleSimpleEventVideoRecordStart", "")
      print("ststst")
    end
   -- if currentState==1 then
    showWechat(scene,true)
    bottomSampler:stop()
    --bottomSampler:setFrameIndex(0)
    bottomSampler:setRepeatCount(1)
    bottomSampler:setFPS(10)
    bottomSampler:play()
    --  if sound~=nil and DeviceConfig.instance().platformType == DeviceConfigPlatformType.P_IOS then
    --     sound=nil
    --     sound = soundExtension:loadEx(BASE_DIRECTORY..("aidai.mp3"))
    --     sound:play(false)
    --  else
     -- g_sound:stop()
    g_sound:play(false)
    -- end
    end
 -- end
 
end
 
if isShowWechat then
  --18,43,61,77,96
  print("getFrameIndexgetFrameIndex"..bottomSampler:getFrameIndex())
  if totalElapsedTime <10.5 then
    totalElapsedTime = totalElapsedTime+elapsedTime * 0.001*g_speed
  else
       if isRecording==true then
        isRecording=false
        isPhotoTake = false
        isShowWechat = false
        g_sound:stop()
        -- bottomSampler:stop()
        totalElapsedTime = 0.0
        eventExtension:postSimpleEventToApp("KaleSimpleEventVideoRecordStop", "")
      end
  end
  if bottomSampler:getFrameIndex()<=18 then
    for i=1,#avrtPhoto do
      avrtPhoto[i]:setEnabled(false)
    end
    middleSampler:setTexture(totolsamplers[6]:getTexture())
  elseif bottomSampler:getFrameIndex()<=43 then
    middleSampler:setTexture(totolsamplers[1]:getTexture())
    avrtPhoto[1]:setEnabled(true)
  elseif bottomSampler:getFrameIndex()<=61 then
    middleSampler:setTexture(totolsamplers[2]:getTexture())
    avrtPhoto[2]:setEnabled(true)
  elseif bottomSampler:getFrameIndex()<=77 then
    middleSampler:setTexture(totolsamplers[3]:getTexture())
    avrtPhoto[3]:setEnabled(true)
  elseif bottomSampler:getFrameIndex()<=96 then
    middleSampler:setTexture(totolsamplers[4]:getTexture())
    avrtPhoto[4]:setEnabled(true)
  else
    middleSampler:setTexture(totolsamplers[5]:getTexture())
    avrtPhoto[5]:setEnabled(true)
  end
 end

end
function reset(scene)
  if currentState~=1 then
    -- g_sound:stop()
    -- g_sound:play()
    if g_sound~=nil and DeviceConfig.instance().platformType == DeviceConfigPlatformType.P_IOS then
      g_sound=nil
      g_sound = soundExtension:loadEx(BASE_DIRECTORY..("aidai.mp3"))
      --g_sound:play(false)
    end
    showWechat(scene,false)
    passFilterNode:setEnabled(true)
    oribgNode:setEnabled(true)
    resNode:setEnabled(true)
    g_dpFilterNode:setEnabled(true)
  end
end
function createFullscreenQuadMode(sampler)
  local smesh = Mesh.createQuadFullscreen()
  local model = Model.create(smesh)
  smesh:release()

  material = Material.createWithShaderFile("res/shaders/passthrough.vert","res/shaders/passthrough.frag",nil)
  material:getParameter("u_worldViewProjectionMatrix"):setMatrix(Matrix:identity())
  material:getParameter("u_texture"):setSampler(sampler)
  model:setMaterial(material,-1)
  material:release()

  return model
end

function getStickerSampler(filepath,fps)
  local sp = KuruAnimationSampler.createFromPath(BASE_DIRECTORY..filepath,false,false)
  sp:setRepeatCount(1)
  sp:setFPS(10)
  sp:play()

  return sp
end

-- function onSimpleEvent(event)
  
--   local e = KuruEventExtensionSimpleEventArgs.cast(event)
--   print("onSimpleEventonSimpleEvent"..e:getName())
--   if e:getName() == EVENT_NAME_PHOTO_TAKE then
--     g_eventExtension:postSimpleEventToApp(EVENT_NAME_PHOTO_TAKE, "")
--   end
-- end
-- function getStickerSampler(filepath,fps)
-- 	local sp = KuruAnimationSampler.createFromPath(BASE_DIRECTORY..filepath,false,false)
-- 	sp:setRepeatCount(0)
-- 	sp:setFPS(fps)
-- 	sp:play()
  
-- 	return sp
-- end

function addNodeAndRelease(scene, node)
  scene:addNode(node)
  node:release()
  return node
end


function getCurrentShot(scene, bufferScale)
  local node = KuruSnapshotNode.create()
  scene:addNode(node)
  node:release()
  node:setFrameBufferScale(bufferScale, bufferScale)
  return node
end


function createSegNode(scene, distortionNode, snapshotNode)
  local seg = KuruSegmentationNode.create()
  seg:setSourceSampler(snapshotNode:getSnapshot())
 -- seg:setDistortionNode(distortionNode)
  scene:addNode(seg)
  seg:release()
  return seg
end

function onTouchDown(event)
  print('touchdown')
  isPhotoTake = true
end

function finalize(scene)
  -- g_touchExtension:getTouchDownEvent():removeEventHandler(onTouchDown)
  eventExtension:getSimpleEvent():removeEventHandler(onSimpleEvent)
  kuruTouch:getTouchDownEvent():removeEventHandler(onTouchDown)
  bottomSampler:release()
  middleSampler:release()
  g_frameBuffer:release()
  g_frameBufferSampler:release()
  modelContext:release()
  for i=1,#totolsamplers do
    totolsamplers[i]:release()
  end
end
