import {FrameBuffer} from './FrameBuffer'
import {EventEmitter} from '../../base/events'

const SampleRate = 22050
const Bits = 16

export class CaptureBase extends EventEmitter {

    public capturing: boolean = false;

    
    public contentWidth = 0
    public contentHeight = 0
    public contentX = 0
    public contentY = 0

    public stageWidth = 320
    public stageHeight = 240

    public bCrop = false
    public cropPos = [0,0]
    public cropSize = [320, 240]

    public audioRecorder: any = null

    public isContinuous() {
        return true
    }
    public onStartProjection() {}

    setStageSize(width:number, height:number) {
        this.stageWidth = width
        this.stageHeight = height
    }

    fixStage(w: number,h: number, stageW:number, stageH:number): [number, number, number, number] {
        let x = 0, y = 0
        // let stageW = stage.width, stageH = stage.height
        if(w>stageW || h>stageH) {
            // 宽度对齐
            if( w/h>stageW/stageH) {
                h = Math.floor( h * (stageW/w) )
                w = stageW
                x = 0
                y = Math.floor((stageH-h)/2) // 垂直居中
            }
            // 高度对齐
            else {
                w = Math.floor( w * (stageH/h) )
                h = stageH
                x = Math.floor((stageW-w)/2) // 水平居中
                y = 0
            }
        } else {
            x = Math.floor((stageW-w)/2)
            y = Math.floor((stageH-h)/2)
        }
        return [x,y,w,h]
    }

    constructor(public type:string, public frameBuffer:FrameBuffer) {
        super()
    }

    crop() {}


}

// let requestAnimationFrame = function(callback) {
//     setTimeout(callback,1000/30)
// }

export class CaptureMedia extends CaptureBase {

    public video: HTMLVideoElement|null = null
    public img: HTMLImageElement|null = null

    public canvas: HTMLCanvasElement = document.createElement('canvas')
    private ctx: CanvasRenderingContext2D = this.canvas.getContext('2d') as CanvasRenderingContext2D

    constructor(type:string, frameBuffer: FrameBuffer) {
        super(type, frameBuffer)
        
        this.canvas.style.display = 'none'
        this.canvas.style.backgroundColor = '#000'
        this.canvas.id = 'canvasProjMedia'
        document.body.appendChild(this.canvas)
    }

    private onloadedmetadata = ()=>{
        if(!this.video) {
            return
        }
        [   this.contentX,
            this.contentY,
            this.contentWidth,
            this.contentHeight
        ] = this.fixStage(this.video.videoWidth, this.video.videoHeight, this.stageWidth, this.stageHeight)
        this.video.width = this.contentWidth
        this.video.height = this.contentHeight
        console.log("loadedmetadata", this.contentWidth, this.contentHeight)

        this.canvas.width = this.contentWidth
        this.canvas.height = this.contentHeight
        this.canvas.style.width = this.contentWidth + 'px'
        this.canvas.style.height = this.contentHeight + 'px'

        
    }

    private onplay = ()=>{
        this.emit('onplay')
    }
    private onended = ()=>{
        this.emit('onstop')
    }
    private onpause = ()=>{
        this.emit('onstop')
    }

    private onImgLoad = () => {
        if(!this.img) {
            return
        }
    }

    detach() {
        if(this.video) {
            this.video.removeEventListener("loadedmetadata", this.onloadedmetadata)
            this.video.removeEventListener("play", this.onplay)
            this.video.removeEventListener("ended", this.onended)
            this.video.removeEventListener("pause", this.onpause)
            this.video = null
        }
        if(this.img) {
            this.img.removeEventListener("load", this.onImgLoad)
            this.img = null
        }
    }
    attach(media: HTMLVideoElement | HTMLImageElement) {

        this.detach()

        if( media instanceof HTMLVideoElement ) {
            this.video = media
            this.video.addEventListener("loadedmetadata", this.onloadedmetadata)
            this.video.addEventListener("play", this.onplay)
            this.video.addEventListener("ended", this.onended)
            this.video.addEventListener("pause", this.onpause)
        }

        else if( media instanceof HTMLImageElement ){
            this.img = media
            this.img.addEventListener("load", this.onImgLoad)
        }
    }

    startCapture() {

        if(this.video) {
            let stream = (this.video as any).captureStream()
            this.startCaptureAudio( stream )
    
            this.capturing = true
            requestAnimationFrame(this.outputFrame)
        }

        else if(this.img) {
        }

        else {
            throw new Error("no video")
        }
    }


    startCaptureAudio(stream: any) {

        let audioTracks = stream.getAudioTracks()
        if(!audioTracks.length) {
            console.error("there is no audio track")
            return
        }

        if(this.audioRecorder){//清理掉已有的
            this.audioRecorder.close();
        }
        
        this.audioRecorder = new (window as any).Recorder({
            type: "pcm"
            ,sampleRate: SampleRate
            ,bitRate: Bits
            ,sourceStream: stream //明确指定从这个流中录制音频
        })

        //打开这个流
        // this.audioRecorder.open(()=>{
        //         //开始录制
        //         this.audioRecorder.start()
        //     },function(msg){
        //         console.error("can not open stream:"+msg);
        //     }
        // )

        ;(window as any).recorder = this.audioRecorder
    }

    stopCaptureAudio(){
        
        if(this.audioRecorder) {
            this.audioRecorder.stop()
            this.audioRecorder = null
        }

        let recorder = this.audioRecorder
        this.audioRecorder = null
        if(!recorder){
            return
        }
        recorder.stop(function(){
            recorder.close()
        },console.error)
    }


    start() {
        if(this.video) {
            if(this.video.paused) {
                this.once('onplay',()=>this.startCapture())
            }
            else {
                this.startCapture()
            }
        }
        else if (this.img) {
            this.startCapture()
        }
    }

    stop() {
        this.stopCaptureAudio()
        
        this.capturing = false
    }

    
    protected outputFrame = ()=>{
        if(!this.capturing || !this.ctx) {
            return
        }
        if(!this.contentWidth || !this.contentHeight) {
            requestAnimationFrame(this.outputFrame)
            return
        }

        if(!this.frameBuffer.acceptTest(this.type)) {
            requestAnimationFrame(this.outputFrame)
            return
        }
        
        if(this.bCrop) {
            this.ctx.drawImage(this.video as HTMLVideoElement
                , this.cropPos[0], this.cropPos[1]
                , this.cropSize[0], this.cropSize[1]
                , 0,0, this.contentWidth, this.contentHeight)
        }
        else {
            this.ctx.drawImage(this.video as HTMLVideoElement, 0,0, this.contentWidth, this.contentHeight)
        }

        if(this.frameBuffer.savingFrames) {
            this.frameBuffer.saveOneFrame(this.canvas.toDataURL("image/jpeg", 100))
            requestAnimationFrame(this.outputFrame)
            return
        }
        
        if(this.frameBuffer.isFull()) {
            requestAnimationFrame(this.outputFrame)
            return
        }

        let urlData = this.canvas.toDataURL("image/jpeg", 0.7)
        this.frameBuffer.pushVideoFrameBase64(this.type,urlData,this.contentX,this.contentY)

        requestAnimationFrame(this.outputFrame)
    }
    
    public isContinuous() {
        return ! this.img
    }
    
    public onStartProjection() {
        if(this.img) {
            this.canvas.width = this.img.width
            this.canvas.height = this.img.height

            this.canvas.getContext("2d")?.drawImage(this.img,0,0,this.img.width,this.img.height)
            let [x,y] = this.fixStage(this.img.width, this.img.height, this.stageWidth, this.stageHeight)

            this.frameBuffer.pushVideoFrameBase64(this.type, this.canvas.toDataURL("image/jpeg",0.9), x,y)
        }
    }
}

export class CaptureScreen extends CaptureMedia {
    
    async startCapture() {

        if(!this.video) {
            throw new Error("no video")
        }

        this.stop()

        try {
            let captureStream = await navigator.mediaDevices.getDisplayMedia({
                audio: true ,
                video: true ,
            })

            // 仅将capture到的视频流给 <video>
            let videoStream = new MediaStream
            captureStream.getVideoTracks().forEach(track => videoStream.addTrack(track))
            this.video.srcObject = videoStream

            this.startCaptureAudio(captureStream)

            this.capturing = true

            // requestAnimationFrame(outputFrame)

        } catch (err) {
            console.error(err)
            return
        }

        requestAnimationFrame(this.outputFrame)
    }
    
    start() {
        if(!this.video) {
            return
        }
        this.startCapture()
    }
}

export class CaptureFrames extends CaptureBase {

    public frames: HTMLImageElement[] = []
    public frmIdx = 0
    private timer = -1

    public canvas: HTMLCanvasElement = document.createElement('canvas')
    private ctx: CanvasRenderingContext2D = this.canvas.getContext('2d') as CanvasRenderingContext2D
    
    constructor(type:string, frameBuffer:FrameBuffer) {
        super(type, frameBuffer)
    }

    protected outputFrame = ()=>{
        console.log("outputFrame")
        if(!this.capturing) {
            return
        }
        if(this.frameBuffer.isFull()) {
            return
        }

        let img = this.frames[this.frmIdx]
        if(!img) {
            return
        }

        this.frmIdx++
        if(this.frmIdx>=this.frames.length) {
            this.frmIdx = 0
        }

        this.ctx.drawImage(img, 0,0, this.contentWidth, this.contentHeight)

        let urlData = this.canvas.toDataURL("image/jpeg", 0.7)
        this.frameBuffer.pushVideoFrameBase64(this.type,urlData,this.contentX,this.contentY)
    }

    start(frames, fps:number) {
        if(!frames.length) {
            return
        }
        if(this.capturing) {
            throw new Error("capturing already")
        }

        [   this.contentX,
            this.contentY,
            this.contentWidth,
            this.contentHeight
        ] = this.fixStage(frames[0].width, frames[0].height, this.stageWidth, this.stageHeight)
        
        this.canvas.width = this.contentWidth
        this.canvas.height = this.contentHeight
        this.canvas.style.width = this.contentWidth + 'px'
        this.canvas.style.height = this.contentHeight + 'px'

        this.frames = frames
        this.frmIdx = 0
        this.capturing = true

        this.timer = setInterval(this.outputFrame, 1000/fps) as unknown as number
    }

    stop() {
        clearTimeout(this.timer)
        this.timer = -1
        this.capturing = false
    }

}

export class CaptureWebGL extends CaptureBase {
}
