package encoder

import (
	"github.com/pterm/pterm"
	"image/jpeg"
	"os"
	"time"
	"xmediaEmu/pkg/encoder/yuv"
	iImage "xmediaEmu/pkg/image"
	"xmediaEmu/pkg/log"
)

var accelerateDc *iImage.Context

func init() {
	file, err := os.Open("./bg_yiyan.jpeg")
	if err != nil {
		panic(err)
	}
	defer file.Close()
	accelerateImg, err := jpeg.Decode(file)
	if err != nil {
		panic(err)
	}

	accelerateDc = iImage.NewContext(540, 960)
	accelerateDc.DrawImage(accelerateImg, 0, 0)
}

type VideoPipe struct {
	Input  chan InFrame
	Output chan OutFrame
	done   chan struct{}

	encoder Encoder

	// frame size
	w, h int
}

// NewVideoPipe returns new video encoder pipe.
// By default it waits for RGBA images on the input channel,
// converts them into YUV I420 format,
// encodes with provided video encoder, and
// puts the result into the output channel.
func NewVideoPipe(enc Encoder, w, h int) *VideoPipe {
	return &VideoPipe{
		Input:  make(chan InFrame, 1),
		Output: make(chan OutFrame, 2),
		done:   make(chan struct{}),

		encoder: enc,

		w: w,
		h: h,
	}
}

// Start begins video encoding pipe.
// Should be wrapped into a goroutine.
// TODO: 此函数解码率下降.
func (vp *VideoPipe) Start() {
	defer func() {
		if r := recover(); r != nil {
			log.Logger.Error("Warn: Recovered panic in encoding ", r)
		}
		close(vp.Output)
		close(vp.done)
	}()

	// 主循环在此处处理编解码实际工作.
	// ticker := time.NewTicker(time.Millisecond * 900)
	yuvProc := yuv.NewYuvImgProcessor(vp.w, vp.h)

	// 提取预热.
	for i := 0; i < 30; i++ {
		yCbCr := yuvProc.Process(accelerateDc.ImageRgba()).Get()
		frame := vp.encoder.Encode(yCbCr)
		if len(frame) > 0 {
			log.Logger.Debugf("VideoPipe Encode accelerateDc success, Image length:%d, yCbCr length:%d, frame length:%d. \n", len(accelerateDc.ImageRgba().Pix), len(yCbCr), len(frame))
			vp.Output <- OutFrame{Data: frame, Timestamp: uint32(time.Now().UnixNano() / 8333)}
			break
		} else if len(frame) == 0 {
			log.Logger.Debugf("VideoPipe Encode accelerateDc nil, may be buffer, accelerateDc length:%d, yCbCr length:%d, frame length:%d. \n", len(accelerateDc.ImageRgba().Pix), len(yCbCr), len(frame))
		} else {
			log.Logger.Infof("VideoPipe Encode accelerateDc error, accelerateDc length:%d, yCbCr length:%d, frame length:%d. \n", len(accelerateDc.ImageRgba().Pix), len(yCbCr), len(frame))
		}
	}

	//h262Enc := vp.encoder.(*h264.H264)
	//isFirst := true
	for img := range vp.Input {
		//select {
		//case <-ticker.C:
		//	sppssei := h262Enc.EncodeHeader()
		//	pterm.FgGreen.Printf("VideoPipe EncodeHeader success, frame length:%d. \n", len(sppssei))
		//	vp.Output <- OutFrame{Data: sppssei, Timestamp: img.Timestamp}
		//default:
		//	if isFirst {
		//		sppssei := h262Enc.EncodeHeader()
		//		pterm.FgGreen.Printf("VideoPipe EncodeHeader success, frame length:%d. \n", len(sppssei))
		//		vp.Output <- OutFrame{Data: sppssei, Timestamp: img.Timestamp}
		//		isFirst = false
		//	}

		// for test.
		yCbCr := yuvProc.Process(img.Image).Get()
		frame := vp.encoder.Encode(yCbCr)
		if len(frame) > 0 {
			// pterm.FgGreen.Printf("VideoPipe Encode success, Image length:%d, yCbCr length:%d, frame length:%d. \n", len(img.Image.Pix), len(yCbCr), len(frame))
			log.Logger.Debugf("VideoPipe Encode success, Image length:%d, yCbCr length:%d, frame length:%d. \n", len(img.Image.Pix), len(yCbCr), len(frame))
			vp.Output <- OutFrame{Data: frame, Timestamp: img.Timestamp}
		} else if len(frame) == 0 {
			log.Logger.Debugf("VideoPipe Encode image nil, may be buffer, img.Image length:%d, yCbCr length:%d, frame length:%d. \n", len(img.Image.Pix), len(yCbCr), len(frame))
		} else {
			log.Logger.Infof("VideoPipe Encode image error, img.Image length:%d, yCbCr length:%d, frame length:%d. \n", len(img.Image.Pix), len(yCbCr), len(frame))
		}
		//}
	}

	// 输出缓存的.
	for frame := vp.encoder.Encode(nil); len(frame) > 0; {
		pterm.FgGreen.Printf("VideoPipe Encode delayed buff success, frame length:%d. \n", len(frame))
		log.Logger.Infof("VideoPipe Encode delayed buff success, frame length:%d. \n", len(frame))
		vp.Output <- OutFrame{Data: frame, Timestamp: 0} // 时间戳丢失算了.
		frame = vp.encoder.Encode(nil)
	}

	// else to do?.
}

func (vp *VideoPipe) Stop() {
	close(vp.Input)

	// 等到最后缓存处理完.
	<-vp.done
	if err := vp.encoder.Shutdown(); err != nil {
		log.Logger.Error("error: failed to close the encoder")
	}
}
