package main

import (
	"common/rtpengine/rtp/codecs"
	"github.com/pterm/pterm"
	"io/ioutil"
	"os"
	"xmediaEmu/pkg/decoder"
	"xmediaEmu/pkg/decoder/codec"
	"xmediaEmu/pkg/decoder/codec/aacparser"
	"xmediaEmu/pkg/decoder/codec/amrparser"
	"xmediaEmu/pkg/decoder/codec/h264parser"
	"xmediaEmu/pkg/decoder/fdkaac"
	"xmediaEmu/pkg/format/flv"
	"xmediaEmu/pkg/format/flv/flvio"
)

// 写死编解码信息.
var AacCapabilities = map[string]*CapabilityAac{
	"audio": &CapabilityAac{
		Codecs: []string{"AAC"},
		Rtcpfbs: []*RtcpFeedbackAac{
			&RtcpFeedbackAac{
				ID: "nack",
			},
		},
	},
}

// RtmpStream 定义rtmp解析的结构streamer.
type RtmpStreamAac struct {
	streams        []codec.CodecData
	videoCodecData h264parser.CodecData
	audioCodecData aacparser.CodecData

	spspps       bool
	cachedPacket *codecs.H264Packet // for cached.

	// aac头部
	adtsheader []byte

	// TODO: 以下用于构建SDP。不需要则跳过.
	// 后续和外部媒体通信需要根据外部媒体调节自己的编码属性，尽量不做转码，比如h265.
	audioCapability *CapabilityAac
	// videoCapability *CapabilityAac
}

type RtcpFeedbackAac struct {
	ID     string   `json:"id,omitempty"`
	Params []string `json:"params,omitempty"`
}

type CapabilityAac struct {
	Codecs     []string           `json:"codecs"`
	Rtx        bool               `json:"rtx,omitempty"`
	Rtcpfbs    []*RtcpFeedbackAac `json:"rtcpfbs,omitempty"`
	Extensions []string           `json:"extensions,omitempty"`
	Simulcast  bool               `json:"simulcast,omitempty"`
}

// NewRtmpStreamerAac  create media transform, ignore video.
func NewRtmpStreamerAac(audio *CapabilityAac, video *CapabilityAac) *RtmpStreamAac {
	streamer := &RtmpStreamAac{}
	streamer.audioCapability = audio
	// streamer.videoCapability = video
	return streamer
}

// WriteHeader got sps and pps
func (self *RtmpStreamAac) WriteHeader(streams []codec.CodecData) error {
	self.streams = streams

	for _, stream := range streams {
		if stream.Type() == codec.H264 {
			h264Codec := stream.(h264parser.CodecData)
			self.videoCodecData = h264Codec
		}
		if stream.Type() == codec.AAC {
			aacCodec := stream.(aacparser.CodecData)
			self.audioCodecData = aacCodec
			self.adtsheader = make([]byte, 7)
		}
	}

	return nil
}

// amr nb 和wb的文件帧类型
var amr_nbFrameLen = []int{95, 103, 118, 134, 148, 159, 204, 244, 39}
var wbSpeechFrameBytes = []int{18, 23, 33, 37, 41, 47, 51, 59, 61, 0, 0, 0, 0, 0, 0, 0}
var CMR = []byte{1, 1, 1, 1}

type AmrPayloader struct{}

// RTP里的一帧,
func (p *AmrPayloader) PayloadAmr(amr_data []byte) (amrPayLoad [][]byte) {
	amr_data = amr_data[6:]
	amr_data = Bytes2Bits(amr_data)
	for index := 0; index < len(amr_data); {
		payload := make([]byte, 0)
		// payload = append(payload, CMR...)                       //CMR
		payload = append(payload, 0)                            //F
		payload = append(payload, amr_data[index+1:index+6]...) //FT和Q
		flen := amr_nbFrameLen[int((amr_data[index+1]<<3)+(amr_data[index+2]<<2)+(amr_data[index+3]<<1)+amr_data[index+4])]
		payload = append(payload, amr_data[index+8:index+flen+8]...) //payloaddata
		for num := 8 - len(payload)%8; 0 < num && num < 8; num-- {   //8位对齐补0
			payload = append(payload, 0)
		}
		amrPayLoad = append(amrPayLoad, Bits2Bytes(payload))
		index += 8 + flen
		for num := 8 - flen%8; 0 < num && num < 8; num-- {
			index += 1
		}
	}
	return amrPayLoad
}

func (p *AmrPayloader) PayloadAmrWB(amr_data []byte) (amrPayLoad [][]byte) {
	var frame []byte
	amr_data = amr_data[9:]
	for current, length := 0, len(amr_data); current < length; {
		index := 0
		ft := amr_data[current] >> 3
		next := current + wbSpeechFrameBytes[ft]
		frame = make([]byte, wbSpeechFrameBytes[ft])
		frame[index] = (8 << 4) | (amr_data[current] >> 4)
		frame[index+1] = (amr_data[current] << 4) | (amr_data[current+1] >> 2)
		index += 2
		for current += 2; current < next; current++ {
			frame[index] = (amr_data[current-1] << 6) | (amr_data[current] >> 2)
			index++
		}
		amrPayLoad = append(amrPayLoad, frame)
	}
	return amrPayLoad
}

func Bytes2Bits(data []byte) []byte {
	bin := make([]byte, 0)
	for _, v := range data {
		for i := 0; i < 8; i++ {
			move := uint(7 - i)
			bin = append(bin, uint8((v>>move)&1))
		}
	}
	return bin
}

// 二进制数组转字节数组
func Bits2Bytes(data []byte) []byte {
	bytes := make([]byte, 0)
	for i := 0; i < len(data); i += 8 {
		bytes = append(bytes, (data[i]<<7)+(data[i+1]<<6)+(data[i+2]<<5)+
			(data[i+3]<<4)+(data[i+4]<<3)+(data[i+5]<<2)+(data[i+6]<<1)+(data[i+7]))
	}
	return bytes
}

func main() {
	config := aacparser.MPEG4AudioConfig{SampleRate: 16000, ChannelLayout: codec.CH_MONO, ChannelConfig: 1, SampleRateIndex: 4, ObjectType: aacparser.AOT_AAC_LC}
	config.Complete()
	config.SampleRate = 16000
	dAac := fdkaac.NewAacEncoder()
	if err := dAac.InitLc(1, config.SampleRate, 44100*2); err != nil {
		pterm.BgRed.Println("init decoder failed, err is", err)
		return
	}
	defer dAac.Close()

	// 解码amrwb.
	if len(os.Args) < 3 {
		pterm.BgRed.Println("Please provide a input amr-wb audio file and output aac file")
		os.Exit(1)
	}

	// var amrcodeData amrparser.AmrNbCodecData
	var amrcodeData amrparser.AmrWbCodecData
	amrDecoder, err := decoder.NewAudioDecoderByName(amrcodeData)
	if err != nil {
		panic(err)
	}
	//_ = amrDecoder.SetChannelLayout(codec.CH_MONO)
	//_ = amrDecoder.SetBitrate(24000)
	//_ = amrDecoder.SetSampleRate(16000)
	//_ = amrDecoder.SetSampleFormat(codec.S16)
	//
	//// 初始化编码.
	//if err = amrDecoder.Setup(); err != nil {
	//	panic(err)
	//}
	reSampler := &decoder.Resampler{
		OutSampleFormat:  codec.FLTP, // 改成s16 pcm码流解码.
		OutSampleRate:    16000,
		OutChannelLayout: codec.CH_MONO,
	}

	fileBuf, err := ioutil.ReadFile(os.Args[1])
	if err != nil {
		pterm.BgRed.Printf("Unable to open file %s, error: %s\n", os.Args[1], err.Error())
		os.Exit(1)
	}

	//
	f, err := os.Create(os.Args[2])
	if err != nil {
		panic(err)
	}
	flvWriter := flv.NewMuxer(f)

	// 构建codecData,
	//demuxer, err := rtmp.Dial("")
	//if err != nil {
	//	panic(err)
	//}
	// 直接根据aac的codecData构建数据。
	var streams []codec.CodecData
	aacCodec, err := aacparser.NewCodecDataFromMPEG4AudioConfig(config)
	if err != nil {
		pterm.BgRed.Printf("NewCodecDataFromMPEG4AudioConfig failed:%v, error: %s\n", config, err.Error())
		os.Exit(1)
	}

	streams = append(streams, aacCodec)
	//if streams, err = demuxer.Streams(); err != nil && err != io.EOF {
	//	panic(err)
	//}
	// 写入AAC头.
	err = flvWriter.WriteHeader(streams)
	if err != nil {
		panic(err)
	}
	//rtmpStreamer := NewRtmpStreamerAac(AacCapabilities["audio"], nil)
	//if err = ws.rtmpStreamer.WriteHeader(streams); err != nil {
	//	log.Error("mockLocalEmuProcess:", err)
	//	return err
	//}

	// 只针对amr的一个帧，rtp传输的需要去掉CMR和F等填充bit.
	var amrPayloader AmrPayloader
	payloadAmr := amrPayloader.PayloadAmrWB(fileBuf)
	var reserveBuf []byte

	// 一帧帧解析.
	// 初始化flv的tag信息.
	// pcm s16 设计
	timestamp := int32(0)
	// rwBase := codec.NewRWBaser(time.Second * 10)
	gap := int32(10) // 20ms.

	for _, pkt := range payloadAmr {
		// 2048成一帧.
		frameResult, err := amrDecoder.DecodingMultipleFrame(pkt, reSampler)
		if err != nil && len(frameResult) <= 0 {
			pterm.FgRed.Printf("Decode amr result  err:%v\n", err)
			continue
		}
		for index, frame := range frameResult {
			pterm.FgWhite.Println("Decode amr result length is: index:%d frameResult.Data[0]:length:%d frameResult.Data:length:%d err:%v\n", index, len(frame.Data[0]), len(frame.Data), err)

			if len(frame.Data) > 0 {
				pterm.FgWhite.Printf("Decode amr result length is: frameResult.Data[0]:length:%d frameResult.Data:length:%d err:%v\n", len(frame.Data[0]), len(frame.Data), err)

				// 编码,
				// var frameBuf []byte
				frameBuf := reserveBuf
				const splitterLen = 2048
				start := 0
				for i := 0; i < len(frame.Data); i++ {
					frameBuf = append(frameBuf, frame.Data[i]...)
				}

				// 加了后长度不够，下回计算.
				if len(frameBuf) < splitterLen {
					reserveBuf = frameBuf
					continue
				}

				// 切割frameBuf.
				// TODO: 放弃使用fdkaac原生库，改用ffmpeg库.
				for i := 0; start+splitterLen < len(frameBuf); {
					aacbytes, err := dAac.Encode(frameBuf[start : start+splitterLen])
					if err != nil {
						pterm.FgRed.Println("dAac.Encode failed, err is", err)
						continue
					}
					// aac 写入.
					var pkt codec.Packet
					pkt.Data = aacbytes
					pkt.Idx = 0
					// rwBase.SetPreTime()

					// 打包时间怎么确定？...
					pkt.Time = flvio.TsToTime(timestamp)
					timestamp += gap
					if err = flvWriter.WritePacket(pkt); err != nil {
						pterm.FgRed.Printf("aacWriter.WritePacket failed: index:%v aacbytes.Data:length:%d err:%v\n", i, len(aacbytes), err)
					} else {
						pterm.FgWhite.Printf("aacWriter.WritePacket success: index:%v aacbytes.Data:length:%d \n", i, len(aacbytes))
					}

					start += splitterLen
				}

				// 剩余的留下次用.
				if start < len(frameBuf) {
					reserveBuf = frameBuf[start:]
					continue
				}
			}
		}
	}

	aacbytes, err := dAac.Flush()
	for ; err == nil && len(aacbytes) > 0; aacbytes, err = dAac.Flush() {
		// aac 写入.
		var pkt codec.Packet
		pkt.Data = aacbytes
		pkt.Idx = 0
		pkt.Time = flvio.TsToTime(timestamp)
		timestamp += gap
		if err = flvWriter.WritePacket(pkt); err != nil {
			pterm.FgRed.Printf("aacWriter.Flush failed: aacbytes.Data:length:%d err:%v\n", len(aacbytes), err)
		} else {
			pterm.FgWhite.Printf("aacWriter.Flush success: aacbytes.Data:length:%d \n", len(aacbytes))
		}
	}

	got, frameResult, err := amrDecoder.ReceiveReserved()
	for len(frameResult.Data) > 0 {
		pterm.FgLightWhite.Printf("Decode ReceiveReserved result length is: got:%v frameResult.Data[0]:length:%d frameResult.Data:length:%d err:%v\n", got, len(frameResult.Data[0]), len(frameResult.Data), err)

		// 编码,
		// var frameBuf []byte
		frameBuf := reserveBuf
		const splitterLen = 2048
		start := 0
		for i := 0; i < len(frameResult.Data); i++ {
			frameBuf = append(frameBuf, frameResult.Data[i]...)
		}

		// 加了后长度不够，下回计算.
		if len(frameBuf) < splitterLen {
			reserveBuf = frameBuf
			continue
		}

		// 切割frameBuf.
		for i := 0; start+splitterLen < len(frameBuf); {
			aacbytes, err := dAac.Encode(frameBuf[start : start+splitterLen])
			if err != nil {
				pterm.FgRed.Println("dAac.Encode ReceiveReserved failed, err is", err)
				continue
			}
			// aac 写入.
			var pkt codec.Packet
			pkt.Data = aacbytes
			pkt.Idx = 0
			pkt.Time = flvio.TsToTime(timestamp)
			timestamp += gap
			if err = flvWriter.WritePacket(pkt); err != nil {
				pterm.FgRed.Printf("aacWriter.WritePacket ReceiveReserved failed: index:%v aacbytes.Data:length:%d err:%v\n", i, len(aacbytes), err)
			} else {
				pterm.FgWhite.Printf("aacWriter.WritePacket ReceiveReserved success: index:%v aacbytes.Data:length:%d \n", i, len(aacbytes))
			}

			start += splitterLen
		}

		// 剩余的留下次用.
		if start < len(frameBuf) {
			reserveBuf = frameBuf[start:]
			continue
		}

		got, frameResult, err = amrDecoder.ReceiveReserved()
	}
	aacbytes, err = dAac.Flush()
	for ; err == nil && len(aacbytes) > 0; aacbytes, err = dAac.Flush() {
		// aac 写入.
		var pkt codec.Packet
		pkt.Data = aacbytes
		pkt.Idx = 0
		pkt.Time = flvio.TsToTime(timestamp)
		timestamp += gap
		if err = flvWriter.WritePacket(pkt); err != nil {
			pterm.FgRed.Printf("aacWriter.Flush failed: aacbytes.Data:length:%d err:%v\n", len(aacbytes), err)
		} else {
			pterm.FgWhite.Printf("aacWriter.Flush success: aacbytes.Data:length:%d \n", len(aacbytes))
		}
	}

	amrDecoder.Close()

}

func CreatePcmS16LeFlvTag() (tag flvio.Tag) {
	tag = flvio.Tag{Type: flvio.TAG_AUDIO}
	tag.SoundType = flvio.SOUND_MONO
	tag.SoundFormat = flvio.SOUND_PCM16LE
	// tag

	return
}

//
//func CreateAacFlvTag() (tag flvio.Tag) {
//
//}
