package utils

import (
	"bytes"
	"compress/gzip"
	"context"
	"encoding/base64"
	"io"
	"runtime"
	"strconv"
	"sync"

	"github.com/bytedance/gopkg/util/logger"
	"github.com/go-redis/redis"
	jsoniter "github.com/json-iterator/go"
	"github.com/tendermint/tendermint/abci/example/kvstore"
	"github.com/tendermint/tendermint/my_test/config"
	"github.com/tendermint/tendermint/my_test/performance/entity"
	"github.com/tendermint/tendermint/rpc/client/http"
)

var n int = runtime.NumCPU()

func PreheatWithVO(cli *http.HTTP, r *redis.Client, ctx context.Context) {
	var wg sync.WaitGroup

	part := config.MaxObjectId / n

	for i := 0; i < n; i++ {
		wg.Add(1)

		go func(i int) {
			defer wg.Done()

			from, to := i*part+1, (i+1)*part
			if i == n-1 {
				to = config.MaxObjectId
			}

			for objectId := from; objectId <= to; objectId++ {
				objectIdStr := strconv.Itoa(objectId)

				var heightList []int64

				abciQueryRes, err := cli.ABCIQuery(ctx, "", nil)
				if err != nil {
					logger.Errorf("[PreheatWithVO] abciQuery error, objectId:%v, err:%v", objectId, err)
					panic(err)
				}

				blockRes, err := cli.Block(ctx, &abciQueryRes.Response.Height)
				if err != nil {
					logger.Errorf("[PreheatWithVO] block error, objectId:%v, err:%v", objectId, err)
					panic(err)
				}

				mptQueryRes, err := cli.MPTQuery(ctx, blockRes.Block.Header.MPTHash, objectIdStr)
				if err != nil {
					logger.Errorf("[PreheatWithVO] mptQuery error, objectId:%v, err:%v", objectId, err)
					panic(err)
				}

				if mptQueryRes != nil {
					heightList = mptQueryRes.HeightList
				}

				provRes := new(entity.ProvResult)

				for i := 0; i < len(heightList); i++ {
					if i-1 >= 0 && heightList[i] == heightList[i-1] {
						continue
					}

					h := heightList[i]
					blockRes, err = cli.Block(ctx, &h)
					if err != nil {
						logger.Errorf("[PreheatWithVO] block error, objectId:%v, err:%v", objectId, err)
						panic(err)
					}

					for index := 0; index < len(blockRes.Block.Txs); index++ {
						var tx kvstore.Tx
						if err = jsoniter.Unmarshal(blockRes.Block.Txs[index], &tx); err != nil {
							panic(err)
						}

						if tx.Data.ObjectID == objectIdStr {
							proof := blockRes.Block.Txs.Proof(index)

							provRes.Txs = append(provRes.Txs, &tx)

							tx.Data = nil

							provRes.MerkleProofList = append(provRes.MerkleProofList, &entity.MerkleProof{
								TxProof:  proof,
								RootHash: blockRes.Block.DataHash,
							})
						}
					}
				}

				if len(provRes.Txs) > 0 {
					jsonTxList, err := jsoniter.Marshal(&provRes)
					if err != nil {
						logger.Errorf("[PreheatWithVO] marshal error, objectId:%v, err:%v", objectId, err)
						panic(err)
					}

					r.Set(objectIdStr, GzipCompress(jsonTxList), 0)
				}
			}
		}(i)
	}

	wg.Wait()
}

func SearchTxByObjectIdAndDataType(cli *http.HTTP, ctx context.Context, objectId string, dataType int) (string, error) {
	var heightList []int64

	abciQueryRes, err := cli.ABCIQuery(ctx, "", nil)
	if err != nil {
		logger.Errorf("[SearchTxByObjectIdAndDataType] abciQuery error, objectId:%v, dataType:%v, err:%v", objectId, dataType, err)
		return "", err
	}

	blockRes, err := cli.Block(ctx, &abciQueryRes.Response.Height)
	if err != nil {
		logger.Errorf("[SearchTxByObjectIdAndDataType] block error, objectId:%v, dataType:%v, err:%v", objectId, dataType, err)
		return "", err
	}

	mptQueryRes, err := cli.MPTQuery(ctx, blockRes.Block.Header.MPTHash, objectId)
	if err != nil {
		logger.Errorf("[SearchTxByObjectIdAndDataType] mptQuery error, objectId:%v, dataType:%v, err:%v", objectId, dataType, err)
		return "", err
	}

	if mptQueryRes != nil {
		heightList = mptQueryRes.HeightList
	}

	provRes := new(entity.ProvResult)

	for i := 0; i < len(heightList); i++ {
		if i-1 >= 0 && heightList[i] == heightList[i-1] {
			continue
		}

		h := heightList[i]
		blockRes, err = cli.Block(ctx, &h)
		if err != nil {
			logger.Errorf("[SearchTxByObjectIdAndDataType] block error, objectId:%v, dataType:%v, err:%v", objectId, dataType, err)
			return "", err
		}

		for index := 0; index < len(blockRes.Block.Txs); index++ {
			var tx kvstore.Tx
			if err = jsoniter.Unmarshal(blockRes.Block.Txs[index], &tx); err != nil {
				logger.Errorf("[SearchTxByObjectIdAndDataType] unmarshal error, objectId:%v, dataType:%v, err:%v", objectId, dataType, err)
				return "", err
			}

			if tx.Data.ObjectID == objectId && tx.Data.FileType == config.DataTypeList[dataType] {
				tx.Data = nil

				provRes.Txs = append(provRes.Txs, &tx)

				proof := blockRes.Block.Txs.Proof(index)

				provRes.MerkleProofList = append(provRes.MerkleProofList, &entity.MerkleProof{
					TxProof:  proof,
					RootHash: blockRes.Block.DataHash,
				})
			}
		}
	}

	jsonData, err := jsoniter.Marshal(provRes)
	if err != nil {
		logger.Errorf("[SearchTxByObjectIdAndDataType] marshal error, objectId:%v, dataType:%v, err:%v", objectId, dataType, err)
		return "", err
	}

	return GzipCompress(jsonData), nil
}

func Preheat(cli *http.HTTP, r *redis.Client, ctx context.Context) {
	var wg sync.WaitGroup

	part := config.MaxObjectId / n

	for i := 0; i < n; i++ {
		wg.Add(1)

		go func(i int) {
			defer wg.Done()

			from, to := i*part+1, (i+1)*part
			if i == n-1 {
				to = config.MaxObjectId
			}

			for objectId := from; objectId <= to; objectId++ {
				objectIdStr := strconv.Itoa(objectId)

				var heightList []int64

				abciQueryRes, err := cli.ABCIQuery(ctx, "", nil)
				if err != nil {
					logger.Errorf("[Preheat] abciQuery error, objectId:%v, err:%v", objectId, err)
					panic(err)
				}

				blockRes, err := cli.Block(ctx, &abciQueryRes.Response.Height)
				if err != nil {
					logger.Errorf("[Preheat] block error, objectId:%v, err:%v", objectId, err)
					panic(err)
				}

				mptQueryRes, err := cli.MPTQuery(ctx, blockRes.Block.Header.MPTHash, objectIdStr)
				if err != nil {
					logger.Errorf("[Preheat] mptQuery error, objectId:%v, err:%v", objectId, err)
					panic(err)
				}

				if mptQueryRes != nil {
					heightList = mptQueryRes.HeightList
				}

				var txs []kvstore.Tx

				for i := 0; i < len(heightList); i++ {
					if i-1 >= 0 && heightList[i] == heightList[i-1] {
						continue
					}

					h := heightList[i]
					blockRes, err = cli.Block(ctx, &h)
					if err != nil {
						logger.Errorf("[Preheat] block error, objectId:%v, err:%v", objectId, err)
						panic(err)
					}

					for index := 0; index < len(blockRes.Block.Txs); index++ {
						var tx kvstore.Tx
						if err = jsoniter.Unmarshal(blockRes.Block.Txs[index], &tx); err != nil {
							logger.Errorf("[Preheat] unmarshal error, objectId:%v, err:%v", objectId, err)
							panic(err)
						}

						if tx.Data.ObjectID == objectIdStr {
							tx.Data = nil

							txs = append(txs, tx)
						}
					}
				}

				if len(txs) > 0 {
					jsonTxList, err := jsoniter.Marshal(txs)
					if err != nil {
						logger.Errorf("[Preheat] marshal error, objectId:%v, err:%v", objectId, err)
						panic(err)
					}

					// r.Set(objectIdStr, GzipCompress(jsonTxList), 0)
					r.Set(objectIdStr, jsonTxList, 0)
				}
			}
		}(i)
	}

	wg.Wait()
}

func Verify(provRes entity.ProvResult) error {
	var err error

	for i := 0; i < len(provRes.MerkleProofList); i++ {
		rootHash := provRes.MerkleProofList[i].RootHash

		logger.Infof("[Verify] rootHash:%v", rootHash)

		proof := provRes.MerkleProofList[i].TxProof

		logger.Infof("[Verify] proof:%+v", proof)

		if err = proof.Validate(rootHash); err != nil {
			logger.Errorf("[Verify] error, objectId:%v, dataType:%v, err:%v", provRes.Txs[i].Data.ObjectID, provRes.Txs[i].Data.FileType, err)
			return err
		}
	}

	return err
}

func GzipCompress(s []byte) string {
	var b bytes.Buffer      // 创建一个bytes.Buffer用于写入压缩数据
	w := gzip.NewWriter(&b) // 创建一个gzip.Writer

	// 写入并压缩数据
	if _, err := w.Write(s); err != nil {
		logger.Errorf("[GzipCompress] Write error, err:%v", err)
		panic(err)
	}

	// 关闭gzip.Writer以刷新输出
	if err := w.Close(); err != nil {
		logger.Errorf("[GzipCompress] Close error, err:%v", err)
		panic(err)
	}

	// 读取压缩后的数据
	compressedData, err := io.ReadAll(&b)
	if err != nil {
		logger.Errorf("[GzipCompress] ReadAll error, err:%v", err)
		panic(err)
	}

	return base64.StdEncoding.EncodeToString(compressedData)
}

func GzipUnCompress(s string) []byte {
	decodedBytes, err := base64.StdEncoding.DecodeString(s)
	if err != nil {
		panic(err)
	}

	// 创建一个gzip.Reader用于解压缩数据
	gr, err := gzip.NewReader(bytes.NewBuffer(decodedBytes))
	if err != nil {
		panic(err)
	}
	defer gr.Close()

	// 读取解压缩后的数据
	decompressedBytes, err := io.ReadAll(gr)
	if err != nil {
		panic(err)
	}

	// 将解压缩后的数据转换为字符串
	return decompressedBytes
}
