package server

import (
	"fmt"
	"net/http"
	"sync"

	"fabric-sdk/sdk"

	"fabric-sdk/utils"

	"github.com/Shopify/sarama"
	"github.com/hyperledger/fabric-sdk-go/pkg/client/channel"
)

const channelNum = 2 // channel num

type Server struct {
	SdkChannelInfos []sdk.SdkChannelInfo
}

type Object struct {
	ID   string
	Hash string
}

type KafkaConsumer struct {
	Node  string
	Topic string
}

// var id_ch = make(chan string, 200)
var hash_ch = make(chan Object, 400000)

var wg sync.WaitGroup

func (s *Server) query_hash(w http.ResponseWriter, r *http.Request) {
	defer r.Body.Close()

	// timeStart := time.Now()
	id := r.URL.Query().Get("ID")
	channelIdToPut := utils.StrHashToNum(id, channelNum)
	response, err := s.SdkChannelInfos[channelIdToPut].Client.Query(channel.Request{ChaincodeID: s.SdkChannelInfos[channelIdToPut].ChaincodeID, Fcn: "read", Args: [][]byte{[]byte(id)}})
	if err != nil {
		fmt.Println("<-myerror-> ", err)
	}
	// else {
	// 	fmt.Printf("<-查询对象ID为: '%s' 的对象哈希: '%v'-> 花费时间: %dms\n", id, []byte(response.Payload),
	// 		time.Since(timeStart).Milliseconds())
	// }
	_, _ = w.Write([]byte(string(response.Payload)))
}

// func (s *Server) query_obj() {
// 	for id := range id_ch {
// 		timeStart := time.Now().UnixNano()
// 		response, err := s.SdkChannelInfos.Client.Query(channel.Request{ChaincodeID: s.SdkChannelInfos.ChaincodeID, Fcn: "ReadObject", Args: [][]byte{[]byte(id)}})
// 		if err != nil {
// 			fmt.Println("<-myerror-> ", err)
// 		} else {
// 			fmt.Printf("<-查询对象ID为: '%s' 的对象哈希: '%s'-> 花费时间: %fms\n", id, string(response.Payload), ((float64)(time.Now().UnixNano()-timeStart))/1e6)
// 		}
// 	}
// }

func (s *Server) add_obj() {
	defer wg.Done()
	for obj_put := range hash_ch {
		// timeStart := time.Now()
		channelIdToPut := utils.StrHashToNum(obj_put.ID, channelNum)
		_, err := s.SdkChannelInfos[channelIdToPut].Client.Execute(channel.Request{ChaincodeID: s.SdkChannelInfos[channelIdToPut].ChaincodeID, Fcn: "create", Args: [][]byte{[]byte(obj_put.ID), []byte(obj_put.Hash)}})
		if err != nil {
			fmt.Println(err)
		}
		// else {
		// 	fmt.Printf("latency %d\n", time.Since(timeStart).Milliseconds())
		// 	// fmt.Printf("<-添加对象ID: '%s', Hash length: %d, 到Fabric-> 花费时间: %fms\n", obj_put.ID, len(obj_put.Hash), ((float64)(time.Now().UnixNano()-timeStart))/1e6)
		// }
	}
}

// func (s *Server) getBlockHeight(w http.ResponseWriter, r *http.Request) {
// 	defer r.Body.Close()

// 	blockinfo, err := s.SdkChannelInfos.LedgerClient.QueryInfo()
// 	if err != nil {
// 		fmt.Printf("failed query block info: %s\n", err)
// 		return
// 	}
// 	heightBytes := make([]byte, 8)
// 	binary.BigEndian.PutUint64(heightBytes, blockinfo.BCI.Height)
// 	_, _ = w.Write(heightBytes)
// 	fmt.Printf("Get current block height: %d\n", blockinfo.BCI.Height)
// }

// func (s *Server) getBlock(w http.ResponseWriter, r *http.Request) {
// 	defer r.Body.Close()

// 	timeStart := time.Now()
// 	channelID, err := strconv.Atoi(r.URL.Query().Get("channel"))
// 	if err != nil {
// 		fmt.Printf("Atoi channel ID error: %s\n", err)
// 		return
// 	}
// 	height, err := strconv.ParseUint(r.URL.Query().Get("height"), 10, 64)
// 	if err != nil {
// 		fmt.Printf("Atoi block height error: %s\n", err)
// 		return
// 	}
// 	block, err := s.SdkChannelInfos[channelID].LedgerClient.QueryBlock(height)
// 	if err != nil {
// 		fmt.Printf("failed to query block: %s\n", err)
// 		return
// 	}
// 	objIDsInBlock, err := utils.ExtractObjIDsFromBlock(block)
// 	if err != nil {
// 		fmt.Printf("failed to ExtractObjIDsFromBlock: %s\n", err)
// 		return
// 	}
// 	objIDsInBlockJson, err := json.Marshal(objIDsInBlock)
// 	if err != nil {
// 		fmt.Printf("failed to Marshal objIDsInBlock: %s\n", err)
// 		return
// 	}
// 	_, _ = w.Write(objIDsInBlockJson)
// 	fmt.Printf("ExtractObjIDsFromBlock, block ID: %d, objs count: %d, cost time: %dms\n", objIDsInBlock.BlockID, len(objIDsInBlock.IDs), time.Since(timeStart).Milliseconds())
// }

// func (s *Server) getRandomBlock(w http.ResponseWriter, r *http.Request) {
// 	defer r.Body.Close()

// 	timeStart := time.Now()
// 	heightStart, err := strconv.ParseUint(r.URL.Query().Get("start"), 10, 64)
// 	if err != nil {
// 		fmt.Printf("Atoi block height start error: %s\n", err)
// 		return
// 	}
// 	rand.Seed(time.Now().Unix())
// 	channelIDCheck := rand.Intn(channelNum) // 随机选择一个channel
// 	// channelIDCheck := 0 // 指定channel
// 	blockinfo, err := s.SdkChannelInfos[channelIDCheck].LedgerClient.QueryInfo()
// 	if err != nil {
// 		fmt.Printf("failed query block info: %s\n", err)
// 		return
// 	}
// 	checkBlockID := rand.Uint64()%(blockinfo.BCI.GetHeight()-heightStart) + heightStart // 根据给定起始高度和当前区块高度之间随机获取一个区块高度
// 	block, err := s.SdkChannelInfos[channelIDCheck].LedgerClient.QueryBlock(checkBlockID)
// 	if err != nil {
// 		fmt.Printf("failed to query block: %s\n", err)
// 		return
// 	}
// 	objIDsInBlock, err := utils.ExtractObjIDsFromBlock(block)
// 	if err != nil {
// 		fmt.Printf("failed to ExtractObjIDsFromBlock: %s\n", err)
// 		return
// 	}
// 	objIDsInBlock.BlockID = checkBlockID
// 	objIDsInBlockJson, err := json.Marshal(objIDsInBlock)
// 	if err != nil {
// 		fmt.Printf("failed to Marshal objIDsInBlock: %s\n", err)
// 		return
// 	}
// 	_, _ = w.Write(objIDsInBlockJson)
// 	fmt.Printf("ExtractObjIDsFromBlock, block ID: %d, objs count: %d, cost time: %dms\n", objIDsInBlock.BlockID, len(objIDsInBlock.IDs), time.Since(timeStart).Milliseconds())
// }

func (t KafkaConsumer) Consume() {
	consumer, err := sarama.NewConsumer([]string{t.Node}, nil)
	if err != nil {
		fmt.Printf("kafka connnet failed, error[%v]", err.Error())
		return
	}
	defer consumer.Close()

	partition_consumer, err := consumer.ConsumePartition(t.Topic, 0, sarama.OffsetNewest)
	if err != nil {
		fmt.Printf("try create partition_consumer error %s\n", err.Error())
		return
	}
	defer partition_consumer.Close()

	for {
		select {
		case msg := <-partition_consumer.Messages():
			obj_put := Object{
				ID:   string(msg.Key),
				Hash: string(msg.Value),
			}
			hash_ch <- obj_put
		case err := <-partition_consumer.Errors():
			fmt.Printf("err :%s\n", err.Error())
		}
	}
}

func ServerStart(s Server, kafkaBroker string, kafkaTopic string, orgID int) {

	http.HandleFunc("/get", s.query_hash)
	// http.HandleFunc("/getRandomBlock", s.getRandomBlock)
	// http.HandleFunc("/getBlock", s.getBlock)

	var kafkaConsumer = KafkaConsumer{
		Node:  kafkaBroker,
		Topic: kafkaTopic,
	}

	go kafkaConsumer.Consume()

	for i := 0; i < 2000; i++ {
		// go s.query_obj()
		go s.add_obj()
	}
	fmt.Println("Server start ...")
	http.ListenAndServe(":8080", nil)
}
