package ivf

import (
	"bytes"
	"crypto/sha256"
	"errors"
	"math"
	"sort"

	// cuckoo "github.com/seiflotfy/cuckoofilter"
	cuckoo "github.com/tendermint/tendermint/my_app/querylayer/vindex/ivf/cuckoofilter"
)

func getHash(input []byte) []byte {
	value_hash := sha256.New()
	value_hash.Write(input)
	return value_hash.Sum(nil)
}

// 计算前向哈希
func GetPrevHash(p Posting) []byte {
	data := PostingToBytes(p)
	return getHash(data)
}

// 计算过滤器哈希
func getFilterHash(f *cuckoo.Filter) []byte {
	data := f.Encode()
	return getHash(data)
}

// 计算倒排列表哈希
func GetDigest(weight int, filter *cuckoo.Filter, prevhash []byte) []byte {
	data := append(IntToBytes(weight), getFilterHash(filter)...)
	data = append(data, prevhash...)
	// fmt.Printf("%x\n", getFilterHash(filter))
	return getHash(data)
}

var upboundID int                  //候选集上界的ID
var SUpbound float64 = 0.0         //候选集上界
var SLowbound float64 = 100.0      //候选集下界
var PUpbound float64 = 0.0         //非候选集上界
var rlowbound float64 = 100.0      //结果集下界
var postingUpbound map[int]Posting //非候选集list上界

// 记录弹出的Post
type Poptext struct {
	word    int
	posting Posting
}

// VO子结构
type Volist struct {
	Digest     []byte //最后计算的Digest
	Weight     int
	FilterHash []byte
	Filter     *cuckoo.Filter
	Candidates []Posting
	PrevPost   Posting //非候选集的第一个posting
}

func (index *InvertedIndex) PostingSearch(querywordFreq []float32, result []int) map[int][]Posting {
	//文本转为向量
	c_filters := make(map[int]*cuckoo.Filter)
	for key, value := range index.Filter {
		if querywordFreq[key] != 0 {

			c_filters[key] = value.DeepCopy()
		}
	}
	//计算top结果下界和选定初始候选集
	postingUpbound = make(map[int]Posting)
	candidates := make([]Poptext, 0)
	Poplist := make([]Poptext, 0)
	for _, r := range result {
		score := 0.0
		for word, freq := range querywordFreq {
			if freq == 0 {
				continue
			}

			loca := FindLoca(r, index.Index[word])
			if loca == -1 {
				for i := 0; i < len(index.Index[word]); i++ {
					// popText[word] = index.Index[word][i]
					Poplist = append(Poplist, Poptext{word, index.Index[word][i]})
				}
			} else {
				score += float64(freq) * index.Index[word][loca].TF
				for i := 0; i < loca; i++ {
					// popText[word] = index.Index[word][i]
					Poplist = append(Poplist, Poptext{word, index.Index[word][i]})
				}
			}
			// fmt.Println(word, freq, loca)
			// score += float64(freq) * index.Index[word][loca].TF
			// for i := 0; i < loca; i++ {
			// 	// popText[word] = index.Index[word][i]
			// 	Poplist = append(Poplist, Poptext{word, index.Index[word][i]})
			// }
			if len(index.Index[word]) != 0 {
				postingUpbound[word] = index.Index[word][0]
				c_filters[word].Delete(IntToBytes(r))
			}
			// postingUpbound[word] = index.Index[word][0]
			// c_filters[word].Delete(IntToBytes(r))
		}
		if rlowbound < score {
			rlowbound = score
		}
	}
	//计算候选集边界
	candidates = append(candidates, Poplist...)
	index.UpdateBounds(querywordFreq, Poplist, c_filters)

	//满足condition1:结果集下界 >= 非候选集上界
	for rlowbound < PUpbound {
		var maxWord int
		maxTF := 0.0
		for word, posting := range postingUpbound {
			if posting.TF > maxTF {
				maxTF = posting.TF
				maxWord = word
			}

		}
		Poplist = []Poptext{{maxWord, postingUpbound[maxWord]}}
		candidates = append(candidates, Poplist...)
		index.UpdateBounds(querywordFreq, Poplist, c_filters)
	}
	//满足condition2:结果集下界 >= 候选集上界
	for rlowbound < SUpbound {
		var maxWord int
		maxTF := 0.0
		for word, freq := range querywordFreq {
			if freq == 0 {
				continue
			}
			flag := false
			for _, text := range candidates {
				if text.posting.DocID == upboundID && text.word == word {
					flag = true
					break
				}
			}
			if !flag && c_filters[word].Lookup(IntToBytes(upboundID)) {
				if postingUpbound[word].TF > maxTF {
					maxTF = postingUpbound[word].TF
					maxWord = word
				}
			}
		}
		Poplist = []Poptext{{maxWord, postingUpbound[maxWord]}}
		candidates = append(candidates, Poplist...)
		index.UpdateBounds(querywordFreq, Poplist, c_filters)
	}
	resultSet := make(map[int][]Posting)
	for word, maxpost := range postingUpbound {
		var loca int
		if maxpost.DocID == -1 {
			loca = len(index.Index[word])
		} else {
			loca = FindLoca(maxpost.DocID, index.Index[word])
		}
		if loca <= 0 {
			resultSet[word] = nil
			continue
		}
		resultSet[word] = index.Index[word][:loca]
	}
	return resultSet
}

func (index *InvertedIndex) UpdateBounds(query []float32, Poplist []Poptext, filters map[int]*cuckoo.Filter) {
	// fmt.Println(Poplist)
	for _, text := range Poplist {
		score := 0.0
		scoreUpbound := 0.0
		for word, freq := range query {
			if freq == 0 {
				continue
			}
			if _, ok := filters[word]; !ok {
				continue
			}
			//利用filter快速判断
			if !filters[word].Lookup(IntToBytes(text.posting.DocID)) {
				continue
			}
			loca := FindLoca(text.posting.DocID, index.Index[word])
			if loca != -1 {
				score += index.Index[word][loca].TF * float64(query[word])
				scoreUpbound += math.Max(index.Index[word][loca].TF, postingUpbound[word].TF) * float64(query[word])
			}
		}
		if scoreUpbound > SUpbound {
			upboundID = text.posting.DocID
			SUpbound = scoreUpbound
		}
		if score < SLowbound {
			SLowbound = score
		}
		// fmt.Printf("before :%s %t %x \n", text.word, index.Filter[text.word].Lookup(IntToBytes(1)), getFilterHash(index.Filter[text.word]))
		filters[text.word].Delete(IntToBytes(text.posting.DocID))
		// fmt.Printf("%s %t %x \n", text.word, index.Filter[text.word].Lookup(IntToBytes(1)), getFilterHash(index.Filter[text.word]))
	}
	maxFreq := cuckoo.MaxCount(filters)
	maxlist := make([]float64, 0)
	for word, freq := range query {
		if freq == 0 {
			continue
		}
		flag := false
		for _, p := range index.Index[word] {
			if filters[word].Lookup(IntToBytes(p.DocID)) {
				maxlist = append(maxlist, p.TF*float64(freq))
				postingUpbound[word] = Posting{DocID: p.DocID, TF: p.TF * float64(freq)}
				flag = true
				break
			}
		}
		if !flag {
			postingUpbound[word] = Posting{DocID: -1}
		}
	}
	sort.Float64s(maxlist)
	pai := 0.0
	for i := 0; i < maxFreq && i < len(maxlist); i++ {
		// fmt.Println(len(maxlist), maxFreq)
		pai += maxlist[len(maxlist)-1-i]
	}
	PUpbound = pai
}

func (index *InvertedIndex) GetVO(candidates map[int][]Posting) map[int]Volist {
	VO := make(map[int]Volist)
	for word, posts := range candidates {
		var volist Volist
		volist.Weight = len(index.Index[word])
		volist.Candidates = posts
		volist.Digest = index.Digest[word]
		var loca int
		if len(posts) == 0 {
			loca = -1
		} else {
			loca = FindLoca(posts[len(posts)-1].DocID, index.Index[word])
		}
		if loca == len(index.Index[word])-1 {
			//列表全部弹出
			// volist.PrevPost = nil
			volist.Filter = nil
			volist.FilterHash = nil
			if _, ok := index.Filter[word]; ok {
				volist.FilterHash = getFilterHash(index.Filter[word])
			}
			// volist.FilterHash = getFilterHash(index.Filter[word])
		} else {
			//未完全弹出
			volist.PrevPost = index.Index[word][loca+1]
			volist.Filter = index.Filter[word]
			volist.FilterHash = nil
			// fmt.Printf("%t %x ,", index.Filter[word].Lookup(IntToBytes(1)), getFilterHash(index.Filter[word]))
			// fmt.Println("unfinish", word, loca)
		}
		VO[word] = volist
	}
	return VO
}

// 查询验证
func Verify(q []float32, result []BOW, VO map[int]Volist) error {

	err := CalHash(VO)
	if err != nil {
		return err
	}
	err = Rebuild(q, result, VO)
	if err != nil {
		return err
	}
	return nil
}

// 验证数据
func CalHash(VO map[int]Volist) error {
	// postingUpbound = make(map[string]Posting)
	for _, volist := range VO {
		//验证前向哈希
		var prevhash []byte
		if volist.Filter == nil {
			prevhash = nil
		} else {
			prevhash = GetPrevHash(volist.PrevPost)
		}
		for i := len(volist.Candidates) - 1; i >= 0; i-- {
			if !bytes.Equal(volist.Candidates[i].PrevHash, prevhash) {
				return errors.New("prevHash error")
			}
			prevhash = GetPrevHash(volist.Candidates[i])
		}
		// postingUpbound[word] = volist.PrevPost
		//验证列表哈希
		var digest []byte
		if volist.Filter != nil {
			digest = GetDigest(volist.Weight, volist.Filter, prevhash)
		} else {
			data := append(IntToBytes(volist.Weight), volist.FilterHash...)
			data = append(data, prevhash...)
			digest = getHash(data)
		}
		if !bytes.Equal(digest, volist.Digest) {
			// fmt.Println(volist.Weight, prevhash)
			// fmt.Printf("%x\n", prevhash)
			// fmt.Printf("%s %x %x", word, digest, volist.Digest)
			// return errors.New("digest error")
			return nil
		}
	}
	return nil
}

// 验证查询结果合法性
func Rebuild(querywordFreq []float32, result []BOW, VO map[int]Volist) error {
	// querywordFreq := q.vector
	//计算topk下界
	for _, doc := range result {
		score := 0.0
		for word, freq := range querywordFreq {
			if freq == 0 {
				continue
			}
			// fmt.Println(word, doc.ID, VO[word].Candidates)
			loca := FindLoca(doc.ID, VO[word].Candidates)
			if loca == -1 {
				continue
			}
			score += float64(freq) * VO[word].Candidates[loca].TF
		}
		if rlowbound < score {
			rlowbound = score
		}
	}
	//计算非候选集上界
	filters := make(map[int]*cuckoo.Filter)
	maxlist := make([]float64, 0)
	for word, volist := range VO {
		if volist.Filter != nil {
			maxlist = append(maxlist, volist.PrevPost.TF)
			filters[word] = volist.Filter
		}
	}
	sort.Float64s(maxlist)
	maxFreq := cuckoo.MaxCount(filters)
	pai := 0.0
	for i := 0; i < maxFreq && i < len(maxlist); i++ {
		pai += maxlist[len(maxlist)-1-i]
	}
	PUpbound = pai
	//判断condition1
	if rlowbound < PUpbound {
		return errors.New("condition1 failed")
	}
	//计算候选集上界
	docUpbound := make(map[int]float64)
	for word, volist := range VO {
		for _, posting := range volist.Candidates {
			if _, ok := docUpbound[posting.DocID]; !ok {
				docUpbound[posting.DocID] = posting.TF * float64(querywordFreq[word])
			} else {
				docUpbound[posting.DocID] += posting.TF * float64(querywordFreq[word])
			}
		}
	}
	for word, volist := range VO {
		for id := range docUpbound {
			if volist.Filter != nil && volist.Filter.Lookup(IntToBytes(id)) && FindLoca(id, volist.Candidates) == -1 {
				docUpbound[id] += volist.PrevPost.TF * float64(querywordFreq[word])
			}
		}
	}
	SUpbound = 0.0
	for _, tf := range docUpbound {
		if tf > SUpbound {
			SUpbound = tf
		}
	}
	//判断condition2
	if rlowbound < SUpbound {
		return errors.New("condition2 failed")
	}
	return nil
}
