package main

import (
	"encoding/gob"
	"errors"
	"io"
	"math"
	"os"
	"path/filepath"
	"sync/atomic"
)

// 微小非零概率
const defaultProb = 0.00000000001

var ErrUnderflow = errors.New("possible underflow detected")

type Class string

type Classifier struct {
	Classes         []Class
	learned         int
	seen            int32
	datas           map[Class]*classData
	tfIdf           bool
	DidConvertTfIdf bool
}
type serializableClassifier struct {
	Classes         []Class
	learned         int
	seen            int
	Datas           map[Class]*classData
	TfIdf           bool
	DidConvertTfIdf bool
}
type classData struct {
	Freqs   map[string]float64
	FreqTfs map[string][]float64
	Total   int
}

func newClassData() *classData {
	return &classData{
		Freqs:   make(map[string]float64),
		FreqTfs: make(map[string][]float64),
	}
}

func (d *classData) getWordProb(word string) float64 {
	value, ok := d.Freqs[word]
	if !ok {
		return defaultProb
	}
	return float64(value) / float64(d.Total)
}

func (d *classData) getWordsProb(words []string) (prob float64) {
	prob = 1
	for _, word := range words {
		prob *= d.getWordProb(word)
	}
	return
}

func NewClassifierTfIdf(classes ...Class) (c *Classifier) {
	n := len(classes)
	if n < 2 {
		panic("provide at least two classes")
	}
	check := make(map[Class]bool, n)
	for _, class := range classes {
		check[class] = true
	}
	if len(check) != n {
		panic("classes must be unique")
	}
	c = &Classifier{
		Classes: classes,
		datas:   make(map[Class]*classData, n),
		tfIdf:   true,
	}
	for _, class := range classes {
		c.datas[class] = newClassData()
	}
	return
}
func NewClassifier(classes ...Class) (c *Classifier) {
	n := len(classes)
	if n < 2 {
		panic("provide at least two classes")
	}
	check := make(map[Class]bool, n)
	for _, class := range classes {
		check[class] = true
	}
	if len(check) != n {
		panic("classes must be unique")
	}
	c = &Classifier{
		Classes:         classes,
		datas:           make(map[Class]*classData, n),
		tfIdf:           false,
		DidConvertTfIdf: false,
	}
	for _, class := range classes {
		c.datas[class] = newClassData()
	}
	return
}

func NewClassifierFromFile(name string) (c *Classifier, err error) {
	file, err := os.Open(name)
	if err != nil {
		return nil, err
	}
	defer file.Close()
	return NewClassifierFromReader(file)
}

func NewClassifierFromReader(file io.Reader) (c *Classifier, err error) {
	dec := gob.NewDecoder(file)
	w := new(serializableClassifier)
	err = dec.Decode(w)

	return &Classifier{w.Classes, w.learned,
		int32(w.seen), w.Datas, w.TfIdf, w.DidConvertTfIdf}, err
}

func (c *Classifier) getPriors() (priors []float64) {
	n := len(c.Classes)
	priors = make([]float64, n, n)
	sum := 0
	for index, class := range c.Classes {
		total := c.datas[class].Total
		priors[index] = float64(total)
		sum += total
	}
	if sum != 0 {
		for i := 0; i < n; i++ {
			priors[i] /= float64(sum)
		}
	}
	return

}
func (c *Classifier) Learned() int {
	return c.learned
}
func (c *Classifier) Seen() int {
	return int(atomic.LoadInt32(&c.seen))
}

func (c *Classifier) IsTfIdf() bool {
	return c.tfIdf
}

func (c *Classifier) WordCount() (result []int) {
	result = make([]int, len(c.Classes))
	for index, class := range c.Classes {
		data := c.datas[class]
		result[index] = data.Total
	}
	return
}

func (c *Classifier) Observe(word string, count int, which Class) {
	data := c.datas[which]
	data.Freqs[word] += float64(count)
	data.Total += count

}

func (c *Classifier) Learn(document []string, which Class) {
	if c.tfIdf {
		if c.DidConvertTfIdf {
			panic("Cannot call ConvertTermsFreqToTfIdf more than once. Reset and relearn to reconvert.")
		}
		docTf := make(map[string]float64)
		for _, word := range document {
			docTf[word]++
		}
		docLen := float64(len(document))
		for wIndex, wCount := range docTf {
			docTf[wIndex] = wCount / docLen
			c.datas[which].FreqTfs[wIndex] = append(c.datas[which].FreqTfs[wIndex], docTf[wIndex])
		}
	}

	data := c.datas[which]
	for _, word := range document {
		data.Freqs[word]++ // 自增	a = a + 1 a++
		data.Total++
	}
	c.learned++
}

func (c *Classifier) ConvertTermsFreqToTfIdf() {
	if c.DidConvertTfIdf {
		panic("Cannot call ConvertTermsFreqToTfIdf more than once. Reset and relearn to reconvert.")
	}
	for className := range c.datas {
		for wIndex := range c.datas[className].FreqTfs {
			tfIdfAdder := float64(0)
			for tfSampleIndex := range c.datas[className].FreqTfs[wIndex] {
				tf := c.datas[className].FreqTfs[wIndex][tfSampleIndex]
				c.datas[className].FreqTfs[wIndex][tfSampleIndex] = math.Log1p(tf) * math.Log1p(float64(c.learned))
				tfIdfAdder += c.datas[className].FreqTfs[wIndex][tfSampleIndex]
			}
			// convert the 'counts' to TF-IDF's
			c.datas[className].Freqs[wIndex] = tfIdfAdder
		}
	}
	c.DidConvertTfIdf = true
}

func (c *Classifier) LogScores(document []string) (scores []float64, inx int, strict bool) {
	if c.tfIdf && !c.DidConvertTfIdf {
		panic("Using a TF-IDF classifier. Please call ConvertTermsFreqToTfIdf before calling LogScores.")
	}
	n := len(c.Classes)
	scores = make([]float64, n, n)
	priors := c.getPriors()

	for index, class := range c.Classes {
		data := c.datas[class]
		score := math.Log(priors[index])
		for _, word := range document {
			score += math.Log(data.getWordProb(word))
		}
		scores[index] = score
	}
	inx, strict = findMax(scores)
	atomic.AddInt32(&c.seen, 1)
	return scores, inx, strict
}

func (c *Classifier) ProbScores(doc []string) (scores []float64, inx int, strict bool) {
	if c.tfIdf && !c.DidConvertTfIdf {
		panic("Using a TF-IDF classifier. Please call ConvertTermsFreqToTfIdf before calling ProbScores.")
	}
	n := len(c.Classes)
	scores = make([]float64, n, n)
	priors := c.getPriors()
	sum := float64(0)

	for index, class := range c.Classes {
		data := c.datas[class]
		score := priors[index]

		for _, word := range doc {
			score *= data.getWordProb(word)
		}
		scores[index] = score
		sum += score
	}
	for i := 0; i < n; i++ {
		scores[i] /= sum
	}
	inx, strict = findMax(scores)
	atomic.AddInt32(&c.seen, 1)
	return scores, inx, strict
}
func (c *Classifier) SafeProbScores(doc []string) (scores []float64, inx int, strict bool, err error) {
	if c.tfIdf && !c.DidConvertTfIdf {
		panic("Using a TF-IDF classifier. Please call ConvertTermsFreqToTfIdf before calling SafeProbScores.")
	}
	n := len(c.Classes)
	scores = make([]float64, n, n)
	logScores := make([]float64, n, n)
	priors := c.getPriors()
	sum := float64(0)

	for index, class := range c.Classes {
		data := c.datas[class]
		score := priors[index]
		logScore := math.Log(priors[index])

		for _, word := range doc {
			p := data.getWordProb(word)
			score *= p
			logScore += math.Log(p)
		}
		scores[index] = score
		logScores[index] = logScore
		sum += score
	}
	for i := 0; i < n; i++ {
		scores[i] /= sum
	}
	inx, strict = findMax(scores)
	logInx, logStrict := findMax(logScores)
	if inx != logInx || strict != logStrict {
		err = ErrUnderflow
	}
	atomic.AddInt32(&c.seen, 1)
	return scores, inx, strict, err
}

func (c *Classifier) WordFrequencies(words []string) (freqMatrix [][]float64) {
	n, l := len(c.Classes), len(words)
	freqMatrix = make([][]float64, n)
	for i := range freqMatrix {
		arr := make([]float64, l)
		data := c.datas[c.Classes[i]]
		for j := range arr {
			arr[j] = data.getWordProb(words[j])
		}
		freqMatrix[i] = arr
	}
	return
}

func (c *Classifier) WordsByClass(class Class) (freq map[string]float64) {
	freq = make(map[string]float64)
	for word, cnt := range c.datas[class].Freqs {
		freq[word] = float64(cnt) / float64(c.datas[class].Total)
	}
	return freq

}
func (c *Classifier) WriteToFile(name string) (err error) {
	file, err := os.OpenFile(name, os.O_WRONLY|os.O_CREATE, 0644)
	if err != nil {
		return err
	}
	defer file.Close()
	return c.WriteTo(file)
}

func (c *Classifier) WriteClassesToFile(rootPath string) (err error) {
	for name := range c.datas {
		c.WriteClassToFile(name, rootPath)
	}
	return
}
func (c *Classifier) WriteTo(w io.Writer) (err error) {
	enc := gob.NewEncoder(w)
	err = enc.Encode(&serializableClassifier{c.Classes, c.learned, int(c.seen), c.datas, c.tfIdf, c.DidConvertTfIdf})
	return
}

func (c *Classifier) WriteClassToFile(name Class, path string) (err error) {
	data := c.datas[name]
	fileName := filepath.Join(path, string(name))
	file, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE, 0644)
	if err != nil {
		return err
	}
	defer file.Close()

	enc := gob.NewEncoder(file)
	err = enc.Encode(data)
	return

}
func (c *Classifier) ReadClassFromFile(class Class, location string) (err error) {
	fileName := filepath.Join(location, string(class))
	file, err := os.Open(fileName)

	if err != nil {
		return err
	}
	defer file.Close()

	dec := gob.NewDecoder(file)
	w := new(classData)
	err = dec.Decode(w)

	c.learned++
	c.datas[class] = w
	return
}

func findMax(scores []float64) (inx int, strict bool) {
	inx = 0
	strict = true
	for i := 1; i < len(scores); i++ {
		if scores[inx] < scores[i] {
			inx = i
			strict = true
		} else if scores[inx] == scores[i] {
			strict = false
		}
	}
	return
}

func main() {

}
