package ths

import (
	"encoding/json"
	"gitee.com/gcom/gbox/errors"
	"gitee.com/gcom/stockdock/comm"
	"gitee.com/gcom/stockdock/core/support"
	"gitee.com/gcom/stockdock/crawler"
	"strconv"
	"strings"
)

type histQuote struct {
	Total       comm.QuotedInt `json:"total"`
	Start       string         `json:"start"`
	Name        string         `json:"name"`
	MarketType  string         `json:"marketType"`
	SortYear    [][2]int       `json:"sortYear"`
	PriceFactor int32          `json:"priceFactor"`
	Price       string         `json:"price"`
	Volume      string         `json:"volumn"`
	AfterVolume string         `json:"afterVolumn"`
	Dates       string         `json:"dates"`
}

func GetStockQuoteHist(tctx support.TracerCtx, codes <-chan string, onValue func(crawler.CrawlRequest, []*StockHistThs),
	onError crawler.CrawlOnError, onDiscard crawler.CrawlOnDiscardFunc) {

	cctx := crawler.NewCrawlContext(tctx)
	defer cctx.Cancel()
	url := "http://stockpage.10jqka.com.cn/600000/"
	request := make(chan crawler.CrawlRequest)
	defer close(request)
	onValueFunc := func(r crawler.CrawlRequest, val string) {
		data := histQuote{}
		if err := json.Unmarshal([]byte(val), &data); err != nil {
			tctx.Logger.Errorf("解析json消息失败, <%s> \n%v", val, err)
			onError(r, err)
			return
		}

		list := make([]*StockHistThs, 0, int(data.Total))
		index := 0
		pre := float64(0)
		prices := strings.Split(data.Price, ",")
		volumes := strings.Split(data.Volume, ",")
		dates := strings.Split(data.Dates, ",")
		for _, y := range data.SortYear {
			year := y[0]
			for i := 0; i < y[1]; i++ {
				kl := StockHistThs{}
				pos := index * 4
				kl.Code = r.(*StockHistCrawlRequest).Code
				kl.Name = data.Name
				kl.Low = comm.MustParseFloat(prices[pos])
				kl.Open = (kl.Low + comm.MustParseFloat(prices[pos+1])) / 100
				kl.High = (kl.Low + comm.MustParseFloat(prices[pos+2])) / 100
				kl.Close = (kl.Low + comm.MustParseFloat(prices[pos+3])) / 100
				kl.Low /= 100
				kl.PreClose = pre
				kl.Volume = comm.MustParseInt(volumes[index])
				kl.Date = comm.MustParseTime(strconv.Itoa(year)+dates[index], comm.CompactDatePattern)
				pre = kl.Close
				index++
				list = append(list, &kl)
			}
		}
		onValue(r, list)
	}

	go func() {
		err := crawler.CrawlAndEval(cctx, url, request)
		if err != nil {
			cctx.Tracer().Logger.Errorf("chromedp异常，%v", err)
			onError(nil, err)
		}
	}()

	for code := range codes {
		request <- &StockHistCrawlRequest{
			Ctx:           tctx,
			Code:          code,
			OnValueFunc:   onValueFunc,
			OnErrorFunc:   onError,
			OnDiscardFunc: onDiscard,
		}
	}
}

func GetStockQuote(tctx support.TracerCtx, codes <-chan string, onValue func(crawler.CrawlRequest, *StockQuoteThs),
	onError crawler.CrawlOnError, onDiscard crawler.CrawlOnDiscardFunc) {

	cctx := crawler.NewCrawlContext(tctx)
	defer cctx.Cancel()
	url := "http://stockpage.10jqka.com.cn/600000/"
	request := make(chan crawler.CrawlRequest)
	defer close(request)
	onValueFunc := func(r crawler.CrawlRequest, val string) {
		data := struct {
			Items map[string]interface{} `json:"items"`
		}{}
		defer func() {
			if rec := recover(); rec != nil {
				code := r.(*StockQuoteCrawlRequest).Code
				e := errors.Newf("解析同花顺股票(%s)报价数据失败, <%s>\n%v", code, val, rec)
				tctx.Logger.Errorf("解析同花顺股票(%s)报价数据失败, <%s>\n%v", code, val, e)
				onError(r, e)
			}
		}()
		if err := json.Unmarshal([]byte(val), &data); err != nil {
			tctx.Logger.Errorf("解析同花顺股票今日报价数据失败, <%s>\n%v", val, err)
			onError(r, err)
			return
		}

		m := data.Items
		var index StockQuoteThs
		index.Code = r.(*StockQuoteCrawlRequest).Code
		index.Name = m["name"].(string)
		index.Timestamp = comm.MustParseTime(m["updateTime"].(string)[:10], comm.DefaultDatePattern)
		index.Close = comm.MustParseFloat(m["10"].(string))
		index.High = comm.MustParseFloat(m["8"].(string))
		index.Low = comm.MustParseFloat(m["9"].(string))
		index.Open = comm.MustParseFloat(m["7"].(string))
		index.PreClose = comm.MustParseFloat(m["6"].(string))
		index.LimitUp = comm.MustParseFloat(m["69"].(string))
		index.LimitDown = comm.MustParseFloat(m["70"].(string))
		index.PB = comm.MustParseFloat(m["592920"].(string))
		index.PE = comm.MustParseFloat(m["134152"].(string))
		index.PEForward = comm.MustParseFloat(m["2034120"].(string))
		index.Change = comm.MustParseFloat(m["264648"].(string))
		index.PctChg = comm.MustParseFloat(m["199112"].(string))
		index.Amplitude = comm.MustParseFloat(m["526792"].(string))
		index.TurnoverRate = comm.MustParseFloat(m["1968584"].(string))
		index.VolumeRatio = comm.MustParseFloat(m["1771976"].(string))
		index.Volume = int64(comm.MustParseFloat(m["13"].(string)))
		index.Amount = comm.MustParseFloat(m["19"].(string))
		index.TotalShare = int64(comm.MustParseFloat(comm.NilString(m["402"])))
		index.FloatShare = int64(comm.MustParseFloat(comm.NilString(m["407"])))
		index.TotalMV = comm.MustParseFloat(m["3541450"].(string))
		index.FloatMV = comm.MustParseFloat(m["3475914"].(string))
		onValue(r, &index)
	}

	go func() {
		err := crawler.CrawlAndEval(cctx, url, request)
		if err != nil {
			cctx.Tracer().Logger.Errorf("chromedp异常，%v", err)
			onError(nil, err)
		}
	}()

	for code := range codes {
		request <- &StockQuoteCrawlRequest{
			Ctx:           tctx,
			Code:          code,
			OnValueFunc:   onValueFunc,
			OnErrorFunc:   onError,
			OnDiscardFunc: onDiscard,
		}
	}
}
