package ths

import (
	"context"
	"fmt"
	"gitee.com/gcom/gbox/errors"
	"gitee.com/gcom/stockdock/comm"
	"gitee.com/gcom/stockdock/core/support"
	"gitee.com/gcom/stockdock/crawler"
	"github.com/PuerkitoBio/goquery"
	"github.com/chromedp/cdproto/network"
	"github.com/chromedp/cdproto/page"
	"github.com/chromedp/chromedp"
	"log"
	"regexp"
	"strconv"
	"strings"
	"time"
)

// IndustryQuoteListThs 获取同花顺行业报价
func IndustryQuoteListThs(ctx support.TracerCtx) ([]*IndustryQuoteThs, error) {
	page := 1
	totalPage := 1
	list := make([]*IndustryQuoteThs, 0, 100)
	cctx, cancel := crawler.NewChromedpCtx(ctx, 0, nil)
	defer cancel()
	for page <= totalPage {
		url := "http://q.10jqka.com.cn/thshy/index/field/199112/order/desc/page/" + strconv.Itoa(page) + "/ajax/1/"
		doc, err := crawler.CrawlAndParsePageWith(cctx, url)
		if err != nil {
			return nil, errors.Wrap(err, "获取同花顺行业报价失败")
		}

		sel := doc.Find("table.m-table > tbody > tr")
		sel.Each(func(i int, s *goquery.Selection) {
			tds := s.Find("td").Nodes
			if len(tds) != 12 {
				html, _ := s.Html()
				log.Println("行业报价数据格式错误：" + html)
				return
			}
			q := IndustryQuoteThs{}
			q.Name = comm.HTMLText(tds[1])
			url := comm.HTMLAttrOr(tds[1].FirstChild, "href", "0000000")
			q.Code = url[len(url)-7 : len(url)-1]
			q.Percent = comm.MustParseFloat(comm.HTMLText(tds[2]))
			q.Volume = int64(comm.MustParseFloat(comm.HTMLText(tds[3])))
			q.Amount = comm.MustParseFloat(comm.HTMLText(tds[4]))
			q.FlowIn = comm.MustParseFloat(comm.HTMLText(tds[5]))
			q.Up = int(comm.MustParseInt(comm.HTMLText(tds[6])))
			q.Down = int(comm.MustParseInt(comm.HTMLText(tds[7])))
			q.Avg = comm.MustParseFloat(comm.HTMLText(tds[8]))
			q.LeadName = comm.HTMLText(tds[9])
			url = comm.HTMLAttrOr(tds[9].FirstChild, "href", "0000000")
			q.LeadCode = url[len(url)-7 : len(url)-1]
			q.LeadPrice = comm.MustParseFloat(comm.HTMLText(tds[10]))
			q.LeadPercent = comm.MustParseFloat(comm.HTMLText(tds[11]))
			list = append(list, &q)
		})

		page++
		if totalPage == 1 {
			pages := doc.Find("div#m-page > span.page_info").Text()
			if sa := strings.Split(pages, "/"); len(sa) == 2 {
				totalPage = int(comm.MustParseInt(sa[1]))
			}
		}
		ctx.Logger.Infof("已读取%d/%d页", page-1, totalPage)
	}
	return list, nil
}

// ListDailyQuoteThs 同花顺日行情报价
func ListDailyQuoteThs(tctx support.TracerCtx, ch chan<- QuotePageEventThs) error {
	url := "http://q.10jqka.com.cn/"
	ctx, cancel := crawler.NewChromedpCtx(tctx, 0, nil)
	defer cancel()
	return crawlAllPages(tctx, ctx, url, ch)
}

// IndustryStockListThs 同花顺行业股票列表
func IndustryStockListThs(tctx support.TracerCtx, industryCode string, ch chan<- QuotePageEventThs) error {
	url := "http://q.10jqka.com.cn/thshy/detail/code/" + industryCode
	ctx, cancel := crawler.NewChromedpCtx(tctx, 0, nil)
	defer cancel()
	return crawlAllPages(tctx, ctx, url, ch)
}

func crawlAllPages(tctx support.TracerCtx, ctx context.Context, url string, ch chan<- QuotePageEventThs) error {
	var loadedPage = make(chan int, 1)
	defer close(loadedPage)
	chromedp.ListenTarget(ctx, func(ev interface{}) {
		if ev, ok := ev.(*page.EventJavascriptDialogOpening); ok {
			go func() {
				if err := chromedp.Run(ctx, page.HandleJavaScriptDialog(true)); err != nil {
					tctx.Logger.Errorf("关闭alert失败, %v", err)
					panic(err)
				}
				pn, _ := strconv.Atoi(ev.Message)
				loadedPage <- pn
			}()
		}
	})

	return crawler.Crawl(ctx, chromedp.Tasks{
		network.SetExtraHTTPHeaders(map[string]interface{}{"Referer": url}),
		chromedp.Navigate(url),
		chromedp.ActionFunc(func(c context.Context) error {
			var e error
			var res int
			var js = `
						var $_node = document.getElementById('maincont');
						var $_config = {attributes: false, childList: true, subtree: false};
						var $_observer = new MutationObserver(function(mutationsList, observer) {
							var p = $("#m-page > span.page_info").text();
							console.log('#maincont content changed: ' + p);
							alert(p.split('/')[0])
						});
						$_observer.observe($_node, $_config); 1;
						`
			e = chromedp.EvaluateAsDevTools(js, &res).Do(c)
			if e != nil {
				tctx.Logger.Errorf("执行JS脚本失败, %v", e)
				ch <- QuotePageEventThs{Err: e}
				return e
			}
			time.Sleep(1 * time.Second)
			loadedPage <- 1 // send first page event manually

			var html string
			var total = 1
			var li []*DailyQuoteThs
			var currentPage = 0
			for {
				select {
				case pn := <-loadedPage:
					if pn <= currentPage {
						break
					}
					currentPage = pn
					e = chromedp.OuterHTML("#maincont", &html).Do(c)
					if e != nil {
						tctx.Logger.Errorf("获取页面内容失败, %v", e)
						ch <- QuotePageEventThs{Err: e}
						return e
					}
					li, total, e = parseQuotePage(html)
					if e != nil {
						tctx.Logger.Errorf("解析页码失败, %v", e)
						ch <- QuotePageEventThs{Err: e}
						return e
					}
					tctx.Logger.Infof("已读入页面%d/%d, 共%d条", pn, total, len(li))
					ch <- QuotePageEventThs{
						Data:      li,
						PageNo:    pn,
						PageCount: total,
					}
					if pn >= total {
						return nil
					}
					e = chromedp.EvaluateAsDevTools(`$("#m-page >a:contains('下一页')").click(); 1;`, &res).Do(c)
					if e != nil {
						tctx.Logger.Errorf("加载下一页(%d)失败, %v", pn, e)
						ch <- QuotePageEventThs{Err: e}
						return e
					}
				case <-time.After(10 * time.Second):
					return errors.New("等待加载数据超时")
				}
			}
			return nil
		}),
	})
}

func parseQuotePage(html string) (list []*DailyQuoteThs, pageCount int, err error) {
	doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
	if err != nil {
		return nil, 0, errors.Wrap(err, "文档解析失败")
	}
	sel := doc.Find("table.m-table > tbody > tr")
	list = make([]*DailyQuoteThs, 0, len(sel.Nodes))
	sel.Each(func(i int, s *goquery.Selection) {
		tds := s.Find("td").Nodes
		if len(tds) != 15 {
			html, _ := s.Html()
			log.Println("报价数据格式错误：" + html)
			return
		}
		q := DailyQuoteThs{}
		q.Code = comm.HTMLText(tds[1])
		q.Name = comm.SwipeWhitespace(comm.HTMLText(tds[2]))
		q.Price = comm.MustParseFloat(comm.HTMLText(tds[3]))
		q.Percent = comm.MustParseFloat(comm.HTMLText(tds[4]))
		q.Change = comm.MustParseFloat(comm.HTMLText(tds[5]))
		q.TurnoverRate = comm.MustParseFloat(comm.HTMLText(tds[7]))
		q.VolumeRatio = comm.MustParseFloat(comm.HTMLText(tds[8]))
		q.Amplitude = comm.MustParseFloat(comm.HTMLText(tds[9]))
		q.Amount = comm.ParseAbbrFloat(comm.HTMLText(tds[10]))
		q.FloatShare = int64(comm.ParseAbbrFloat(comm.HTMLText(tds[11])))
		q.FloatMarketCapital = comm.ParseAbbrFloat(comm.HTMLText(tds[12]))
		q.PE = comm.MustParseFloat(comm.HTMLText(tds[13]))
		list = append(list, &q)
	})

	if len(list) == 0 {
		log.Println("空页面", html)
	}

	pages := doc.Find("div#m-page > span.page_info").Text()
	if sa := strings.Split(pages, "/"); len(sa) == 2 {
		pageCount = int(comm.MustParseInt(sa[1]))
	} else {
		pageCount = 1
	}
	return
}

// GetCompInfoThs 获取上市公司信息(同花顺)
func GetCompInfoThs(ctx context.Context, code string) (*CompInfoThs, error) {
	url := "http://basic.10jqka.com.cn/" + code + "/company.html"
	html, err := crawler.CrawlPageWith(ctx, url)
	if err != nil {
		return nil, errors.Wrap(err, fmt.Sprintf("获取企业%s信息失败", code))
	}

	doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
	if err != nil {
		return nil, errors.Wrap(err, "文档解析失败")
	}
	return ParseCompInfoThs(code, doc)
}

func ParseCompInfoThs(code string, doc *goquery.Document) (*CompInfoThs, error) {
	comp := CompInfoThs{Code: code, Name: doc.Find("body > input#stockName").AttrOr("value", "")}
	main := doc.Find("div.page_event_content")

	tr1 := main.Find("#detail > div.bd > table.m_table > tbody > tr")
	comp.CompName = tr1.Eq(0).Find("span").Eq(0).Text()
	comp.Area = tr1.Eq(0).Find("span").Eq(1).Text()
	comp.CompEnName = tr1.Eq(1).Find("span").Eq(0).Text()
	comp.Industry = tr1.Eq(1).Find("span").Eq(1).Text()
	comp.UsedNames = tr1.Eq(2).Find("span").Eq(0).Text()
	comp.Website = tr1.Eq(2).Find("span > a").AttrOr("href", "")

	tr2 := main.Find("#detail > div.bd > div.m_tab_content2 > table.ggintro > tbody > tr")
	comp.MainBiz = tr2.Eq(0).Find("span").Text()
	re := regexp.MustCompile("\\s+")
	comp.Product = re.ReplaceAllString(tr2.Eq(1).Find("td > span").Text(), "")
	comp.CtlShareholder = strings.TrimSpace(comm.HTMLNodeText(tr2.Eq(2).Find("td > div.mr10 > span")))
	comp.ActualCtrl = strings.TrimSpace(comm.HTMLNodeText(tr2.Eq(3).Find("td > div.mr10 > span")))
	comp.UltimateCtrl = strings.TrimSpace(comm.HTMLNodeText(tr2.Eq(4).Find("td > div.mr10 > span")))
	comp.Chairman = tr2.Eq(5).Find("td:nth-child(1) > span > a").Text()
	comp.Secretary = tr2.Eq(5).Find("td:nth-child(2) > span > a").Text()
	comp.Representative = tr2.Eq(5).Find("td:nth-child(3) > span > a").Text()
	comp.GeneralManager = tr2.Eq(6).Find("td:nth-child(1) > span > a").Text()
	comp.RegisterFund = comm.ParseFirstFloat(tr2.Eq(6).Find("td:nth-child(2) > span").Text())
	comp.EmployeeNum = int(comm.MustParseInt(tr2.Eq(6).Find("td:nth-child(3) > span").Text()))
	comp.Address = tr2.Eq(8).Find("td > span").Text()
	comp.Intro = comm.HTMLNodeText(tr2.Eq(9).Find("td > p"))

	tr3 := main.Find("#publish > div.bd > table.m_table > tbody > tr")
	comp.EastDate = comm.MustParseTime(tr3.Eq(0).Find("td:nth-child(1) > span").Text(), comm.DefaultDatePattern)
	comp.IssueVolume = int64(comm.ParseFirstFloat(tr3.Eq(0).Find("td:nth-child(2) > span").Text()))
	comp.IssuePrice = comm.ParseFirstFloat(tr3.Eq(0).Find("td:nth-child(3) > span").Text())
	comp.ListDate = comm.MustParseTime(tr3.Eq(1).Find("td:nth-child(1) > span").Text(), comm.DefaultDatePattern)
	comp.IssuePE = comm.ParseFirstFloat(tr3.Eq(1).Find("td:nth-child(2) > span").Text())
	comp.InitOpenPrice = comm.ParseFirstFloat(tr3.Eq(2).Find("td:nth-child(1) > span").Text())
	comp.Demand2OfferRatio = comm.ParseFirstFloat(tr3.Eq(2).Find("td:nth-child(2) > span").Text())
	comp.InvestAmount = comm.ParseFirstFloat(tr3.Eq(2).Find("td:nth-child(3) > span").Text())
	comp.Underwriter = tr3.Eq(3).Find("td:nth-child(1) > div:nth-child(1) > span").Text()
	comp.Sponsor = tr3.Eq(3).Find("td:nth-child(1) > div:nth-child(2) > span").Text()
	comp.History = comm.HTMLNodeText(tr3.Eq(4).Find("td > p.none"))
	return &comp, nil
}
