package crawler

import (
	"encoding/json"
	"fmt"
	"strings"
	"time"

	"github.com/gocolly/colly"
	"github.com/gocolly/colly/extensions"
	"github.com/yejingxuan/accumulate/domain/entity"
	"github.com/yejingxuan/accumulate/domain/repository"
	"github.com/yejingxuan/accumulate/interface/dto"
	"go.uber.org/zap"

	"github.com/yejingxuan/accumulate/infrastructure/config"
	"github.com/yejingxuan/accumulate/infrastructure/logger"
	"github.com/yejingxuan/accumulate/infrastructure/utils"
)

var (
	xueQiuAllDataUrl = "https://xueqiu.com/service/v5/stock/screener/quote/list?page=%d&size=%d&order=desc&orderby=percent&order_by=percent&market=CN&type=sh_sz&_=%d"

	xuqQiuKLineUrl = "https://stock.xueqiu.com/v5/stock/chart/kline.json?symbol=%s&begin=%d&period=day&type=before&count=%d&indicator=kline"
	kLineCount     = -90
)

type StockCrawlerInterface interface {
	UpdateAllStockBaseData()
	UpdateKLineDataByCode(code string) error
}

type xueQiuCrawler struct {
	stockRepo repository.StockRepo
	klineRepo repository.KlineRepo
}

// NewXueQiuCrawler 构造函数
func NewXueQiuCrawler(stockRepo repository.StockRepo, klineRepo repository.KlineRepo) *xueQiuCrawler {
	return &xueQiuCrawler{stockRepo: stockRepo, klineRepo: klineRepo}
}

//更新所有stock基本信息
func (x xueQiuCrawler) UpdateAllStockBaseData() {
	pageNo := 1
	pageSize := 500
	total := 500
	totalPage := 1

	c := colly.NewCollector()

	c.OnRequest(func(r *colly.Request) {
		//r.Headers.Set("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36")
		logger.Info("Visiting", zap.Any("URL", r.URL))
	})

	c.OnResponse(func(r *colly.Response) {
		//配置代理
		//c.SetProxyFunc(randomProxySwitcher())
		resp := dto.XueQiuAllDataResp{}
		err := json.Unmarshal(r.Body, &resp)
		if err != nil {
			logger.Error("json.Unmarshal err", zap.Any("err", err))
		}
		total = resp.Data.Count
		totalPage = total/pageSize + 1

		for _, item := range resp.Data.List {
			stock := entity.Stock{}
			//过滤掉B股和st股
			if strings.Contains(item.Name, "B") || strings.Contains(item.Name, "ST") {
				continue
			}
			utils.StructAssign(&stock, &item)
			_ = x.stockRepo.CreateStock(&stock)
		}

		if pageNo < totalPage {
			pageNo++
			time.Sleep(time.Duration(utils.RandInt64(100, 200)) * time.Millisecond)
			extensions.RandomUserAgent(c)
			_ = c.Visit(fmt.Sprintf(xueQiuAllDataUrl, pageNo, pageSize, time.Now().UnixNano()/1e6))
		}
	})
	//配置代理
	extensions.RandomUserAgent(c)
	extensions.Referer(c)
	//爬取根网页
	baseUrl := fmt.Sprintf(xueQiuAllDataUrl, pageNo, pageSize, time.Now().UnixNano()/1e6)
	extensions.RandomUserAgent(c)
	_ = c.Visit(baseUrl)
}

//更新kline信息
func (x xueQiuCrawler) UpdateKLineDataByCode(code string) error {
	var resErr error
	c := colly.NewCollector()

	c.OnRequest(func(r *colly.Request) {
		r.Headers.Set("cookie", config.CoreConf.Server.Crawler.XueQiuCookie)
		fmt.Println("Visiting", r.URL)
	})

	c.OnError(func(response *colly.Response, err error) {
		logger.Error("kline data do request err", zap.Error(err))
		resErr = err
		return
	})

	c.OnResponse(func(r *colly.Response) {
		resp := dto.KlineDataResp{}
		err := json.Unmarshal(r.Body, &resp)
		if err != nil {
			logger.Error("json.Unmarshal err", zap.Any("err", err))
		}
		kline, _ := json.Marshal(resp.Data)
		data := entity.Kline{
			Symbol: resp.Data.Symbol,
			Kline:  string(kline),
		}
		_ = x.klineRepo.CreateOrUpdateKlineInfo(&data)
	})

	//配置代理
	//c.SetProxyFunc(RandomProxySwitcher())
	extensions.RandomUserAgent(c)
	extensions.Referer(c)
	//爬取根网页
	baseUrl := fmt.Sprintf(xuqQiuKLineUrl, code, time.Now().UnixNano()/1e6, kLineCount)
	_ = c.Visit(baseUrl)
	return resErr
}
