package spider

import (
	"asocrawler/conf"
	"asocrawler/db"
	"asocrawler/tool"
	"crypto/aes"
	"crypto/cipher"
	"encoding/base64"
	"encoding/json"
	"errors"
	"fmt"
	"io/ioutil"
	"log"
	"net/http"
	"net/http/cookiejar"
	"net/url"
	"regexp"
	"runtime/debug"
	"strconv"
	"strings"
	"time"

	"github.com/huichen/sego"
)

const (
	loginUrl  = "https://cc.oceanengine.com/login"
	dataUrl   = "https://cc.oceanengine.com/creative_radar_api/v1/douyin/challenge_list"
	kewordUrl = "https://trendinsight.oceanengine.com/api/open/index/query_topic"

	gameKeyWordUrl      = "https://trendinsight.oceanengine.com/api/open/content/content_creative_keywords"
	gameKeyWordTopicUrl = "https://trendinsight.oceanengine.com/api/open/content/content_creative_keyword_items"

	huiTunLogin  = "https://login.huitun.com/weChat/userLogin"
	huiTunCookie = "https://dyapi.huitun.com/userLogin"
	huiTunUrl    = "https://dyapi.huitun.com/hot/topic"

	topiceDeatilUrl = "https://trendinsight.oceanengine.com/arithmetic-index"
)

var HuiTunConfig struct {
	Mobile   string `json:"mobile"`
	Password string `json:"password"`
}

type OceanEngineSpider struct {
	Client     *http.Client
	Advertiser string
	Segmenter  *sego.Segmenter
}

func (c *OceanEngineSpider) println(v ...interface{}) {
	var str = fmt.Sprintf("[%s]", c.Advertiser)
	if conf.Conf.Debug.Switch {
		str += "-[Debug]"
	}
	log.Println(str, fmt.Sprintln(v...))
}
func NewOceanEngineSpider() (h *OceanEngineSpider) {
	jar, err := cookiejar.New(nil)
	if err != nil {
		return
	}

	h = &OceanEngineSpider{
		Advertiser: "巨量引擎",
		Client: &http.Client{
			Jar: jar,
		},
		Segmenter: new(sego.Segmenter),
	}
	// 载入词典
	h.Segmenter.LoadDictionary("./dictionary/dictionary.txt")
	return
}

func (this OceanEngineSpider) Run() {
	go func() {
		this.run()
	}()
}

// 采集话题榜
type TopicRankResult struct {
	Code int    `json:"code"`
	Msg  string `json:"msg"`
	Data struct {
		HasMore    bool            `json:"has_more"`
		Items      []TopicRankItem `json:"items"`
		Pagination struct {
			TotalCount int `json:"total_count"`
		} `json:"pagination"`
	} `json:"data"`
}

type TopicRankItem struct {
	Audit         interface{} `json:"audit"`
	ChallengeId   string      `json:"challenge_id"`
	ChallengeName string      `json:"challenge_name"`
	CoverImageUri string      `json:"cover_image_uri"`
	DescInfo      string      `json:"desc_info"`
	IsOfficial    string      `json:"is_official"`
	ItemTops      []struct {
		HeadImageUri string `json:"head_image_uri"`
		ItemId       string `json:"item_id"`
		VideoId      string `json:"video_id"`
		Vv           VV     `json:"vv"`
	} `json:"item_tops"`
	Metrics Metrics `json:"metrics"`
	Source  string
	PType   string
}

// 根据关键词采集话题榜

type TopicKeyWordResult struct {
	Status int    `json:"status"`
	Msg    string `json:"msg"`
	Data   struct {
		BaseResp struct {
			StatusMessage string `json:"StatusMessage"`
			StatusCode    int    `json:"StatusCode"`
		} `json:"BaseResp"`
		TopicList []struct {
			DiggCnt     string `json:"digg_cnt"`      // 点赞量
			ItemCnt     string `json:"item_cnt"`      // 投稿量
			PlayReadCnt string `json:"play_read_cnt"` // 播放量
			TopicId     string `json:"topic_id"`      // id
			TopicIndex  string `json:"topic_index"`   // 话题指数
			TopicName   string `json:"topic_name"`    // 话题名
		} `json:"topic_list"`
	} `json:"data"`
}

type VV struct {
	Data         int         `json:"data"`
	Data100      interface{} `json:"data_100"`
	Data75       interface{} `json:"data_75"`
	Data50       interface{} `json:"data_50"`
	Data25       interface{} `json:"data_25"`
	Description  string      `json:"description"`
	Desensitized bool        `json:"desensitized"`
}

type Metrics struct {
	LikeCount       VV `json:"like_count"`
	LikeCountAll    VV `json:"like_count_all"`
	PublishCount    VV `json:"publish_count"`
	PublishCountAll VV `json:"publish_count_all"`
	Vv              VV `json:"vv"`
	VvAll           VV `json:"vv_all"`
}

type HuiTunTopic struct {
	Status  int    `json:"status"`
	Message string `json:"message"`
	Time    string `json:"time"`
	Total   int    `json:"total"`
	Code    int    `json:"code"`
	Data    []struct {
		Title     string `json:"title"`
		TopicId   string `json:"topicId"`
		AwemeList []struct {
			CoverUrl string `json:"coverUrl"`
		} `json:"awemeList"`
	} `json:"data"`
}

// 采集抖音话题榜title
func (c *OceanEngineSpider) collectDouYinTopicTitle(dateDay string, periodType int) (ts []TopicRankItem, err error) {
	cookieStr, err := db.GetToutiaoCookieStr()
	if err != nil {
		c.println("collectDouYinTopicTitle 查询cookie失败！,err=", err)
		return
	}
	cookieStr, err = tool.CookieArrToStr(cookieStr)
	if err != nil {
		c.println("collectDouYinTopicTitle 转换cookie失败！,err=", err)
		return
	}

	n := 1
	data := url.Values{}
	data.Add("order_by", "8")
	data.Add("period_type", fmt.Sprint(periodType)) // 周期
	data.Add("industry_user_type", "1")
	data.Add("keywords_type", "1")
	data.Add("limit", "50")
	for n <= 2 {
		time.Sleep(2 * time.Second)
		data.Set("page", fmt.Sprint(n))
		urlStr := dataUrl + "?" + data.Encode()
		req, err := http.NewRequest("GET", urlStr, nil)
		if err != nil {
			c.println("collectDouYinTopicTitle error1,err=", err)
			return nil, err
		}
		req.Header.Add("cookie", cookieStr)
		resp, err := c.Client.Do(req)
		if err != nil {
			c.println("collectDouYinTopicTitle error2,err=", err)

			return nil, err
		}
		b, err := ioutil.ReadAll(resp.Body)
		if err != nil {
			c.println("collectDouYinTopicTitle error3,err=", err)

			resp.Body.Close()
			return nil, err
		}
		resp.Body.Close()
		var r TopicRankResult
		err = json.Unmarshal(b, &r)
		if err != nil {
			c.println("collectDouYinTopicTitle error4,err=", err)

			return nil, err
		}
		if r.Code != 0 {
			c.println("collectDouYinTopicTitle error5")

			return nil, errors.New("n=" + fmt.Sprint(n) + " err=" + r.Msg)
		}
		if len(r.Data.Items) == 0 {
			break
		}
		for _, v := range r.Data.Items {
			v.Source = "巨量"
			ts = append(ts, v)
		}
		n++
	}
	return
}

//获取灰豚的登录cookie
func (c *OceanEngineSpider) GetHuiTunCookie() (cookie string, err error) {
	HuiTunConfig.Mobile = conf.Conf.OceanengineConfig.HuiTunUser
	HuiTunConfig.Password = conf.Conf.OceanengineConfig.HuiTunPasswd
	data, err := json.Marshal(HuiTunConfig)
	if err != nil {
		c.println("GetHuiTunCookie error1,err=", err)
		return "", err
	}
	tryCount := 1
re1:
	req1, err := http.NewRequest("POST", huiTunLogin, strings.NewReader(string(data)))
	if err != nil {
		c.println("GetHuiTunCookie error2,err=", err)
		return "", err
	}
	req1.Header.Add("Content-Type", "application/json")
	resp1, err := c.Client.Do(req1)
	if err != nil {
		c.println("GetHuiTunCookie error3,err=", err)
		return "", err
	}
	var b1 HuiTunTopic
	body1, err := ioutil.ReadAll(resp1.Body)
	if err != nil {
		c.println("GetHuiTunCookie error4,err=", err)
		resp1.Body.Close()
		return "", err
	}
	resp1.Body.Close()
	err = json.Unmarshal(body1, &b1)
	if err != nil {
		c.println("GetHuiTunCookie error5,err=", err)
		return "", err
	}
	if b1.Code != 20001 {
		//重试3次
		if tryCount <= 3 {
			log.Println("GetHuiTunCookie error,尝试重新登录...tryCount：", tryCount)
			tryCount++
			goto re1
		}
		c.println("GetHuiTunCookie error6,err= 灰豚登录失败！")
		return "", errors.New("灰豚登录失败！")
	}

	time.Sleep(1 * time.Second)
	tryCount = 1
re2:
	req2, err := http.NewRequest("POST", huiTunCookie, strings.NewReader(string(data)))
	if err != nil {
		c.println("GetHuiTunCookie error7,err=", err)
		return "", err
	}
	req2.Header.Add("Content-Type", "application/json")
	resp2, err := c.Client.Do(req2)
	if err != nil {
		c.println("GetHuiTunCookie error8,err=", err)
		return "", err
	}
	for _, v := range resp2.Cookies() {
		if v.Name == "SESSION" {
			cookie = "SESSION=" + v.Value
			break
		}
	}
	if cookie == "" {
		//重试3次
		if tryCount <= 3 {
			log.Println("GetHuiTunCookie error,尝试重新获取cookie...tryCount：", tryCount)
			tryCount++
			goto re2
		}
		return "", errors.New("Get huitun cookie failed！")
	}
	return
}

// 采集灰豚数据热门话题title
func (c *OceanEngineSpider) collectHuiTunTopicTitle(dateDay string) (ts []TopicRankItem, err error) {
	n := 1
	data := url.Values{}
	data.Add("keyword", "")  // 周期
	data.Add("cid", "10129") // 游戏分类的id
	data.Add("sort", "view_count_inc")
	cookie, err := c.GetHuiTunCookie()
	if err != nil {
		c.println("collectHuiTunTopicTitle error0,err=", err)
		return nil, err
	}
	for {
		time.Sleep(3000 * time.Millisecond)
		data.Set("from", fmt.Sprint(n))
		urlStr := huiTunUrl + "?" + data.Encode()
		req, err := http.NewRequest("GET", urlStr, nil)
		if err != nil {
			c.println("collectHuiTunTopicTitle error1,err=", err)
			return nil, err
		}
		req.Header.Set("Cookie", cookie)
		resp, err := c.Client.Do(req)
		if err != nil {
			c.println("collectHuiTunTopicTitle2 error1,err=", err)
			return nil, err
		}
		b, err := ioutil.ReadAll(resp.Body)
		if err != nil {
			c.println("collectHuiTunTopicTitle error3,err=", err)
			resp.Body.Close()
			return nil, err
		}
		resp.Body.Close()
		var r HuiTunTopic
		err = json.Unmarshal(b, &r)
		if err != nil {
			c.println("collectHuiTunTopicTitle error4,err=", err)
			return nil, err
		}
		if r.Code == 2001 { // 当前为试用版会员，仅可查看50条数据
			c.println(string(b))
			c.println("collectHuiTunTopicTitle error5")
			break
		}
		if r.Code != 0 {
			c.println(string(b))
			c.println("collectHuiTunTopicTitle error6")
			return nil, errors.New("n=" + fmt.Sprint(n) + " err=" + r.Message)
		}
		if len(r.Data) == 0 {
			break
		}
		for _, v := range r.Data {
			var CoverImageUri string
			if len(v.AwemeList) > 0 {
				CoverImageUri = v.AwemeList[0].CoverUrl
			}
			ts = append(ts, TopicRankItem{
				Source:        "灰豚",
				PType:         "游戏",
				ChallengeName: v.Title,
				ChallengeId:   v.TopicId,
				CoverImageUri: CoverImageUri})
		}
		n++
		if n == 6 {
			break
		}
	}
	return
}

func (c *OceanEngineSpider) collectTopicRank(dateDay string, periodType int) (ts []db.TopicRank, err error) {
	var strMap = make(map[string]TopicRankItem)
	var tempRes []db.TopicRank
	// 1.采集抖音话题榜话题
	c.println("开始采集抖音话题榜...")
	titles1, err := c.collectDouYinTopicTitle(dateDay, periodType)
	if err != nil {
		return
	}
	log.Println("抖音话题榜 collectDouYinTopicTitle titles1 length:", len(titles1))
	// 2.采集灰豚话题榜话题
	c.println("开始采集灰豚话题榜...")
	titles2, err := c.collectHuiTunTopicTitle(dateDay)
	if err != nil {
		//灰豚为获取到数据时，设置为空数组，不return
		titles2 = []TopicRankItem{}
		c.println("err=", err)
		err = nil
	}
	// 合并去重
	for _, t := range titles1 {
		strMap[t.ChallengeName] = t
	}
	for _, t := range titles2 {
		strMap[t.ChallengeName] = t
	}
	// 拆分关键词
	for _, v := range strMap {
		segments := c.Segmenter.Segment([]byte(v.ChallengeName))
		res := sego.SegmentsToSlice(segments, true)

		for _, resStr := range res {
			if len(resStr) > 3 {
				strMap[resStr] = v
			}
		}
	}

	c.println("拆分后关键词数量：", len(strMap))
	var dIndex int
	for keyword, v := range strMap {
		dIndex++
		if dIndex%200 == 0 {
			c.println("采集进度：", fmt.Sprintf("%d/%d", dIndex, len(strMap)))
		}
		time.Sleep(200 * time.Millisecond)
		//c.println(fmt.Sprintf("开始采集关键词:%s, 父话题:%s", keyword, v.ChallengeName))
		tt, err := c.collectKeyWord(dateDay, keyword, periodType, v)
		if err != nil {
			return nil, err
		}
		tempRes = append(tempRes, tt...)

		/*progress := fmt.Sprintf("%.2f", dIndex*100/float64(len(strMap)))
		c.println(fmt.Sprintf("采集完成...,数据长度:%d,进度:%s%%", len(tempRes), progress))*/
	}

	// 过滤采集到相同话题 id
	c.println("开始过滤相同话题...,数据长度:", len(tempRes))
	var catchMap = make(map[string]bool)
	for _, t := range tempRes {
		_, ok := catchMap[t.ChallengeId]
		if !ok {
			catchMap[t.ChallengeId] = true
			ts = append(ts, t)
		}
	}
	c.println("过滤完成,数据长度:", len(ts))

	return ts, err
}

// 话题相关-关键词
func (c *OceanEngineSpider) collectKeyWord(dateDay, keyword string, periodType int, pData TopicRankItem) (ts []db.TopicRank, err error) {
	// 关键词只能采集昨天的数据
	timeLayout := "2006-01-02"
	parseDate, err := time.ParseInLocation(timeLayout, dateDay, time.Local)
	if err != nil {
		c.println("collectKeyWord error1,err=", err)
		return
	}
	yesterday := parseDate.AddDate(0, 0, -2).Format("20060102")

	params := fmt.Sprintf(`{"param":{"app_name":"aweme","keyword":"%s","start_date":"%s","end_date":"%s"}}`, keyword, yesterday, yesterday)

	tryTimes := 0
re:
	req, err := http.NewRequest("POST", kewordUrl, strings.NewReader(params))
	if err != nil {
		c.println("collectKeyWord error2,err=", err)
		return nil, err
	}

	req.Header.Add("content-type", "application/json;charset=UTF-8")

	resp, err := c.Client.Do(req)
	if err != nil {
		c.println("collectKeyWord error3,err=", err)
		return nil, err
	}
	b, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		c.println("collectKeyWord error4,err=", err)
		resp.Body.Close()
		return ts, err
	}
	resp.Body.Close()
	var r TopicKeyWordResult
	err = json.Unmarshal(b, &r)
	if err != nil {
		c.println("collectKeyWord error5,尝试重新请求,err=", err)
		if tryTimes < 5 {
			tryTimes++
			time.Sleep(1 * time.Second)
			goto re
		}
		c.println("collectKeyWord error5,多次请求失败，退出,err=", err)
		c.println(keyword, string(b))
		return ts, err
	}
	if r.Status != 0 || r.Data.BaseResp.StatusCode == -1 {
		c.println("collectKeyWord error6,尝试重新请求,r.Status != 0")
		if tryTimes < 5 {
			tryTimes++
			time.Sleep(1 * time.Second)
			goto re
		}
		c.println("collectKeyWord error6,err=", r.Data.BaseResp.StatusMessage)
		c.println("error,params=", params)
		return nil, errors.New("r.Status != 0,b=" + string(b))
	}
	for i, v := range r.Data.TopicList {
		if i >= 10 {
			break
		}
		var publishAllCount, allHot, allLikeCount int
		var pChallengeId, isOfficial string

		publishCount, err := strconv.Atoi(v.ItemCnt)
		if err != nil {
			c.println("collectKeyWord error7,err=", err)
			return ts, err
		}
		hot, err := strconv.Atoi(v.PlayReadCnt)
		if err != nil {
			c.println("collectKeyWord error8,err=", err)
			return ts, err
		}
		likeCount, err := strconv.Atoi(v.DiggCnt)
		if err != nil {
			c.println("collectKeyWord error9,err=", err)
			return ts, err
		}
		topicIndex, err := strconv.Atoi(v.TopicIndex)
		if err != nil {
			c.println("collectKeyWord error10,err=", err)
			return ts, err
		}

		if pData.ChallengeName == v.TopicName {
			// 	如果是父话题
			publishAllCount = pData.Metrics.PublishCountAll.Data
			allHot = pData.Metrics.VvAll.Data
			allLikeCount = pData.Metrics.LikeCountAll.Data
			isOfficial = pData.IsOfficial
		} else {
			// 如果不是父话题
			pChallengeId = pData.ChallengeId

			// 关键词搜索的话题都默认非官方话题
			isOfficial = "0"
		}

		t := db.TopicRank{
			DateDay:         dateDay,
			PeriodType:      periodType,
			ChallengeId:     v.TopicId,
			PChallengeId:    pChallengeId,
			ChallengeName:   v.TopicName,
			CoverImageUri:   pData.CoverImageUri,
			IsOfficial:      isOfficial,
			PublishCount:    publishCount,
			PublishAllCount: publishAllCount,
			Hot:             hot,
			AllHot:          allHot,
			LikeCount:       likeCount,
			AllLikeCount:    allLikeCount,
			TopicIndex:      topicIndex,
			Source:          pData.Source,
			PType:           pData.PType,
		}
		ts = append(ts, t)
	}

	return ts, err

}

// 游戏热词
type OceanEngineGameKeyWordRes struct {
	Status int    `json:"status"`
	Msg    string `json:"msg"`
	Data   struct {
		BaseResp struct {
			StatusMessage string `json:"StatusMessage"`
			StatusCode    int    `json:"StatusCode"`
		} `json:"BaseResp"`
		KeyWordList []struct {
			KeyWord     string `json:"keyword"`      // 关键词名称
			Cnt         string `json:"cnt"`          // 视频量
			IsHot       bool   `json:"is_hot"`       // 意义未知 不用
			Index       string `json:"index"`        // 综合指数 默认排名依据
			SearchIndex string `json:"search_index"` // 搜索指数
		} `json:"keyword_list"`
	} `json:"data"`
}

// 游戏热词 https://trendinsight.oceanengine.com/vertical-analysis
func (c *OceanEngineSpider) collectGameKeyWord(dateDay string) (ts []db.GameKeyWord, topicJson map[string]db.GameTopic, err error) {
	// 初始化
	topicJson = make(map[string]db.GameTopic)

	// 只能采集昨天的数据
	timeLayout := "2006-01-02"
	parseDate, err := time.ParseInLocation(timeLayout, dateDay, time.Local)
	if err != nil {
		c.println("collectGameKeyWord error1,err=", err)
		return
	}

	yesterday := parseDate.AddDate(0, 0, -1).Format("20060102")
	period := "day"
	tagId := "2015"
	params := fmt.Sprintf(`{"tag_id":"%s","end_date":"%s","period":"%s"}`, tagId, yesterday, period)

	tryTimes := 0
re:
	req, err := http.NewRequest("POST", gameKeyWordUrl, strings.NewReader(params))
	if err != nil {
		c.println("collectGameKeyWord error2,err=", err)
		return nil, nil, err
	}

	req.Header.Add("content-type", "application/json;charset=UTF-8")

	resp, err := c.Client.Do(req)

	if err != nil {
		c.println("collectGameKeyWord error3,err=", err)
		return nil, nil, err
	}

	b, err := ioutil.ReadAll(resp.Body)

	if err != nil {
		c.println("collectGameKeyWord error4,err=", err)
		resp.Body.Close()
		return ts, nil, err
	}

	resp.Body.Close()

	var r OceanEngineGameKeyWordRes

	err = json.Unmarshal(b, &r)

	if err != nil {
		c.println("collectGameKeyWord error5,尝试重新请求,err=", err)
		if tryTimes < 5 {
			tryTimes++
			time.Sleep(1 * time.Second)
			goto re
		}
		c.println("collectGameKeyWord error5,多次请求失败，退出,err=", err)
		return ts, nil, err
	}

	if r.Status != 0 || r.Data.BaseResp.StatusCode == -1 {
		c.println("collectGameKeyWord error6,err=", r.Data.BaseResp.StatusMessage)
		c.println("error,params=", params)
		return nil, nil, errors.New("r.Status != 0,b=" + string(b))
	}

	for _, v := range r.Data.KeyWordList {
		var cntCount, indexCount, searchIndexCount int

		cntCount, err := strconv.Atoi(v.Cnt)
		if err != nil {
			c.println("collectKeyWord error7,err=", err)
			return ts, nil, err
		}
		indexCount, err = strconv.Atoi(v.Index)
		if err != nil {
			c.println("collectKeyWord error8,err=", err)
			return ts, nil, err
		}
		searchIndexCount, err = strconv.Atoi(v.SearchIndex)
		if err != nil {
			c.println("collectKeyWord error9,err=", err)
			return ts, nil, err
		}

		var analysisUrl = "https://trendinsight.oceanengine.com/arithmetic-index/analysis?keyword=" + v.KeyWord

		resMap, err := c.getGameKeywordTopicJson(tagId, period, yesterday, v.KeyWord)
		if err != nil {
			c.println("collectKeyWord error10,err=", err)
			return ts, nil, err
		}

		// 话题存入json
		for topic, count := range resMap {
			val, ok := topicJson[topic]

			// 累计次数
			if ok {
				val.Count += count
				topicJson[topic] = val
			} else {
				topicJson[topic] = db.GameTopic{
					Topic:   topic,
					Count:   count,
					DateDay: dateDay,
					KeyWord: v.KeyWord,
				}
			}
		}

		// 关键词存入json字符串
		bytes, err := json.Marshal(resMap)
		if err != nil {
			return ts, nil, err
		}
		jsonStr := string(bytes)

		t := db.GameKeyWord{
			DateDay:     dateDay,
			KeyWord:     v.KeyWord,        // 关键词名称
			Cnt:         cntCount,         // 视频量
			Index:       indexCount,       // 综合指数
			SearchIndex: searchIndexCount, // 搜索指数
			AnalysisUrl: analysisUrl,      // 具体分析 网页原址
			TopicJson:   jsonStr,
		}
		ts = append(ts, t)

		// 有http请求,sleep一下
		time.Sleep(100 * time.Millisecond)
	}

	topicJson, err = c.getTopicDetail(topicJson, dateDay)
	if err != nil {
		return
	}

	return

}

type OceanEngineGameKeyWordTopicRes struct {
	Status int    `json:"status"`
	Msg    string `json:"msg"`
	Data   struct {
		BaseResp struct {
			StatusMessage string `json:"StatusMessage"`
			StatusCode    int    `json:"StatusCode"`
		} `json:"BaseResp"`
		ItemList []struct {
			Img    string `json:"img"`     // 视频图片
			ItemId string `json:"item_id"` // id
			Title  string `json:"title"`   // 标题 （拆分出话题）
			Url    string `json:"url"`     // 视频地址
		} `json:"item_list"`
	} `json:"data"`
}

func (c *OceanEngineSpider) getGameKeywordTopicJson(tagId, period, yesterday, keyWord string) (resMap map[string]int, err error) {
	params := fmt.Sprintf(`{"tag_id":"%s","end_date":"%s","period":"%s","keyword":"%s"}`, tagId, yesterday, period, keyWord)
	tryTimes := 0
re:
	req, err := http.NewRequest("POST", gameKeyWordTopicUrl, strings.NewReader(params))
	if err != nil {
		c.println("getGameKeywordTopicJson error2,err=", err)
		return
	}

	req.Header.Add("content-type", "application/json;charset=UTF-8")

	resp, err := c.Client.Do(req)
	if err != nil {
		c.println("getGameKeywordTopicJson error3,err=", err)
		return
	}

	b, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		c.println("getGameKeywordTopicJson error4,err=", err)
		resp.Body.Close()
		return
	}

	resp.Body.Close()
	var r OceanEngineGameKeyWordTopicRes
	err = json.Unmarshal(b, &r)
	if err != nil {
		c.println("getGameKeywordTopicJson error5,尝试重新请求,err=", err)
		if tryTimes < 5 {
			tryTimes++
			time.Sleep(1 * time.Second)
			goto re
		}
		c.println("getGameKeywordTopicJson error5,多次请求失败，退出,err=", err)
		return resMap, err
	}
	if r.Status != 0 || r.Data.BaseResp.StatusCode == -1 {
		c.println("getGameKeywordTopicJson error6,err=", r.Data.BaseResp.StatusMessage)
		c.println("error,params=", params)
		return resMap, errors.New("r.Status != 0,b=" + string(b))
	}

	resMap = make(map[string]int)
	for _, v := range r.Data.ItemList {

		reg := regexp.MustCompile(`#[^#|\s]*`)
		wordArr := reg.FindAllString(v.Title, -1)
		for _, w := range wordArr {
			resMap[w]++
		}
	}

	return
}

type TopicDetailRes struct {
	BaseResp struct {
		StatusMessage string `json:"StatusMessage"`
		StatusCode    int    `json:"StatusCode"`
	} `json:"BaseResp"`
	HotList []struct {
		Keyword string `json:"keyword"`
		HotList []struct {
			Index string `json:"index"` // 综合指数
		} `json:"hot_list"`
		SearchHotList []struct {
			Index string `json:"index"` // 搜索指数
		} `json:"search_hot_list"`
	} `json:"hot_list"`
}

// 获取游戏话题指数详情
func (c *OceanEngineSpider) getTopicDetail(topics map[string]db.GameTopic, date string) (ts map[string]db.GameTopic, err error) {

	ts = topics

	if len(topics) == 0 {
		return
	}

	c.println("开始获取游戏话题指数详情...")
	wd, err := tool.OpenHeadLessWebDriver()
	if err != nil {
		c.println("创建无头浏览器对象失败，err= ", err)
		return
	}
	//关闭一个webDriver会对应关闭一个chrome窗口
	//但是不会导致seleniumServer关闭
	defer func() {
		wd.Quit()             // 关闭浏览器
		tool.PutBrowserPool() // 放回池子
	}()

	if err = wd.Get(topiceDeatilUrl); err != nil {
		c.println("打开首页失败!,err=", err)
		return
	}
	time.Sleep(1 * time.Second)

	funcScripts := `
window.queryData =  function queryData (data) {
    var p = new Promise(function (resolve, reject) {
        var e = {
            "url": "https://trendinsight.oceanengine.com/api/open/index/get_multi_keyword_hot_trend",
            "method": "POST",
            "data": data
        };
        var h = new XMLHttpRequest; h.open(e.method, e.url, true);
        h.setRequestHeader("accept", "application/json, text/plain, */*");
        h.setRequestHeader("content-type", "application/json;charset=UTF-8");
        h.setRequestHeader("tea-uid", "7054893410171930123");
        h.onreadystatechange = function () {
            if (h.readyState != 4) return;
            if (h.readyState === 4 && h.status === 200) {
                resolve(h.response);
            } else {
            }
        };
        h.send(e.data);
    });
    return p;
}`

	_, err = wd.ExecuteScript(funcScripts, nil)
	if err != nil {
		c.println("初始化queryData函数失败，err=", err)
		return
	}

	c.println("初始化queryData函数成功...")

	// 每个话题分别去调用网页函数请求

	pDate := strings.ReplaceAll(date, "-", "")

	keywords := make([]string, 0)
	for _, v := range topics {
		keywords = append(keywords, v.Topic)
	}

	start := 0
	interval := 5
	end := len(keywords)

	if end < interval {
		interval = end
	}

	tryTimes := 0
re:
	time.Sleep(300 * time.Millisecond)
	tempEnd := start + interval
	if tempEnd > end {
		tempEnd = end
	}

	nowKeywords := keywords[start:tempEnd]

	kByte, err := json.Marshal(nowKeywords)
	if err != nil {
		c.println("json.Marshal keywords失败，err=", err)
		return
	}

	keywordStr := strings.ReplaceAll(string(kByte), "#", "")

	params := fmt.Sprintf(`'{"keyword_list":%s,"start_date":"%s","end_date":"%s","app_name":"aweme","region":[]}'`, keywordStr, pDate, pDate)
	log.Println("params:", params)
	scripts := fmt.Sprintf(`queryData(%s).then(res =>  {
var callback = arguments[arguments.length - 1];
var pRes = JSON.parse(res)

callback(pRes.data)
})`, params)

	text, err := wd.ExecuteScriptAsync(scripts, nil)

	tres, ok := text.(string)

	if err != nil || !ok {

		if tryTimes < 10 {
			c.println("尝试从新请求，start:", start, "tempEnd:", tempEnd)
			tryTimes++
			goto re
		}

		c.println("调用queryData函数失败，err=", err)
		return
	}

	c.println("调用queryData函数成功，text=", text)

	// text是AES-128加密的字符串，需要解密

	jsonStr, err := c.decrypt(tres)
	if err != nil {
		c.println("解密失败，err=", err)
		return
	}

	var d TopicDetailRes
	err = json.Unmarshal([]byte(jsonStr), &d)
	if err != nil {
		c.println("解析json失败，err=", err)
		return
	}

	for _, q := range d.HotList {

		tTopic := "#" + q.Keyword

		// 组装成数组一起请求，根据话题string 取map中的原值
		v, ok := topics[tTopic]
		if ok {

			// q.SearchHotList 一个日期只会有一条数据
			if len(q.SearchHotList) != 0 {
				i, err := strconv.Atoi(q.SearchHotList[0].Index)
				if err != nil {
					c.println("话题：", v.Topic, "转换数据失败2，err=", err)
					continue
				}
				v.SearchIndex = i
			}

			ts[tTopic] = v
		}

	}

	if tempEnd < end {
		// interval长度为一组 一次请求
		start = tempEnd
		goto re
	}

	return
}

func (c *OceanEngineSpider) decrypt(t string) (r string, err error) {
	decodeByte, err := base64.StdEncoding.DecodeString(t)
	if err != nil {
		return
	}

	iv := []byte("amlheW91LHFpYW53")
	key := []byte("anN2bXA2NjYsamlh")

	block, err := aes.NewCipher(key)
	if err != nil {
		return
	}

	stream := cipher.NewCFBDecrypter(block, iv)
	stream.XORKeyStream(decodeByte, decodeByte)

	r = string(decodeByte)

	return
}

func (c *OceanEngineSpider) collect(dateDay string) (success bool, err error) {
	timeLayout := "2006-01-02"
	gameKeyDay := dateDay
	// 热度关键词
re:
	ks, topicJson, err := c.collectGameKeyWord(gameKeyDay)

	if err != nil {
		c.println("collectGameKeyWord error:", err)
		return
	}

	if len(ks) == 0 {
		c.println("游戏关键词排行数据长度为0。。时间往前推一天,直到采集到数据为止")

		// 采集不到数据，时间往前推一天,直到采集到数据为止

		parseDate, _ := time.ParseInLocation(timeLayout, gameKeyDay, time.Local)

		gameKeyDay = parseDate.AddDate(0, 0, -1).Format("2006-01-02")

		c.println("当前 gameKeyDay：", gameKeyDay)

		goto re

	} else {
		err = db.SaveGameKeyWord(ks)
		if err != nil {
			return
		}

		//c.println("topicJson", topicJson)

		err = db.SaveGameTopic(topicJson)
		if err != nil {
			return
		}
	}

	// 话题榜单
	ts, err := c.collectTopicRank(dateDay, 1)
	if err != nil {
		return
	}
	if len(ts) == 0 {
		return
	}
	err = db.SaveTopicRank(ts, dateDay)
	if err != nil {
		return
	}

	return true, nil
}

func (c *OceanEngineSpider) run() {
	defer func() {
		if err := recover(); err != nil {
			errMsg := string(debug.Stack())
			c.println("[崩溃]err=", err, errMsg)
		}
	}()
	if conf.Conf.Debug.Switch {
		for _, fDate := range conf.Conf.Debug.DateList {
			c.println("日期:", fDate, "开始采集数据...")
			succ, err := c.collect(fDate)
			if succ {
				c.println("日期:", fDate, "采集数据完成")
			} else {
				if err == nil {
					c.println("日期:", fDate, "数据未更新")
				} else {
					c.println("日期:", fDate, "出现错误err:", err)
				}
			}
		}
	} else {
	re:
		fDate := time.Now().Format("2006-01-02")

		c.GetDataByDate(fDate)
		time.Sleep(3 * time.Hour)
		goto re
	}
}

func (c *OceanEngineSpider) GetDataByDate(date string) {
	c.println("日期:", date, "开始采集数据...")
	success, err := c.collect(date)
	if err != nil {
		errMsg := err.Error()
		if !strings.Contains(errMsg, "script timeout") {
			// 脚本超时不发送报警 2023-02-20
			tool.SendWarn("ASO报警-巨量引擎爬虫报错", "ASO报警-巨量引擎爬虫报错,错误信息："+errMsg)
		}

		c.println("日期:", date, "采集失败 err=", err)
	} else {
		if success {
			c.println("日期:", date, "采集数据完成 稍后再次采集...")
		} else {
			c.println("日期:", date, "数据未更新 稍后重新采集...")
		}
	}
}
