package starter

import (
	"aqielife.cn/spider/config"
	mysql "aqielife.cn/spider/db"
	"aqielife.cn/spider/downloader"
	"aqielife.cn/spider/log"
	"aqielife.cn/spider/models"
	"aqielife.cn/spider/parser"
	"aqielife.cn/spider/pipeline"
	"aqielife.cn/spider/utils"
	"fmt"
	"net/http"
	"sync"
)

var wg sync.WaitGroup
var logger = log.NewSimpleLogger() // 日志记录器
type XinhuaStarter struct {
	StartUrl   string                   //初始爬行Url
	Downloader downloader.GenDownloader // 下载器
	Channel    *pipeline.ArticleChannel //管道
	WorkPool   *pipeline.WorkPool       //工作池
	Parser     parser.ArticleAnalyzer   //解析页函数
}

func NewStarter(startUrl string) *XinhuaStarter {
	return &XinhuaStarter{StartUrl: startUrl}
}

func (c *XinhuaStarter) Start() {
	if err := utils.InitSnowFlake("2021-12-03", 1); err != nil {
		fmt.Println("Init() failed, err = ", err)
		return
	}
	config.EastmoneyConfig.StartUrl = c.StartUrl
	// todo 添加 request header
	config.InitConfig()
	// logger.Infoln("config:", config.EastmoneyConfig)
	c.Downloader = downloader.NewDownloader()
	c.Channel = pipeline.NewArticleChannel()
	c.WorkPool = pipeline.NewWorkPool(3, config.EastmoneyConfig.RequestNum).Start()
	c.Parser = parser.NewXinhuaAnalyzer()
	request, err := http.NewRequest(config.EastmoneyConfig.RequestMethod, c.StartUrl, nil)
	if err != nil {
		logger.Panicln(err)
		return
	}
	newRequest := models.NewRequest(request, 0)
	c.Channel.ReqChan() <- *newRequest
	wg.Add(2)
	go c.DownList()
	go c.FirstAnalyzer()
	wg.Wait()
	logger.Infoln("first finish")

	// chan []models.ArticleDetail -> chan models.ArticleDetail
	respListChan := c.Channel.RespListChan()
	if len(respListChan) == 0 {
		logger.Panicln("respListChan 为空")
		return
	}
	respList := <-respListChan
	detailChan := make(chan models.ArticleDetail, 10)

	// var wg sync.WaitGroup
	wg.Add(2)
	go c.handlerTask1(detailChan, respList)
	go c.handlerTask2(detailChan)
	wg.Wait()
	c.WorkPool.Stop()

	logger.Infoln("wgDetail Download finish")
}

func (c *XinhuaStarter) handlerTask1(detailChan chan models.ArticleDetail, respList []models.ArticleDetail) {
	defer wg.Done()
	for _, ch := range respList {
		detailChan <- ch
	}
	close(detailChan)
}

// 从channel 读取数据,下载，解析，入库
func (c *XinhuaStarter) handlerTask2(detailChan chan models.ArticleDetail) {
	defer wg.Done()
	var task = pipeline.Task{
		F: func(args ...interface{}) {
			for {
				if ch, ok := <-detailChan; ok {
					println(ok, ch.Url)
					prereq, _ := http.NewRequest(config.EastmoneyConfig.RequestMethod, ch.Url, nil)
					basereq := models.NewRequest(prereq, 0)
					resp := c.Downloader.Download(basereq)
					detail := c.Parser.AnalyzeDetail(resp.GetRes(), ch)
					if detail != nil {
						if err := mysql.DB.Create(&detail).Error; err != nil {
							logger.Infoln("db.Create err : %v", err)
						}
					}
				} else {
					break
				}
			}
		},
		Args: []interface{}{},
	}
	c.WorkPool.PushTask(&task)
}

/**
 * 根据baseUrl 获取 list响应 channel
 */
func (c *XinhuaStarter) DownList() {
	// logger.Info("FirstDown...")
	defer wg.Done()
	dwg := new(sync.WaitGroup)
	dwg.Add(1)
	go func() {
		defer dwg.Done()
		req := <-c.Channel.ReqChan()
		res := c.Downloader.Download(&req)

		if res != nil && res.GetRes().StatusCode == 200 {
			c.Channel.RespChan() <- *res //访问成功
		} else {
			logger.Panicln("resp 为空", res)
		}
	}()
	dwg.Wait()
	go func() {}()
	close(c.Channel.RespChan())
}

/**
 * 从 listChannel 中读取响应 并解析 list
 */
func (c *XinhuaStarter) FirstAnalyzer() {
	defer wg.Done()
	awg := new(sync.WaitGroup)
	awg.Add(1)
	go func() {
		defer awg.Done()
		res := <-c.Channel.RespChan()
		resp := c.Parser.AnalyzeList(res.GetRes())
		// log.Println("解析结果 list 数组:", resp)
		c.Channel.RespListChan() <- resp
	}()
	awg.Wait()
	close(c.Channel.RespListChan())
}

func (c *XinhuaStarter) DownDetail(ch models.ArticleDetail) *models.ArticleDetail {
	prereq, _ := http.NewRequest(config.EastmoneyConfig.RequestMethod, ch.Url, nil)
	basereq := models.NewRequest(prereq, 0)
	resp := c.Downloader.Download(basereq)
	detail := c.Parser.AnalyzeDetail(resp.GetRes(), ch)
	return detail

}
