package parser

import (
	"encoding/json"
	"log"
	"my-crawler/concurrence/engine"
	"my-crawler/concurrence/fetcher"
	"my-crawler/concurrence/task"
	"my-crawler/concurrence/task/juchao/models"
	"strconv"
)

//步骤1 获取comment_id
func DetailParser(content []byte, ReqData engine.ReqData) engine.ParserResult {
	res := engine.ParserResult{}

	//获取下一条
	page, ok := ReqData.Payload["id"]
	if !ok {
		log.Fatal("Not Found ID!")
	}
	atoi, _ := strconv.Atoi(page)
	if (task.JCEndSqlId != 0 && atoi < task.JCEndSqlId) || task.JCEndSqlId == 0 { //0 限制 其他限制
		reqData, header, reqUrl := models.GetModels(page)
		//再追加列表页下一页请求
		res.Request = append(res.Request, engine.CrawlerRequest{
			ReqData:  engine.CreateReqData(header, reqData, reqUrl), //数据初始化
			HttpFunc: &fetcher.GetReq{},                             //添加上延迟时间 默认就使用之前的
			Parser:   DetailParser,
		})
	}

	//update
	var data models.Result
	err := json.Unmarshal(content, &data)
	if err != nil {
		log.Println(err)
	}
	data.ID = page
	res.Item = append(res.Item, data)

	return res

}
