package parser

import (
	"bytes"
	"fmt"
	"github.com/PuerkitoBio/goquery"
	"my-crawler/concurrence/engine"
	"my-crawler/concurrence/fetcher"
	"my-crawler/concurrence/model"
	"strings"
)

type profile map[string]string

func GetUserDetailParser(content []byte, data map[string]string) engine.ParserResult {
	reader := bytes.NewReader(content)
	doc, err := goquery.NewDocumentFromReader(reader)
	if err != nil {
		fmt.Errorf("goquery: Load the HTML document failed! %v", err)
	}
	res := engine.ParserResult{}
	person := model.SaveItem{} //声明item
	person.Name = data["name"]
	profile := profile{}

	//分成两部分 获取用户信息了 然后在当前页面查找其他用户链接
	doc.Find(".myhome-wrap .brief-center .pure-table td").Each(func(i int, selection *goquery.Selection) {
		td := selection.Text() // 籍贯：武汉市 月收入：1-2000元
		profile = parseItem(td, profile)
	})
	person.Marriage = profile["婚况"]
	res.Item = append(res.Item, person)

	//测试从上一个Parse中携带参数
	//获取其他用户链接
	doc.Find(".pure-u-2-3").Each(func(i int, selection *goquery.Selection) {
		a := selection.Find("a")
		href, exists := a.Attr("href")
		reqData := map[string]string{
			"url":  href,
			"name": a.Text(),
		}
		if exists {
			res.Request = append(res.Request, engine.CrawlerRequest{
				Data:     reqData,
				HttpFunc: fetcher.FetchByGet,
				Parser:   GetUserDetailParser,
			})
			fmt.Printf("Concurrence:新请求:%s \n", href)
		}
	})

	return res
}

func parseItem(td string, profile profile) profile {
	if strings.Contains(td, "：") {
		tdSplit := strings.Split(td, "：")
		if len(tdSplit) == 2 {
			profile[tdSplit[0]] = tdSplit[1]
		}
	}
	return profile
}
