package parser

import (
	"bytes"
	"fmt"
	"github.com/PuerkitoBio/goquery"
	"my-crawler/single-task/engine"
	"strings"
)
type profile map[string]string
func GetUserDetailParser(content []byte,Name string) engine.ParserResult {
	reader := bytes.NewReader(content)
	doc, err := goquery.NewDocumentFromReader(reader)
	if err != nil {
		fmt.Errorf("goquery: Load the HTML document failed! %v",err)
	}
	res := engine.ParserResult{}
	person := make(profile)
	person["Name"] = Name
	//分成两部分 获取用户信息了 然后在当前页面查找其他用户链接
	doc.Find(".myhome-wrap .brief-center .pure-table td").Each(func(i int, selection *goquery.Selection) {
		td := selection.Text() // 籍贯：武汉市 月收入：1-2000元
		person = parseItem(td,person)
	})
	res.Item = append(res.Item,person)
	fmt.Printf("Item:%v \n",person)

	 //测试从上一个Parse中携带参数
	//获取其他用户链接
	doc.Find(".pure-u-2-3").Each(func(i int, selection *goquery.Selection) {
		a := selection.Find("a")
		href,exists := a.Attr("href")
		if exists {
			res.Request = append(res.Request,engine.CrawlerRequest{
				href,
				func(c []byte) engine.ParserResult{
					return GetUserDetailParser(c,a.Text())
				},
			})
			fmt.Printf("新请求:%s \n",href)
		}
	})

	return res
}

func parseItem(td string, profile profile) profile {
	if strings.Contains(td,"：") {
		tdSplit := strings.Split(td,"：")
		if len(tdSplit) == 2 {
			profile[tdSplit[0]] = tdSplit[1]
		}
	}
	return profile
}
