package parser

import (
	"bytes"
	"fmt"
	"github.com/PuerkitoBio/goquery"
	"my-crawler/concurrence/engine"
	"my-crawler/concurrence/fetcher"
)

func GetCityUsersListParser(content []byte, data map[string]string) engine.ParserResult {
	reader := bytes.NewReader(content)
	doc, err := goquery.NewDocumentFromReader(reader)
	if err != nil {
		fmt.Errorf("goquery: Load the HTML document failed! %v", err)
	}
	res := engine.ParserResult{}
	doc.Find(".posts .post .post-title a").Each(func(i int, s *goquery.Selection) {
		// For each item found, get the title
		title := s.Text()
		href, _ := s.Attr("href")
		reqData := map[string]string{
			"url":  href,
			"name": title,
		}
		res.Request = append(res.Request, engine.CrawlerRequest{
			Data:     reqData,
			HttpFunc: fetcher.FetchByGet,
			Parser:   GetUserDetailParser,
		})
		fmt.Printf("Concurrence: user_name:%s url:%s \n", title, href)
	})
	return res
}
