package parser

import (
	"crawler/engine"
	"crawler/model"
	"regexp"
	"strconv"
)

var (
	cityUrlRe = regexp.MustCompile(`href="(http://www.zhenai.com/zhenghun/[^"]+)"`)
	//profileRe = regexp.MustCompile(`<a href="(http://album.zhenai.com/u/[0-9]+)"[^>]*>([^<]+)</a>`)
	profileRe = regexp.MustCompile(
		`<table><tbody><tr><th><a href="(http://album.zhenai.com/u/[0-9]+)"[^>]+>([^<]+)</a></th></tr> <tr><td[^>]+><span[^>]+>性别：</span>([^<]+)</td> <td><span[^>]+>居住地：</span>([^<]+)</td></tr> <tr><td[^>]+><span[^>]+>年龄：</span>([^<]+)</td> <td><span class="grayL">[^<]+</span>([^<]+)</td> <!----></tr> <tr><td[^>]+><span[^>]+>婚况：</span>([^<]+)</td> <td[^>]+><span[^>]+>身   高：</span>([^<]+)</td></tr></tbody></table>`)
)

// 城市解析器
func ParseCity(contents []byte, _ string) engine.ParserResult {
	result := engine.ParserResult{}

	matches := profileRe.FindAllSubmatch(contents, -1)
	//fmt.Printf("matches len: %d\n", len(matches))
	for _, match := range matches {
		profile := model.Profile{}
		profile.Name = string(match[2])
		profile.Gender = string(match[3])
		profile.Hokou = string(match[4])
		retInt, err := strconv.Atoi(string(match[5]))
		if err == nil {
			profile.Age = retInt
		}

		profile.Education = string(match[6])
		profile.Marriage = string(match[7])
		height, err := strconv.Atoi(string(match[8]))
		if err == nil {
			profile.Height = height
		}

		//fmt.Printf("city profile: %v\n", profile)

		result.Requests = append(result.Requests, engine.Request{
			Url:        string(match[1]),
			ParserFunc: ProfileParser(string(match[2]), profile),
		})
	}

	//解析更多城市
	// urlMatches := cityUrlRe.FindAllSubmatch(contents, -1)
	// for _, match := range urlMatches {
	// 	result.Requests = append(result.Requests, engine.Request{
	// 		Url:        strings.Replace(string(match[1]), `\u002F`, "/", -1),
	// 		ParserFunc: ParseCity,
	// 	})
	// }

	return result
}
