package parser

import (
	"bytes"
	"luiz/crawler/engine"
	"luiz/crawler_distributed/config"
	"regexp"
)

//获取了区域之后，获取详情的url
const urllist = `<h5 class="title [^>]+><a href="//([^"]+)[^>]+>([^<]+)</a>`

func PreaseUrls(contents []byte, _ string) engine.ParseResult {
	compile := regexp.MustCompile(urllist)
	areadeatil := compile.FindAllStringSubmatch(string(contents), -1)
	result := engine.ParseResult{}
	for _, ele := range areadeatil {
		var bt bytes.Buffer
		bt.WriteString("http://")
		bt.WriteString(ele[1])
		name := ele[2]
		//这里可以直接传ele[2]。函数传递不是闭包函数。
		result.Request = append(result.Request, engine.Request{Url: bt.String(), Parser: NewProfileParser(name)})
		//fmt.Printf("%v %v\n", bt.String(), "房屋名："+ele[2])
	}
	return result
}

type ProfileParse struct {
	userName string
}

func (p *ProfileParse) Parse(contents []byte, url string) engine.ParseResult {

	return PreaseDeatil(contents, p.userName, url)
}

func (p *ProfileParse) Serialize() (name string, args interface{}) {
	return config.ProfileParse, p.userName
}

func NewProfileParser(name string) *ProfileParse {
	return &ProfileParse{
		userName: name,
	}
}
func ProfileParser(name string) engine.ParserFunc {
	return func(i []byte, url string) engine.ParseResult {
		return PreaseDeatil(i, name, url)
	}
}
