package crawler

import (
	"crawlies/src/base"
	"regexp"
	"strings"
)

func Analysis(html string) {
	htmlCompile, err := regexp.Compile(`"http[^"?#]*`)
	if err != nil {
		return
	}
	url := htmlCompile.FindAllString(html, -1)
	go func(u []string) {
		for i := range u {
			s := strings.Replace(u[i], "\"", "", -1)
			base.Logger.Info().Println(" >-> 解析到Ready2Save ", s)
			base.Ready2Save <- s
		}
	}(url)
}
