package crawler

import (
	"crawlies/src/base"
	"time"
)

func DoIt() {
	for {
		if len(base.GoroutineOpen) < 1024 {
			url := <-base.Ready2Save
			go tryCrawler(url)
		} else {
			time.Sleep(20 * time.Microsecond)
		}
	}
}

func tryCrawler(url string) {
	base.GoroutineOpen <- true
	defer func() {
		_ = <-base.GoroutineOpen
	}()
	base.SavedMapLock.RLock()
	bool1 := base.SavedUrl2Path[url] == ""
	base.SavedMapLock.RUnlock()
	if bool1 {
		//是否属于死亡名单
		base.DeadUrlLock.RLock()
		bool2 := false
		for i := range base.DeadUrl {
			if base.DeadUrl[i] == url {
				bool2 = true
			}
		}
		base.DeadUrlLock.RUnlock()
		if bool2 {
			return
		}
		//
		base.Logger.Info().Println(" -- 获取到有效url并执行 ", url)
		bs := Fetch(url)
		if bs == nil || len(bs) == 0 {
			saveDeadUrl(url)
			return
		}
		success := SaveUrl(url, bs)
		if success {
			Analysis(string(bs))
		} else {
			saveDeadUrl(url)
		}
	}
}

func saveDeadUrl(url string) {
	base.Logger.Info().Println(" <-| 记录无效url ", url)
	base.DeadUrlLock.Lock()
	defer base.DeadUrlLock.Unlock()
	base.DeadUrl = append(base.DeadUrl, url)
}
