package start

import (
	"crawlies/src/base"
	"crawlies/src/crawler"
	"crawlies/src/util/fileutil"
	"crawlies/src/util/try"
	"fmt"
	"log"
	"os"
	"strings"
	"time"
)

func Run(firstIndex string) {
	wd, err := os.Getwd()
	try.Throw(err)
	base.Path = strings.Replace(wd, "\\", "/", -1) + "/results/"
	exists, err := fileutil.PathExists(base.Path)
	try.Throw(err)
	if !exists {
		try.Throw(fileutil.Mkdir(base.Path))
	}
	//开启循环
	go crawler.DoIt()
	//第一个入口
	first(firstIndex)
	for {
		time.Sleep(1 * time.Second)
		base.Logger.Debug().Println("|| debug - len(base.Ready2Save) : ", len(base.Ready2Save))
		fmt.Println("|| debug - len(base.Ready2Save) : ", len(base.Ready2Save),
			" - len(base.SavedUrl2Path) : ", len(base.SavedUrl2Path),
			" - len(base.DeadUrl) : ", len(base.DeadUrl))

		//出口!
		if len(base.SavedUrl2Path) > base.MaxSaveNum {
			log.Fatalln(" 保存数已达", base.MaxSaveNum, "!!! 结束爬取")
		}
	}
}

func first(firstIndex string) {
	b := crawler.Fetch(firstIndex)
	crawler.SaveUrl(firstIndex, b)
	crawler.Analysis(string(b))
}
