package scheduler

import (
	"cd-crawler/config"
	"cd-crawler/download"
	"cd-crawler/parser"
	"cd-crawler/queue"
	"cd-crawler/storage"
	"cd-crawler/types"
	"log"
)

func Run() {

	//获取配置
	crawlConf := config.GetCrawlerConf()
	url := crawlConf.Crawl_url
	funcName := crawlConf.Crawl_func
	workerNum := crawlConf.Worker_num

	request := types.UrlRequest{
		Url:      url,
		ParseFuc: funcName,
	}

	// 请求队列，和结果队列
	requestQueue := queue.RequestQueue{}
	requestQueue.RequestChan = make(chan types.UrlRequest)

	resultQueue := queue.ResultQueue{}
	resultQueue.ResultChan = make(chan types.ParseResult)

	go func() { requestQueue.Push(request) }()

	// 开启工作
	for i := 0; i < workerNum; i++ {
		go work(requestQueue, resultQueue)
	}

	for {
		// 接收结果
		parseResult := resultQueue.Pop()
		for _, urlR := range parseResult.UrlRequest {
			//urlManager.UrlPush(urlR)
			go func(url types.UrlRequest) {
				requestQueue.Push(url)
			}(urlR)
		}

		for _, item := range parseResult.Item {
			storage.Save(item)
			//log.Printf("%+v", item)
		}

	}

}

func work(requestQueue queue.RequestQueue, resultQueue queue.ResultQueue) {
	for {
		r := requestQueue.Pop()
		if r.ParseFuc == "" {
			//log.Printf("该链接(%s)已经是最底层链接了\n", urlRequest.Url)
			continue
		}

		reader, err := download.DownUrl(r.Url)
		if err != nil {
			log.Fatal(err)
		}

		//解析内容
		parseResult := parser.Distribute(types.ParseParams{
			Reader: reader,
			Url:    r.Url,
		}, r.ParseFuc)
		resultQueue.Push(parseResult)
		//fmt.Printf("%+v\n", parseResult)
	}
}
