package main

import (
	"github.com/gocolly/colly"
	"github.com/gocolly/colly/extensions"
	"github.com/gocolly/colly/queue"
	"go.uber.org/zap"
	"net/url"
	"regexp"
	"spider/csv_to_json"
	m "spider/my_logger"
	"spider/tool"
	"spider/visited"
	"spider/write_file"
	"time"
)

const (
	//URL_STR = "http://www.stats.gov.cn/tjsj/tjbz/tjyqhdmhcxhfdm/2022/"

	URL_STR = "http://www.stats.gov.cn/sj/tjbz/tjyqhdmhcxhfdm/2022/"
	//URL_STR   = "http://www.stats.gov.cn/tjsj/tjbz/tjyqhdmhcxhfdm/2021/53.html"
	TRY_COUNT = 5
	// 抓取到几级
	LEVEL = 4
)

var (
	VISITED = visited.NewVisitedType()
	W       = write_file.NewWriteFile(write_file.WithFileName("./output_temp4.csv"))
)

func main() {
	csv_to_json.CsvToJson("./output_temp4.csv", "./area.json")
	//CrawlingRun()
}

// 爬取数据
func CrawlingRun() {
	logDebugger := &m.LogDebugger{}
	defer func() {
		m.Sync()
		logDebugger.Sync()
		VISITED.Clear()
		m.Info("crawling finish")
	}()

	u, _ := url.Parse(URL_STR)

	c := colly.NewCollector(
		colly.AllowedDomains(u.Host),
		colly.MaxDepth(10),
		colly.AllowURLRevisit(),
		colly.Debugger(logDebugger),
	)

	q, _ := queue.New(
		10,                                          // Number of consumer threads
		&queue.InMemoryQueueStorage{MaxSize: 10000}, // Use default queue storage
	)

	extensions.RandomUserAgent(c)
	extensions.Referer(c)

	// 控制并发请求数据
	c.Limit(&colly.LimitRule{
		DomainGlob:  "*",
		Parallelism: 10,
		RandomDelay: 5 * time.Second,
	})

	c.SetRequestTimeout(30 * time.Second)

	rootRegex := regexp.MustCompile(`^(\d+)\.html$`)
	branchRegex := regexp.MustCompile(`^\d+/(\d+)\.html$`)

	c.OnRequest(func(r *colly.Request) {
		if VISITED.Get(r.URL.String()) {
			r.Abort()
			m.Warn(r.URL.String(), zap.String("msg", "is abort"))
		} else {
			VISITED.Set(r.URL.String(), true)
		}
	})

	// 抓取5级
	if LEVEL > 4 {
		c.OnHTML("tr.villagetr", func(e *colly.HTMLElement) {
			td := e.DOM.Find("td")
			pCode := tool.GetCode(e.Request.URL.Path)

			W.Write(&write_file.CityInfo{
				Code:   td.Eq(0).Text(),
				Name:   td.Eq(2).Text(),
				Level:  tool.GetLevel(pCode),
				Parent: pCode,
			})
		})
	}

	c.OnHTML("a[href]", func(e *colly.HTMLElement) {
		var (
			link = e.Attr("href")
			name string
			code string
		)

		if rootRegex.Match([]byte(link)) {
			code = rootRegex.FindStringSubmatch(link)[1]
			name = e.Text
		} else if branchRegex.Match([]byte(link)) {
			if regexp.MustCompile(`^\d+$`).Match([]byte(e.Text)) {
				return
			}
			code = branchRegex.FindStringSubmatch(link)[1]
			name = e.Text
		} else {
			return
		}
		pCode := tool.GetCode(e.Request.URL.Path)

		level := tool.GetLevel(pCode)

		W.Write(&write_file.CityInfo{
			Code:   code,
			Name:   name,
			Level:  level,
			Parent: pCode,
		})

		if level == LEVEL {
			return
		}

		r2, err := e.Request.New("GET", e.Request.AbsoluteURL(link), nil)
		if err == nil {
			q.AddRequest(r2)
		}
	})

	c.OnError(func(r *colly.Response, err error) {
		i := VISITED.GetCount(r.Request.URL.String())
		if i > TRY_COUNT {
			m.Warn(r.Request.URL.String(), zap.String("msg", "quit!"))
			return
		}
		VISITED.SetFlagAndAddCount(r.Request.URL.String(), false, 1)
		r2, e := r.Request.New("GET", r.Request.URL.String(), nil)
		if e == nil {
			q.AddRequest(r2)
		}
		m.Warn(r.Request.URL.String(), zap.Int("tryCount", i))
	})

	m.Info("crawling start")
	W.Start()
	err := q.AddURL(URL_STR)
	if err != nil {
		m.Panic(err.Error())
	}
	err = q.Run(c)
	if err != nil {
		m.Panic(err.Error())
	}
	W.IsDone()
	W.Wait()

}
