package worker

import (
	"crawler/util"
	"fmt"
	"os"
	"strconv"
	"sync"
)

func BaiDuTieBaWorking() {
	var start, end int
	fmt.Println("请输入爬取的初始页数(>=1)")
	_, _ = fmt.Scan(&start)
	fmt.Println("请输入爬取的终止页数(>=起始页数)")
	_, _ = fmt.Scan(&end)
	baiDuTieBaWorking(start, end)
}

func baiDuTieBaWorking(start, end int) {
	fmt.Printf("正在爬取第%d页到第%d页...\n", start, end)
	var waitGroup sync.WaitGroup
	waitGroup.Add(end - start + 1)
	//循环爬取每一页数据
	for index := start; index <= end; index++ {
		go doBaiDuTieBaWorking(index, &waitGroup)
	}
	//主协程等待其余协程运行完成再结束
	waitGroup.Wait()
}

func doBaiDuTieBaWorking(index int, waitGroup *sync.WaitGroup) {
	url := "https://tieba.baidu.com/f?kw=%E5%8E%9F%E7%A5%9E&ie=utf-8&pn=" + strconv.Itoa((index-1)*50)
	fmt.Printf("开始爬取第%d页...\n", index)
	result, httpGetError := util.HttpGet(url)
	if httpGetError != nil {
		fmt.Printf("HttpGet Error，Page：%d， Error Messsage：%s", index, httpGetError)
		return
	}
	file, createError := os.Create(".\\temp\\第" + strconv.Itoa(index) + "页.html")
	if createError != nil {
		fmt.Printf("Create Error，Page：%d， Error Messsage：%s", index, createError)
		return
	}
	_, writeError := file.WriteString(result)
	if writeError != nil {
		return
	}
	closeError := file.Close()
	if closeError != nil {
		return
	}
	fmt.Printf("第%d页爬取完成\n\n", index)
	waitGroup.Done()
}
