package main

import "fmt"
import "net/http"
import "strconv"
import "os"

func HttpGet(url string) (result string, err error) {
	//resp, err1 := http.Get(url)
	client := &http.Client{}
	req, err := http.NewRequest("GET", url, nil)
	if err != nil {
		fmt.Println("new request err=", err)
		return
	}
	//避免51学堂反爬虫，加入user-agent
	req.Header.Add("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36")
	resp, err1 := client.Do(req)

	if err1 != nil {
		err = err1
		return
	}
	defer resp.Body.Close()
	buf := make([]byte, 1024*4)
	for {
		n, err := resp.Body.Read(buf)
		if n == 0 {
			fmt.Println("resp.Body.Reading err=", err)
			break
		}
		result += string(buf[:n])
	}
	return
}
func test(start, end int) {
	var url string
	fmt.Printf("正在爬取%d-%d页内容\n", start, end)
	for i := start; i <= end; i++ {
		if i == 1 {
			url = "https://www.financialnews.com.cn/jg/ld/index.html"
		} else {
			url = "https://www.financialnews.com.cn/jg/ld/index_" + strconv.Itoa(i-1) + ".html"
		}
		fmt.Println("url=", url)
		result, err := HttpGet(url)
		if err != nil {
			fmt.Println("Http Getting err=", err)
			continue
		}
		fileName := strconv.Itoa(i) + ".html"
		f, err1 := os.Create(fileName)
		if err1 != nil {
			fmt.Println("Creation error=", err)
			continue
		}
		f.WriteString(result)
		f.Close()
	}
}
func main() {
	var start, end int
	fmt.Println("开始页:")
	fmt.Scan(&start)
	fmt.Println("终止页:")
	fmt.Scan(&end)
	if start < 1 || end < start {
		fmt.Println("非法操作")
	}
	test(start, end)
}
