package fetcher

import (
	"fmt"
	"gark-crawlers/config"
	"io/ioutil"
	"log"
	"net/http"
	"time"
)

var rateLimiter = time.Tick(time.Second / config.Qps)

// Fetch send request to url , returns response bytes, return error if failed
func Fetch(url string) ([]byte, error) {
	<-rateLimiter
	log.Printf("fetching url %s", url)
	client := http.Client{}
	request, err := http.NewRequest("GET", url, nil)
	if err != nil {
		return nil, err
	}
	// 模拟浏览器访问
	request.Header.Set("user-agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.106 Safari/537.36")

	resp, err := client.Do(request)
	if err != nil {
		return nil, err
	}

	defer resp.Body.Close()
	if resp.StatusCode != http.StatusOK {
		return nil, fmt.Errorf("wrong status code :%d", resp.StatusCode)
	}
	return ioutil.ReadAll(resp.Body)
}
