package crawler

import (
	"crawlies/src/base"
	"crawlies/src/util/try"
	"io/ioutil"
	"net/http"
	"time"
)

// Fetch 传入url 返回body
func Fetch(url string) []byte {
	client := new(http.Client)
	client.Timeout = 3 * time.Second
	req, _ := http.NewRequest("GET", url, nil)
	if req == nil {
		base.Logger.Err().Println("Request create nil err")
		return nil
	}
	//req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")
	req.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11")
	req.Header.Set("Accept-Language", "zh-CN,zh;q=0.9")
	resp, err := client.Do(req)
	if err != nil {
		base.Logger.Err().Println("Http get err:", err)
		return nil
	}
	if resp.StatusCode != 200 {
		base.Logger.Err().Println("Http status code:", resp.StatusCode)
		return nil
	}
	defer try.Close(resp.Body)

	body, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		base.Logger.Err().Println("Read error", err)
		return nil
	}
	return body
}

// SaveUrl 传入url与数据 保存到本地
func SaveUrl(url string, b []byte) bool {
	//解析成文件名
	path, name := url2Name(url)
	//保存
	base.Logger.Info().Println(" ->| 开始保存", path, " -> ", name, " >> ", len(b), "byte")
	boolean := save(b, path, name)
	if boolean {
		base.SavedMapLock.Lock()
		defer base.SavedMapLock.Unlock()
		base.SavedUrl2Path[url] = path + "/" + name
	}
	return boolean
}
