package pc

import (
	"compress/gzip"
	"fmt"
	"io"
	"io/ioutil"
	"net/http"
)

func GetWeb(client *http.Client, _url string, reader io.Reader) (string, error) {
	var req *http.Request
	var rsp *http.Response
	var err error
	var bytes []byte
	if req, err = http.NewRequest("GET", _url, reader); err != nil {
		fmt.Println("爬虫错误：", err)
		return "", err
	}
	req.Header = chromeHeader
	if rsp, err = client.Do(req); err != nil {
		fmt.Println("爬虫错误：", err)
		return "", err
	}
	defer rsp.Body.Close()

	switch rsp.Header.Get("Content-Encoding") {
	case "gzip":
		bytes = make([]byte, 0)
		reader, _ := gzip.NewReader(rsp.Body)
		for {
			buf := make([]byte, 1024)
			n, err := reader.Read(buf)

			if err != nil && err != io.EOF {
				fmt.Println(err)
				return "", err
			}
			if n == 0 {
				break
			}
			bytes = append(bytes, buf[:n]...)
		}
		break
	default:
		if bytes, err = ioutil.ReadAll(rsp.Body); err != nil {
			fmt.Println("爬虫错误：", err)
			return "", err
		}
	}
	fmt.Println("爬虫结果：", string(bytes), "\n", rsp.Header)
	return string(bytes), nil
}
func getWebFile(client *http.Client, url string, reader io.Reader) {

}
