package fetcher

import (
	"fmt"
	"io/ioutil"
	"my-crawler/concurrence/engine"
	"net/http"
	"net/url"
	"strings"
)

type PostFormReq struct {
}

func (r *PostFormReq) Request(ReqData engine.ReqData) ([]byte, error) {
	<-ReqData.Rate
	var u string
	if ReqData.Url == "" {
		panic("No Request Url！")
	}
	fmt.Println(ReqData)
	//拼接url
	u = ReqData.Url
	data := ReqData.Payload
	header := ReqData.Header
	form := make(url.Values)
	for k, v := range data {
		form[k] = []string{v}
	}

	client := http.Client{}
	req, err := http.NewRequest("POST", u, strings.NewReader(form.Encode()))
	if err != nil {
		return nil, err
	}
	req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
	if ct, ok := header["Content-Type"]; ok {
		req.Header.Add("Content-Type", ct)
	}
	if ck, ok := header["Cookie"]; ok {
		req.Header.Add("cookie", ck)
	}
	if ug, ok := header["User-Agent"]; ok {
		req.Header.Add("User-Agent", ug)
	}
	resp, _ := client.Do(req)
	defer resp.Body.Close()
	body, _ := ioutil.ReadAll(resp.Body)
	fmt.Println(string(body))
	return body, nil

}
