package crawl

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
	"strconv"
)

type Crawl struct {
	pageIndex int
	url string
	send chan<- []byte
}

func NewCrawl(url string,pageIndex int,send chan<- []byte) *Crawl{
	return &Crawl{
		url:url,
		pageIndex:pageIndex,
		send:send,
	}
}

func (crawl *Crawl) StartGetData(){
	formData := make(url.Values)
	formData["sex"] = []string{"f"}
	formData["stc"] = []string{"1:37,2:20.28,23:1"}
	formData["f"] = []string{"search"}
	formData["listStyle"] = []string{"bigPhoto"}
	formData["sv"] = []string{"1"}
	formData["p"] = []string{strconv.Itoa(crawl.pageIndex)}

	res,err := http.PostForm(crawl.url,formData)

	if err != nil{
		fmt.Printf("get the page of %d occur err:%s\n",crawl.pageIndex,err)
		return
	}

	defer res.Body.Close()

	buf,err := ioutil.ReadAll(res.Body)

	if err != nil{
		fmt.Printf("parse the page of %d occur err:%s\n",crawl.pageIndex,err)
		return
	}
	crawl.send <- buf
}