package crawlers

import (
	"net/http"
	"io/ioutil"
	"github.com/PuerkitoBio/goquery"
	"github.com/djimenez/iconv-go"
	"strings"
	"errors"
)

type HttpCrawler struct {
	charset string
	title string
	score float64
	downloadUrl string
}

func (p *HttpCrawler) Crawl(url string) (doc *goquery.Document, err error) {
	resp, err := http.Get(url)
	if err != nil {
		return
	}

	if resp.StatusCode != http.StatusOK {
		err = errors.New("fetch html failed, url=" + url)
		return
	}

	header := resp.Header
	header.Get("charset")

	input, err := ioutil.ReadAll(resp.Body)
	out := make([]byte, len(input))
	out = out[:]
	iconv.Convert(input, out, "gb2312", "utf-8")

	doc, err = goquery.NewDocumentFromReader(strings.NewReader(string(out)))

	return
}