package handler

import (
	"fmt"
	"github.com/jackdanger/collectlinks"
	"log"
	"net/http"
	"net/url"
	"os"
)

const (
	//CrawlerUrl = "http://demo-f.kaipanyi.com/m/Login?t=t"
	CrawlerUrl = "http://feijisu7.com/acg/"
)

var ResultUrls string

func Run() {
	queue := make(chan string)
	go func() {
		queue <- CrawlerUrl
	}()
	for CrawlerUrli := range queue {
		err := DownloadWeb(CrawlerUrli, queue)
		if err != nil {
			log.Println("DownloadWeb", err)
		}
		f, err := os.Create("爬取结果存储.txt")
		defer f.Close()
		if err != nil {
			fmt.Println(err.Error())
		} else {
			_, err = f.Write([]byte(ResultUrls))
		}
	}

}
func DownloadWeb(url string, queue chan string) (err error) {
	client := &http.Client{}
	req, _ := http.NewRequest("GET", url, nil)
	// 自定义Header
	req.Header.Set("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)")

	resp, err := client.Do(req)
	if err != nil {
		log.Println("http get error", err)
		return
	}
	//函数结束后关闭相关链接
	defer resp.Body.Close()

	/*body, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		log.Println("read error", err)
		return
	}
	log.Println(string(body))*/

	links := collectlinks.All(resp.Body)
	for _, link := range links {
		absolute := urlJoin(link, url)
		if url != " " {
			log.Println("parse url", absolute)
			ResultUrls = ResultUrls + absolute + "\n"
			go func() {
				queue <- absolute
			}()
		}
	}
	return
}

func urlJoin(href, base string) string {
	uri, err := url.Parse(href)
	if err != nil {
		return " "
	}
	baseUrl, err := url.Parse(base)
	if err != nil {
		return " "
	}
	return baseUrl.ResolveReference(uri).String()
}
