package spider

import (
	"encoding/json"
	"fmt"
	"log"
	"math/rand"
	"net"
	"net/http"
	"strconv"
	"study/dao"
	"study/model"
	"study/util"
	"time"

	"github.com/gocolly/colly"
	"github.com/gocolly/colly/extensions"
)

// var imgPath = "d:/data01/images/20210818/"

func SpiderCartoon() {

	for i := 1; i < 38; i++ {

		url := "https://m.manwaiyin.com/weimanhua/gaoxiaomanhua"
		if i > 1 {
			url = url + "/page/" + strconv.FormatInt(int64(i), 10)
		}
		c := newCollectorLocal()
		c.OnHTML("ul[class='post-loop post-loop-default cols-0']", func(e *colly.HTMLElement) {
			fmt.Println("=====================title=================")
			e.ForEach("li", func(i int, item *colly.HTMLElement) {
				//标题
				item.ChildText("h2[class='item-title']")
				//详情页面连接
				href := item.ChildAttr("h2[class='item-title'] a", "href")
				//爬取详情
				SpiderDetail(href)

				// fmt.Println(href)

			})
		})

		c.Visit(url)
	}

}
func SpiderDetail(href string) {
	detailCollector := newCollectorLocal()
	detailCollector.OnHTML("html", func(detailEle *colly.HTMLElement) {

		ss := detailEle.ChildText("script[type='application/ld+json']")

		var data Data
		json.Unmarshal([]byte(ss), &data)
		if len(data.Images) == 1 {

			now := time.Now()
			joke := model.Joke{}
			joke.Title = data.Title
			joke.SourceId = 652
			joke.Src = data.Url
			joke.Good = int(util.RandInt64(int64(1000), int64(5000)))
			joke.Bad = rand.Intn(500)
			joke.CreateTime = now
			joke.UpdateTime = now
			joke.Type = 1

			// joke.ReleaseAvata = "//joke2-img.oupeng.com/1/15.png"
			// joke.ReleaseNick = "漫画家"
			img, _ := util.GetImgInfo(data.Images[0])
			joke.Height = img.Bounds().Dy()
			joke.Width = img.Bounds().Dx()

			imgName, _, _ := util.DownImg(data.Images[0], imgPath)
			joke.Img = "20210818/" + imgName
			dao.Db.Create(&joke)
			fmt.Println(data)
			// fmt.Println(len(data.Images))
		}

		// })
		// detailEle.ForEach("a", func(i int, h *colly.HTMLElement) {

		// 	fmt.Println(i, h.Attr("href"))
		// })

	})
	detailCollector.Visit(href)
	// time.Sleep(time.Duration(4))
}

type Data struct {
	Images []string `json:"images"`
	Title  string   `json:"title"`
	Url    string   `json:"@id"`
}

func newCollectorLocal() *colly.Collector {
	c := colly.NewCollector(
		colly.Async(false),
	)
	//配置反爬策略(设置ua和refer扩展)
	extensions.RandomUserAgent(c)
	extensions.Referer(c)
	c.Limit(&colly.LimitRule{
		// Delay: 5 * time.Second,
		Parallelism: 2,
		RandomDelay: 5 * time.Second,
	})

	c.OnRequest(func(r *colly.Request) {
		// fmt.Println("Visiting", r.URL)
	})

	c.OnError(func(_ *colly.Response, err error) {
		log.Println("Something went wrong:", err)
	})

	c.OnResponse(func(r *colly.Response) {
		fmt.Println("Visited", r.Request.URL)
	})

	return c
}

func newCollectorLocal2() *colly.Collector {
	collector := colly.NewCollector()
	collector.WithTransport(&http.Transport{
		Proxy: http.ProxyFromEnvironment,
		DialContext: (&net.Dialer{
			Timeout:   90 * time.Second,
			KeepAlive: 90 * time.Second,
			DualStack: true,
		}).DialContext,
		MaxIdleConns:          100,
		IdleConnTimeout:       90 * time.Second,
		TLSHandshakeTimeout:   90 * time.Second,
		ExpectContinueTimeout: 90 * time.Second,
	})

	//是否允许相同url重复请求
	collector.AllowURLRevisit = false
	//默认是同步,配置为异步,这样会提高抓取效率
	collector.Async = false

	collector.DetectCharset = true

	// 对于匹配的域名(当前配置为任何域名),将请求并发数配置为2
	//通过测试发现,RandomDelay参数对于同步模式也生效
	if err := collector.Limit(&colly.LimitRule{
		// glob模式匹配域名
		DomainGlob: "*",
		// 匹配到的域名的并发请求数
		Parallelism: 1,
		// 在发起一个新请求时的随机等待时间
		RandomDelay: time.Duration(5) * time.Second,
	}); err != nil {
		fmt.Println(err)
	}

	//配置反爬策略(设置ua和refer扩展)
	extensions.RandomUserAgent(collector)
	extensions.Referer(collector)

	return collector
}
