package downloader

import (
	"bytes"
	"context"
	"net"
	"time"

	"github.com/PuerkitoBio/goquery"
	"github.com/bitly/go-simplejson"

	//    iconv "github.com/djimenez/iconv-go"
	"gitee.com/congqian/dense-spider/core/common/mlog"
	"gitee.com/congqian/dense-spider/core/common/page"
	"gitee.com/congqian/dense-spider/core/common/request"
	"gitee.com/congqian/dense-spider/core/common/util"
	"gitee.com/congqian/dense-spider/core/downloader"
	"github.com/chromedp/cdproto/cdp"
	"github.com/chromedp/cdproto/network"
	"github.com/chromedp/chromedp"

	//    "golang.org/x/text/encoding/simplifiedchinese"
	//    "golang.org/x/text/transform"
	"io"
	"io/ioutil"
	"net/http"
	"net/url"

	"golang.org/x/net/html/charset"
	//    "regexp"
	//    "golang.org/x/net/html"
	"strings"
	//"fmt"
)

// The ChromeDownloader download page by package webloop.
// The "html" content is contained in dom parser of package goquery.
// The "json" content is saved.
// The "jsonp" content is modified to json.
// The "text" content will save body plain text only.
// The page result is saved in Page.
type ChromeDownloader struct {
	Proxies          []string
	ChromeContext    context.Context
	ChromeCancelFunc context.CancelFunc
}

func NewChromeDownloader(proxies []string) *ChromeDownloader {
	return &ChromeDownloader{Proxies: proxies}
}
func (this *ChromeDownloader) Close() {
	if this.ChromeContext != nil {
		chromedp.FromContext(this.ChromeContext).Allocator.Wait()
	}
	if this.ChromeCancelFunc != nil {
		this.ChromeCancelFunc()
	}
}
func (this *ChromeDownloader) Download(req *request.Request) *page.Page {
	var mtype string
	var p = page.NewPage(req)
	mtype = req.GetResponceType()
	switch mtype {
	case "html":
		return this.downloadHtml(p, req)
	case "json":
		fallthrough
	case "jsonp":
		return this.downloadJson(p, req)
	case "text":
		return this.downloadText(p, req)
	default:
		mlog.LogInst().LogError("error request type:" + mtype)
	}
	return p
}

// Charset auto determine. Use golang.org/x/net/html/charset. Get page body and change it to utf-8
func (this *ChromeDownloader) changeCharsetEncodingAuto(contentTypeStr string, sor io.ReadCloser) string {
	var err error
	destReader, err := charset.NewReader(sor, contentTypeStr)
	if err != nil {
		mlog.LogInst().LogError(err.Error())
		destReader = sor
	}

	var sorbody []byte
	if sorbody, err = ioutil.ReadAll(destReader); err != nil {
		mlog.LogInst().LogError(err.Error())
		// For gb2312, an error will be returned.
		// Error like: simplifiedchinese: invalid GBK encoding
		// return ""
	}
	//e,name,certain := charset.DetermineEncoding(sorbody,contentTypeStr)
	bodystr := string(sorbody)

	return bodystr
}
func NewProxyHeadless(req *request.Request, proxy string) (context.Context, context.CancelFunc) {
	opts := make([]chromedp.ExecAllocatorOption, 0)
	opts = append(opts, chromedp.ProxyServer(proxy))
	opts = append(opts, chromedp.UserAgent(req.GetHeader().Get("User-Agent")))
	opts = append(opts, chromedp.Flag("headless", true))
	opts = append(opts, chromedp.Flag("hide-scrollbars", true))
	opts = append(opts, chromedp.Flag("mute-audio", true))
	opts = append(opts, chromedp.Flag("disable-gpu", true))
	allocator, _ := chromedp.NewExecAllocator(context.Background(), opts...)
	ctx, cancel := chromedp.NewContext(allocator)
	return ctx, cancel
}
func NewHeadless(req *request.Request) (context.Context, context.CancelFunc) {
	opts := make([]chromedp.ExecAllocatorOption, 0)
	opts = append(opts, chromedp.UserAgent(req.GetHeader().Get("User-Agent")))
	opts = append(opts, chromedp.Flag("headless", true))
	opts = append(opts, chromedp.Flag("hide-scrollbars", true))
	opts = append(opts, chromedp.Flag("mute-audio", true))
	opts = append(opts, chromedp.Flag("disable-gpu", true))
	allocator, _ := chromedp.NewExecAllocator(context.Background(), opts...)
	ctx, cancel := chromedp.NewContext(allocator)
	return ctx, cancel
}

// 获取html
func getHtml(p *page.Page, req *request.Request, res *string) chromedp.Tasks {
	var header map[string]interface{}
	for k, v := range req.GetHeader() {
		header[k] = v
	}
	return chromedp.Tasks{
		network.Enable(),
		network.SetExtraHTTPHeaders(network.Headers(header)),
		chromedp.ActionFunc(func(ctx context.Context) error {
			// add cookies to chrome
			for _, cookie := range req.GetCookies() {
				expr := cdp.TimeSinceEpoch(cookie.Expires)
				_, err := network.SetCookie(cookie.Name, cookie.Value).
					WithExpires(&expr).
					WithDomain(cookie.Domain).
					WithHTTPOnly(cookie.HttpOnly).
					Do(ctx)
				if err != nil {
					return err
				}
			}
			return nil
		}),
		chromedp.Navigate(req.GetUrl()),
		chromedp.Sleep(2 * time.Second),
		chromedp.OuterHTML("body", res, chromedp.ByQuery),
		chromedp.ActionFunc(func(ctx context.Context) error {
			cookies, err := network.GetAllCookies().Do(ctx)
			if err != nil {
				return err
			}
			var httpCookies = make([]*http.Cookie, len(cookies))
			for i, cookie := range cookies {
				httpCookies[i] = &http.Cookie{
					Name:     cookie.Name,
					Domain:   cookie.Domain,
					HttpOnly: cookie.HTTPOnly,
					Value:    cookie.Value,
					Path:     cookie.Path,
					Expires:  time.Now().Add(time.Duration(int(cookie.Expires)) * time.Second),
				}
			}
			p.SetCookies(httpCookies)

			return nil
		}),
	}
}
func (this *ChromeDownloader) downloadChromeHtml(p *page.Page, req *request.Request) (*page.Page, string) {
	var err error
	var urlstr string
	if urlstr = req.GetUrl(); len(urlstr) == 0 {
		mlog.LogInst().LogError("url is empty")
		p.SetStatus(true, "url is empty")
		return p, ""
	}
	if this.ChromeContext == nil {
		if this.Proxies != nil {
			this.ChromeContext, this.ChromeCancelFunc = NewProxyHeadless(req, downloader.GetRandomProxy(this.Proxies))
		} else {
			this.ChromeContext, this.ChromeCancelFunc = NewHeadless(req)
		}
	}
	var html string
	// ctxt是chromedp的实例，用于执行网页操作
	err = chromedp.Run(this.ChromeContext, getHtml(p, req, &html))
	if err != nil {
		mlog.LogInst().LogError(err.Error())
	}

	return p, html
}

// Download file and change the charset of page charset.
func (this *ChromeDownloader) downloadFile(p *page.Page, req *request.Request) (*page.Page, string) {
	var err error
	var urlstr string
	if urlstr = req.GetUrl(); len(urlstr) == 0 {
		mlog.LogInst().LogError("url is empty")
		p.SetStatus(true, "url is empty")
		return p, ""
	}

	var client *http.Client
	if this.Proxies != nil {
		proxy, err := url.Parse(downloader.GetRandomProxy(this.Proxies))
		if err == nil {
			netTransport := &http.Transport{
				//Proxy: http.ProxyFromEnvironment,
				Proxy: http.ProxyURL(proxy),
				Dial: func(netw, addr string) (net.Conn, error) {
					c, err := net.DialTimeout(netw, addr, time.Second*time.Duration(10))
					if err != nil {
						return nil, err
					}
					return c, nil
				},
				MaxIdleConnsPerHost:   10,                             //每个host最大空闲连接
				ResponseHeaderTimeout: time.Second * time.Duration(5), //数据收发5秒超时
			}
			client = &http.Client{
				Transport:     netTransport,
				CheckRedirect: req.GetRedirectFunc(),
			}
		} else {
			client = &http.Client{
				CheckRedirect: req.GetRedirectFunc(),
			}
		}
	} else {
		client = &http.Client{
			CheckRedirect: req.GetRedirectFunc(),
		}
	}
	httpreq, err := http.NewRequest(req.GetMethod(), req.GetUrl(), strings.NewReader(req.GetPostdata()))

	if header := req.GetHeader(); header != nil {
		httpreq.Header = req.GetHeader()
	}
	if cookies := req.GetCookies(); cookies != nil {
		for i := range cookies {
			httpreq.AddCookie(cookies[i])
		}
	}

	var resp *http.Response
	if resp, err = client.Do(httpreq); err != nil {
		if e, ok := err.(*url.Error); ok && e.Err != nil && e.Err.Error() == "normal" {
			//  normal
		} else {
			mlog.LogInst().LogError(err.Error())
			p.SetStatus(true, err.Error())
			return p, ""
		}
	}

	p.SetHeader(resp.Header)
	p.SetCookies(resp.Cookies())

	// get converter to utf-8
	bodyStr := this.changeCharsetEncodingAuto(resp.Header.Get("Content-Type"), resp.Body)

	defer resp.Body.Close()
	return p, bodyStr
}

func (this *ChromeDownloader) downloadHtml(p *page.Page, req *request.Request) *page.Page {
	var err error
	p, destbody := this.downloadChromeHtml(p, req)
	if !p.IsSucc() {
		return p
	}
	bodyReader := bytes.NewReader([]byte(destbody))

	var doc *goquery.Document
	if doc, err = goquery.NewDocumentFromReader(bodyReader); err != nil {
		mlog.LogInst().LogError(err.Error())
		p.SetStatus(true, err.Error())
		return p
	}

	var body string
	if body, err = doc.Html(); err != nil {
		mlog.LogInst().LogError(err.Error())
		p.SetStatus(true, err.Error())
		return p
	}

	p.SetBodyStr(body).SetHtmlParser(doc).SetStatus(false, "")

	return p
}

func (this *ChromeDownloader) downloadJson(p *page.Page, req *request.Request) *page.Page {
	var err error
	p, destbody := this.downloadFile(p, req)
	if !p.IsSucc() {
		return p
	}

	var body []byte
	body = []byte(destbody)
	mtype := req.GetResponceType()
	if mtype == "jsonp" {
		tmpstr := util.JsonpToJson(destbody)
		body = []byte(tmpstr)
	}

	var r *simplejson.Json
	if r, err = simplejson.NewJson(body); err != nil {
		mlog.LogInst().LogError(string(body) + "\t" + err.Error())
		p.SetStatus(true, err.Error())
		return p
	}

	// json result
	p.SetBodyStr(string(body)).SetJson(r).SetStatus(false, "")

	return p
}

func (this *ChromeDownloader) downloadText(p *page.Page, req *request.Request) *page.Page {
	p, destbody := this.downloadFile(p, req)
	if !p.IsSucc() {
		return p
	}

	p.SetBodyStr(destbody).SetStatus(false, "")
	return p
}
