package venus

import (
	"bytes"
	"compress/gzip"
	"errors"
	"fmt"
	"io"
	"io/ioutil"
	"net"
	"net/http"
	"regexp"
	"sort"
	"strconv"
	"strings"
	"time"

	"github.com/Qingluan/merkur"
	log "github.com/sirupsen/logrus"
)

const (
	FORWARDING_KEY = "alll_links_will_encode_64_then_forwarding_to"
)

var (
	DeFaultTimeout = time.Second * 40
	Log            = log.New()
	HttpUrl        = regexp.MustCompile(`['"]https?\:[\/\\]{2,4}.+?["']`)

	HttpUrl2 = regexp.MustCompile(`['"]\:[\/\\]{2,4}.+?["']`)
	HttpUrl3 = regexp.MustCompile(`['"][\/\\]{2,4}.+?["']`)
	// LinksRever     = regexp.MustCompile(FORWARDING_KEY + "=\\S+")
	HostAddr       = "localhost:8443"
	Domain         = ""
	DEFAULT_SCHEME = "https"
	// init           = InitLog()
)

func InitLog() bool {
	Log.SetFormatter(&log.TextFormatter{
		DisableColors: true,
		FullTimestamp: true,
	})
	return true
}

func SetDefaultScheme(u string) {
	DEFAULT_SCHEME = u
}

func SetDefaultListenAddress(host string) {
	HostAddr = host
}

func ReadMsg(c net.Conn, limit ...int) ([]byte, error) {
	var buf []byte
	if limit != nil {
		buf = make([]byte, limit[0])
	} else {
		buf = make([]byte, 2048)
	}
	n, err := c.Read(buf)
	// c.

	return buf[:n], err
}

type Relayor struct {
	Host  string
	Proxy string
	Datas *DataBase
}

func NewRelayer(host, proxy string) *Relayor {
	return &Relayor{
		Host:  host,
		Proxy: proxy,
		Datas: NewData(),
	}

}

func noRedirect(req *http.Request, via []*http.Request) error {
	return errors.New("Don't redirect!")
}

func (relay *Relayor) Handle(c net.Conn,config *ServerConfig) (err error) {
	reqRaw, err := ReadMsg(c)
	if err != nil {
		return err
	}
	if req, copyreq, err := RawToNewReq(string(reqRaw), relay.Host); err != nil {
		return err
		// log.Fatal("err:", err)
	} else {
		// a := ReqString(req)
		// log.Info("Request:\n", a)
		// log.Info("[", req.Method, "] ", req.URL.String())
		// _, req, err = CopyReq(req)
		if err != nil {
			log.Fatal(err)
		}
		buf, _ := ioutil.ReadAll(copyreq.Body)
		if config != nil{
			for k, v := range config.HEADERS{
				req.Header.Add(k,v)
			}
		}
		saveReq, _ := RawToReq(ReqString(req)+string(buf), false)
		relay.Datas.Put(saveReq.URL.String(), saveReq, 1)
		if relay.Proxy != "" {
			if client := merkur.NewProxyHttpClient(relay.Proxy); client != nil {
				client.Timeout = DeFaultTimeout
				client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
					return http.ErrUseLastResponse
				}
				if res, err := client.Do(req); err != nil {
					log.Error(req.URL.String(), " | ", err)
					return err
				} else {
					// c.Write(res.H)
					// fmt.Println("%+v", res.Header)
					// relay.Datas.Put(clonedReq.URL.String(), clonedReq, 1)

					if err := relay.HandleRes(res, req, c);err !=nil{
						return err
					}
				}
			}
		} else {
			client := &http.Client{
				Timeout: DeFaultTimeout,
			}
			if res, err := client.Do(req); err != nil {
				return err
			} else {
				// c.Write(res.H)
				// relay.Datas.Put(clonedReq.URL.String(), clonedReq, 1)

				relay.HandleRes(res, req, c)
				// fmt.Println(header)

			}
		}

	}

	return
}

// HandleRes TODO Doc
func (relay *Relayor) HandleRes(res *http.Response, req *http.Request, c net.Conn) (err error){
	// n := time.Now()
	// defer func() { log.Warn("Handleres used:", time.Now().Sub(n)) }()

	// bod := ResString(res)
	defer relay.Datas.Put(req.URL.String(), res, 0)

	defer func() {
		if res.StatusCode/300 == 1 {
			log.Warn("[", res.Status, "]", " ", res.Header.Get("Location-To"), " | ", res.Header.Get("Location"))
		} else if res.StatusCode != 200 {
			log.Warn("[", res.Status, "] ", req.URL.String(), "\n", ReqString(req), "\n<--SEP--->\n", ResString(res)) // "\n", res, "\n---------------------------------------------------------")
		} else {
			// log.Info("[", res.Status, "] ", req.URL.String())
		}
	}()
	// res.Header.Add("DecodedUrl", req.URL.String())
	if location := res.Header.Get("Location"); location != "" {
		loc := PackUrl(location)
		res.Header["Location"] = []string{loc}

		res.Header["Location-To"] = []string{location}
	}
	// fmt.Println(header)
	IsText := false
	if v := res.Header.Get("Content-type"); strings.Contains(v, "text/html;") {
		IsText = true
	}
	if !IsText {
		c.Write([]byte(ResString(res)))
		io.Copy(c, res.Body)
		return
	}
	dealBuffer := &MultiBuffer{
		buffer: bytes.NewBuffer([]byte{}),
	}

	// outer = io.MultiWriter(c, dealBuffer)
	// buf :=
	var reader io.Reader

	if res.Header.Get("Content-Encoding") == "gzip" {
		dealBuffer.GzipMode()
		reader, err = gzip.NewReader(res.Body)
		if err != nil {
			log.Info("parse body gzip data error:", err)
			return
		}
	} else {
		reader = res.Body
	}
	if allbuf, err := ioutil.ReadAll(reader); err != nil {
		log.Info("io.read:", err)
		return err
	} else {
		// log.Info("len:", len(allbuf))
		dealBuffer.Write(allbuf)
	}
	// io.Copy(dealBuffer, reader)

	if dealedBuf, err := dealBuffer.Encode(); err == nil {
		// fmt.Println(string(dealedBuf))
		if ls := res.Header.Get("Content-Length"); ls != "" {
			l := len(dealedBuf)
			l2, _ := strconv.Atoi(ls)
			if l2 != l {
				res.Header.Set("Content-Length", fmt.Sprint(l))
				log.Warn(" ! [change]:", " len:", ls, "->", l)
			}
		}

		header := ResString(res)
		c.Write([]byte(header))
		c.Write(dealedBuf)
		// log.Info("reply :", req.URL.String())
	} else {
		return err
		log.Fatal("error:", err)
	}
	return 
}

type MultiBuffer struct {
	buffer *bytes.Buffer
	GZIP   bool
	reader io.ReadCloser
}

func (m *MultiBuffer) GzipMode() (err error) {
	m.GZIP = true
	return
}

//zi3043738416
func (m *MultiBuffer) Write(buf []byte) (int, error) {
	return m.buffer.Write(buf)
}

func (m *MultiBuffer) Close() error {
	if m.reader != nil {
		return m.reader.Close()
	}
	return nil
}

func (m *MultiBuffer) Read(buf []byte) (n int, err error) {
	// if len(buf) > m.buffer.Len() {
	// copy(buf, m.buffer.Bytes())
	// if m.GZIP {
	// return m.reader.Read(buf)
	// } else {
	return m.buffer.Read(buf)
	// }
	// } else {
	// return m.buffer.Read(buf)
	// }
}

func (m *MultiBuffer) Bytes() ([]byte, error) {
	buf, err := ioutil.ReadAll(m)
	return buf, err
}

func OrderdMapFromBigToSmall(s map[string]string) (o [][2]string) {
	keys := []string{}
	for k := range s {
		keys = append(keys, k)
	}
	sort.Slice(keys, func(i, j int) bool {
		return len(keys[i]) > len(keys[j])
	})
	for _, k := range keys {
		o = append(o, [2]string{k, s[k]})
	}
	return
}

func (m *MultiBuffer) Encode() ([]byte, error) {
	// n := time.Now()
	// defer func() { log.Warn("encode used:", time.Now().Sub(n)) }()

	buf, err := m.Bytes()
	if err != nil {
		log.Error(err)
		return nil, err
	}

	mk := make(map[string]string)

	// fmt.Println("buf:", buf)

	for _, realHost := range HttpUrl.FindAll(buf, -1) {
		// log.Info(realHost)
		oldUrl := string(realHost[1 : len(realHost)-1])
		oldUrl = strings.TrimSpace(oldUrl)
		o := PackUrl(oldUrl)
		mk[oldUrl] = o
	}
	for _, realHost := range HttpUrl2.FindAll(buf, -1) {
		// fmt.Println(string(realHost))
		oldUrl := DEFAULT_SCHEME + string(realHost[1:len(realHost)-1])
		o := PackUrl(oldUrl)
		// fmt.Println(oldUrl[5:], o[5:])
		mk[oldUrl[5:]] = o[5:]
	}
	for _, realHost := range HttpUrl3.FindAll(buf, -1) {
		// fmt.Println(string(realHost))
		oldUrl := DEFAULT_SCHEME + ":" + string(realHost[1:len(realHost)-1])
		o := PackUrl(oldUrl)
		// fmt.Println(oldUrl[5:], o[5:])
		mk[oldUrl[5:]] = o[5:]
	}
	for _, vs := range OrderdMapFromBigToSmall(mk) {
		k, v := vs[0], vs[1]
		buf = bytes.ReplaceAll(buf, []byte(k), []byte(v))
	}
	if m.GZIP {
		buffer := bytes.NewBuffer([]byte{})
		writer := gzip.NewWriter(buffer)
		// log.Info("gzip len:", len(buf))
		// cn := 0
		// ll := len(buf)
		// for cn < ll {
		// fmt.Println(string(buf))
		if _, err := writer.Write(buf); err != nil {
			log.Warn("gzip compress err:", err)
			return nil, err
		}
		writer.Close()
		// }

		// buffer.Bytes()
		return buffer.Bytes(), err
	}
	return buf, err
}

func ResString(res *http.Response, replaces ...[2]string) string {
	header := fmt.Sprintf("%s %s\r\n", res.Proto, res.Status)
	for k, vs := range res.Header {
		if len(replaces) > 0 {
			f := false
			for _, ks := range replaces {
				if k == ks[0] {
					header += fmt.Sprintf("%s: %s\r\n", k, ks[1])
					f = true
					break
				}
			}
			if !f {
				header += fmt.Sprintf("%s: %s\r\n", k, strings.Join(vs, " "))
			}
		} else {
			header += fmt.Sprintf("%s: %s\r\n", k, strings.Join(vs, " "))
		}
	}
	return header + "\r\n"
}

func ReqString(req *http.Request, replaces ...[2]string) string {
	header := fmt.Sprintf("%s %s %s\r\n", req.Method, req.URL.Path, req.Proto)
	for k, vs := range req.Header {
		if len(replaces) > 0 {
			f := false
			for _, ks := range replaces {
				if k == ks[0] {
					header += fmt.Sprintf("%s: %s\r\n", k, ks[1])
					f = true
					break
				}
			}
			if !f {
				header += fmt.Sprintf("%s: %s\r\n", k, strings.Join(vs, " "))
			}
		} else {
			header += fmt.Sprintf("%s: %s\r\n", k, strings.Join(vs, " "))
		}
	}
	return header + "\r\n"
}

func ResBody(res *http.Response) (reader io.Reader, err error) {
	if res.Header.Get("Content-Encoding") == "gzip" {
		reader, err = gzip.NewReader(res.Body)
		if err != nil {
			log.Info("parse body gzip data error:", err)
			return
		}
	} else {
		reader = res.Body
	}
	return
}
