package proxy

import (
    "time"
    "bufio"
    "io"
    "strings"
    "net/url"
    "net/textproto"
    "sync"
    "fmt"
    "unicode"
    "net"
)

type http struct {
    c *conn
    req *request
}

func (c *conn) newHttp() (h *http) {
    h = &http {
        c: c,
    }
    return
}

// isCommonNetReadError reports whether err is a common error
// encountered during reading a request off the network when the
// client has gone away or had its read fail somehow. This is used to
// determine which logs are interesting enough to log about.
func isCommonNetReadError(err error) bool {
    if err == io.EOF {
        return true
    }
    if neterr, ok := err.(net.Error); ok && neterr.Timeout() {
        return true
    }
    if oe, ok := err.(*net.OpError); ok && oe.Op == "read" {
        return true
    }
    return false
}

// badRequestError is a literal string (used by in the server in HTML,
// unescaped) to tell the user why their request was bad. It should
// be plain text without user info or other embedded errors.
type badRequestError string

func (e badRequestError) Error() string { return "Bad Request: " + string(e) }

func (h *http) Serve() {
    var err error
    h.req, err = h.readRequest()
    if err != nil {
        const errorHeaders = "\r\nContent-Type: text/plain; charset=utf-8\r\nConnection: close\r\n\r\n"

        //if err == errTooLarge { TODO:
        //    // Their HTTP client may or may not be
        //    // able to read this if we're
        //    // responding to them and hanging up
        //    // while they're still writing their
        //    // request. Undefined behavior.
        //    const publicErr = "431 Request Header Fields Too Large"
        //    fmt.Fprintf(h.c.rwc, "HTTP/1.1 "+publicErr+errorHeaders+publicErr)
        //    h.c.closeWriteAndWait()
        //    return
        //}
        if isCommonNetReadError(err) {
            return // don't reply
        }

        publicErr := "400 Bad Request"
        if v, ok := err.(badRequestError); ok {
            publicErr = publicErr + ": " + string(v)
        }

        fmt.Fprintf(h.c.rwc, "HTTP/1.1 "+publicErr+errorHeaders+publicErr)
        return
    }

    h.c.peerHost = h.req.Host
    err = h.c.connect()
    if err != nil {
        // TODO
    }

    if strings.ToUpper(h.req.Method) == "CONNECT" {
        fmt.Fprintf(h.c.bufw, "%s %d OK\r\n\r\n", h.req.Proto, 200)
    } else {
        delete(h.req.Header, "Host")
        connection := h.req.Header.Get("Proxy-Connection")
        if len(connection) > 0 {
            h.req.Header.Del("Proxy-Connection")
            h.req.Header.Add("Connection", connection)
        }

        // TODO:
    }

    h.c.relay()
}


type request struct {
    Method string

    URL *url.URL

    Header textproto.MIMEHeader

    RequestURI string
    Proto string
    Host string
}

func (h *http) readRequest() (req *request, err error) {

    var (
        //wholeReqDeadline time.Time // or zero if none
        hdrDeadline      time.Time // or zero if none
    )

    t0 := time.Now()
    if d := h.c.server.readHeaderTimeout(); d != 0 {
        hdrDeadline = t0.Add(d)
    }
    //if d := h.c.server.ReadTimeout; d != 0 {
    //    wholeReqDeadline = t0.Add(d)
    //}
    h.c.rwc.SetReadDeadline(hdrDeadline)
    if d := h.c.server.WriteTimeout; d != 0 {
        defer func() {
            h.c.rwc.SetWriteDeadline(time.Now().Add(d))
        }()
    }


    req, err = readRequest(h.c.bufr)
    if err != nil {
        return nil, err
    }

    return
}

var textprotoReaderPool sync.Pool

func newTextprotoReader(br *bufio.Reader) *textproto.Reader {
    if v := textprotoReaderPool.Get(); v != nil {
        tr := v.(*textproto.Reader)
        tr.R = br
        return tr
    }
    return textproto.NewReader(br)
}

func putTextprotoReader(r *textproto.Reader) {
    r.R = nil
    textprotoReaderPool.Put(r)
}

// parseRequestLine parses "GET /foo HTTP/1.1" into its three parts.
func parseRequestLine(line string) (method, requestURI, proto string, ok bool) {
    s1 := strings.Index(line, " ")
    s2 := strings.Index(line[s1+1:], " ")
    if s1 < 0 || s2 < 0 {
        return
    }
    s2 += s1 + 1
    return line[:s1], line[s1+1 : s2], line[s2+1:], true
}

type badStringError struct {
    what string
    str  string
}

func (e *badStringError) Error() string { return fmt.Sprintf("%s %q", e.what, e.str) }

func validMethod(method string) bool {
    for _, r := range method {
        if !unicode.IsDigit(r) && !unicode.IsLetter(r) {
            return false
        }
    }

    return true
}

func readRequest(b *bufio.Reader) (req *request, err error) {
    tp := newTextprotoReader(b)
    req = new(request)

    // First line: GET /index.html HTTP/1.0
    var s string
    if s, err = tp.ReadLine(); err != nil {
        return nil, err
    }
    defer func() {
        putTextprotoReader(tp)
        if err == io.EOF {
            err = io.ErrUnexpectedEOF
        }
    }()

    var ok bool
    req.Method, req.RequestURI, req.Proto, ok = parseRequestLine(s)
    if !ok {
        return nil, &badStringError{"malformed HTTP request", s}
    }
    if !validMethod(req.Method) {
        return nil, &badStringError{"invalid method", req.Method}
    }
    rawurl := req.RequestURI

    // CONNECT requests are used two different ways, and neither uses a full URL:
    // The standard use is to tunnel HTTPS through an HTTP proxy.
    // It looks like "CONNECT www.google.com:443 HTTP/1.1", and the parameter is
    // just the authority section of a URL. This information should go in req.URL.Host.
    //
    // The net/rpc package also uses CONNECT, but there the parameter is a path
    // that starts with a slash. It can be parsed with the regular URL parser,
    // and the path will end up in req.URL.Path, where it needs to be in order for
    // RPC to work.
    justAuthority := req.Method == "CONNECT" && !strings.HasPrefix(rawurl, "/")
    if justAuthority {
        rawurl = "http://" + rawurl
    }

    if req.URL, err = url.ParseRequestURI(rawurl); err != nil {
        return nil, err
    }

    if justAuthority {
        // Strip the bogus "http://" back off.
        req.URL.Scheme = ""
    }

    // Subsequent lines: Key: value.
    req.Header, err = tp.ReadMIMEHeader()
    if err != nil {
        return nil, err
    }

    // RFC 2616: Must treat
    //	GET /index.html HTTP/1.1
    //	Host: www.google.com
    // and
    //	GET http://www.google.com/index.html HTTP/1.1
    //	Host: doesntmatter
    // the same. In the second case, any Host line is ignored.
    req.Host = req.URL.Host
    if req.Host == "" {
        req.Host = req.Header.Get("Host")
    }

    return req, nil
}