#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Based on GAppProxy by Du XiaoGang <dugang@188.com>

import util
import BaseHTTPServer, SocketServer
import socket, errno, threading
import re, time, random
try:
    import ssl
    ssl_enabled = True
except:
    ssl_enabled = False

class LocalProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
    FR_Headers = ('', 'host', 'vary', 'via', 'x-forwarded-for',
                  'proxy-connection', 'upgrade', 'keep-alive')

    def send_response(self, code, message=None):
        self.log_request(code)
        if message is None:
            if code in self.responses:
                message = self.responses[code][0]
            else:
                message = 'WallProxy Notify'
        if self.request_version != 'HTTP/0.9':
            self.wfile.write("%s %d %s\r\n" % (self.protocol_version, code, message))

    def end_error(self, code, message=None, data=None):
        if not data:
            self.send_error(code, message)
        else:
            self.send_response(code, message)
            self.wfile.write(data)
        self.close_connection = 1

    def do_CONNECT(self):
        if not ssl_enabled:
            return self.end_error(501, 'Local proxy error, HTTPS needs Python2.6 or later')

        # for ssl proxy
        host, _, port = self.path.rpartition(':')
        keyFile, crtFile = util.getCertificate(host)
        self.wfile.write('HTTP/1.1 200 OK\r\n')
        self.end_headers()
        try:
            ssl_sock = ssl.wrap_socket(self.connection, keyFile, crtFile, True)
        except ssl.SSLError, e:
            print 'SSLError:', e
            self.close_connection = 1
            return

        # rewrite request line, url to abs
        first_line = ''
        while True:
            data = ssl_sock.read()
            # EOF?
            if data == '':
                # bad request
                ssl_sock.close()
                self.close_connection = 1
                return
            # newline(\r\n)?
            first_line += data
            if '\n' in first_line:
                first_line, data = first_line.split('\n', 1)
                first_line = first_line.rstrip('\r')
                break
        # got path, rewrite
        method, path, ver = first_line.split()
        if path.startswith('/'):
            path = 'https://%s%s' % (host if port=='443' else self.path, path)
        # connect to local proxy server
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        if util.LISTEN_ADDR[0] == '0.0.0.0':
            sock.connect(('127.0.0.1', util.LISTEN_ADDR[1]))
        else:
            sock.connect(util.LISTEN_ADDR)
        sock.send('%s %s %s\r\n%s' % (method, path, ver, data))

        # forward https request
        ssl_sock.settimeout(1)
        while True:
            try:
                data = ssl_sock.read(8192)
            except ssl.SSLError, e:
                if str(e).lower().find('timed out') == -1:
                    # error
                    sock.close()
                    ssl_sock.close()
                    self.close_connection = 1
                    return
                # timeout
                break
            if data != '':
                sock.send(data)
            else:
                # EOF
                break

        ssl_sock.setblocking(True)
        # simply forward response
        while True:
            data = sock.recv(8192)
            if data != '':
                ssl_sock.write(data)
            else:
                # EOF
                break
        # clean
        sock.close()
        ssl_sock.shutdown(socket.SHUT_WR)
        ssl_sock.close()

    def _NeedRangeFetch(self, data):
        m = re.search(r'bytes\s+(\d+)-(\d+)/(\d+)', data['headers'].get('content-range',''))
        if not m: return False
        m = map(int, m.groups())
        start = m[0]
        end = m[2] - 1
        if 'range' in self.headers:
            req_range = re.search(r'(\d+)?-(\d+)?', self.headers['range'])
            if req_range:
                req_range = [u and int(u) for u in req_range.groups()]
                if req_range[0] is None:
                    if req_range[1] is not None:
                        if m[1]-m[0]+1==req_range[1] and m[1]+1==m[2]:
                            return False
                        if m[2] >= req_range[1]:
                            start = m[2] - req_range[1]
                else:
                    start = req_range[0]
                    if req_range[1] is not None:
                        if m[0]==req_range[0] and m[1]==req_range[1]:
                            return False
                        if end > req_range[1]:
                            end = req_range[1]
            data['headers']['content-range'] = 'bytes %d-%d/%d' % (start, end, m[2])
        elif start != 0: #Bad
            return False
        else:
            data['code'] = 200
            del data['headers']['content-range']
        data['headers']['content-length'] = end-start+1
        self.send_response(data['code'])
        for k,v in data['headers'].iteritems():
            self.send_header(k.title(), v)
        self.end_headers()
        partSize = util.PART_SIZE
        if start == m[0]:
            self.wfile.write(data['content'])
            start = m[1] + 1
            if partSize<=0: partSize = len(data['content'])
        del self.headers['range']
        self.headers['RangeFetch'] = 'True'
        return start, end, partSize

    def _RangeFetch(self, handler, proxy, start, end, partSize):
        mutex = threading.Lock()
        task_size = (end-start+partSize)//partSize
        print '>>>>>>>>>> Range Fetch started: blocks=%d bytes=%d-%d' % (task_size, start, end)
        tasks=[None] * task_size; i = 0
        while start <= end:
            p_end = start + partSize - 1
            if p_end > end: p_end = end
            tasks[i] = (i, start, p_end)
            start += partSize; i += 1
        results = [None] * task_size
        thread_cnt = len(proxy) if len(proxy)<task_size else task_size
        random.shuffle(proxy)
        proxy = proxy * 2
        threads = {}
        for i in xrange(thread_cnt):
            t = threading.Thread(target=self._RangeThread,
                args=(handler, proxy[i:i+2], tasks, results, threads, mutex))
            threads[t] = set([])
            t.setDaemon(True)
            t.start()
        i = 0; t = False
        while i < task_size:
            if results[i] is not None:
                try:
                    self.wfile.write(results[i])
                    results[i] = None
                    i += 1
                    continue
                except:
                    mutex.acquire()
                    del tasks[:]
                    mutex.release()
                    self.close_connection = 1
                    break
            if not threads:
                if t: break
                t = True; continue
            time.sleep(1)
        print '>>>>>>>>>> Range Fetch ended'

    def _RangeThread(self, handler, proxy, tasks, results, threads, mutex):
        ct = threading.current_thread()
        while True:
            mutex.acquire()
            try:
                if threads[ct].intersection(*threads.itervalues()):
                    raise ValueError('All threads failed')
                for i,v in enumerate(tasks):
                    if v not in threads[ct]:
                        task = tasks.pop(i)
                        break
                else:
                    raise ValueError('No task for me')
            except ValueError:
                break
            finally:
                mutex.release()
            success = False
            headers = '%sRange: bytes=%d-%d\r\n' % (self.headers, task[1], task[2])
            for i in xrange(2):
                retval, data = handler(proxy, self.path, self.command, headers)
                if retval!=0 or data['code']!=206 or len(data['content'])!=task[2]-task[1]+1:
                    if retval!=0: time.sleep(2)
                    continue
                success = True
                break
            mutex.acquire()
            if success:
                results[task[0]] = data['content']
                print '>>>>>>>>>> block=%d bytes=%d-%d'%task, len(data['content'])
            else:
                threads[ct].add(task)
                tasks.append(task)
                tasks.sort(key=lambda x: x[0])
            mutex.release()
        mutex.acquire()
        del threads[ct]
        mutex.release()

    def do_METHOD(self):
        if self.path.startswith('/'):
            host = self.headers['host']
            if host.endswith(':80'):
                host = host[:-3]
            self.path = 'http://%s%s' % (host , self.path)
        handler, proxy = util.findProxy(self.client_address[0], self.path,
                self.command, dict(self.headers))
        if handler is None:
            return self.end_error(*proxy)

        payload_len = int(self.headers.get('content-length', 0))
        if payload_len > 0:
            payload = self.rfile.read(payload_len)
        else:
            payload = ''

        for k in self.FR_Headers:
            if k in self.headers:
                del self.headers[k]

        retval, data = handler(proxy, self.path, self.command, self.headers, payload)
        try:
            if retval == -1:
                return self.end_error(502, str(data))
            # Proxy Fetch
            if isinstance(data, dict):
                if data['code']==206 and self.command=='GET':
                    payload = self._NeedRangeFetch(data)
                    if payload:
                        del data
                        return self._RangeFetch(handler, proxy, *payload)
                self.send_response(data['code'])
                for k,v in data['headers'].iteritems():
                    self.send_header(k.title(), v)
                self.end_headers()
                self.wfile.write(data['content'])
            # Direct Fetch
            else:
                resp = data
                self.send_response(resp.code)
                # Write resp.headers directly to avoid set-cookie bug
                self.wfile.write(resp.headers)
                self.end_headers()
                data = resp.read(8192)
                while data != '':
                    self.wfile.write(data)
                    data = resp.read(8192)
                resp.close()
        except socket.error:
            # Connection closed before proxy return
            self.close_connection = 1

    do_GET = do_METHOD
    do_HEAD = do_METHOD
    do_PUT = do_METHOD
    do_POST = do_METHOD
    do_DELETE = do_METHOD

class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
    pass

def main():
    httpd = ThreadingHTTPServer(util.LISTEN_ADDR, LocalProxyHandler)
    util.LISTEN_ADDR = httpd.server_address[:2]
    print '---------------------------------------------------'
    print 'ListenAddress: %s:%d' % util.LISTEN_ADDR
    print 'HTTPS Enabled: %s' % ('YES' if ssl_enabled else 'NO')
    print 'OpenSSLModule: %s' % ('YES' if util.crypto else 'NO')
    print '---------------------------------------------------'
    if util.RELOAD:
        def repeat():
            while util.RELOAD:
                time.sleep(util.RELOAD)
                util.checkConf()
        threading.Thread(target=repeat).start()
    try:
        httpd.serve_forever()
    except KeyboardInterrupt:
        util.RELOAD = 0

if __name__ == '__main__':
    main()