#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import sys
import bz2
import json
import time
import types
import socket
import struct
import sqlite3
import argparse
import urlparse
import threading
import BaseHTTPServer
import SocketServer
import logging
import logging.handlers
from utils.ipip import IP
from utils.sqlitethread import SqliteMultithread

logger = logging.getLogger('httplog')

IP.load(os.path.join(sys.path[0], 'data/17monipdb.dat'))

g_cursor = None
g_conf_dir = None
g_use_x_real_ip = False
g_lock = threading.Lock()
g_cache = {
    'files': {}
}


def ip_in_network(ip, network):
    ipaddr = struct.unpack('!L', socket.inet_aton(ip))[0]
    net, prefixlen = network.split('/')
    netaddr = struct.unpack('!L', socket.inet_aton(net))[0]
    mask = (1L << 32) - (1L << (32 - int(prefixlen)))
    return ipaddr & mask == netaddr & mask


def read_file_data(file_name, file_dir):
    file_path = os.path.join(file_dir, file_name)
    data = ''
    if os.path.isfile(file_path):
        with open(file_path, 'rb') as f:
            data = f.read()
    return data


def bzip2_decompress(content):
    result = bz2.decompress(content)
    return result


def get_black_list():
    with g_lock:
        file_name = 'blacklist.txt'
        file_path = os.path.join(g_conf_dir, file_name)
        if file_path not in g_cache['files']:
            g_cache['files'][file_path] = {}
        file_dict = g_cache['files'][file_path]
        if file_dict.get('st_mtime') == os.stat(file_path).st_mtime:
            return file_dict['data']
        black_list = []
        items = set([ip.strip() for ip in read_file_data(file_name, g_conf_dir).split('\n') if ip])
        for item in items:
            if '/' not in item:
                item = item + '/32'
            black_list.append(item)
        file_dict['st_mtime'] = os.stat(file_path).st_mtime
        file_dict['data'] = black_list
        return black_list


def get_routers():
    with g_lock:
        file_name = 'routers.json'
        file_path = os.path.join(g_conf_dir, file_name)
        if file_path not in g_cache['files']:
            g_cache['files'][file_path] = {}
        file_dict = g_cache['files'][file_path]
        if file_dict.get('st_mtime') == os.stat(file_path).st_mtime:
            return file_dict['data']
        parsed_routers = []
        routers = json.loads(read_file_data(file_name, g_conf_dir))['routers']
        for router in routers:
            path = router['path']
            status = router['status']
            headers = router['headers']
            content = router['content']
            file_dir = router.get('file_dir')
            file_name = router.get('file_name')
            process = router.get('process', [])
            if file_dir is not None and file_name is not None:
                content = read_file_data(file_name, os.path.join(sys.path[0], file_dir))
            for p in process:
                fn = globals().get(p)
                if isinstance(fn, types.FunctionType):
                    content = fn(content)
            parsed_routers.append({
                'path': path,
                'status': status,
                'headers': headers,
                'content': content
            })
        file_dict['st_mtime'] = os.stat(file_path).st_mtime
        file_dict['data'] = parsed_routers
        return parsed_routers


def periodic_commit(seconds):
    while True:
        time.sleep(seconds)
        g_cursor.commit()


def insert_record(request):
    try:
        server_ip, server_port = request.connection.getsockname()
        client_ip, client_port = request.connection.getpeername()
        if g_use_x_real_ip is True:
            client_ip = request.headers.getheader('X-Real-IP', client_ip)
        region = IP.find(client_ip)
        host = request.headers.getheader('Host', '').strip()
        if not host:
            host = '%s:%d' % (server_ip, server_port)
        if request.path.startswith('http://'):
            url = request.path
        else:
            url = 'http://' + host + request.path
        user_agent = request.headers.getheader('User-Agent')
        referer = request.headers.getheader('Referer')
        payload = request.requestline + '\r\n' + str(request.headers)
        if request.command in ['POST', 'PUT']:
            try:
                length = request.headers.getheader('content-length')
                nbytes = int(length)
                nbytes = 1024 if nbytes > 1024 else nbytes
                entity_body = request.rfile.read(nbytes)
                payload = payload + '\r\n' + entity_body
            except:
                pass
        host = unicode(host, 'utf-8', 'ignore')
        url = unicode(url, 'utf-8', 'ignore')
        user_agent = user_agent and unicode(user_agent, 'utf-8', 'ignore')
        referer = referer and unicode(referer, 'utf-8', 'ignore')
        payload = unicode(payload, 'utf-8', 'ignore')
        g_cursor.execute(
            'INSERT INTO httplog'
            ' (server_ip, server_port, client_ip, client_port, region, host, url, user_agent, referer, payload)'
            ' VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (
                server_ip, server_port, client_ip, client_port,
                region, host, url, user_agent, referer, payload
            )
        )
    except Exception as e:
        logger.exception('sqlite insert error: %s', e)
    else:
        logger.info('client_ip: %s, url: %s', client_ip, url)


def dispatch(request):
    black_list = get_black_list()
    client_ip, client_port = request.connection.getpeername()
    if g_use_x_real_ip is True:
        client_ip = request.headers.getheader('X-Real-IP', client_ip)
    path = urlparse.urlparse(request.path).path
    if any(ip_in_network(client_ip, network) for network in black_list):
        logger.warn('block %s, path: %s', client_ip, path)
        status = 403
        return (status, None, None)
    else:
        insert_record(request)
    routers = get_routers()
    for router in routers:
        if path == router['path']:
            status = router['status']
            headers = router['headers']
            content = router['content']
            return (status, headers, content)
    status = 404
    return (status, None, None)


class TimeoutHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):

    """
    Abandon request handling when client has not responded for a
    certain time. This raises a socket.timeout exception.
    """

    timeout = 60
    protocol_version = 'HTTP/1.1'

    def setup(self):
        self.request.settimeout(self.timeout)
        BaseHTTPServer.BaseHTTPRequestHandler.setup(self)

    def process(self):
        """ process added """
        code, headers, content = dispatch(self)
        if code == 200:
            self.send_response(200)
            for key, value in headers.items():
                self.send_header(key, value)
            content_type = headers.get('Content-Type')
            if not content_type:
                content_type = 'text/plain; charset=utf-8'
            self.send_header('Content-Type', content_type)
            self.send_header('Content-Length', str(len(content)))
            self.end_headers()
            if self.command != 'HEAD':
                self.wfile.write(content)
        else:
            self.send_custom(code, None, headers, content)

    def do_GET(self):
        """ do_GET added """
        self.process()

    def do_HEAD(self):
        """ do_HEAD added """
        self.process()

    def do_OPTIONS(self):
        """ do_OPTIONS added """
        self.process()

    def do_POST(self):
        """ do_POST added """
        self.process()

    def do_PUT(self):
        """ do_PUT added """
        self.process()

    def do_DELETE(self):
        """ do_DELETE added """
        self.process()

    def send_response(self, code, message=None):
        """ send_response overridden """
        self.log_request(code)
        if message is None:
            if code in self.responses:
                message = self.responses[code][0]
            else:
                message = ''
        self.wfile.write('%s %d %s\r\n' % (self.protocol_version, code, message))

        self.response_headers = {}
        self.send_header('Server', self.version_string())
        self.send_header('Date', self.date_time_string())
        if code >= 400:
            self.send_header('Connection', 'close')
            self.close_connection = 1
        else:
            self.send_header('Connection', 'keep-alive')
            self.close_connection = 0

    def send_header(self, keyword, value):
        """ send_header overridden """
        self.response_headers[keyword] = value

    def end_headers(self):
        """ end_headers overridden"""
        for key in sorted(self.response_headers.keys()):
            self.wfile.write('%s: %s\r\n' % (key, self.response_headers[key]))
        self.wfile.write('\r\n')

    def send_custom(self, code, message=None, headers=None, content=None):
        """ send_custom added """
        try:
            short, _ = self.responses[code]
        except KeyError:
            short, _ = '???', '???'
        if message is None:
            message = short
        self.send_response(code, message)
        if headers:
            for key, value in headers.items():
                self.send_header(key, value)
        if not content:
            content = '%d, %s' % (code, message)
            content_type = 'text/plain; charset=utf-8'
            self.send_header('Content-Type', content_type)
        if code in (204, 304):
            self.send_header('Content-Length', '0')
        else:
            self.send_header('Content-Length', str(len(content)))
        self.end_headers()
        if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
            self.wfile.write(content)

    def send_error(self, code, message=None):
        """ send_error overridden """
        self.send_custom(code, message)

    def log_message(self, *args):
        """ log_message overridden """
        pass

    def address_string(self):
        """ address_string overridden """
        host, port = self.client_address[:2]
        return host

    def version_string(self):
        """ version_string overridden """
        return 'Microsoft-IIS/8.5'


class ThreadedHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):

    """
    New features w/r to BaseHTTPServer.HTTPServer:
    - serves multiple requests simultaneously
    - catches socket.timeout and socket.error exceptions (raised from RequestHandler)
    """

    request_queue_size = 100
    allow_reuse_address = True
    daemon_threads = True

    def __init__(self, *args):
        BaseHTTPServer.HTTPServer.__init__(self, *args)

    def process_request_thread(self, request, client_address):
        """
        Overrides SocketServer.ThreadingMixIn.process_request_thread
        in order to catch socket.timeout
        """
        try:
            self.finish_request(request, client_address)
        except socket.timeout:
            pass
        except:
            self.close_request(request)


def httplog(ip, port):
    global g_cursor
    sqlite_db = os.path.join(sys.path[0], 'data', 'db', 'httplog.db')
    with open(os.path.join(sys.path[0], 'sqls', 'httplog.sql'), 'rb') as f:
        sql = f.read()
    with sqlite3.connect(sqlite_db) as conn:
        conn.executescript(sql)
    g_cursor = SqliteMultithread(sqlite_db)

    t = threading.Thread(target=periodic_commit, args=(1,))
    t.setDaemon(True)
    t.start()

    httpd = ThreadedHTTPServer((ip, port), TimeoutHTTPRequestHandler)
    print 'Serving HTTP on %s port %d ...' % (ip, port)
    try:
        httpd.serve_forever()
    except KeyboardInterrupt:
        print '^C received, shutting down server'
    finally:
        g_cursor.close()
        g_cursor.join()


def time_converter(secs):
    # Asia/Shanghai
    return time.gmtime(secs + 8 * 60 * 60)


def main():
    global g_conf_dir, g_use_x_real_ip
    parser = argparse.ArgumentParser()
    parser.add_argument('-l', '--local-ip', default='0.0.0.0',
                        dest='local_ip', help='local ip address to bind to, default: 0.0.0.0')
    parser.add_argument('-p', '--local-port', default=8080, type=int,
                        dest='local_port', help='local port to bind to, default: 8080')
    parser.add_argument('-c', '--conf-dir', default='conf', dest='conf_dir',
                        help='conf directory, default: "conf"')
    parser.add_argument('--use-x-real-ip', dest='use_x_real_ip', action='store_true',
                        help='use x-real-ip as client ip')
    parser.add_argument('--log', dest='log_file', help='log file')
    opts = parser.parse_args()

    if os.path.isdir(os.path.join(sys.path[0], opts.conf_dir)):
        g_conf_dir = os.path.join(sys.path[0], opts.conf_dir)
    else:
        print '[error] conf dir "%s" not exist!' % opts.conf_dir
        sys.exit()

    if opts.use_x_real_ip:
        g_use_x_real_ip = True

    if opts.log_file:
        handler = logging.handlers.RotatingFileHandler(
            filename=opts.log_file,
            mode='a',
            maxBytes=1 * 1024 * 1024,
            backupCount=5
        )
        formatter = logging.Formatter(fmt='[%(asctime)s] %(levelname)s [%(name)s:%(lineno)d] "%(message)s"')
        formatter.converter = time_converter
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)
    else:
        handler = logging.StreamHandler()
        formatter = logging.Formatter(fmt='[%(asctime)s] %(levelname)s [%(name)s:%(lineno)d] "%(message)s"')
        formatter.converter = time_converter
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)

    httplog(opts.local_ip, opts.local_port)


if __name__ == '__main__':
    main()
