#!/usr/bin/env python
# -*- encoding: utf-8 -*-

import os
import pickle
import urlparse
import logging
import struct
from tornado import gen, tcpclient, httpclient
from  tornado.iostream import StreamClosedError

logger = logging.getLogger(__name__)

class Query(object):
    def __init__(self, log_path, relay_host, relay_port,
                 query_server, result_server,
                 num_worker=10, send_header=False, io_loop=None):
        self.relay_host, self.relay_port = relay_host, relay_port
        self.query_server = query_server
        self.result_server = result_server
        self.num_worker = num_worker
        self.send_header = send_header
        self.loop = io_loop or ioloop.IOLoop()
        self.running = False

        self.db_logger = None
        if log_path:
            from logging import handlers
            handler = handlers.TimedRotatingFileHandler(
                os.path.join(log_path, "query_log.txt"),
                when="D",
                backupCount=30)
            self.db_logger = logging.getLogger("db_dump")
            self.db_logger.setLevel(logging.INFO)
            self.db_logger.addHandler(handler)
            self.db_logger.propagate = 0


    @gen.coroutine
    def worker(self, n):
        while self.running:
            try:
                stream = yield tcpclient.TCPClient(io_loop=self.loop).\
                    connect(self.relay_host, self.relay_port)
                logger.info("connected...")
                # 获取请求
                while self.running:
                    data = yield stream.read_bytes(struct.calcsize("!i"))
                    data_len = struct.unpack("!i", data)[0]
                    if data_len == 0:
                        continue
                    data = yield stream.read_bytes(data_len)
                    http_args = pickle.loads(data)
                    origin_url = http_args.pop("url", None)
                    if origin_url.startswith("http"):
                        r = urlparse.urlparse(origin_url)
                        if r.query:
                            path = "?".join([r.path, r.query])
                        else:
                            path = r.path
                        url = urlparse.urljoin(self.query_url, path)
                    else:
                        url = urlparse.urljoin(self.query_url, origin_url)
                    logger.debug("url:%s body:%s", url, http_args)

                    http_client = httpclient.AsyncHTTPClient(io_loop=self.loop)
                    if self.send_header:
                        req = httpclient.HTTPRequest(url=url, **http_args)
                        response = yield http_client.fetch(req, raise_error=False)
                    else:
                        http_args.pop("headers", None)
                        req = httpclient.HTTPRequest(url=url, **http_args)
                        response = yield http_client.fetch(req, raise_error=False)
                    logger.debug("code:%s", response.code)
                    data = pickle.dumps(dict(
                        code=response.code,
                        reason=response.reason,
                        headers=dict(*[response.headers.get_all()]),
                        body=response.body,
                    ))
                    # 返回结果
                    yield stream.write(struct.pack("!i", len(data)) + data)
                    self.persist(origin_url, http_args.get("body"), response.body)
            except (StreamClosedError, IOError) as e:
                logger.warning("worker %d: %s", n, e)
            except Exception as e:
                logger.exception("unknown: %s", e)
                stream.close()

            import random
            seconds = random.randint(1, 3)
            logger.info("worker-%d reconnect after %ds", n, seconds)
            yield gen.sleep(seconds)

    @gen.coroutine
    def start_tunnel(self):
        self.running = True
        yield [self.worker(n) for n in range(self.num_worker)]

    @property
    def query_url(self):
        return self.query_server

    @property
    def result_url(self):
        return self.result_server

    def io_loop(self):
        return self.loop

    def persist(self, uri, body, resp):
        """
        持久化
        :param request: 请求
        :param response: 响应
        :return: 
        """
        if not self.db_logger:
            return
        import base64
        from json.encoder import encode_basestring
        self.db_logger.info("\"%s\",%s,\"%s\"",
                                        uri,
                                        encode_basestring(body or ""),
                                        base64.b64encode(resp or ""))
