#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
import logging
import pickle
import tornado
import struct
from datetime import timedelta
from tornado import web, httpserver, ioloop, tcpserver, queues, httpclient, gen
from tornado.iostream import StreamClosedError

logger = logging.getLogger(__name__)


class RequestHandler(web.RequestHandler):
    @gen.coroutine
    def get(self):
        logger.debug('handle %s request to %s', self.request.method,
                     self.request.uri)
        body = self.request.body
        if not body:
            body = None
        try:
            response = yield self.fetch_request(url=self.request.uri,
                                                method=self.request.method,
                                                body=body,
                                                headers=self.request.headers,
                                                follow_redirects=False,
                                                allow_nonstandard_methods=True)
            if not response:
                logging.warning("fetch_request fail %s", self.request.uri)
                raise IOError()
            self.handle_response(response=response)
            # self.persist(self.request, response)
        except Exception as e:
            self.set_status(500)
            self.write('Internal server error: %s' % str(e))

    @gen.coroutine
    def post(self):
        self.get()

    def handle_response(self, response):
        if response["code"] < 200 or response["code"] >= 300:
            error = web.HTTPError(response["code"],
                                  reason=response.get("reason", None))
            self.set_status(500)
            self.write('Internal server error:\n' + str(error))
        else:
            self.set_status(response["code"], response["reason"])
            self._headers = tornado.httputil.HTTPHeaders()

            for header, v in response["headers"].items():
                if header not in (
                        'Content-Length', 'Transfer-Encoding',
                        'Content-Encoding',
                        'Connection'):
                    self.add_header(header, v)

            if response["body"]:
                self.set_header('Content-Length', len(response["body"]))
                self.write(response["body"])

    @gen.coroutine
    def fetch_request(self, **kwargs):
        if kwargs.get("headers") and type(kwargs["headers"]) is not dict:
            kwargs["headers"] = dict(*[kwargs["headers"].get_all()])
        data = pickle.dumps(kwargs)

        # 获取空闲的连接
        stream = None
        try:
            while True:
                stream = yield self.application.connections.get(
                    timedelta(seconds=80))
                if not stream or stream.closed():
                    logger.debug("closed...")
                    continue
                else:
                    try:
                        yield stream.write(struct.pack("!i", len(data)) + data)
                        data = yield stream.read_bytes(struct.calcsize("!i"))
                        body_len = struct.unpack("!i", data)[0]
                        data = yield stream.read_bytes(body_len)
                        break
                    except StreamClosedError:
                        logger.debug("write closed...")
                        continue
        except Exception as e:
            logger.exception(str(e))
            logger.warning("timeout...")
            raise gen.Return()

        try:
            yield self.application.connections.put(stream,
                                                   timedelta(seconds=60))
        except queues.QueueFull:
            stream.close()
        resp_data = pickle.loads(data)
        raise gen.Return(resp_data)


class RelayServer(tcpserver.TCPServer):
    def __init__(self, connections, **kwargs):
        super(RelayServer, self).__init__(**kwargs)
        self.connections = connections

    @gen.coroutine
    def handle_stream(self, stream, address):
        logger.info("connection from %s", address)
        try:
            yield self.connections.put(stream, timedelta(seconds=60))
        except queues.QueueFull:
            stream.close()


class Application(web.Application):
    def __init__(self, connections):
        self.connections = connections
        settings = dict(debug=False)
        handlers = [(".*", RequestHandler), ]
        super(Application, self).__init__(handlers, **settings)


class Relay(object):
    def __init__(self, relay_port, http_port, io_loop=None):
        self.connections = queues.Queue()
        self.loop = io_loop or ioloop.IOLoop()
        self.app = Application(self.connections)
        self.app.listen(port=http_port, io_loop=self.loop)
        self.relay_server = RelayServer(connections=self.connections,
                                        io_loop=self.loop)
        self.relay_server.listen(relay_port)

    def io_loop(self):
        return self.loop
