# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: inkyu

import logging
import threading

import requests
import tornado.httpclient
from concurrent.futures import ThreadPoolExecutor


def create_async_http_client(rest_lib, **kwargs):
  logging.info('Chosen rest_lib: %s', rest_lib)
  if rest_lib == 'requests':
    logging.info('Using rest_lib: requests')
    return AsyncHttpClient(**kwargs)
  elif rest_lib in ('pycurl', 'curl', 'tornado.curl'):
    try:
      tornado.httpclient.AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient",
                                                   max_clients=100)
      logging.info('Using rest_lib: pycurl')
      return TornadoAsyncHttpClient(**kwargs)
    except ImportError as e:
      logging.error('ImportError %s\n Fallback to use requests rest_lib=requests.', e)
      logging.info('Using rest_lib: requests')
      return AsyncHttpClient(**kwargs)
  else:
    raise ValueError('Unknown!')


class AsyncHttpClient(object):
  def __init__(self, *, max_connections=4, keep_alive=True, **kwargs):
    self._kwargs = kwargs  # Absorb extra params
    self._executor = ThreadPoolExecutor(max_workers=max_connections)
    self._max_connections = max_connections
    self._threadlocal = threading.local()
    self._owned_sessions = []
    self._keep_alive = keep_alive
    self._logger = logging.getLogger(__name__)

  def __del__(self):
    if self._owned_sessions is not None:
      for session in self._owned_sessions:
        session.close()
      self._owned_sessions = None

  def _new_session(self):
    session = requests.Session()
    if not self._keep_alive:
      session.keep_alive = False
    self._owned_sessions.append(session)  # Python list.append is thread-safe.
    return session

  def _fetch_sync(self, *, timeout=7, **kwargs):
    try:
      session = self._threadlocal.session
    except AttributeError:
      session = self._new_session()
      self._threadlocal.session = session

    request = requests.Request(**kwargs)
    prepared = session.prepare_request(request)
    response = session.send(prepared, timeout=timeout)
    return response

  def fetch(self, **kwargs):
    # noinspection PyProtectedMember
    qsize = self._executor._work_queue.qsize()
    if qsize > self._max_connections:
      self._logger.warning('Number of queued jobs (%s) is more than max workers (%s)!',
                           qsize,
                           self._max_connections)
    return self._executor.submit(self._fetch_sync, **kwargs)

  def get(self, url, **kwargs):
    return self.fetch(method='GET', url=url, **kwargs)

  def post(self, url, **kwargs):
    return self.fetch(method='POST', url=url, **kwargs)

  def put(self, url, **kwargs):
    return self.fetch(method='PUT', url=url, **kwargs)

  def delete(self, url, **kwargs):
    return self.fetch(method='DELETE', url=url, **kwargs)


class TornadoAsyncHttpClient(object):
  def __init__(self, **kwargs):
    self._kwargs = kwargs  # Absorb extra params
    self._client = tornado.httpclient.AsyncHTTPClient()
    self._logger = logging.getLogger('TornadoAsyncHttpClient')

  def fetch(self, **kwargs):
    timeout = kwargs.pop('timeout', None)
    request = requests.Request(**kwargs)
    prepared_request = request.prepare()
    request = tornado.httpclient.HTTPRequest(
        method=prepared_request.method,
        url=prepared_request.url,
        headers=prepared_request.headers,
        body=prepared_request.body,
        connect_timeout=timeout,
        request_timeout=timeout,
        allow_nonstandard_methods=True,
    )
    return self._client.fetch(request)

  def get(self, url, **kwargs):
    return self.fetch(method='GET', url=url, **kwargs)

  def post(self, url, **kwargs):
    return self.fetch(method='POST', url=url, **kwargs)

  def put(self, url, **kwargs):
    return self.fetch(method='PUT', url=url, **kwargs)

  def delete(self, url, **kwargs):
    return self.fetch(method='DELETE', url=url, **kwargs)
