﻿#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

# Changes made by Alden Torres
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# - Rewrite of SingleLinkNode, LockFreeStack from Julian M Bucknall article
#   http://www.boyet.com/articles/lockfreestack.html.
# - Rewrite of BufferManager, SocketAsyncEventArgsPool from MSDN documentation
#   examples.
# - Complete rewrite of IOStream, HTTPServer.

from System import *
from System.Net import *
from System.Net.Sockets import *
from System.Threading import *
from System.Text import *

"""A non-blocking, single-threaded HTTP server."""
import cgi
import errno
import functools
import logging
import time
import urlparse

def WriteStringToBuffer(str, count, buffer, offset):
	i = 0
	while i < count:
		buffer[offset + i] = Convert.ToByte(str.Chars[i])
		i += 1

class SingleLinkNode(object):
	def __init__(self, next = None, item = None):
		self.next = next
		self.item = item
		
class LockFreeStack(object):
	def __init__(self):
		self._head = SingleLinkNode()

	def push(self, item):
		new_node = SingleLinkNode(self._head.next, item)
		
		while not self._cas(new_node.next, new_node):
			new_node.next = self._head.next

	def pop(self):
		node = self._head.next
		if node == None:
			return None
		
		while not self._cas(node, node.next):
			node = self._head.next
			if node == None:
				return None
			
		return node.item

	def _cas(self, comparand, new_value):
		location, self._head.next = Interlocked.CompareExchange(self._head.next, new_value, comparand)
		return comparand == location;

class BufferManager(object):
	"""This class creates a single large buffer which can be divided up 
	and assigned to SocketAsyncEventArgs objects for use with each
	socket I/O operation.
	
	This enables bufffers to be easily reused and guards against
	fragmenting heap memory.
	"""
	def __init__(self, total_bytes, buffer_size):		
		self._total_bytes = total_bytes
		self._buffer_size = buffer_size
		self._current_index = 0		
		self._free_index_pool = LockFreeStack()
		
		#create one big large buffer and divide that
		#out to each SocketAsyncEventArg object
		self._buffer = Array.CreateInstance(Byte, self._total_bytes)

	def set_buffer(self, args):
		"""Assigns a buffer from the buffer pool to the
		specified SocketAsyncEventArgs object.
		
		Returns True if the buffer was successfully set, else False.
		"""
		index = self._free_index_pool.pop()
		if not index == None:
			args.SetBuffer(self._buffer, index, self._buffer_size)
		else:
			if (self._total_bytes - self._current_index) < self._buffer_size:
				return False
						
			args.SetBuffer(self._buffer, self._current_index, self._buffer_size)
			self._current_index += self._buffer_size
			
		return True

	def free_buffer(self, args):
		"""Removes the buffer from a SocketAsyncEventArg object.
		
		This frees the buffer back to the buffer pool.
		"""
		self._free_index_pool.push(args.Offset)
		args.SetBuffer(None, 0, 0)

class SocketAsyncEventArgsPool(object):
	"""Represents a collection of reusable SocketAsyncEventArgs objects."""
	def __init__(self):
		self._pool = LockFreeStack()

	def push(self, item):
		"""Add a SocketAsyncEventArg instance to the pool.
		
		The "item" parameter is the SocketAsyncEventArgs instance
		to add to the pool.
		"""
		if item == None:
			raise ArgumentNullException("Items added to a SocketAsyncEventArgsPool cannot be null")
		
		self._pool.push(item)

	def pop(self):
		"""Removes a SocketAsyncEventArgs instance from the pool
		and returns the object removed from the pool
		"""
		return self._pool.pop()
	
class IOStream(object):
	"""A utility class to write to and read from a non-blocking socket.

    We support three methods: write(), read_until(), and read_bytes().
    All of the methods take callbacks (since writing and reading are
    non-blocking and asynchronous). read_until() reads the socket until
    a given delimiter, and read_bytes() reads until a specified number
    of bytes have been read from the socket.

    A very simple (and broken) HTTP client using this class:

        s = Socket(...) and connect
        
        stream = IOStream(s)

        def on_headers(data):
            headers = {}
            for line in data.split("\r\n"):
               parts = line.split(":")
               if len(parts) == 2:
                   headers[parts[0].strip()] = parts[1].strip()
            stream.read_bytes(int(headers["Content-Length"]), on_body)

        def on_body(data):
            print data
            stream.close()

        stream.write("GET / HTTP/1.0\r\n\r\n")
        stream.read_until("\r\n\r\n", on_headers)

    """
	def __init__(self, socket, read_write_pool, max_buffer_size = 104857600, read_chunk_size = 4096):
		self.socket = socket
		self.read_write_pool = read_write_pool
		self.max_buffer_size = max_buffer_size
		self.read_chunk_size = read_chunk_size
		self._read_buffer = ""
		self._write_buffer = ""
		self._read_delimiter = None
		self._read_bytes = None
		self._read_callback = None
		self._write_callback = None
		self._close_callback = None
		
	def read_until(self, delimiter, callback):
		"""Call callback when we read the given delimiter."""
		assert not self._read_callback, "Already reading"
		loc = self._read_buffer.find(delimiter)
		if loc != -1:
			try:
				callback(self._consume(loc + len(delimiter)))
			except Exception as ex:
				logging.error("Callback error in read_until: " + ex.ToString())
			return
		self._check_closed()
		self._read_delimiter = delimiter
		self._read_callback = callback
        
		read_event_args = self.read_write_pool.pop()
		read_event_args.Completed += self._IO_Completed
		
		will_raise_event = self.socket.ReceiveAsync(read_event_args)
		if not will_raise_event:
			self._process_receive(read_event_args)
			
	def read_bytes(self, num_bytes, callback):
		"""Call callback when we read the given number of bytes."""
		assert not self._read_callback, "Already reading"
		if len(self._read_buffer) >= num_bytes:
			try:
				callback(self._consume(num_bytes))
			except Exception as ex:
				logging.error("Callback error in read_bytes: " + ex.ToString())
			return
		self._check_closed()
		self._read_bytes = num_bytes
		self._read_callback = callback
		
		read_event_args = self.read_write_pool.pop()
		read_event_args.Completed += self._IO_Completed
		
		will_raise_event = self.socket.ReceiveAsync(read_event_args)
		if not will_raise_event:
			self._process_receive(read_event_args)

	def write(self, data, callback=None):
	 	"""Write the given data to this stream.
	 	
	 	If callback is given, we call it when all of the buffered write
	 	data has been successfully written to the stream. If there was
	 	previously buffered write data and an old write callback, that
	 	callback is simply overwritten with this new callback.
	 	"""
		self._check_closed()
		self._write_buffer += data
		self._write_callback = callback
		
		if self._write_buffer:
			write_event_args = self.read_write_pool.pop()
			write_event_args.Completed += self._IO_Completed
			
			num_bytes = min(self._write_buffer.Length, write_event_args.Count)
			WriteStringToBuffer(self._write_buffer, num_bytes, write_event_args.Buffer, write_event_args.Offset)
			write_event_args.SetBuffer(write_event_args.Buffer, write_event_args.Offset, num_bytes)
			self._write_buffer = self._write_buffer[num_bytes:]
			
			will_raise_event = self.socket.SendAsync(write_event_args)
			if not will_raise_event:
				self._process_send(write_event_args)
		
	def set_close_callback(self, callback):
		"""Call the given callback when the stream is closed."""
		self._close_callback = callback
		
	def close(self, e = None):
		"""Close this stream."""
		if self.socket is not None:
			self._close_client_socket(e)
			self.socket = None
			if self._close_callback:
				try:
					self._close_callback()
				except Exception as ex:
					logging.error("Callback error in close: " + ex.ToString())
			
	def reading(self):
		"""Returns true if we are currently reading from the stream."""
		return self._read_callback is not None
	
	def writing(self):
		"""Returns true if we are currently writing to the stream."""
		return len(self._write_buffer) > 0
	
	def closed(self):
		return self.socket is None
	
	def _IO_Completed(self, sender, e):
		""" This method is called whenever a receive or send
		operation is completed on a socket.
		"""
		if e.LastOperation == SocketAsyncOperation.Receive:
			self._process_receive(e)
		elif e.LastOperation == SocketAsyncOperation.Send:
			self._process_send(e)
		else:
			raise ArgumentException("The last operation completed on the socket was not a receive or send")

	def _process_receive(self, e):
		"""This method is invoked when an asynchronous receive operation completes.
		If the remote host closed the connection, then the socket is closed.
		"""
		if e.BytesTransferred > 0 and e.SocketError == SocketError.Success:
			chunk = e.Buffer[e.Offset:e.Offset + e.BytesTransferred]
			
			if not chunk:
				self.close(e)
				return
			
			self._read_buffer += Encoding.UTF8.GetString(chunk)
			
			if len(self._read_buffer) >= self.max_buffer_size:
				logging.error("Reached maximum read buffer size")
				self.close(e)
				return
			
			if self._read_bytes:
				if len(self._read_buffer) >= self._read_bytes:
					num_bytes = self._read_bytes
					callback = self._read_callback
					self._read_callback = None
					self._read_bytes = None
					try:
						callback(self._consume(num_bytes))
					except Exception as ex:
						logging.error("Callback error in _process_receive: " + ex.ToString())
						self.close(e)
						return
			elif self._read_delimiter:
				loc = self._read_buffer.find(self._read_delimiter)
				if loc != -1:
					callback = self._read_callback
					delimiter_len = len(self._read_delimiter)
					self._read_callback = None
					self._read_delimiter = None
					try:
						callback(self._consume(loc + delimiter_len))
					except Exception as ex:
						logging.error("Callback error in _process_receive: " + ex.ToString())
						self.close(e)
						return
			
			if not self.socket == None:
				will_raise_event = self.socket.ReceiveAsync(e)
				if not will_raise_event:
					self._process_receive(e)
		else:
			self.close(e)

	def _process_send(self, e):
		"""This method is invoked when an asynchronous send operation completes.
		"""
		if e.SocketError == SocketError.Success:
			if self._write_buffer:
				
				num_bytes = min(self._write_buffer.Length, e.Count)
				WriteStringToBuffer(self._write_buffer, num_bytes, e.Buffer, e.Offset)
				e.SetBuffer(e.Buffer, e.Offset, num_bytes)
				self._write_buffer = self._write_buffer[num_bytes:]
				
				will_raise_event = self.socket.SendAsync(e)
				if not will_raise_event:
					self._process_send(e)
				
			if not self._write_buffer and self._write_callback:
				callback = self._write_callback
				self._write_callback = None				
				try:
					callback()
				except Exception as ex:
					logging.error("Callback error in _process_send: " + ex.ToString())
					return
		else:
			self.close(e)
			
	def _consume(self, loc):
		result = self._read_buffer[:loc]
		self._read_buffer = self._read_buffer[loc:]
		return result
	
	def _check_closed(self):
		if not self.socket:
			raise IOError("Stream is closed")
		
	def _close_client_socket(self, e):
		# close the socket associated with the client
		try:
			self.socket.Shutdown(SocketShutdown.Send)
		except e:
			# throws if client process has already closed
			pass
		
		self.socket.Close()
		# Free the SocketAsyncEventArg so they can be reused by another client
		if not e == None:
			e.Completed -= self._IO_Completed
			self.read_write_pool.push(e)
	
class HTTPServer(object):
	"""A non-blocking, single-threaded HTTP server.

    A server is defined by a request callback that takes an HTTPRequest
    instance as an argument and writes a valid HTTP response with
    request.write(). request.finish() finishes the request (but does not
    necessarily close the connection in the case of HTTP/1.1 keep-alive
    requests). A simple example server that echoes back the URI you
    requested:

        def handle_request(request):
           message = "You requested %s\n" % request.uri
           request.write("HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n%s" % (
                         len(message), message))
           request.finish()

        http_server = HTTPServer(handle_request)
        http_server.listen(local_end_point)

    HTTPServer is a very basic connection handler. Beyond parsing the
    HTTP request body and headers, the only HTTP semantics implemented
    in HTTPServer is HTTP/1.1 keep-alive connections. We do not, however,
    implement chunked encoding, so the request callback must provide a
    Content-Length header or implement chunked encoding for HTTP/1.1
    requests for the server to run correctly for HTTP/1.1 clients. If
    the request handler is unable to do this, you can provide the
    no_keep_alive argument to the HTTPServer constructor, which will
    ensure the connection is closed on every request no matter what HTTP
    version the client is using.

    If xheaders is True, we support the X-Real-Ip and X-Scheme headers,
    which override the remote IP and HTTP scheme for all requests. These
    headers are useful when running Tornado behind a reverse proxy or
    load balancer.
    """
	def __init__(self, request_callback, num_connections = 100, receive_buffer_size = 4096, no_keep_alive = False, xheaders = False):
		self.request_callback = request_callback
		self.num_connections = num_connections #the maximum number of connections the sample is designed to handle simultaneously
		self.receive_buffer_size = receive_buffer_size #buffer size to use for each socket I/O operation
		self.no_keep_alive = no_keep_alive
		self.xheaders = xheaders
		self._socket = None
		self._ops_to_pre_alloc = 2
		
		# Allocate buffers such that the maximum number of sockets can have one outstanding read and 
		# write posted to the socket simultaneously
		self._buffer_manager = BufferManager(receive_buffer_size * num_connections * self._ops_to_pre_alloc, receive_buffer_size)
		self._read_write_pool = SocketAsyncEventArgsPool()
		
		# Initializes by preallocating reusable buffers and context objects.
		i = 0
		while i < self.num_connections:
			# Pre-allocate a set of reusable SocketAsyncEventArgs
			read_write_event_arg = SocketAsyncEventArgs()
			# assign a byte buffer from the buffer pool to the SocketAsyncEventArg object
			self._buffer_manager.set_buffer(read_write_event_arg)
			# add SocketAsyncEventArg to the pool
			self._read_write_pool.push(read_write_event_arg)
			i += 1
			
	def listen(self, local_end_point):
		"""Starts the server such that it is listening for 
		incoming connection requests.
		"""
		assert not self._socket
		# create the socket which listens for incoming connections
		self._socket = Socket(local_end_point.AddressFamily, SocketType.Stream, ProtocolType.Tcp)
		self._socket.Bind(local_end_point)
		# start the server with a listen backlog of 100 connections
		self._socket.Listen(100)
		
		# post accepts on the listening socket
		self._start_accept(None)

	def _start_accept(self, accept_event_arg):
		"""Begins an operation to accept a connection request from the client.
		
		accept_event_arg is the context object to use when issuing
		the accept operation on the server's listening socket.
		"""
		if accept_event_arg == None:
			accept_event_arg = SocketAsyncEventArgs()
			accept_event_arg.Completed += self._Accept_Completed
		else:
			# socket must be cleared since the context object is being reused
			accept_event_arg.AcceptSocket = None
			
		will_raise_event = self._socket.AcceptAsync(accept_event_arg)
		if not will_raise_event:
			self._process_accept(accept_event_arg)

	def _Accept_Completed(self, sender, e):
		""" This method is the callback method associated with Socket.AcceptAsync
		operations and is invoked when an accept operation is complete.
		"""
		self._process_accept(e)

	def _process_accept(self, e):
		
		try:
			stream = IOStream(e.AcceptSocket, self._read_write_pool)
			address = "0.0.0.0"
			if e.AcceptSocket.RemoteEndPoint is not None:
				if e.AcceptSocket.RemoteEndPoint.Address is not None:
					address = e.AcceptSocket.RemoteEndPoint.Address.ToString()
			HTTPConnection(stream, address, self.request_callback,
							self.no_keep_alive, self.xheaders)
		except:
			logging.error("Error in connection callback", exc_info=True)
		
		# accept the next connection request
		self._start_accept(e)
		
class HTTPConnection(object):
    """Handles a connection to an HTTP client, executing HTTP requests.

    We parse HTTP headers and bodies, and execute the request callback
    until the HTTP conection is closed.
    """
    def __init__(self, stream, address, request_callback, no_keep_alive=False,
                 xheaders=False):
        self.stream = stream
        self.address = address
        self.request_callback = request_callback
        self.no_keep_alive = no_keep_alive
        self.xheaders = xheaders
        self._request = None
        self._request_finished = False
        self.stream.read_until("\r\n\r\n", self._on_headers)

    def write(self, chunk):
        assert self._request, "Request closed"
        self.stream.write(chunk, self._on_write_complete)

    def finish(self):
        assert self._request, "Request closed"
        self._request_finished = True
        if not self.stream.writing():
            self._finish_request()

    def _on_write_complete(self):
        if self._request_finished:
            self._finish_request()

    def _finish_request(self):
        if self.no_keep_alive:
            disconnect = True
        else:
            connection_header = self._request.headers.get("Connection")
            if self._request.supports_http_1_1():
                disconnect = connection_header == "close"
            elif ("Content-Length" in self._request.headers 
                    or self._request.method in ("HEAD", "GET")):
                disconnect = connection_header != "Keep-Alive"
            else:
                disconnect = True
        self._request = None
        self._request_finished = False
        if disconnect:
            self.stream.close()
            return
        self.stream.read_until("\r\n\r\n", self._on_headers)

    def _on_headers(self, data):
        eol = data.find("\r\n")
        start_line = data[:eol]
        method, uri, version = start_line.split(" ")
        if not version.startswith("HTTP/"):
            raise Exception("Malformed HTTP version in HTTP Request-Line")
        headers = HTTPHeaders.parse(data[eol:])
        self._request = HTTPRequest(
            connection=self, method=method, uri=uri, version=version,
            headers=headers, remote_ip=self.address[0])

        content_length = headers.get("Content-Length")
        if content_length:
            content_length = int(content_length)
            if content_length > self.stream.max_buffer_size:
                raise Exception("Content-Length too long")
            if headers.get("Expect") == "100-continue":
                self.stream.write("HTTP/1.1 100 (Continue)\r\n\r\n")
            self.stream.read_bytes(content_length, self._on_request_body)
            return

        self.request_callback(self._request)

    def _on_request_body(self, data):
        self._request.body = data
        content_type = self._request.headers.get("Content-Type", "")
        if self._request.method == "POST":
            if content_type.startswith("application/x-www-form-urlencoded"):
                arguments = cgi.parse_qs(self._request.body)
                for name, values in arguments.iteritems():
                    values = [v for v in values if v]
                    if values:
                        self._request.arguments.setdefault(name, []).extend(
                            values)
            elif content_type.startswith("multipart/form-data"):
                boundary = content_type[30:]
                if boundary: self._parse_mime_body(boundary, data)
        self.request_callback(self._request)

    def _parse_mime_body(self, boundary, data):
        if data.endswith("\r\n"):
            footer_length = len(boundary) + 6
        else:
            footer_length = len(boundary) + 4
        parts = data[:-footer_length].split("--" + boundary + "\r\n")
        for part in parts:
            if not part: continue
            eoh = part.find("\r\n\r\n")
            if eoh == -1:
                logging.warning("multipart/form-data missing headers")
                continue
            headers = HTTPHeaders.parse(part[:eoh])
            name_header = headers.get("Content-Disposition", "")
            if not name_header.startswith("form-data;") or \
               not part.endswith("\r\n"):
                logging.warning("Invalid multipart/form-data")
                continue
            value = part[eoh + 4:-2]
            name_values = {}
            for name_part in name_header[10:].split(";"):
                name, name_value = name_part.strip().split("=", 1)
                name_values[name] = name_value.strip('"').decode("utf-8")
            if not name_values.get("name"):
                logging.warning("multipart/form-data value missing name")
                continue
            name = name_values["name"]
            if name_values.get("filename"):
                ctype = headers.get("Content-Type", "application/unknown")
                self._request.files.setdefault(name, []).append(dict(
                    filename=name_values["filename"], body=value,
                    content_type=ctype))
            else:
                self._request.arguments.setdefault(name, []).append(value)


class HTTPRequest(object):
    """A single HTTP request.

    GET/POST arguments are available in the arguments property, which
    maps arguments names to lists of values (to support multiple values
    for individual names). Names and values are both unicode always.

    File uploads are available in the files property, which maps file
    names to list of files. Each file is a dictionary of the form
    {"filename":..., "content_type":..., "body":...}. The content_type
    comes from the provided HTTP header and should not be trusted
    outright given that it can be easily forged.

    An HTTP request is attached to a single HTTP connection, which can
    be accessed through the "connection" attribute. Since connections
    are typically kept open in HTTP/1.1, multiple requests can be handled
    sequentially on a single connection.
    """
    def __init__(self, method, uri, version="HTTP/1.0", headers=None,
                 body=None, remote_ip=None, protocol=None, host=None,
                 files=None, connection=None):
        self.method = method
        self.uri = uri
        self.version = version
        self.headers = headers or HTTPHeaders()
        self.body = body or ""
        if connection and connection.xheaders:
            self.remote_ip = headers.get("X-Real-Ip", remote_ip)
            self.protocol = headers.get("X-Scheme", protocol) or "http"
        else:
            self.remote_ip = remote_ip
            self.protocol = protocol or "http"
        self.host = host or headers.get("Host") or "127.0.0.1"
        self.files = files or {}
        self.connection = connection
        self._start_time = time.time()
        self._finish_time = None

        scheme, netloc, path, query, fragment = urlparse.urlsplit(uri)
        self.path = path
        self.query = query
        arguments = cgi.parse_qs(query)
        self.arguments = {}
        for name, values in arguments.iteritems():
            values = [v for v in values if v]
            if values: self.arguments[name] = values

    def supports_http_1_1(self):
        """Returns True if this request supports HTTP/1.1 semantics"""
        return self.version == "HTTP/1.1"

    def write(self, chunk):
        """Writes the given chunk to the response stream."""
        assert isinstance(chunk, str)
        self.connection.write(chunk)

    def finish(self):
        """Finishes this HTTP request on the open connection."""
        self.connection.finish()
        self._finish_time = time.time()

    def full_url(self):
        """Reconstructs the full URL for this request."""
        return self.protocol + "://" + self.host + self.uri

    def request_time(self):
        """Returns the amount of time it took for this request to execute."""
        if self._finish_time is None:
            return time.time() - self._start_time
        else:
            return self._finish_time - self._start_time

    def __repr__(self):
        attrs = ("protocol", "host", "method", "uri", "version", "remote_ip",
                 "remote_ip", "body")
        args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs])
        return "%s(%s, headers=%s)" % (
            self.__class__.__name__, args, dict(self.headers))


class HTTPHeaders(dict):
    """A dictionary that maintains Http-Header-Case for all keys."""
    def __setitem__(self, name, value):
        dict.__setitem__(self, self._normalize_name(name), value)

    def __getitem__(self, name):
        return dict.__getitem__(self, self._normalize_name(name))

    def _normalize_name(self, name):
        return "-".join([w.capitalize() for w in name.split("-")])

    @classmethod
    def parse(cls, headers_string):
        headers = cls()
        for line in headers_string.splitlines():
            if line:
                name, value = line.split(": ", 1)
                headers[name] = value
        return headers