#!/usr/bin/env python2.5
# encoding: utf-8

# http://docs.python.org/lib/module-BaseHTTPServer.html

from SQLInputStream import SQLInputStream
from SQLImportStream import SQLImportStream
from SQLOperatorStream import SQLOperatorStream

# import apsw
# import binascii
# import datetime
import math
import os
import re
import select
import simplejson
import sys
import time

from urllib import unquote_plus

from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from DBAPI2Server import DBAPI2Server

_version = '0.1'
_name    = 'DASEServer'

class DASEServer(HTTPServer, DBAPI2Server, object):
  argsForFilter = ('startTime', 'endTime', 'value')
  
  def __init__(self, server_address, engine='sqlite3', db_server=(':memory:',)):
    HTTPServer.__init__(self, server_address, DASERequestHandler)
    DBAPI2Server.__init__(self, engine, db_server)
    
    self._streams = {}
    self.base = '%s:%i'%(server_address[0], server_address[1])
  
  def serve_until_quit(self):
    self.quit = False
    
    while not self.quit:
      rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
      if rd:
        self.handle_request()
  
  
  def requireUniqueStreamName(self, name):
    if name in self._streams:
      raise NameError, "Stream '" + name + "' already exists !"
  
  def newStream(self, name, duplicateFrom = False):
    """newStream(name) creates a new Stream"""
    
    self.requireUniqueStreamName(name)
    
    self._streams[name] = SQLInputStream(self, name, duplicateFrom)
    
    return self._streams[name]
  
  def newImportStream(self, name, query):
    self.requireUniqueStreamName(name)
    
    self._streams[name] = SQLImportStream(self, name, query)
    
    return self._streams[name]
  
  def newOperatorStream(self, name, leftStreamName, rightStreamName, expr):
    """newOperatorStream("name", function, leftStreamName, rightStreamName)
      
      creates a new OperatorStream
      
      Sample expression (takes v, w as arguments) :
        'v + w'
      """
    
    self.requireUniqueStreamName(name)
    
    assert re.match('', expr)
    
    fname = name + '_func'
    
    self.createfunction(fname, ('v', 'w'), expr)
    
    self._streams[name] = SQLOperatorStream(
      self, name,
      '`' + leftStreamName + '`.id || "_" || `' + rightStreamName + '`.id',
      'MAX(`' + leftStreamName + '`.startTime, `' + rightStreamName + '`.startTime)',
      'MIN(`' + leftStreamName + '`.endTime, `' + rightStreamName + '`.endTime)',
      fname + '(`' + leftStreamName + '`.value, `' + rightStreamName + '`.value)',
      '`' + leftStreamName + '` '
        'INNER JOIN `' + rightStreamName + '` '
        'ON   ( `' + leftStreamName + '`.startTime >= `' + rightStreamName + '`.startTime '
          'OR `' + leftStreamName + '`.endTime   >  `' + rightStreamName + '`.startTime) '
        'AND  ( `' + leftStreamName + '`.startTime <  `' + rightStreamName + '`.endTime '
          'OR `' + leftStreamName + '`.endTime   <= `' + rightStreamName + '`.endTime) ',
      '1');
    
    return self._streams[name]
  
  def newFilterStream(self, name, sourceStream, startTimeFilter = None, endTimeFilter = None, valueFilter = None, filter = None):
    # Update doc for this function following def --> expr
    """newFilterStream("name", sourceStream,
        startTimeFilter = None, endTimeFilter = None, valueFilter = None, filter = None)
      Creates a new OperatorStream that applies functions to individual events
      
      Sample filters :
      
      def startTimeFilter(startTime, endTime, value):
        return startTime - 10
      
      def endTimeFilter(startTime, endTime, value):
        return endTime + (startTime - endTime)
      
      def valueFilter(startTime, endTime, value):
        return value + 1
      
      def filter(startTime, endTime, value):
        if (startTime - endTime < 100):
          return False
        else:
          return True"""
    
    self.requireUniqueStreamName(name)
    
    if startTimeFilter != None:
      self.createfunction(name + '_startTimeFilter', self.argsForFilter, unquote_plus(startTimeFilter))
      startTimeClause = name + '_startTimeFilter(`' + sourceStream + '`.startTime, `' + sourceStream + '`.endTime, `' + sourceStream + '`.value)'
    else:
      startTimeClause = '`' + sourceStream + '`.startTime'
    
    if endTimeFilter != None:
      self.createfunction(name + '_endTimeFilter', self.argsForFilter, unquote_plus(endTimeFilter))
      endTimeClause = name + '_endTimeFilter(`' + sourceStream + '`.startTime, `' + sourceStream + '`.endTime, `' + sourceStream + '`.value)'
    else:
      endTimeClause = '`' + sourceStream + '`.endTime'
    
    if valueFilter != None:
      self.createfunction(name + '_valueFilter', self.argsForFilter, unquote_plus(valueFilter))
      valueClause = name + '_valueFilter(`' + sourceStream + '`.startTime, `' + sourceStream + '`.endTime, `' + sourceStream + '`.value)'
    else:
      valueClause = '`' + sourceStream + '`.value'
    
    if filter != None:
      self.createfunction(name + '_filter', self.argsForFilter, unquote_plus(filter))
      whereClause = name + '_filter(`' + sourceStream + '`.startTime, `' + sourceStream + '`.endTime, `' + sourceStream + '`.value)'
    else:
      whereClause = '1'
    
    self._streams[name] = SQLOperatorStream(
      self, name,
      '`' + sourceStream + '`.id',
      startTimeClause,
      endTimeClause,
      valueClause,
      '`' + sourceStream + '`',
      whereClause);
    
    return self._streams[name]
  
  def newUnionStream(self, name, stream1Name, stream2Name):
    """newUnion(name, stream1Name, stream2Name)
      Creates a new OperatorStream that merges all events from two streams"""
    self._streams[name] = SQLOperatorStream(
      self, name,
      ' "' + stream1Name + '" || "_" || id',
      'startTime',
      'endTime',
      'value',
      '`' + stream1Name + '`',
      '1 UNION '
      'SELECT "' + stream2Name + '" || "_" || id, startTime, endTime, value FROM `' + stream2Name + '`')
    
    return self._streams[name]
  
  def newAggregationStream(self, name, sourceStreamName, aggregationStartTimeIterator, aggregationEndTimeIterator, aggregationValueIterator, aggregationGrouper):
    """newAggregationStream(name, sourceStreamName, aggregationStartTimeIterator, aggregationEndTimeIterator, aggregationValueIterator, aggregationGrouper)
      Creates a new OperatorStream that aggregates events in one stream
      together in order to generate a new stream
      
      Sample aggregator :
    """
    
    # def aggregationFinal(context):
    #   return context['result']
    #
    # def aggregationStartTimeFactory():
    #   return ({'result': None}, aggregationStartTimeIterator, aggregationFinal)
    #
    # def aggregationEndTimeFactory():
    #   return ({'result': None}, aggregationEndTimeIterator, aggregationFinal)
    #
    # def aggregationValueFactory():
    #   return ({'result': None}, aggregationValueIterator, aggregationFinal)
    
    self.createaggregatefunction(name, aggregationStartTimeIterator, aggregationEndTimeIterator, aggregationValueIterator, aggregationGrouper)
    
    self.createfunction(name + "_grouper", self.argsForFilter, aggregationGrouper)
    
    self._streams[name] = SQLOperatorStream(
      self, name,
      'MIN(id)',
      name + "_startTimeAggregator(startTime, endTime, value)",
      name + "_endTimeAggregator(startTime, endTime, value)",
      name + "_valueAggregator(startTime, endTime, value)",
      '`' + sourceStreamName + '`',
      "1 GROUP BY " + name + "_grouper(startTime, endTime, value)")
    
    return self._streams[name]
  
  def newIteratorStream(self, name, sourceStreamName, iterator):
    """newIteratorStream(name, sourceStreamName, iterator)
    
    event[0] = id,      event[1] = startTime,
    event[2] = endTime, event[3] = value.
    
    def sampleIterator(stream, event, current={'startTime'=None, 'endTime'=0, 'value'=0}):
      if current['startTime'] == None:
        current['startTime'] = event[1]
      
      if event[2] - current['endTime'] < 60:
        if event[2] > current['endTime']:
          current['endTime'] = event[2]
        
        current['value'] += event[3]
      else:
        stream[current['startTime']:current['endTime']] = current['value']
        current['endTime']  += 1
        current['startTime'] = current['endTime']
        current['value']     = 0
      
      # We return the current event so as to be able to add the last event
      # We will add the event
      # (current['startTime'], current['endTime'], current['value'])
      # so we should be careful to always return a valid event here
      return current
      """
    
    stream    = self.newStream(name)
    lastEvent = None
    
    for event in self._streams[sourceStreamName].__iter__():
      lastEvent = iterator(stream, event)
    
    if lastEvent != None:
      stream[lastEvent['startTime']:lastEvent['endTime']] = lastEvent['value']
    
    return stream
  
  def newDuplicateStream(self, fromStream, toStream):
    """duplicateStream(fromStream, toStream) duplicates all events from `fromStream` to `toStream`"""
    self.newStream(toStream, duplicateFrom = self.streams[fromStream])
  
  def newNormalizeStream(self, fromStream, normalizedStream):
    """normalizeStream(sourceStreamName, normalizationName) normalizes a stream"""
    
    def normalizationIterator(stream, event, current={'startTime': None}):
      if current['startTime'] == None:
        current['startTime'] = event[1]
        current['endTime']   = event[2]
        current['value']     = event[3]
        
        # We can't add this event yet
        return current
      
      if event[2] <= current['endTime']:
        # This event happens before the current event
        # We ignore it
        return current
      
      if event[1] > current['endTime']:
        # These events are not contiguous
        stream[current['startTime']:current['endTime']] = current['value']
        
        current['startTime'] = event[1]
        current['endTime']   = event[2]
        current['value']     = event[3]
        
        return current
      
      if event[3] == current['value']:
        # This event at best prolongs the current event
      
        if event[2] > current['endTime']:
          current['endTime'] = event[2]
      
        return current
      
      stream[current['startTime']:current['endTime']] = current['value']
      
      current['startTime'] = current['endTime']
      current['endTime']   = event[2]
      current['value']     = event[3]
      
      return current
    
    
    return self.newIteratorStream(
      normalizedStream,
      fromStream,
      normalizationIterator)
    
  
  @property
  def streams(self):
    return self._streams
  


class DASERequestHandler(BaseHTTPRequestHandler):
  global _name, _version
  server_version = '%s/%s'%(_name, _version)
  
  error_message_format = 'An error occurred (%(code)i) : %(message)s ; %(explain)s.'
  
  regexps = {
    'stream':         r'([a-zA-Z0-9_\-]+)',
    
    'int':            r'([0-9]+)',
    'value':          r'([0-9]+)',
    
    'binExpr':        r'(.*)',
    'ternaryExpr':    r'(.*)',
    'quaternaryExpr': r'(.*)',
    'callback':       r'(.*)',
    
    'query':          r'(.*)',
  }
  
  pagesRegexpList = [
    r'()',
    r'(favicon)\.ico',
    r'(www)/(.*)\.html(\?.+)?',
    r'(js)/(.*)\.js(\?.+)?',
    r'(css)/(.*)\.css',
    r'(img)/(.*)\.(png|jpg|gif)',
    
    r'(new)/(stream)/%s'%(regexps['stream']),
    r'(new)/(import)/%s/%s'%(regexps['query'], regexps['stream']),
    r'(new)/(operator)/%s/%s/%s/%s'%(regexps['stream'], regexps['stream'], regexps['binExpr'], regexps['stream']),
    r'(new)/(filter)/%s/%s/%s/%s/%s/%s'%(regexps['stream'], regexps['ternaryExpr'], regexps['ternaryExpr'], regexps['ternaryExpr'], regexps['ternaryExpr'], regexps['stream']),
    r'(new)/(union)/%s/%s/%s'%(regexps['stream'], regexps['stream'], regexps['stream']),
    r'(new)/(aggregation)/%s/%s/%s/%s/%s/%s'%(regexps['stream'], regexps['quaternaryExpr'], regexps['quaternaryExpr'], regexps['quaternaryExpr'], regexps['ternaryExpr'], regexps['stream']),
    r'(new)/(iterator)/%s/%s/%s'%(regexps['stream'], regexps['callback'], regexps['stream']),
    r'(new)/(duplicate)/%s/%s'%(regexps['stream'], regexps['stream']),
    r'(new)/(normalize)/%s/%s'%(regexps['stream'], regexps['stream']),
    
    r'(get)/%s'%(regexps['stream']),
    
    r'(set)/%s/%s/%s/%s'%(regexps['stream'], regexps['int'], regexps['int'], regexps['value']),
    
    r'(delete)/%s'%(regexps['stream']),
    
    r'(list)',
    
    r'(quit)',
  ]
  
  availablePagesRegexp = re.compile(r'^/(%s)/?$'%(r'|'.join(pagesRegexpList)))
  
  statusCodes = {
    'Success':       0,
    'NameError':     1,
    'UnknownError': 99,
  }
  
  def buildResults(self, status, contents):
    return {
      'status':   self.statusCodes[status],
      'contents': contents
    }
  
  
  # Actions :
  
  def action_GET(self, action):
    try:
      stream = self.server.streams[action[0]]
      
      firstStartTime = stream.getFirstStart()
      lastEndTime    = stream.getLastEnd()
      
      events = stream.__iter__()
      
      # Attention !! On renvoie (pour flot) des données inversées !
      # Cela permet pour les plotter qui ne savent pas afficher la durée d'un
      # event de n'afficher que sa valeur de départ (et de faire une courbe
      # d'interpolation plus aisément d'ailleurs)
      return self.buildResults(
          'Success',
          [e for e in events] if events != None else [])
          # [(e[1], e[2], e[3]) for e in events] if events != None else [])
    except Exception, e:
      return self.buildResults(
        'UnknownError',
        e.__repr__())
  
  def action_LIST(self, action):
    try:
      return self.buildResults(
        'Success',
        self.server.streams.keys())
    except Exception, e:
      return self.buildResults(
        'UnknownError',
        e.__repr__())
    
  def action_NEW(self, action):
    try:
      output = self.newStreamActions[action[0]](self, *action[1:])
      
      return self.buildResults(
        'Success',
        output)
    except NameError, e:
      return self.buildResults(
        'NameError',
        e.__repr__())
    except Exception, e:
      return self.buildResults(
        'UnknownError',
        e.__repr__())
  
  def action_DELETE(self, action):
    try:
      self.server.streams[action[0]].delete()
      
      del self.server.streams[action[0]]
      
      return self.buildResults(
        'Success',
        action[0])
    except Exception, e:
      return self.buildResults(
        'UnknownError',
        e.__repr__())
  
  def action_SET(self, action):
    try:
      name      = action[0]
      startTime = action[1]
      endTime   = action[2]
      value     = action[3]
      
      self.server.streams[name][startTime:endTime] = value
      
      return self.buildResults(
        'Success',
        {'name':      name,
         'startTime': startTime,
         'endTime':   endTime,
         'value':     value})
    except NameError, e:
      return self.buildResults(
        'NameError',
        e.__repr__())
    except Exception, e:
      return self.buildResults(
        'UnknownError',
        e.__repr__())
  
  def action_QUIT(self, action):
    self.server.quit = True
    
    return self.buildResults(
      'Success',
      'Exiting...')
  
  
  pageToAction = {
    'new':    action_NEW,
    'get':    action_GET,
    'set':    action_SET,
    'delete': action_DELETE,
    'list':   action_LIST,
    'quit':   action_QUIT,
  }
  
  def action_NEW_STREAM(self, name):
    self.server.newStream(name)
    
    return name
  
  def action_NEW_IMPORT(self, query, name):
    self.server.newImportStream(name, unquote_plus(query))
    
    return name
  
  def action_NEW_OPERATOR(self, leftStream, rightStream, operation, name):
    self.server.newOperatorStream(name, leftStream, rightStream, unquote_plus(operation))
    
    return name
  
  def action_NEW_FILTER(self, sourceStream, startTimeFilter, endTimeFilter, valueFilter, filterCondition, name):
    self.server.newFilterStream(
      name,
      sourceStream,
      startTimeFilter, endTimeFilter, valueFilter, filterCondition)
    
    return name
  
  def action_NEW_UNION(self, leftStream, rightStream, name):
    self.server.newUnionStream(name, leftStream, rightStream)
    
    return name
  
  def action_NEW_AGGREGATION(self, sourceStream, aggregationStartTimeIterator, aggregationEndTimeIterator, aggregationValueIterator, aggregationGrouper, name):
    self.server.newAggregationStream(
      name,
      sourceStream,
      aggregationStartTimeIterator, aggregationEndTimeIterator,
      aggregationValueIterator, aggregationGrouper)
    
    return name
  
  def action_NEW_ITERATOR(self, sourceStream, iterator, name):
    self.server.newIteratorStream(name, sourceStream, iterator)
    
    return name
  
  def action_NEW_DUPLICATE(self, sourceStream, name):
    self.server.newDuplicateStream(sourceStream, name)
    
    return name
  
  def action_NEW_NORMALIZE(self, sourceStream, name):
    self.server.newNormalizeStream(sourceStream, name)
    
    return name
  
  newStreamActions = {
    'stream':      action_NEW_STREAM,
    'import':      action_NEW_IMPORT,
    'operator':    action_NEW_OPERATOR,
    'filter':      action_NEW_FILTER,
    'union':       action_NEW_UNION,
    'aggregation': action_NEW_AGGREGATION,
    'iterator':    action_NEW_ITERATOR,
    'duplicate':   action_NEW_DUPLICATE,
    'normalize':   action_NEW_NORMALIZE,
  }
  
  
  # ################################## #
  # Code de gestion des pages appelées #
  # ################################## #
  
  def do_GET(self):
    page = self.availablePagesRegexp.match(self.path)
    
    mimetype = None
    
    if page == None:
      self.send_error(404)
    else:
      page = [v for v in page.groups() if v != None]
      
      category = page[1]
      
      if category == '':
        # C'est la racine du serveur qui est demandée, on renvoie l'index du
        # serveur web
        
        category = 'www'
      
      if category in ['www', 'favicon', 'js', 'css', 'img']:
        mimetype = {
            'www':     'text/html',
            'favicon': 'image/x-icon',
            'js':      'text/javascript',
            'css':     'text/css',
            'img':     None
          }[category]
          
        # This is a static file
        if category == 'favicon':
          page = 'favicon.ico'
        else:
          page = list(page)[0]
          
          if page == '':
            page = 'www/index.html'
        
        queryStartIndex = page.find('?')
        
        if queryStartIndex != -1:
          page = page[0:queryStartIndex]
        
        try:
          f = open(os.path.dirname(__file__) + '/../DASEWebServer/' + page, 'r')
          
          output = f.read()
          
          f.close()
        except IOError, e:
          output = ''
        
        if page == 'www/index.html':
          # index.html utilise la balise <base> qu'il faut remplir de façon
          # dynamique
          output = output%(self.server.base)
      else:
        output = simplejson.dumps(self.pageToAction[page[1]](self, page[2:]))
      
      print >> self.wfile, 'HTTP/1.0 200 OK'
      
      if mimetype:
        self.send_header('Content-type', mimetype)
      
      self.end_headers()
      self.log_request(200, len(output));
      
      print >> self.wfile, output
  

