# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.

import re
import os
import sys
import sqlite3
import ConfigParser

import jsonpickle
import scipy
import numpy
import pygooglechart
from scipy import stats
import httplib
from urllib import urlencode

from datetime import datetime
UPTIME_RE = re.compile(' ([0-9]{2}:[0-9]{2}:[0-9]{2}) up ((([0-9]+) days, ([0-9]{2}):([0-9]{2}))|(([0-9]+) min)),  ([0-9]+) user[s]?,  load average: ([0-9\.]+), ([0-9\.]+), ([0-9\.]+)')
FILENAME_RE = re.compile('.+([0-9]{8})')


class NotImplementedException(Exception):
    """A class must subclass this method.
    """

class ParseException(Exception):
    """Error parsing the uptime line.
    """

class Uptime(object):
    def __init__(self, time=None, up_minutes=None, users=None, load1=None, load5=None, load15=None):
        self.time = time
        self.up_minutes = up_minutes
        self.users = users
        self.load1 = load1
        self.load5 = load5
        self.load15 = load15
    
    def __repr__(self):
        return '<Uptime object at ' + str(self.time) + '>'
        

class LineParser(object):
    """Parse a single line from the uptime command.
    """
    def __init__(self, line, date):
        """Parse the uptime line according to the UPTIME_RE regular expression.
        
        Input:
        line - the string representation of the output of uptime
        date - a Python date/datetime object holding the year, month, and date from the reading
        
        Raises:
        ParseException - if the regular expression does not find a match
        """
    
        self.line = line
        self.date = date
        match = UPTIME_RE.match(line)
        if match is None:
            raise ParseException()
        else:
            self.matches = match.groups()
    
    def up_minutes(self):
        if 'min' in self.matches[1]:
            return int(self.matches[7])
        else:
            days = int(self.matches[3])
            hours = int(self.matches[4])
            minutes = int(self.matches[5])
            return ((days * 24) + hours) * 60 + minutes
            
    up_minutes = property(up_minutes)
    
    def users(self):
        return int(self.matches[8])
    users = property(users)
    
    def load1(self):
        return float(self.matches[9])
    load1 = property(load1)
    
    def load5(self):
        return float(self.matches[10])
    load5 = property(load5)
    
    def load15(self):
        return float(self.matches[11])
    load15 = property(load15)

    def datetime(self):
        d = self.date
        t = self.matches[0]
        return datetime(d.year, d.month, d.day, 
                        int(t[0:2]), int(t[3:5]), int(t[6:8]))
    datetime = property(datetime)

    def uptime(self):
        u = Uptime(time=self.datetime, 
                   up_minutes=self.up_minutes, 
                   users=self.users, 
                   load1=self.load1, 
                   load5=self.load5, 
                   load15=self.load15)
        return u
    uptime = property(uptime)
    
    def __repr__(self):
        print self.line
    
class Parser(object):
    """Abstract class for Parsers, must be subclassed."""
    
    def __init__(self):
        self.db = DBBackend(Config().dbfile)
    
    def parse(self):
        """
        Must override.  Does not need to return anything. 
        """
        raise NotImplementedException()
    
class FileParser(Parser):
    """Parses a file full of uptime output for a single date.
    Stores the result into the uptime database.
    
    The date must be on the end of the file, in the YYYYMMDD format.
    For example "hostA_20080731" 
    """
    def __init__(self, filename):
        """
        Input:
        filename - ending in YYYYMMDD
        """
        super.__init__()
        self.filename = filename
    
    def date(self):
        """Provide the date based upon the filename."""
         # FIXME will blow up when directory involved.  better regex, maybed specified
        match = FILENAME_RE.match(self.filename)
        date_str = match.group(1)
        return datetime(int(date_str[0:4]), int(date_str[4:6]), int(date_str[6:8]))
    
    def parse(self):
        """Opens the file and processes each line."""
        f = open(self.filename)
        for line in f:
            lp = LineParser(line, self.date())
            db.store(lp.uptime)
        f.close()
        del(self.db)

class DirectoryParser(Parser):
    """Parses all the files in a directory and stores all the results in 
    the database.  Ideal for processing stored results in bulk.
    """
    def __init__(self, dirpath):
        """
        Input:
        dirpath - the directory path to process.
        """
        super.__init__()
        self.dirpath = dirpath
        # the database connection is not needed since we pass the 
        # effort onto individual FileParsers
        del(self.db)        
    
    def parse(self):
        filenames = os.listdir(self.dirpath)
        for file in filenames:
            fp = FileParser(self.dirpath+file) # FIXME use os.join
            fp.parse()
        
class StdinParser(Parser):
    """Parser that reads from stdin and stores the results in the database.
    Ideal for piping the results of uptime into uptimemonitor.py.  
    """
    def __init__(self):
        super.__init__()
        self.startdate = datetime.today()
        # FIXME Possible bug when day rolls over and we are processing from previous day
    
    def date(self):
        return self.startdate
    
    def parse(self):
        lines = sys.stdin.readlines()
        for line in lines:
            lp = LineParser(line, self.date()) 
            db.store(lp.uptime)
        del(self.db)

class Backend(object):
    """Abstract class for store and retrieving Uptime objects."""
    def store(self, uptime):
        raise NotImplementedException()
    
    def getall(self):
        raise NotImplementedException()

class WebBackend(Backend):
    """Storage database that uploads the data to the UptimeMonitor webservice.
    
    Requires apikey to the UptimeMonitor webservice (http://<url>/register
    """
    def __init__(self, url, servername, apikey):
        self.url
        self.apikey
        self.servername = servername
        self.conn = Http()

    def store(self, uptime):
        json = jsonpickle.dumps(uptime)
        data = dict(apikey=self.apikey,
                    servername=servername,
                    uptime=json)
        resp, content = self.conn.request(self.url, 
                                          'POST', 
                                          urlencode(data))
        # TODO handle errors, log errors
    
    def getall(self):
        raise NotImplementedException()

class DBBackend(Backend):
    """
    """
    def __init__(self, dbfile):
        self._conn = sqlite3.connect(dbfile)
        
    def store(self, uptime):
        self._execute('INSERT INTO uptime VALUES (?,?,?,?,?,?)', 
                      vars = (uptime.time, uptime.up_minutes, uptime.users,
                              uptime.load1, uptime.load5, uptime.load15),
                      commit=True)
    
    def getall(self):
        q = self._execute('SELECT time,up,users,load1,load5,load15 FROM uptime ORDER BY time', 
                          close=False)
        all = []
        for r in q:
            u = Uptime(time=r[0], up_minutes=r[1], users=r[2], 
                       load1=r[3], load5=r[4], load15=r[5])
            all.append(u)
        c.close()
    
    def _execute(self, sql, vars=None, close=True, commit=False):
        c = self._conn.cursor()
        if vars:
            c.execute(sql, vars)
        else:
            c.execute(sql)
        if commit:
            self._conn.commit()
        if close:
            c.close()
        return c
        
    def __del__(self):
        self._conn.close()
    
    def createdb(cls, dbfile):
        db = DBBackend(dbfile)
        db._execute('CREATE TABLE uptime (date TIMESTAMP UNIQUE, up INTEGER, users INTEGER, load1 REAL, load5 REAL, load15 REAL)')
        del(db)
    createdb = classmethod(createdb)

#class Config(object):
#    """Configuration file model.
#    Implements the Borg pattern (http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66531) 
#    """
#    __shared_state = __load()
#
#    def __init__(self):
#        self.__dict__ = self.__shared_state
#    
#    def __load():
#        return {'dbfile': 'temp.db'}
        
class Analyzer(object):
    def __init__(self):
        self.db = DBBackend(Config().dbfile)
        self.data = self.db.getall()
        
    def analyze(self):
        raise NotImplementedException()

class DescriptiveStatistics(Analyzer):
    
    def analyze(self):
        self.load = numpy.array([u.load1 for u in self.data])
        self.stats = Stats(self.load)


class HourAnalyzer(Analyzer):
    def analyze(self, data):
        byhour = {}
        for u in self.data:
            hour = u.time.hour
            load1 = u.load1
            if byhour.has_key(hour):
                byhour[hour].append(load1)
            else:
                byhour[hour] = [load1]
        self.byhour_stats = {}
        for k,v in byhour.iteritems():
            values = numpy.array(v)
            byhour_stats[k] = Stats(values)
            
    
class Stats(object):
    def __init__(self, data):
        """
        data is a 1-d array to perform the stats on.
        
        >>> import numpy
        >>> data = numpy.array([10,20,4,2,23,12])
        >>> s = Stats(data)
        >>> s.mean
        11.8333333333
        >>> s.std
        8.40039681602
        >>> s.median
        11.0
        >>> s.interquartile
        (5.5, 18.0)
        >>> s.count
        6
        """
        self.data = data
        self.mean = stats.mean(data)
        self.std = stats.std(data)
        self.skew = stats.skew(data)
        self.kurtosis = stats.kurtosis(data)
        self.median = stats.median(data)
        self.interquartile = (stats.scoreatpercentile(data,25), stats.scoreatpercentile(data,75))
        self.max = numpy.max(data)
        self.min = numpy.min(data)
        self.count = len(data)

            
#def run():
#    filenames = os.listdir('data')
#    results = []
#    for f in filenames:
#        results.extend(parse_log('data/'+f))
#
#    byhour_avg, byhour_std = aggregate_times(results)
#    return results, byhour_avg, byhour_std

if __name__ == '__main__':
    args = sys.argv
    if args[1] == '-f':
        p = FileParser(args[2])
    else:
        p = StdinParser()
    p.parse()
    
    