#   This program is free software; you can redistribute it and/or modify
#   it under the terms of the version 3 of the GNU Lesser General Public License
#   as published by the Free Software Foundation.
#
#   This program is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#   GNU General Public License for more details.
#
#   You should have received a copy of the GNU Lesser General Public License
#   along with this program; if not, write to the Free Software
#   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
#
# Copyright (c) NEC Deutschland GmbH, NEC HPC Europe
#
# $Id$
import logging
import sys
import time
from aggmon.databases.metric.metric_store import MetricStore
from basic_types.metric import Metric
from basic_types.num_record import NS_IN_S
from basic_types.num_record import NumRecord
from basic_types.log_record import LogRecord
from pymongo import MongoClient, ASCENDING
from bson.objectid import ObjectId
from pymongo.son_manipulator import AutoReference, NamespaceInjector
from copy import deepcopy
from threading import Lock

__all__ = ["MongoDBMetricStore"]

# Constants
# max number of records in TS (capped collection)
MAX_RECORDS_NUM = 100000
MAX_RECORDS_LOG = 100000
# average TS record size
RECORD_SIZE_NUM = 255
RECORD_SIZE_LOG = 255
# duration a metric of type log is regarded as valid
METRIC_TTL_LOG = 900


class MongoDBMetricStore(MetricStore):
    """
    host_name and port could be something like:
    "vmn1:27017"
    "vmn1", 27017
    ["vmn1:27017", "vmn2:27017"]
    "mongodb://vmn1:27017,vmn2:27017"
    """
    kwds_allowed = ["host_name", "port", "replicaset"]
    getLastMetricByMetricName_lock = Lock()
    getMetricNames_lock = Lock()

    def __init__( self, metric_database, args, host_name="localhost:27017", port=None, db_name="metric",
                  col_num_metadata="num_metadata", ts_num_prefix="numts",
                  col_log_metadata="log_metadata", ts_log_prefix="logts",
                  col_job_metric="job_metric", **kwds ):
        self.group_path = args[0]
        if port and not isinstance( port, int ):
            port = int( port )
        self.metric_database = metric_database
        logging.info( "Instantiating metrics db (%s) at %s" % (self.__class__.__name__, host_name) )
        # connect db server using default w (write concern) and j (journal flush)
        self.__client = MongoClient( host_name, port )
        self.__db = self.__client[db_name]
        self.__db.add_son_manipulator( NamespaceInjector() )
        self.__db.add_son_manipulator( AutoReference( self.__db ) )
        self.__db_name = db_name
        self.__col_num_metadata = self.__db[col_num_metadata]
        self.__col_log_metadata = self.__db[col_log_metadata]
        self.__col_job_metric = self.__db[col_job_metric]
        self.__col_num_metadata_name = col_num_metadata
        self.__col_log_metadata_name = col_log_metadata
        self.__col_job_metric_name = col_job_metric
        self.__ts_num_prefix = ts_num_prefix
        self.__ts_log_prefix = ts_log_prefix
        try:
            self.__col_num_metadata.ensure_index( [("host", ASCENDING), ("name", ASCENDING)], unique=True )
            self.__col_log_metadata.ensure_index( [("host", ASCENDING), ("name", ASCENDING)], unique=True )
            self.__col_job_metric.ensure_index( [("host", ASCENDING), ("name", ASCENDING), ("value", ASCENDING)], unique=True )
        except Exception, e:
            logging.warning( "Failed to ensure index: '%s'!" % str( e ) )

    def _initTimeSeriesInfo( self, host_name, metric_name, **options ):
        return None

    def __MongoDBEncodeMetric( self, metric ):
        if not "_id" in metric.__dict__:
            _id_metric = ObjectId()    # create new unique ID
            metric._id = _id_metric
        else:
            metric._id = ObjectId( metric._id )
        if self.metric_database.isJobType( metric ):
            return metric.__dict__
        _id_rec = ObjectId()    # create new unique ID
        record_dict = { "_id" : _id_rec, "time" : metric.time * NS_IN_S, "value" : metric.value }
        metric_dict = deepcopy( metric.__dict__ )
        del metric_dict["time"]
        del metric_dict["value"]
        # care for LOG type metrics
        if "output" in metric_dict:
            record_dict["output"] = metric_dict["output"]
            del metric_dict["output"]
        metric_dict["ts_record"] = record_dict
        return metric_dict

    def __MongoDBDecodeMetric( self, metric_dict ):
        if self.metric_database.isJobType( metric_dict ):
            return Metric( **metric_dict )
        record_dict = deepcopy( metric_dict["ts_record"] )
        del metric_dict["ts_record"]
        if "_id" in metric_dict:
            metric_dict["_id"] = str( metric_dict["_id"] )
        metric = Metric( time=long( record_dict["time"] / NS_IN_S ), value=record_dict["value"], **metric_dict )
        # care for LOG type metrics
        if "output" in record_dict:
            metric.output = record_dict["output"]
        return metric

    def addMetric( self, new_metric ):
        # in case metrics arrive with a zero time stamp add the current time
        if new_metric.time == 0:
            new_metric.time = time()

        logging.debug( "%s handle metric, %s" % (self.__class__.__name__, str( new_metric.__dict__ )) )

        if self.metric_database.isNumType( new_metric ):
            # add NUM type metric
            last_metric = self.getLastMetricByMetricName( new_metric.host, new_metric.name )
            if not last_metric:
                # insert new metric, create new TS collection, insert TS Record, insert metric incl. DBRef
                metric_dict = self.__MongoDBEncodeMetric( new_metric )
                try:
                    self.__db.create_collection( self.__ts_num_prefix + str( metric_dict["_id"] ), size=RECORD_SIZE_NUM*MAX_RECORDS_NUM, max=MAX_RECORDS_NUM, capped=True )
                    self.__db[self.__ts_num_prefix + str( metric_dict["_id"] )].insert( metric_dict["ts_record"] )
                    self.__col_num_metadata.save( metric_dict )
                    logging.debug( "Insert num metric, %s" % str( metric_dict ) )
                except Exception, e:
                    logging.error( "Failed to insert num metric, %s" % str( e ) )
            else:
                # insert new TS Record, update metric DBRef
                new_metric._id = ObjectId( last_metric._id )
                metric_dict = self.__MongoDBEncodeMetric( new_metric )
                try:
                    self.__db[self.__ts_num_prefix + str( metric_dict["_id"] )].insert( metric_dict["ts_record"] )
                    self.__col_num_metadata.save( metric_dict )
                    logging.debug( "Insert num record, %s" % str( metric_dict["ts_record"] ) )
                except Exception, e:
                    logging.error( "Failed to insert num record, %s" % str( e ) )
        elif self.metric_database.isLogType( new_metric ):
            # add LOG type metric
            last_metric = self.getLastMetricByMetricName( new_metric.host, new_metric.name )
            if last_metric == None:
                # insert new metric, create new TS collection, insert TS Record, insert metric incl. DBRef
                metric_dict = self.__MongoDBEncodeMetric( new_metric )
                try:
                    self.__db.create_collection( self.__ts_log_prefix + str( metric_dict["_id"] ), size=RECORD_SIZE_LOG*MAX_RECORDS_LOG, max=MAX_RECORDS_LOG, capped=True )
                    self.__db[self.__ts_log_prefix + str( metric_dict["_id"] )].insert( metric_dict["ts_record"] )
                    self.__col_log_metadata.save( metric_dict )
                    logging.debug( "Insert new LogType metric, %s" % str( metric_dict ) )
                except Exception, e:
                    logging.error( "Failed to insert new log record or save new log metric, %s" % str( e ) )
            else:
                # check if metric TTL exceeded and insert intermediate record if so
                if last_metric.time < new_metric.time - METRIC_TTL_LOG:
                    last_metric.value = "UNKNOWN"
                    last_metric.time = last_metric.time + METRIC_TTL_LOG
                    last_metric.output = "Metric TTL exceeded! Record inserted on %i." % int( time.time() )
                    metric_dict = self.__MongoDBEncodeMetric( last_metric )
                    try:
                        self.__db[self.__ts_log_prefix + str( metric_dict["_id"] )].insert( metric_dict["ts_record"] )
                        self.__col_log_metadata.save( metric_dict )
                        logging.debug( "Insert intermediate log metric, %s" % str( metric_dict ) )
                    except Exception, e:
                        raise Exception( "Failed to insert intermediate log record or save log metric: %s" % str( e ) )
                if last_metric.value != new_metric.value or last_metric.output != new_metric.output:
                    # insert new TS Record, update metric DBRef
                    metric_dict = self.__MongoDBEncodeMetric( new_metric )
                    metric_dict["_id"] = last_metric._id
                    try:
                        self.__db[self.__ts_log_prefix + str( metric_dict["_id"] )].insert( metric_dict["ts_record"] )
                        self.__col_log_metadata.save( metric_dict )
                        logging.debug( "Insert new log record, %s" % str( metric_dict["ts_record"] ) )
                    except Exception, e:
                        raise Exception( "Failed to insert new log record or save log metric: %s" % str( e ) )
                else:
                    # update time in TS Record
                    try:
                        update = new_metric.time
                        _id = ObjectId( last_metric._id )
                        #_id_rec = last_metric._id_rec
                        #self.__db[self.__ts_prefix + str( _id )].update( {"_id" : _id_rec}, {"$set" : {"time" : update}} )
                        self.__db[self.__ts_log_prefix + str( _id )].find_and_modify( {"value" : last_metric.value, "output" : last_metric.output},
                                                                         {"$set" : {"time" : update}}, sort=[("time", ASCENDING)] )
                        logging.debug( "Update time (%s) in last log record (value: %s, output: %s)" % (str( update ), str( last_metric.value ), str( last_metric.output)) )
                    except Exception, e:
                        raise Exception( "Failed to update time in log record: %s" % str( e ) )
        elif self.metric_database.isJobType( new_metric ):
            # add JOB type metric
            metric_dict = self.__MongoDBEncodeMetric( new_metric )
            try:
                self.__col_job_metric.save( metric_dict )
                logging.debug( "Insert job metric, %s" % str( metric_dict ) )
            except Exception, e:
                logging.error( "Failed to save job metric, %s" % str( e ) )
        else:
            logging.error( "MetricDatabase: skipping: unsupported value data type '%s' for %s" % (type( new_metric.value ).__name__, repr( new_metric ))  )
            # TODO: remove this exception later, when we understood why this happens
            raise
            return

    def clearMetric( self, host_name, metric_name ):
        if self.__db:
            metric = self.getLastMetricByMetricName( host_name, metric_name )
            if not metric:
                return False
            if self.metric_database.isJobType( metric ):
                # remove job metric
                self.__col_job_metric.remove( {"host" : host_name, "name" : metric_name} )
                return True
            _id = str( metric._id )
            if self.metric_database.isNumType( metric ):
                # remove metric meta data record
                self.__col_num_metadata.remove( {"host" : host_name, "name" : metric_name} )
                # drop TS collection for that metric
                ts_col_name = self.__ts_num_prefix + _id
            elif self.metric_database.isLogType( metric ):
                # remove metric meta data record
                self.__col_log_metadata.remove( {"host" : host_name, "name" : metric_name} )
                # drop TS collection for that metric
                ts_col_name = self.__ts_log_prefix + _id
            self.__db[ts_col_name].drop()
            return True

    def getHostNames( self, metric_types=("num", "log", "job") ):
        hosts = set()
        try:
            if "num" in metric_types:
                hosts |= set( self.__col_num_metadata.distinct( "host" ) )
            if "log" in metric_types:
                hosts |= set( self.__col_log_metadata.distinct( "host" ) )
            if "num" in metric_types:
                hosts |= set( self.__col_job_metric.distinct( "host" ) )
        except Exception, e:
            logging.error( "Query getHostNames failed, %s" % str( e ) )
        return hosts

    def getLastMetricsByHostName( self, host_name ):
        # TODO: replace loop by a smarter query
        metrics = []
        try:
            for name in self.getMetricNames( host_name ):
                metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_num_metadata.find(
                              {"name" : name, "host" : host_name},
                              ).sort( "time", 1 ).limit( 1 )] )
                metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_log_metadata.find(
                              {"name" : name, "host" : host_name},
                              ).sort( "time", 1 ).limit( 1 )] )
                metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_job_metric.find(
                              {"name" : name, "host" : host_name},
                              ).sort( "time", 1 ).limit( 1 )] )
        except Exception, e:
            logging.error("Query getLastMetricsByHostName failed, %s" % str( e ))
        return metrics

    def getLastMetricByMetricName( self, host_name, metric_name ):
        # TODO: check is as_class attribute could be used
        with MongoDBMetricStore.getLastMetricByMetricName_lock:
            metric = None
            metrics = []
            try:
                metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_num_metadata.find(
                                        {"name" : metric_name, "host" : host_name} ).limit( 1 )] )
                metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_log_metadata.find(
                                        {"name" : metric_name, "host" : host_name} ).limit( 1 )] )
                metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_job_metric.find(
                                        {"name" : metric_name, "host" : host_name} ).limit( 1 )] )
            except Exception, e:
                logging.error( "Query getLastMetricByMetricName failed, %s" % str( e ) )
            if len( metrics ) > 1:
                logging.error( "Metric name %s is not unique for host %s!" % (metric_name, host_name) )
            else:
                if metrics:
                    metric = metrics[0]
            return metric

    def getMetricNames( self, host_name ):
        with MongoDBMetricStore.getMetricNames_lock:
            names = []
            try:
                names.extend( self.__col_num_metadata.find( {"host" : host_name}, {"name" : 1} ).distinct( "name" ) )
                names.extend( self.__col_log_metadata.find( {"host" : host_name}, {"name" : 1} ).distinct( "name" ) )
                names.extend( self.__col_job_metric.find( {"host" : host_name}, {"name" : 1} ).distinct( "name" ) )
            except Exception, e:
                logging.error( "Query getMetricNames failed, %s" % str( e ) )
            return names

    def getRecordsByMetricName( self, host_name, metric_name, start_s=0, end_s=0, nsteps=0, step_s=0 ):
        # TODO: add averaging using nsteps and steps_s
        if end_s == 0 or end_s == None:
            end_s = sys.maxint
        if start_s == None:
            start_s = 0
        records = []
        try:
            metric = self.getLastMetricByMetricName( host_name, metric_name )
            if not metric:
                return records
            _id = metric._id
            if self.metric_database.isNumType( metric ):
                records = [NumRecord( time_ns=r["time"], value=r["value"] ) for r in self.__db[self.__ts_num_prefix + str( _id )].find(
                                        {"time" : {"$gt" : start_s, "$lt" : end_s}} )]
            elif self.metric_database.isLogType( metric ):
                records = [LogRecord( time_ns=r["time"], value=r["value"] ) for r in self.__db[self.__ts_log_prefix + str( _id )].find(
                                        {"time" : {"$gt" : start_s, "$lt" : end_s}} )]
            elif self.metric_database.isJobType( metric ):
                # job type metrics are not stored as TS
                pass
        except Exception, e:
            logging.error("Record query failed, %s" % str( e ))
        return records

    def getJobMetrics( self, host_name=None, metric_name=None, start_s=0, end_s=0 ):
        if end_s == 0 or end_s == None:
            end_s = sys.maxint
        if start_s == None:
            start_s = 0
        # match records that's start or end time lays between start_s and end_s
        search = { "$or" : [{"start" : {"$gt" : start_s, "$lt" : end_s}}, {"end" : {"$gt" : start_s, "$lt" : end_s}}]}
        if host_name:
            search["host"] = host_name
        if metric_name:
            search["name"] = metric_name
        metrics = []
        try:
            metrics.extend( [self.__MongoDBDecodeMetric( m.to_dict() ) for m in self.__col_job_metric.find( search )] )
        except Exception, e:
            logging.error( "Query JobMetrics failed, %s" % str( e ) )
        return metrics

    def getSummary( self, path ):
        # TODO: ...
        return None

    def getTimeSeriesType( self, host_name, metric_name ):
        metric = self.getLastMetricByMetricName( host_name, metric_name )
        if self.metric_database.isNumType( metric ):
            return "NUM"
        elif self.metric_database.isLogType( metric ):
            return "LOG"
        return None
