#!/usr/bin/env python

## Copyright (C) 2008 Navid Sheikhol-Eslami <navid@navid.it>

### This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.

## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
## GNU General Public License for more details.

## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.

import os, sys, time, re, pdb, commands, glob, traceback
import datetime, copy, random
from threading import Thread, Lock, Semaphore
from helpers import *
import tarfile, tempfile, logging
from logst.sosreport import sosreport_class
import shelve

EVENT_NOT_GROUPABLE    = 0
EVENT_HOST_GROUPABLE   = 3
EVENT_ALWAYS_GROUPABLE = 5

class cluster_class:

   def __init__(self):

      self.hosts = {}
      self.daemon_log_counter = {}
      self.shutdown_requested = False
      self.logger = logging.getLogger('logpox')

   def is_master(self):

      raise "Deprecated"

      return True

   def should_shutdown(self):

      return self.shutdown_requested == True

   def shutdown(self):

      self.logger.info("cluster: shutdown requested")
      self.store_parsers_options()
      self.shutdown_requested = True

   def host_names(self):
      return ksort(self.hosts)

   def count_hosts(self):
      return len(self.hosts)

   def list_hosts(self):
      for hostname in self.host_names():
         yield self.hosts[hostname]

   def get_filenames(self):
      toret = []
      for host in self.list_hosts():
         toret.extend(host.get_filenames())
      return toret

   def get_host(self, host):
      return self.hosts[host]

   def get_logs_at_date(self, find_date):

      log_ptrs = {}

      for hostname in self.host_names():
         log_ptrs[hostname] = self.get_host(hostname).get_log_at_date(find_date)

      return log_ptrs

   def get_dates_at_position(self, positions):

      toret = {}

      for hostname in self.host_names():
         toret[hostname] = self.get_host(hostname).get_date_at_position(positions[hostname])

      return toret

   def get_position_by_date(self, goto_date):
      toret = {}
      for host in self.hosts:

         log = self.hosts[host].get_log_at_date(goto_date)

         if log:
            toret[host] = log.position
         else:
            toret[host] = None

      return toret

   def get_log_by_id(self, log_id):

      hostname, pos = log_id.split("_", 2)

      return self.get_host(hostname).get_line(pos)

   def get_line_by_internal_id(self, int_line_id):

      # (host_index, position)

      return self.hosts(int_line_id[0]).get_line(int_line_id[1])

   def get_line_by_internal_id_range(self, int_line_id_range):

      # (host_index, position_start, position_stop)

      return self.hosts(int_line_id[0]).get_line(int_line_id[1])

   def size(self):

      return sum ( [ host.size() for host in self.list_hosts() ] )

   def add_log(self, logname, from_where = ""):

      if logname.startswith("http://") or logname.startswith("https://") or logname.startswith("ftp://"):

         tmp_logname = tempfile.mkstemp(prefix = "wgetted_%s_" % os.path.basename(logname), dir = self.CONFIG["tmpdir"])[1]

         if os.system("""wget --quiet --no-check-certificate --output-document='%s' '%s'""" % (tmp_logname, logname) ) != 0:
            return False

         log = self.add_log(tmp_logname, from_where = logname)

         return log

      if os.path.isfile(logname) and tarfile.is_tarfile(logname):
         self.logger.info("""extracting archive "%s"...""" % logname)

         tmpdir = tempfile.mkdtemp(prefix = "untarred_%s_" % os.path.basename(logname), dir = self.CONFIG["tmpdir"])

         tar = tarfile.open(logname, "r")
         tar.first = tar.next()
         hostname = None
         tmplog = None

         for fname in tar.getnames():
            if os.path.basename(fname).startswith("messages"):

               tar.extract(fname, tmpdir)

               tmplog = self.add_log( "%s/%s" % (tmpdir, fname), from_where = if_true(len(from_where) > 0, from_where, logname) )

               if not hostname and tmplog:
                  hostname = tmplog.hostname()

         if hostname:
            self.get_host( hostname ).set_sos(tar)

         return tmplog

      elif os.path.isfile(logname) and os.path.basename(logname).startswith("messages") and logname[-3:] == ".gz":

         self.logger.info("""decompressing "%s"...""" % logname)
         tmp_logname = tempfile.mkstemp(prefix = "ungzipped_%s_" % os.path.basename(logname), dir = self.CONFIG["tmpdir"])[1]
         os.system("""gzip -dc '%s' > '%s'""" % (logname, tmp_logname))
         return self.add_log(tmp_logname)

      elif os.path.isdir(logname):

         toret = None

         for sos_log in glob.glob(logname + "/messages*"):
            toret = self.add_log(sos_log)

         for sos_log in glob.glob(logname + "/var/log/messages*"):
            toret = self.add_log(sos_log)

         if toret:
            self.get_host(toret.hostname()).set_sos(logname)

         return toret

      self.logger.info("""adding log "%s"...""" % logname)

      try:
         log = logfile_class(logname)
      except IOError:
         self.logger.error("""adding "%s" failed (file does not exist)""" % logname)
         return False
      except "Invalid_File":
         self.logger.error("""adding "%s" failed""" % logname)
         return False

      if log.time_end() < log.time_begin():
         self.logger.error("""adding "%s" failed (invalid timestamps)""" % logname)
         return False

      hostname = log.hostname()

      self.logger.info("""adding log "%s" for host %s""" % (log.fname, hostname) )

      if not self.hosts.has_key(hostname):
         self.hosts[hostname] = host_class(cluster = self)

      if self.hosts[hostname].add_log(log):
         log.set_parent_host( self.hosts[hostname] )

         if from_where:
            log.set_name( "%s (from %s)" % (os.path.basename(log.fname), from_where) )

         return log

      return False

   def filename_to_host(self, filename):

      # find out what host the file belongs to
      for host in self.hosts:
         host = self.hosts[host]
         if host.get_filenames().count(filename) > 0:
            return host
      return None

   def time_begin(self):

      return min( [ host.time_begin() for host in self.hosts.values() ] )

   def time_end(self):

      return max( [ host.time_end() for host in self.hosts.values() ] )

class cluster_hints_class:

   def __init__(self):

      self.hints = []
      self.logger = logging.getLogger('logpox')

   class hint_class:

      regex = None  # what to look for
      hint  = None  # associated hint
      hosts = []    # apply to these hosts only

   def add(self, regex, hint_txt, pre_regex = "", post_regex = "", hostnames = []):

      if len(regex) == 0:
         self.logger.warn("invalid empty regex with hint_txt: %s" % hint_txt)
         return

      hint = self.hint_class()
      hint.regex     = "(%s)(%s)(%s)" % (pre_regex, regex, post_regex)
      hint.quick_txt = strings(regex, 3)
      hint.regex_c   = re.compile(hint.regex)
      hint.hint      = hint_txt.replace("'", "\\'")
      hint.hostnames = hostnames

      for old_hint in self.hints:
         if hint.regex == old_hint.regex and hint.hostnames == old_hint.hostnames:
            self.logger.debug("skipping duplicate regex: %s" % hint.regex)
            return

      self.logger.debug("adding hint %s -> %s (quick %s)" % (regex, hint_txt, hint.quick_txt))

      self.hints.append(hint)

   def list(self):

      return self.hints

   def hintify_log(self, log):

      toret = html_quote( log.message() )

      if log.get("has_hints") == False:
         return toret

      for hint in self.hints:

         if len(hint.hostnames) == 0 or log.hostname() in hint.hostnames:

            if not toret.find( hint.quick_txt ) >= 0:
               # let's first see if it's worth running a regex against it
               continue

            div_id = "hint_%d" % random.randint(1,9999999)

            (toret, nsubs) = hint.regex_c.subn(r"""\1<span class="hint" name="sample" id="%s" href="javascript:void(0);" onmouseover="return overlib('%s',TEXTPADDING,6,CAPTION,'Hint', WIDTH,200,BASE,2,REF,'%s',REFC,'UR',REFX,10,REFY,-25, FGCOLOR,'#ffffcc',BGCOLOR,'#333399',CGCOLOR,'#336699',CLOSECOLOR,'#ffccff', SHADOW,SHADOWX,2,SHADOWY,2);" onmouseout="nd();">\2</span>\3""" % (div_id, hint.hint, div_id ), toret )

            # eventually shouldn't return but keep adding hints
            if nsubs > 0:
               log.set("has_hints", True)
               break

      else:
         if len(self.hints) > 0:
            # we have hints, but nothing matched
            log.set("has_hints", False)

      return toret

class cluster_events_class:

   def __init__(self):

      self.events = []
      self.events_ref = {}

   def get_by_ref(self, event_ref):

      if event_ref in self.events_ref.keys():
         return self.get(self.events_ref[event_ref])
      else:
         return None

   def get(self, event_id):

      for event in self.events:
         if event.id == event_id:
            return event

   def add(self, text, log_lines, long_text = None, icon = None, groupable = EVENT_ALWAYS_GROUPABLE):

      for event in self.list():

         if event.groupable == EVENT_NOT_GROUPABLE:
            continue

         if event.text == text and ( not long_text or event.long_text == long_text ) and ( event.icon == icon or not icon ):

            if event.groupable == EVENT_HOST_GROUPABLE and log_lines[0].hostname() != event.logs[0].hostname():
               continue

            event.attach_log(log_lines)

            return event

      return self.create(text, log_lines = log_lines, long_text = long_text, icon = icon, groupable = groupable)

   def create(self, text, log_lines, long_text = None, icon = None, groupable = EVENT_ALWAYS_GROUPABLE):

      event = self.event_class(self)

      event.id = len(self.events)

      event.text = text

      event.groupable = groupable

      if icon:
         event.icon = icon

      if long_text:
         event.long_text = long_text

      for log in log_lines:
         event.attach_log(log)

      self.events.append(event)

      return event

   def __compare_events_younger(self, event1, event2):
      if event1.time_begin() > event2.time_begin():
         return 1
      elif event1.time_begin() == event2.time_begin():
         return 0
      else:
         return -1

   def sort(self):
      self.events.sort(self.__compare_events_younger)

   def list(self):
      return self.events

   def count(self):
      return len(self.events)

   def is_populated(self):

      return self.events != None

   def clear(self):

      self.events = []

   class event_class:

      def __init__(self, parent_cluster_events):
         self.logs = []
         self.icon = "images/32px-Crystal_Clear_app_ktip.png"
         self.text = ""
         self.long_text = ""
         self.id = None
         self.parent = parent_cluster_events
         self.visible_in_browse = True
         self.groupable = EVENT_ALWAYS_GROUPABLE

      def get_long_text(self):

         if callable(self.long_text):
            return self.long_text(self)

         else:
            return self.long_text

      def set_ref(self, quickref_id):

         self.parent.events_ref[quickref_id] = self.id

      def attach_log(self, log):

         if type(log) == list:
            for xlog in log:
               self.attach_log(xlog)
            return

         self.logs.append(log)

         log_events = log.get("events", [])
         log_events.append(self)
         log.set("events", log_events)

      def detach_log(self, log):

         for xlog in self.logs:
            if log.id() == xlog.id():
               self.logs.remove(xlog)
               if len(self.logs) == 0:
                  self.parent.events.remove(self)
               return True

         return False

      def time_begin(self):
         return min([ log.date() for log in self.logs ])

      def time_end(self):
         return max([ log.date() for log in self.logs ])

      def affected_hosts(self):
         toret = []
         for log in self.logs:
            if not log.hostname() in toret:
               toret.append(log.hostname())
         return toret

      def link_to_log(self, log):
         return """/browse?date=%s#%s_%s""" % (log.timestamp(), log.hostname(), log.position)

class cluster_parser_class(cluster_class):

   def __init__(self):

      cluster_class.__init__(self)

      self.events = cluster_events_class()
      self.parsers = []
      self.daemon_log_counter = {}

      self.clear_events()

      self.hints = cluster_hints_class()

      self.parse_lock = Lock()

   def list_parsers(self):
      return self.parsers

   def register_parser(self, parser_class):

      storeme = shelve.open("%s/parsers_opts" % self.CONFIG["homedir"], writeback=True)

      if storeme.has_key("disabled") and parser_class.name() in storeme["disabled"]:
         parser_class.enabled = False

      self.parsers.append(parser_class)

   def store_parsers_options(self):

      storeme = shelve.open("%s/parsers_opts" % self.CONFIG["homedir"], writeback=True)
      storeme["disabled"] = []
      storeme["options"] = {}

      for parser in self.list_parsers():

         if parser.is_disabled():
            storeme["disabled"].append(parser.name())

         storeme["options"][parser.name()] = {}

         for option in parser.list_options():

            storeme["options"][parser.name()][option.uid] = option.get_value()

      storeme.close()

   def get_parser(self, parser_name):
      for parser in self.parsers:
         if parser.__class__.__name__ == parser_name:
            return parser

   def combine_parsers_regex_rules(self):
      toret = []
      for parser in self.parsers:
         if parser.is_enabled():
            toret.extend(parser.get_regex_rules())
      return toret

   def has_parsed(self):

      return not self.is_parsing() and self.events_populator.has_parsed()

   def is_parsing(self):

      return self.parse_lock.locked() == True

   def clear_events(self):

      self.events.clear()

   def parse_hyper(self, threaded = True):

      if threaded and (not hasattr(self,"parse_t") or self.parse_t == None):
         self.parse_t = Thread(target=self.parse_hyper, name='parse-thread', args = [True] )
         self.parse_t.start()
         return self.parse_t

      self.parse_lock.acquire()

      self.clear_events()

      self.logger.info("parsing begins")

      regex_plain = "(%s)" % "|".join( self.combine_parsers_regex_rules() )

      self.events_populator.search(regex_plain)

      for log in self.events_populator.fetch():

         parsed = False

         for parser_class in self.parsers:

            if parser_class.is_disabled():
               continue

            if parser_class.activate_check(log):

               try:
                  parser_class.run_parse(log)
               except:
                  self.logger.error( "error while feeding %s:%s to %s" % ( log.host, log.position, parser_class.name() ) )
                  traceback.print_exc(file=sys.stdout)

               parsed = True

         if not parsed:
            self.logger.warn("strange log didnt match any parser: %s" % log)

      for host in self.list_hosts():

         if not host.has_sos():
            continue

         for parser in self.parsers:
            if hasattr(parser, "sos_handler"):
               parser.sos_handler(host.sos)

      self.logger.info("parsing ends.")

      self.parse_lock.release()

      return True

   def search(self, regex, only_hosts = None, max_results = 100000):

      xsearch = events_populator_class(self)
      xsearch.search(regex = regex, only_hosts = only_hosts, max_results = max_results)
      return [ log for log in xsearch.fetch() ]

   def slow_internal_parser(self, threaded = False):

      if threaded and (not hasattr(self,"slow_parse_t") or self.slow_parse_t == None):
         self.slow_parse_t = Thread(target=self.slow_internal_parser, name='slow-parse-thread', args = [True] )
         self.slow_parse_t.start()
         return self.slow_parse_t

      self.logger.info("slow parsing begins")
      time_begin = time.clock()

      counter = self.daemon_log_counter

      for host in self.list_hosts():

         log = host.get_line(0)

         while True:

            if self.should_shutdown():
               return False

            while self.parse_lock.locked():
               time.sleep(1)

            next_log = log.next()

            try: counter[log.daemon()] += 1
            except KeyError: counter[log.daemon()] = 1

            if not next_log:
               break

            log = next_log

      self.logger.info("slow parsing ends after %d seconds" % (time.clock() - time_begin) )

      return True

   def list_daemon_hits(self):

      return [ (key, self.daemon_log_counter[key]) for key in dict_sort_by_value_r(self.daemon_log_counter) ]

   def instance(self):

      return self

      class cluster_instance_class(cluster_parser_class):

         def __init__(self, parent_cluster):

            self.parent_cluster = parent_cluster
            self.hosts = {}

            for host in self.parent_cluster.hosts:
               self.hosts[host] = host_class(self)

               for log in parent_cluster.hosts[host].logs:
                  logfile = copy.copy(log)
                  self.hosts[host].logs.append(logfile)
                  logfile.set_parent_host(self.hosts[host])

               self.hosts[host].sos = self.parent_cluster.hosts[host].sos

               self.hosts[host].log_overlay = self.parent_cluster.hosts[host].log_overlay

         def __getattr__(self, name):

            return getattr(self.parent_cluster, name)

         def is_master(self):

            return False

      return cluster_instance_class(parent_cluster = self)

class host_class:

   def __init__(self, cluster):

      self.logger = logging.getLogger('logpox')

      self.logs = []
      self.logs_obj_cache = {}

      self.log_idx = 0 # first log
      self.log_ptr = 0 # first char

      self.sos = None

      self.parent_cluster = cluster

      self.lock = Lock()

      self.log_overlay = {}

   def __copy__(self):

      host = host_class(self)

      for log in self.logs:
         logfile = copy.copy(log)
         host.logs.append(logfile)
         logfile.set_parent_host(self.hosts[host])

      host.sos = self.parent_cluster.hosts[host].sos

      host.log_overlay = self.parent_cluster.hosts[host].log_overlay

      pdb.set_trace()

      return host

   def has_sos(self):

      return hasattr(self, "sos") and self.sos != None

   def set_sos(self, sos):

      try:
         self.sos = sosreport_class(sos, self)
      except:
         traceback.print_exc(file=sys.stdout)
         self.logger.warn("not a sosreport: %s" % sos)

   def has_log_overlay(self, position):

      return position in self.log_overlay.keys()

   def get_log_overlay(self, position):

      return self.log_overlay[position]

   def add_log_overlay(self, new_log):

      self.log_overlay[new_log.position] = new_log

   def get_first_line(self):

      return self.get_line(0)

   def get_last_line(self):

      return self.get_line(self.size()-1)

   def get_line(self, position):

      idx, position = self.convert_hostpos_to_filepos(position)

      toret = self.get_file(idx).get_line(position)

      toret.position = self.convert_filepos_to_hostpos(idx, toret.position)

      return toret

   def get_date_at_position(self, position):

      return self.get_line(position).date()

   def find_line_start(self, position_start, xstep = 64):

      idx, relpos = self.convert_hostpos_to_filepos(position_start)
      return self.convert_filepos_to_hostpos(idx, self.get_file(idx).find_line_start(relpos) )

   def seek_and_readline(self, position):

      idx, position = self.convert_hostpos_to_filepos(int(position))
      return self.get_file(idx).seek_and_readline(position)

   def seek_and_read(self, position, bytes = 128):

      idx, position = self.convert_hostpos_to_filepos(int(position))
      return self.get_file(idx).seek_and_read(position, bytes)

   def __str__(self):
      return self.hostname()

   def add_log(self, logfile):

      if logfile.fname in self.get_filenames():
         return False

      for inc in range(0,len(self.logs)):

         if logfile.size() == self.logs[inc].size():
            self.logger.warn("skipping identical log file (already added).")
            return False

         if logfile.time_end() <= self.logs[inc].time_begin():
            self.logs.insert(inc, logfile)
            break

      else:
         self.logs.append(logfile)

      return True

   def hostname(self):

      return self.logs[0].hostname()

   def size(self):

      return sum( [ logfile.size() for logfile in self.logs ] )

   def eof(self):

      raise "Deprecated"

      if self.tell() >= self.size():
         return True
      return False

   def time_begin(self):

      return self.logs[0].time_begin()

   def time_end(self):

      return self.logs[-1].time_end()

   def get_log_at_date(self, find_date):

      if find_date < self.time_begin():
         # our logs begin after the date we are looking for
         return self.get_line(0)

      if find_date > self.time_end():
         # our logs finish before the date we are looking for
         return None

      # FIXME: remember some date/pos to speed this up

      pos_low = 0
      pos_high = self.size()

      idx = 1
      while len(self.logs) > idx:

         if find_date < self.logs[idx].time_begin():

            pos_low  = self.convert_filepos_to_hostpos(idx-1, 0)
            pos_high = self.convert_filepos_to_hostpos(idx-1, self.logs[idx-1].size())
            break

         idx+=1

      while pos_low < pos_high - 1:
         pos = int(pos_low + ((pos_high - pos_low) / 2) )

         log = self.get_line(pos)

         if log.date() < find_date:
            pos_low = pos
         elif log.date() > find_date:
            pos_high = pos
         if log.date() == find_date:
            while True:
               prev_log = log.previous()
               if not prev_log or prev_log.date() != log.date():
                  return log
               log = prev_log
            break

      counter = 0
      if log.date() < find_date:
         while True:
            next_log = log.next()
            if not next_log:
               break
            if next_log.date() > find_date:
               return next_log
            log = next_log

            counter+=1

            if counter > 1000:
               self.logger.error( "can't seek to given date, timestamps in files for %s are probably incorrect." % self.hostname() )
               break

#      print "get_log_at_date()", self.hostname(), find_date, log.position

      return log

   def search(self, regex, max_results = 1000):

      self.cluster.search(regex, only_hosts = self.hostname(), max_results = max_results)

   def get_file(self, log_idx):

      return self.logs[log_idx]

   def fp(self):

      raise "Deprecated"

      return self.logs[self.log_idx]

   def get_filenames(self):
      toret = []
      for log in self.logs:
         toret.append(log.fname)
      return toret

   def get_filenames_escaped(self):
      toret = []
      for log in self.logs:
         toret.append(re.escape(log.fname))
      return toret

   def dump_position_ranges(self):
      sumsize = 0
      print "--- DUMP BEGINS FOR %s ---" % self.hostname()
      for inc in range(0, len(self.logs)):
         print "idx %d\t%s\t(size %d)\t\t%d\t%d" % (inc, self.logs[inc].fname, self.logs[inc].size(), sumsize, sumsize + self.logs[inc].size())
         sumsize += self.logs[inc].size()
      print "--- DUMP ENDS FOR %s ---" % self.hostname()

   def convert_hostpos_to_filepos(self, offset):

      offset = int(offset)

      original_offset = offset

      sumsize = 0
      for inc in range(0, len(self.logs)):
         if offset < sumsize + self.logs[inc].size():
            offset -= sumsize
            return (inc, offset)
         sumsize += self.logs[inc].size()

      print "cannot seek to %s" % original_offset
      self.dump_position_ranges()

      raise "Invalid_Position"

   def filename_to_fileidx(self, filename):

      return self.get_filenames().index(filename)

   def convert_filepos_to_hostpos(self, idx, filepos):

      sumsize = filepos

      if idx > 0:
         for inc in range(0, idx):
            sumsize += self.logs[inc].size()

#      print "convert_filepos_to_hostpos file_position (%s, %d) -> host_position %d" % (filename, filepos, sumsize)

      return sumsize

   def add_hint(self, regex, hint_txt, pre_regex, post_regex):
    
      self.parent_cluster.hints.add(regex, hint_txt, pre_regex, post_regex, hostnames = [self.hostname()])

class logfile_class:

   def __init__(self, fname = None, fp = None):

      self.fname = os.path.abspath(fname)
      self.__name = self.fname

      self.parent_host = None

      self.__init_fp_pool(2)

      if fp:
         self.fp = fp
      elif fname:
         self.fp = open(fname, "r")
      else:
         raise "MissingArgument"

      if not self.validate():
         raise "Invalid_File"

      try:
         self.time_begin()
      except:
         raise "Invalid_File"

      if self.hostname() == "":
         raise "Invalid_File"

   def __init_fp_pool(self, pool_size):

      self.fp_pool_free = []
      self.fp_pool_used = []

      while len(self.fp_pool_free) < pool_size:

         self.fp_pool_free.append( open(self.fname, "r") )

   def acquire_fp(self):

      try:
         fp = self.fp_pool_free.pop()
      except IndexError:
         logging.getLogger('logpox').warn( "fp pool for %s is about to starve (%s in use), creating fp" % (self.name(), len(self.fp_pool_used)) )
         fp = open(self.fname, "r")

      self.fp_pool_used.append(fp)

      return fp

   def release_fp(self, fp):

      self.fp_pool_used.remove(fp)
      self.fp_pool_free.append(fp)

   def seek_and_readline(self, position):

      fp = self.acquire_fp()
      fp.seek(position)
      toret = fp.readline()
      self.release_fp(fp)
      return toret

   def seek_and_read(self, position, bytes):

      fp = self.acquire_fp()
      fp.seek(position)
      toret = fp.read(bytes)
      self.release_fp(fp)
      return toret

   def get_line(self, position):

      position = self.find_line_start(position)

      return log_line_class(self.parent_host, position, self.seek_and_readline(position))

   def set_name(self, name):

      self.__name = name

   def name(self):

      return self.__name

   def index(self):

      return self.parent_cluster.hosts.index(self)

   def set_parent_host(self, host):

      self.parent_host = host

   def validate(self):

      return self.get_line(0).validate()

      pos = self.tell()
      self.seek(0)
      line = self.fp.read(512).split("\n")[0]
      self.fp.seek(pos)

      self.get_line(position)

      return log_line_class(None, 0, line).validate()

   def hostname(self):

      if not hasattr(self, "__hostname"):

         self.__hostname = self.seek_and_read(16, 128).split(" ", 1)[0]

      return self.__hostname

   def time_begin(self):

      return self.get_line(0).date()

      pos = self.fp.tell()
      self.fp.seek(0)
      log = log_line_class(None, 0, fp.read(128))
      self.fp.seek(pos)
      return log.date()

   def time_end(self):

      return self.get_line( self.size() - 3 ).date()

      pos = self.fp.tell()
      bs = 1024
      if self.size() < bs: bs = self.size()
      self.fp.seek(-bs, 2)
      buf = self.fp.read(bs)
      bufsplit = buf.split("\n")
      bufsplit.reverse()

      for line in bufsplit:
         if len(line) == 0: continue

         log = log_line_class(None, None, line)

         if not log.validate():
            continue
         else:
            break

      self.fp.seek(pos)
      return log.date()

   def find_line_start(self, position_start, xstep = 64):

      position = position_start

      fp = self.acquire_fp()

      while position > 0:
         if position - xstep < 0:
            xstep = position + 1
            position = 0
         else:
            position = position - xstep + 1

         fp.seek(position)
         jread = fp.read(xstep)

         newln = jread.rfind("\n")

#         print "I just went to position %d and read %d bytes [%s]" % (position, xstep, jread)
         if newln >= 0 and newln < xstep:
            self.release_fp(fp)
            return position + newln + 1

      self.release_fp(fp)
      return 0

   def size(self):

      return os.path.getsize(self.fname)

   def eof(self):

      raise "Deprecated"

      return self.fp.tell() > self.size()

   def readline(self, max_bytes = -1):

      raise "Deprecated"

      return self.fp.readline(max_bytes)

   def seek(self,pos):

      raise "Deprecated"

      return self.fp.seek(pos)

   def tell(self):

      raise "Deprecated"

      return self.fp.tell()

class log_line_class(object):

   def __init__(self, host, position, line):
      self.host = host
      self.position = position
      self.line = line

      self.events = []

      self.parent_host = host

      self.year = time.localtime()[0]
      xdate = self.date()
      if xdate:
         self.year = self.__year_workaround( xdate )

   __regex_c = re.compile(r"""^(... .. ..:..:..) (\S+) ([-_0-9a-zA-Z \.\/\(\)]+)(\[[0-9]+\])?(:)?\ *(.*)$""")

   def internal_id(self):

      return ( self.host.index(), self.position )

   def id(self):
      if not hasattr(self, "__id"):
         self.__id = "%s_%d" % (self.host, self.position)
      return self.__id

   def tags(self):

      if not self.parent_host.log_overlay.has_key(self.position):
         return []

      toret = {}

      for key, value in self.parent_host.log_overlay[self.position].iteritems():
         if key in [ "starred", "has_hints", "events" ]: continue
         toret[key] = value

      return toret

   def get(self, attribute, default_value = None):

      try:    return self.parent_host.log_overlay[self.position][attribute]
      except: return default_value

   def tag(self, tagname, value = True):

      return self.set(tagname, value)

   def set(self, attribute, value):

      if not self.parent_host.log_overlay.has_key(self.position):
         self.parent_host.log_overlay[self.position] = {}

      self.parent_host.log_overlay[self.position][attribute] = value

   def toggle(self, attribute):

      if hasattr(self, "toggle_%s" % attribute):
         return getattr(self, "toggle_%s" % attribute) ()

      return self.__toggle(attribute)

   def __toggle(self, attribute):

      self.set(attribute, self.get(attribute) != True)
      return self.get(attribute)

   def toggle_starred(self):

      event = self.parent_host.parent_cluster.events.get_by_ref("starred")

      if not self.__toggle("starred"):
         if event:
            event.detach_log(self)
         return False

      if event:
         event.attach_log(self)
      else:
         event = self.add_event("Starred log lines", icon = "images/32px-Crystal_Clear_action_bookmark.png")
         event.set_ref("starred")
         event.visible_in_browse = False

      return True

   def validate(self):
      return self.parse()[0] != None

   def parse(self):

      if not hasattr(self, "__parse"):
         try:    self.__parse = log_line_class.__regex_c.findall(self.line)[0]
         except: self.__parse = [ None, None, None, None, None, None ]

      return self.__parse

   def __str__(self):
      return self.line

   def time(self):
      try:
         return self.date()
      except:
         print "Error converting time, line was", self.line
         pdb.set_trace()

   def syslog_timestamp(self):
      return self.line[0:15]

   def timestamp(self):
      return time.strftime("%Y%m%d%H%M%S", self.date())

   __month_map = {'Jan': 1, 'Feb': 2, 'Mar':3, 'Apr':4, 'May':5, 'Jun':6, 'Jul':7, 'Aug':8, 'Oct':9, 'Sep':10, 'Nov': 11, 'Dec': 12}
   def __faster_syslog_strptime(self, syslog_tstamp):
      return datetime.datetime(self.year, self.__month_map[syslog_tstamp[0:3]], int(syslog_tstamp[4:6]), int(syslog_tstamp[7:9]), int(syslog_tstamp[10:12]), int(syslog_tstamp[13:15]) ).timetuple()

   def __year_workaround(self, xdate):

      curdate = time.localtime()

      if xdate[0] == curdate[0] and xdate > curdate:
         return xdate[0] - 1

      return xdate[0]

   def date(self):

      if self.get("real_date"):
         return self.get("real_date")

      if not self.line:
         return None

      try:    return self.__faster_syslog_strptime(self.line[0:15])
      except: pass

      try:    return time.strptime("%d %s" % (self.year, self.line[0:15]), "%Y %b %d %H:%M:%S")
      except: pass

      print "faster strptime failed on " + self.line[0:15]
#      pdb.set_trace()

      return None

   def hostname(self):
      return self.line[16:].split(" ", 1)[0]

   def daemon(self):
      return self.parse()[2]

   def message(self):
      return self.parse()[5]

   # get the previous log line
   def previous(self):

      if hasattr(self, "__previous"):
         return self.__previous

      if self.position == 0:
         # we are at beginning of file
         return None

      self.__previous = self.host.get_line(self.position - 16)

      if self.__previous.date() > self.date():

         self.set_real_date( self.__previous.date() )

      return self.__previous

   # get the next log line
   def next(self):

      if hasattr(self, "__next"):
         return self.__next

      newpos = self.position + len(self.line) + 3

      if newpos >= self.host.size():
         return None

      self.__next = self.host.get_line(newpos)

      if self.__next.date() < self.date():

         self.__next.set_real_date( self.date() )

      return self.__next

   def set_real_date(self, real_date):

      # FIXME: disabled clock skew detection until memory issues are addressed

      return

      self.set( "real_date", real_date )

      event = self.parent_host.parent_cluster.events.get_by_ref("clockskew")

      if event:
         event.attach_log(self)
      else:
         event = self.add_event("Clow skew - time adjustment activated")
         event.set_ref("clockskew")

   def html_print(self, highlight = None, onclick = False, link_to = False, show_time = False, show_hostname = False, show_events = True):

      out = ""

      if show_events and self.has_events():

         for event in self.list_events():

            if not event.visible_in_browse:
               continue

            out += """<img src="/static/%s" style="width: 12px; height: 12px;" id="%s" href="javascript:void(0);" onmouseover="return overlib('%s', TEXTFONT,'Verdana', TEXTPADDING,6, WIDTH,200, BASE,2,REF,'%s', REFC,'UR', REFX,10, REFY,-25, FGCOLOR,'#eaeaea', BGCOLOR,'#333399', CGCOLOR,'#336699', CLOSECOLOR,'#ffccff', SHADOW,SHADOWX,2,SHADOWY,2);" onmouseout="nd();">""" % (event.icon, self.id(), event.text, self.id() )

      if show_time:
         out += self.syslog_timestamp()

      if show_hostname:
         out = "%s %s" % (out, self.hostname())

      out = "%s %s: %s" % ( out, self.daemon(), self.parent_host.parent_cluster.hints.hintify_log(self) )

      if link_to:
         out = """<a href="%s">%s</a>""" % (link_to, out)
      elif onclick:
         out = """<span onclick="%s">%s</span>""" % (onclick, out)

      for tag in self.tags():
         if tag.startswith("bz"):
            out += """<a href="https://bugzilla.redhat.com/show_bug.cgi?id=%s" id="tag" target="_new">%s</a>""" % (tag[2:],tag)
         elif tag.startswith("it"):
            out += """<a href="https://enterprise.redhat.com/issue-tracker/?module=issues&action=view&tid=%s" id="tag" target="_new">%s</a>""" % (tag[2:],tag)
         else:
            out += """<span id="tag">%s</span>""" % tag

      out+= "<br>"

      out = """<img id="star_%s" style="width: 12px; height: 12px;" onclick="javascript:rpc_star_log('%s');" src="/static/images/%s">%s\n""" % (self.id(), self.id(), if_true( self.get("starred") == True, "32px-Crystal_Clear_action_bookmark.png", "32px-Crystal_Clear_action_bookmark_Silver_half.png" ), out)


      return out

   def add_event(self, text, long_text = "", icon = None):

      return self.parent_host.parent_cluster.events.add(text, long_text = long_text, log_lines = [self], icon = icon, groupable = EVENT_HOST_GROUPABLE)

   def has_events(self):

      return self.get("events") != None

   def list_events(self):

      return self.get("events", [])

   def empty(self):
      if self.line == None:
         return True
      else:
         return False

class cluster_view_class:

   def __init__(self, cluster, find_date = False, find_host_pos = False, max_rows = False, filter_daemons = []):

      self.cluster = cluster
      self.max_rows = int(max_rows)
      self.filter_daemons = filter_daemons

      self.row = 0
      self.count = 0

      if find_host_pos:
         tmp_log = cluster.get_host(hostname).get_line(find_host_pos[1]).date()

         self.log_ptrs = cluster.get_logs_at_date(find_date)

         if tmp_log.position != self.log_ptrs[find_host_pos[0]].position:
            self.log_ptrs[find_host_pos[0]] = tmp_log

      elif find_date:
         self.log_ptrs = cluster.get_logs_at_date(find_date)

      else:
         self.log_ptrs = {}
         for hostname in cluster.host_names():         
            self.log_ptrs[hostname] = cluster.get_host(hostname).get_first_line()

      # let's figure out the first current_time
      self.current_time = self.get_current_time()

   def get_current_time(self):

      current_time = None

      for hostname in self.cluster.host_names():
         if not self.log_ptrs[hostname]:
            continue

         if not current_time or self.log_ptrs[hostname].time() < current_time:
            current_time = self.log_ptrs[hostname].time()

      return current_time

   def step_multiplex(self, skip_empty = False):

      hosts = self.cluster.host_names()
      log_ptrs = self.log_ptrs

      while True:

         current_time = self.get_current_time()

         if not current_time:
            break

         empty_row = True

         for hostname in hosts:

            if not log_ptrs[hostname]:

               if not skip_empty:
                  yield log_line_class(self.cluster.get_host(hostname), None, None)

               continue

            if log_ptrs[hostname].daemon() in self.filter_daemons:

               log_ptrs[hostname] = log_ptrs[hostname].next()
               if not skip_empty:
                  yield log_line_class(self.cluster.get_host(hostname), None, None)

               continue

            if log_ptrs[hostname].time() == current_time:

               yield log_ptrs[hostname]
               log_ptrs[hostname] = log_ptrs[hostname].next()
               empty_row = False

            elif skip_empty:
               continue

            else:
               yield log_line_class(self.cluster.get_host(hostname), None, None)

            self.count += 1

         if not empty_row:
            self.row += 1

#         elif skip_empty:
#            print "stop bc skip_empty"
#            pdb.set_trace()
#            raise StopIteration

         if self.max_rows and self.row >= self.max_rows:
            print "stop bc max_rows"
            raise StopIteration

         self.previous_current_time = current_time

   def next(self):

      if not self.current_time:
         self.current_time = self.get_current_time()
         if not self.current_time:
            raise "EOF"

      for hostname in cluster.host_names():

         if not log_ptrs[hostname]: continue

         try:
            while log_ptrs[hostname].time() == self.current_time:
               # this log is on this row, let's fetch all matching lines.
               yield log_ptrs[hostname]
               log_ptrs[hostname] = log_ptrs[hostname].next()
         except "EOF":
            # EOF was reached.
            pass

      else:
         self.current_time = self.get_current_time()
         if not self.current_time:
            raise "EOF"
         elif self.smart_exception:
            raise "NewTime"

      # return yielded log

def calculate_log_rate_graph(cluster):

   data = {}

   for host in cluster.list_hosts():

      hostname = host.hostname()
      size = host.size()
      pos = 0
      pos_to_inc = int(size / 100)
      data[hostname] = {}

      while pos < size:

         log = host.get_line(pos)

         if not log:
            break

         data[hostname][log.date()] = pos

         pos += pos_to_inc

   return data

def calculate_log_rate_graph_old(cluster):

   time_begin = time.mktime(cluster.time_begin())
   time_end = time.mktime(cluster.time_end())

   inc = 1
   log_rate = []
   while inc <= 100:
      xdate = time.localtime( time_begin + ((time_end-time_begin) / 100 * inc) )
      log_rate.append( (xdate, cluster.get_position_by_date(xdate)) )
      inc+=1

   tmprange = range(1, len(log_rate))
   tmprange.reverse()
   for inc in tmprange:
      for host in cluster.host_names():

         if log_rate[inc][1][host] != None:

            log_rate[inc][1][host] -= log_rate[inc-1][1][host]

         if log_rate[inc][1][host] in [0, None]:
            # remove empty counters
            del log_rate[inc][1][host]

   del log_rate[0]

   return log_rate

class events_populator_class:

   def __init__(self, master_cluster):

      self.cluster = master_cluster
      self.logger = self.cluster.logger
      self.__lock = Lock()
      self.grep = None
      self.__results = None

   def is_parsing(self):

      if self.grep:
         return self.grep.is_running()
      else:
         return False

   def has_parsed(self):

      return not self.is_parsing() and self.__results != None

   def readline(self, line_match):

      try:
         filename, position, match = line_match.split(":", 2)
      except ValueError:
         self.logger.error("error parsing output from grep %s" % line_match)
         return False

      self.logger.debug("match %s:%s (%s)" % (filename, position, match) )

      host = self.cluster.filename_to_host(filename)

      if not host:
         self.logger.error("error parsing output from grep, invalid filename: %s" % filename)
         return False

      log = host.get_line(
	host.convert_filepos_to_hostpos(host.filename_to_fileidx(filename), int(position))
      )

      if log.validate():
         self.__results.append(log)
      else:
         self.logger.debug('couldnt validate [%s]' % log.line)

      return True

   def fetch(self):

      while len(self.__results) > 0 or self.is_parsing():

         while len(self.__results) == 0 and self.is_parsing():
            time.sleep(0.1)

         if len(self.__results) == 0:
            raise StopIteration

         yield self.__results.pop(0)

   def search(self, regex, only_hosts = None, max_results = 0):

      self.__lock.acquire()

      files = []
      hosts = self.cluster.hosts.keys()
      if only_hosts:
         for host in only_hosts:
            hosts.remove(host)
      for host in hosts:
         files.extend( self.cluster.get_host(host).get_filenames_escaped() )
      files = " ".join(files)

      cmdline = """egrep -boiaH --line-buffered %s '%s' %s""" % (if_true(max_results > 0, "-m %d" % max_results), regex.replace("'", "\'"), files) 

      self.__results = []

      self.grep = command_runner(cmdline, readline_callback = self.readline, timeout = 900)
      self.grep.run()
      self.grep.wait()

      self.__lock.release()

      print "grep exited"

   def wait(self):

      if hasattr(self, "grep"):
         self.grep.wait()

      return True

def html_print_logs(logs, before = 2, after = 2, log_ids = None):

   if not logs or len(logs) == 0:
      raise StopIteration

   __after = 0

   logs = copy.copy(logs)

   if log_ids: log_ids = log_ids
   else:       log_ids = [ tmplog.id() for tmplog in logs ]

   tmplog = logs[0].previous()
   before_logs = []

   while tmplog and before > 0:
      before_logs.insert(0, tmplog)
      tmplog = tmplog.previous()
      before -= 1

   yield """<div style='padding-bottom: 20px; font: normal normal 9px "Luxi Mono", LucidaTypewriter'>"""

   for tmplog in before_logs:
      yield """<div style="color: #9c9c9c">%s</div>\n""" % tmplog.html_print(show_time = True, show_hostname = True, show_events = True)

   del before_logs

   log = logs[0]

   while log:

      if log.id() in log_ids:
         __after = 0
         del logs[0]
         log_ids.remove(log.id())
         yield """<div style="color: #9c9c9c">%s</div>\n""" % log.html_print( link_to = link_to_log(log), show_time = True, show_hostname = True, show_events = True)

      else:
         __after += 1
         yield """<div style="color: #9c9c9c">%s</div>\n""" % log.html_print(show_time = True, show_hostname = True, show_events = True)

      if __after == after:
         yield """</div>"""
         if len(logs) > 0:
            yield html_print_logs(logs = logs)
         return

      log = log.next()

def link_to_log(log):
   return """/browse?date=%s#%s_%s""" % (log.timestamp(), log.hostname(), log.position)
