from config import *
import os
import os.path
import logging


class Collection:
  def __init__(self,folder,contents,period, map):
    self._log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
    self.parent = folder  # a pathname string
    #self.lastModified = os.path.getctime(folder) 
    self.lastModified = os.path.getmtime(folder) # unix last change time : win create time
    self.hierarchy = {
      'files' : {
        'recentFileList': [],
        'expiredFileList': [],
      },
      'folders' : {
      },
      
    }
    self.dataFiles = { }
    self.dataFilesExist = False
    self.dataCollectionMap = map
    self.period = period
    self.heldBackTime = self.period[0] # expected start time
    if map.has_key(folder):
      info = map[folder]
      self.heldBackTime = info['time']# last upd time(fudged to 0 if datapending)
    self.flist = contents
    self.dataFormat = None
    self.dataFolder = None
    self.tempDataStore = { }
    self.recentFileList = []
    self.expiredFileList = []
    self.recentFileCount = 0
    self.expiredFileCount = 0
    self.subFolderList = []
    self.verbose = False
    self.dataCollectionHandler = None

  def checkCollectionFolderTimes(self):
    justTheFiles = []
    instrument = None
    for f in self.flist: 
      if os.path.isdir(f):
        if not self.dataCollectionMap.has_key(f):
          # its new, or old, not a datafolder and needs 
          # recursive subfolder checking
          self.subFolderList.append(f);
        else:
          # its an OLD COLLECTION FOLDER 
          # lets check (sub?)folder modification times
          collection = self.dataCollectionMap[f]
          if collection['time'] >= self.period[0] or collection['time']==0:
            #collection parent has been modified since our last scan  
            # or, we fudged the time on previous pass
            self.subFolderList.append(f);
          else:
            instrument = collection['type']
            # check all subfolders
            subFolders = Artifacts.getInstrumentFolders(instrument)
            for folder in subFolders: 
              pathname = os.path.join(f, folder)
              if os.path.exists(pathname): 
                modtime = os.path.getmtime(pathname)
                if modtime >= self.period[0]: # xxx 
                  # recent modification to at least one subfolder
                  if self.lastModified < modtime: 
                    self.lastModified = modtime 
                  self.subFolderList.append(f) 
                  break

          # end of OLD COLLECTION FOLDER
#            else:
#              # its an old collection and not been recently changed
#              if self.verbose:
#                print "--notnew -- bypassed   --   --      -- %s --" % ( f) 
          
      elif os.path.isfile(f):
         justTheFiles.append(f)
      else:
        # not a file or folder. WTF?
        continue
    
    collectType = None
    if self.dataCollectionMap.has_key( self.parent):
      collectType = self.dataCollectionMap[self.parent]['type']
    if not collectType:
      # cant actually decide without getting a subfolder filelist
      collectType = self.decideInstrument(justTheFiles)
      if not collectType:
        return
    self.dataFormat =  collectType


  def decideInstrument(self, topLevelFiles):
    """Status: we don't know which instrument file layout we are dealing with
     or which are data files and recognised subfolders.
    We also need to be able to reject/ignore non-collection folders that
    are just hierarchical ancestors.
    This will be horribly heuristic.
    What could possibly go wrong?
    """
    subFolders = self.subFolderList
    subFolderNames =  map(lambda a : os.path.basename(a), subFolders)
    insts = Artifacts.getInstruments() # possibles 
    #scores = [ [0]*3 for x in range(len(insts)) ]
    scores = {}
    for inst in insts:
      idealFolders = Artifacts.getInstrumentFolders(inst)
      dataFolder = Artifacts.getInstrumentDataFolder(inst) 
      if not dataFolder: 
        raise Exception, "No data folder for instrument inst! " + inst 
      dfname, attribs = dataFolder
      datatypes = attribs['data']
      primaryData = datatypes[0]
      dataRegex = Artifacts.getREforFileType(primaryData[0])   
      
      #   0 = non "." datafolder exists
      #   1 = other hierarchy unique folders exist
      #   2 = datafiles exist
      # score plausability of this being a collection of "instrument" type    
      cnt = 0
      for fold in idealFolders:
        if fold in subFolderNames:
          cnt = cnt +1
      ctot = len(idealFolders)
      scores[inst] = {
          'fcnt': cnt,
          'ftot': len(idealFolders),
          'dfexists': False,
          'dfcnt': 0,
      }
        
      if not self.tempDataStore.has_key(dfname):
        if dfname == ".":   # non-specific data folder
          dataFolderFiles =  topLevelFiles
        else:               # maybe specific data folder!
          if not (dfname in subFolderNames):
            # definitely not this instrument
            continue
          dataFolder = os.path.join(self.parent, dfname) 
          dataFolderFiles =  self.getFolderContents(dataFolder)
        dataFolderFiles = map(lambda a : (a,os.path.getmtime(a)), dataFolderFiles)
        self.tempDataStore[dfname] = dataFolderFiles

      dataFolderFiles = self.tempDataStore[dfname]
      test = lambda a : int(bool(re.match(dataRegex,a[0])))
      count = sum(map(test, dataFolderFiles))
      scores[inst]['dfexists'] = True
      scores[inst]['dfcnt'] = count 

      if count > 20:
        # unequivecobaly  this inst type.
        # and we have lost no information
        return inst

    # now rummage about with the scores to decide on the inst
    for inst in insts:
      stats = scores[inst]
      frac = float(stats['fcnt']) / float(stats['ftot'])
      if frac > 0.5 and stats['ftot']> 5:
         if stats['dfexists']: 
            return inst
      if frac > 0.8 and stats['ftot']> 5:
            return inst

    # check subdirectories for matching files 
    # Shouldn't be that many other files, so efficiency is not paramount
    for inst in insts:
      scores[inst]['mcnt'] = 0
      if scores[inst]['fcnt']<=0: 
        continue
      idealFolders = Artifacts.getInstrumentFolders(inst)
      matchFileCnt = 0 
      for fold in idealFolders:
        if not (fold in subFolderNames):
          continue
        filePatts = Artifacts.getFolderFileREs(inst,fold)
        folder = os.path.join(self.parent, fold) 
        folderFiles =  self.getFolderContents(folder)
        for f in folderFiles: 
          for format, regex in filePatts:
            if regex.match(f):
              matchFileCnt =  matchFileCnt + 1
      scores[inst]['mcnt'] = matchFileCnt

    # now rummage about with the scores to decide on the inst
    maxmcnt = 0; maxinst = None
    lastinst = None
    instkeys = scores.keys()
    def comp(a,b): # largest to smallest
      return scores[b]['mcnt'] - scores[a]['mcnt']
    instkeys.sort(cmp=comp)

    maxInst  = instkeys[0]
    nextInst = instkeys[1]
    # totally heuristic
    if scores[maxInst]['mcnt'] > 10:
      if scores[maxInst]['mcnt'] > scores[nextInst]['mcnt'] + 10:
        # a clear majority
        return maxInst
    # we could check file versions as a last fallback

    # totally ambiguous
    self._log.warning("No instrument match for folder: %s", self.parent)
    self._log.warning("Heuristic scores: %s", str(scores))
    return None


  def getFolderContents(self,folder):
    """ return a list of absolute filepath strings """
    contents = os.listdir(folder)
    contents.sort()
    contents = map(lambda a: os.path.join(folder,a),contents)
    return contents


  def processFolderContents(self, contents,rePatternList,justTheFiles=True):
    """ return lists of relevant old and new files from list """
    recentFileList  = []
    expiredFileList = []
    for f, mtime in contents: 
      if os.path.isdir(f):
        # we don't care about subfolders? seems shotsighted ...
        continue
      # mtime = os.path.getmtime(f)
    #  it may well be that new files were created in a folder between 
    #  the last MINXTIME (start time) and the time of folder lastModification 
    #  because we don't scrape the whole hierarchy instantaneously!
    #    MINXTIME  - - - - -    OLD.lastModified  - - - - -   Now
    #  If we try to add such files to ICAT, they would already ihave been injected
    #  and it would spit the dummy.
      # self.heldBackTime # last recorded upd time (fudged to 0 if datapending)
      if mtime < self.period[0] and self.heldBackTime != 0:
        for format, regex in rePatternList:
          if regex.match(f):
            expiredFileList.append(f)
            break
        #else: ignorable
      elif mtime < self.period[1]:
        for format, regex in rePatternList:
          if regex.match(f):
            if justTheFiles:
              recentFileList.append( f )
            else:
              recentFileList.append( (f,mtime,format) )
            break
        #else: ignorable
      else: 
        # created very very recently i.e. during the 
        # execution of this process,  so ignore
        # we'll pick them up next time this process is run!
        continue
    recentFileList.sort()
    return {
      'recentFileList': recentFileList,
      'expiredFileList': expiredFileList,
    }



  def filesToBeProcessedQ(self):
    """Get New and old filenames and times for all collection folders.
       Stick partitioned dataset files in self.dataFiles dict.
       Put all others in self.hierarchy
       Return True when new files exist and not excluded
    """
    if not (self.dataFormat in Artifacts.getInstruments()):
      return False

    newDataFilesExist = False
    anyDataFilesExist = None
    instrument = self.dataFormat
    newFiles = 0
    expiredFiles = 0

    datafolderInfo = Artifacts.getInstrumentDataFolder(instrument)
    dataFolder   = datafolderInfo[0]
    dataFileTypes = datafolderInfo[1]['data'][0]
    dataFileType = dataFileTypes[0]
    dataFolderRE = Artifacts.FILETYPES[dataFileType]['regexp']
    dataFileREs = [(dataFileType, dataFolderRE) ]
    # this is true only if folder has no record in DATAMINX file.
    if self.tempDataStore.has_key(dataFolder):
      folderFiles = self.tempDataStore[dataFolder]
      filtered = {True:[], False:[]}
      # split list into matched/unmatched filename sub lists
      filter = lambda a : filtered[bool(dataFolderRE.match(a[0]))].append(a)
      map(filter,folderFiles)  # hope its efficient
      dataFiles = filtered[True]
      extraFiles = filtered[False]
      partndDataFiles = self.processFolderContents(dataFiles,dataFileREs)
      self.dataFiles[dataFolder] = partndDataFiles 
      newFiles = newFiles + len(partndDataFiles['recentFileList'])
      expiredFiles = expiredFiles + len(partndDataFiles['expiredFileList'])
      anyDataFilesExist = newFiles + expiredFiles
      # now check all other files in folder
      filePatts = Artifacts.getFolderFileREs(instrument,dataFolder)
      # created with 'data' as first pattern
      if filePatts[0][1] == dataFolderRE: 
        filePatts = filePatts[1:] 
      partndFiles = self.processFolderContents(extraFiles,filePatts,False)
      self.hierarchy['folders'][dataFolder] = partndFiles 
      newFiles = newFiles + len(partndFiles['recentFileList'])
      expiredFiles = expiredFiles + len(partndFiles['expiredFileList'])

#    folder = "."
#    rootLevelFiles = self.tempDataStore[folder]

    subFolders = Artifacts.getInstrumentFolders(instrument)
    for folder in subFolders: 
      if folder == dataFolder and self.tempDataStore.has_key(dataFolder):
        # handled separately earlier
        continue
      pathname = os.path.join(self.parent, folder)
      if os.path.exists(pathname): 
        subFolCont = self.getFolderContents(pathname)
        if not subFolCont: # empty folder
          continue
        subFolCont = map(lambda a : (a,os.path.getmtime(a)), subFolCont)
        filePatts = Artifacts.getFolderFileREs(instrument,folder)
        if folder == dataFolder:
          filtered = {True:[], False:[]}
          filter = lambda a : filtered[bool(dataFolderRE.match(a[0]))].append(a)
          map(filter,subFolCont)  # hope its efficient
          dataFiles = filtered[True]
          subFolCont = filtered[False]
          partndDataFiles = self.processFolderContents(dataFiles,[dataFolderRE])
          self.dataFiles[folder] = partndDataFiles 
          newDFiles = len(partndDataFiles['recentFileList'])
          expiredDFiles =  len(partndDataFiles['expiredFileList'])
          anyDataFilesExist = newDFiles + expiredDFiles
          newFiles = newFiles + newDFiles
          expiredFiles = expiredFiles + expiredDFiles
          # now check all other files in folder
          # created with 'data' as first pattern
          if filePatts[0][1] == dataFolderRE: 
            filePatts = filePatts[1:] 
          else:
            pass
            # erk! what if it isn't ????
        partndFiles =self.processFolderContents(subFolCont,filePatts,False)
        self.hierarchy['folders'][folder] = partndFiles 
        newFiles     =     newFiles + len(partndFiles['recentFileList'])
        expiredFiles = expiredFiles + len(partndFiles['expiredFileList'])
    self.recentFileCount  = newFiles
    self.expiredFileCount = expiredFiles
    self.dataFilesExist = anyDataFilesExist


    del(self.tempDataStore) # no need to retain - unless called again? 

    if newFiles > 0: 
      return True
    return False


  def isADataCollectionQ(self):
    if self.dataFormat and \
      (self.dataFormat in Artifacts.getInstruments()):
      return True
    return False





class FolderRecurser:
  def __init__(self,period,dataCollectionMap):
    self._log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
    self.verbose = False # verbose
    self.period = period
    self.dataCollectionMap = dataCollectionMap

  def processSubFolders(self, folders):
    """ We avoid this bit for the root folder list, but need it
        for every subsubfolder
    """
    # not that in creating the folder list we already filtered out
    # data folders that are too old or too new
    for folder in folders:
      # this is the major bottleneck!
      contents = os.listdir(folder)
      contents.sort()
      contents = map(lambda a: os.path.join(folder,a),contents)
      self.assessFolder(folder, contents)


  def assessFolder(self, folder, contents):
    """ We do not go into subfolders of folders that contain data
    """
    # analyse the folder and its contents
    f = Collection(folder,contents,self.period, self.dataCollectionMap)
    f.verbose = self.verbose
    f.checkCollectionFolderTimes()
    newFiles = f.filesToBeProcessedQ()
    # print f.subFolderList

    if newFiles:
      ##  we have NEW datafiles!!!!
      # no need to descend further in FS hierarchy
      return self.processDataFiles(f) 

    # otherwise no new data files
    if f.isADataCollectionQ():  
      if self.verbose:
        self._log.info("--%6s -- ignoring   --%3d--%6d-- %s --%d" ,
          f.dataFormat, len(f.subFolderList), len(f.recentFileList), folder, len(f.expiredFileList) )
      return
    # recurse down non-datafile containing subfolders
    # note that subFolderList already elminates previously
    # recognised but unchanged data folders
    self.processSubFolders(f.subFolderList)

    if self.verbose:
      self._log.info("--nodata -- ignoring   --%3d--%6d-- %s --%d" ,
         len(f.subFolderList), f.recentFileCount, folder, f.expiredFileCount ) 
    return
   

  def processDataFiles(self, f):
    folder = f.parent

    mode="create"
    if self.dataCollectionMap.has_key(folder):
      mode="append"

    if not f.dataFilesExist:
      format = "--%6s --datapending --%3d--%6d-- %s --%d" 
    else:
      format = "--%6s -- processing --%3d--%6d-- %s --%d"

    self._log.info(format, f.dataFormat, 
         len(f.subFolderList), f.recentFileCount, folder, f.expiredFileCount ) 

    sample = folder.split(os.sep)[-1]

    handled = False
    if self.dataCollectionHandler:
       handled = self.dataCollectionHandler(folder,sample,mode,f)
       # call registered func
    if not handled:
      # If all we did was identify the collection type (dataFormat)
      # there would be a slight saving in identification time if we 
      # recorded it for next invokation, but otherwise, would still
      # need to assess whole collection then anyway.
      #  i.e. we could fudge the 'time' parameter to preceed the collection
      #  creation time, but it doesn't really save us much
      return

    # If the collection was handled properly, to completion, 
    # then we need to record it in the DATAMINX record for next 
    # invokation. We also record a "last useful check" time
    if f.lastModified < self.period[1]:
      self.dataCollectionMap[folder]={
        'time': f.lastModified,
        'type':f.dataFormat}
    else: 
      self.dataCollectionMap[folder]={
        'time': self.period[1],
        'type':f.dataFormat}
    return
  
