# -*- coding: UTF-8 -*-

import os
import gzip
import sqlite3
import fnmatch
import datetime
import cPickle
import base64
import time
import tempfile

from django.db import models
from django.core import serializers
from django.db.models.signals import pre_delete
from django.db.models import Q
from django.contrib.auth.models import User
from django.utils.encoding import smart_str # for å behandle unicode ting, smart_str er en metode
from django.conf import settings
from django.db import connection

from tagging.fields import TagField
from tagging.models import Tag

from ceemtu.common import *
from ceemtu.metadata import getMetadata

class Location(models.Model):
    name = models.CharField(max_length = 200)
    latitude = models.CharField(max_length = 50, null = True, blank = True)
    longitude = models.CharField(max_length = 50, null = True, blank = True)
    country  = models.CharField(max_length = 50, null = True, blank = True)
    city  = models.CharField(max_length = 100, null = True, blank = True)
    zip = models.CharField(max_length = 10, null = True, blank = True)
    county = models.CharField(max_length = 100, null = True, blank = True)

class DatePeriod(models.Model):
    name = models.CharField(max_length = 500)
    description = models.TextField(null = True, blank = True)
    fromDate = models.DateField(db_index = True)
    toDate = models.DateField(db_index = True)  
    recurring_date = models.BooleanField(help_text="Does this date repeat itself every year?")
    tags = TagField(null = True, blank = True)
    _periods = []

    class Meta:
        ordering = ['-fromDate', 'toDate']
        verbose_name = "Dateperiod"
        verbose_name_plural = "Dateperiods"

    def __unicode__(self):
        return self.name
        #return "%s - (%s - %s) [%s]" % (self.name, self.fromDate, self.toDate, self.recurring_date and 'Recurring' or '')

    def is_match(self, date):
        start = datetime.datetime(self.fromDate.year, self.fromDate.month, self.fromDate.day)
        end = datetime.datetime(self.toDate.year, self.toDate.month, self.toDate.day, 23, 59, 59)
        
        if not self.recurring_date:
            return date >= start and date <= end

        if date < start:
            return False

        now = datetime.datetime.today()
        for year in range(self.fromDate.year, now.year+1):
            _start = datetime.datetime(year, self.fromDate.month, self.fromDate.day)
            _end = _start + (end - start)
            if date >= _start and date <= _end:
                return True
    
        return False
   
class Category(models.Model):
    """
    
    """
    name = models.CharField(max_length=20, db_index = True)
    description = models.TextField(null = True)
    parent = models.ForeignKey('self', blank = True, null = True, related_name = 'child_set')
    tree_text = models.TextField(null = True)

    def save(self):
        """
        
        """
        if not self.parent:
            self.tree_text = self.name.capitalize()

        categories = Category.objects.all()
        levels = [self.name.capitalize()]
        cat = self
        while cat.parent:
            for c in categories:
                if c == cat.parent:
                    levels.append(c.name.capitalize())
                    cat = c
        levels.reverse()
        self.tree_text = ' :: '.join(levels)
        super(Category, self).save()

    def __unicode__(self):
        return self.tree_text and self.tree_text or self.name
    
    @classmethod
    def delete_all(cls):
        Category.objects.all().update(parent = None)
        Category.objects.all().delete()
        
    @classmethod
    def get_categories(cls):
        return Category.objects.select_related.all()
    
    @classmethod
    def create_category(cls, tree_text, description = None):
        category_tree = [cat.lower().strip() for cat in tree_text.split('::')]
        levels = []
        categories = Category.objects.all()

        for ct in category_tree:
            found = False
            for c in categories:
                if c.name == ct:
                    levels.append(c)
                    found = True

            if not found:
                c = Category(name = ct)
                if levels:
                    c.parent = levels[-1]
                c.save()
                levels.append(c)

        return levels[-1]
    
    class Meta:
        ordering = ['tree_text', 'name']
        verbose_name = "Category"
        verbose_name_plural = "Categories"
        
class CategoryIsParent(Exception): pass

def parent_check(sender, **kwargs):
    c = kwargs['instance']
    if len(Category.objects.filter(parent = c)) != 0:
        raise CategoryIsParent()

pre_delete.connect(parent_check, sender=Category)

TIME_DURATION = (
    (0, 'Seconds'),
    (1, 'Minutes'),
    (2, 'Hours'),
    (3, 'Days')
)

LOG_TYPE = (
    (0, 'INFO'),
    (1, 'WARNING'),
    (2, 'ERROR')
)

FILE_ACTIVITY_CHOICES = (
    (0, 'Upload'),
    (1, 'Download'),
    (2, 'New file'),
    (3, 'Changed file'),
    (4, 'Deleted file'),
)

class FileActivity(models.Model):
    section = models.IntegerField(choices = FILE_ACTIVITY_CHOICES)
    filename = models.CharField(max_length = 500)
    user = models.ForeignKey(User, null = True, blank = True)
    date_added = models.DateTimeField(default=datetime.datetime.now, editable = False)

    class Meta:
        ordering = ['-date_added', 'section']
        verbose_name = "File activity"
        verbose_name_plural = "File activies"
    
    @classmethod
    def upload(cls, username, filename):
        FileActivity(section = 0, user = User.objects.get(username = username), filename = filename).save()

    @classmethod
    def download(cls, username, filename):
        FileActivity(section = 1, user = User.objects.get(username = username), filename = filename).save()

    @classmethod
    def new_file(cls, user, filename):
        FileActivity(section = 2, filename = filename, user = user).save()

    @classmethod
    def changed_file(cls, user, filename):
        FileActivity(section = 3, filename = filename, user = user).save()

    @classmethod
    def deleted_file(cls, user, filename):
        FileActivity(section = 4, filename = filename, user = user).save()

    def __unicode__(self):
        return "%s %s: %s" % (self.date_added, self.section, self.filename)
    
class Log(models.Model):
    """
    
    """
    user = models.ForeignKey(User, null = True)
    ref_id = models.IntegerField(null = True)
    log_date = models.DateTimeField()
    section = models.CharField(max_length = 200)
    log_type = models.IntegerField(choices = LOG_TYPE)
    text = models.TextField()

    class Admin:
        pass
 
    @classmethod
    def error(cls, usr, section, text, ref_id = None):
        Log(user = usr, log_date = datetime.datetime.today(), section = section, text = text, log_type = 2, ref_id = ref_id).save()
        
    @classmethod
    def warning(cls, usr, section, text, ref_id = None):
        Log(user = usr, log_date = datetime.datetime.today(), section = section, text = text, log_type = 1, ref_id = ref_id).save()

    @classmethod
    def info(cls, usr, section, text, ref_id = None):
        Log(user = usr, log_date = datetime.datetime.today(), section = section, text = text, log_type = 0, ref_id = ref_id).save()
    

class FileExtension(models.Model):
    """
    
    """
    extension = models.CharField(max_length = 20)
    sort_order = models.IntegerField(null = True)

    class Meta:
        ordering = ['-sort_order', 'extension']
    
    def __unicode__(self):
        return self.extension

    def __repr__(self):
        return self.extension

class FileGroup(models.Model):
    """
    
    """
    name = models.CharField(max_length = 50)
    description = models.TextField(blank = True, null = True)
    extensions = models.ManyToManyField(FileExtension, related_name = "filegroup")
  
    class Meta:
        ordering = ['name']
    
    def __unicode__(self):
        return self.name

    class Admin:
        pass

class FileSystemType(models.Model):
    """
    
    """
    text = models.CharField(max_length=20)
    sort_order = models.IntegerField(null = True)

    class Meta:
        ordering = ['-sort_order', 'text']
        verbose_name = "File System Type"
        verbose_name_plural = "File System Types"
        
    class Admin:
        pass
    
    def __repr__(self):
        return self.text

    def __unicode__(self):
        return self.text

RATING_CHOICES = (
    (0, 'Not set'),
    (1, 'Bad'),
    (2, 'Mediocre'),
    (3, 'Good'),
    (4, 'Very good'),
    (5, 'Excellent')
)

AVAILABILITY_CHOICES = (
    (None, 'Not set'),
    (0, 'Public'),
    (1, 'Requires authentication'),
    (2, 'Friend'),
    (3, 'Family'),
    (4, 'Personal')
)

MONTHS_CHOICES = []
for month in range(1,12+1):
    date = datetime.datetime(1900, month, 1)
    MONTHS_CHOICES.append((month, date.strftime('%B')))
MONTHS_CHOICES = tuple(MONTHS_CHOICES)

class Project(models.Model):
    """
    
    """
    title = models.CharField(max_length=200)
    manager = models.ForeignKey(User)
    description = models.TextField(null = True, blank = True)
    categories = models.ManyToManyField(Category, null = True, blank = True)
    #projectmembers = models.ManyToManyField(User)
  
    def __unicode__(self):
        return '"%s", managed by %s' % (self.title, self.manager.username)
        
class CorruptPrimitiveArchive(Exception): pass
class MustRegisterFirst(Exception): pass

class Archive(models.Model):
    """
    
    """
    user = models.ForeignKey(User, editable = False)
    name = models.CharField(max_length=100)
    path = models.CharField(max_length=500)
    filegroups = models.ManyToManyField(FileGroup)
    filesystem = models.ForeignKey(FileSystemType, null = True, blank = True)
    media_label = models.CharField(max_length=100, null = True, blank = True)
    description = models.TextField(null = True, blank = True)
    tags = TagField(null = True, blank = True)
    generate_checksums = models.BooleanField(null = True, blank = True)
    extract_metadata = models.BooleanField(null = True, blank = True)
    ignore_files_below_size = models.IntegerField(blank = True, null = True, default = settings.IGNORE_FILES_BELOW_SIZE)

    last_update = models.DateTimeField(null = True)
    scan_interval = models.IntegerField(null = True, blank = True)
    scan_interval_type = models.IntegerField(null = True, blank = True, choices = TIME_DURATION)
    writable = models.BooleanField(null = True)
    available = models.BooleanField(null = True)
    
    #########################################################################
    #    Backup
    #########################################################################    
    rsync_options = models.CharField(max_length = 50, blank = True, null = True, default = '-av')
    rsync_target = models.CharField(max_length = 150, blank = True, null = True)
    rsync_enabled = models.BooleanField(null = True, blank = True)

    def rsync_cmd(self):
        if not self.rsync_enabled:
            return None
        #extensions = ','.join(['*.%s' % ext for ext in self.get_file_extension_dict().keys()])
        return ['rsync', self.rsync_options, self.path, self.rsync_target]#, '--include=%s' % extensions]
        
    def needs_scanning_after(self):
        if not self.available or not self.scan_interval or self.scan_interval_type == None:
            return datetime.datetime.today() + datetime.timedelta(days = 365)
        
        if not self.last_update:
            return datetime.datetime(1900, 1, 1)

        t = None        
        if self.scan_interval_type == 0:
            t = datetime.timedelta(seconds = self.scan_interval and self.scan_interval or 0)
        elif self.scan_interval_type == 1:
            t = datetime.timedelta(minutes = self.scan_interval and self.scan_interval or 0)
        elif self.scan_interval_type == 2:
            t = datetime.timedelta(hours = self.scan_interval and self.scan_interval or 0)
        elif self.scan_interval_type == 3:
            t = datetime.timedelta(days = self.scan_interval and self.scan_interval or 0)

        return self.last_update + t
    
    def get_tags(self):
        return Tag.objects.get_for_object(self) 
    
    class Meta:
        ordering = ['name']
        verbose_name = "Archive"
        verbose_name_plural = "Archives"

    def scanning_url(self):
        """
        
        """
        print self.id
        return '<a href="/archive/scan/%s">Scan</a>' % self.id
    scanning_url.allow_tags = True

    class Admin:
        pass

    def __repr__(self):
        return self.name

    def __unicode__(self):
        return self.name

    def get_entries(self):
        """
        Returns a list with all entries related to this archive. 
        """
        return FileEntry.objects.filter(archive = self)

    def delete_all(self):
        pass

    def serializeToDisk(self):
        if self.writable and self.available:
            print "Going to disk ..."
            all_objects = list(FileEntry.objects.filter(parent_archive = self)) + [self] + list(FileExtensions.objects.all()) # + photos, music, movies etc. etc.
            XMLSerializer = serializers.get_serializer("xml")
            xml_serializer = XMLSerializer()
            outputfile = os.path.join(self.path, '.ceemtu', '%s.xml' % self.name)
            if not os.path.exists(os.path.split(outputfile)[0]):
                os.makedirs(os.path.split(outputfile)[0])
            out = open(outputfile, "w")
            xml_serializer.serialize(all_objects, stream=out)
            return True

        return False

    @classmethod
    def process_entry(cls, current_archive, current_user, entry, file_extensions, word_index, filename_patterns):
        """
        
        """
        fe = FileEntry.createObjectFromDict(current_archive, entry)
        if not fe:
            return
        
        if fe.filesize < current_archive.ignore_files_below_size:
            return
        
        if not fe.extension.lower() in file_extensions.keys():
            return
        
        fe.checksum = entry.get('checksum', None)
        fe.extension_link = file_extensions[fe.extension.lower()]
        fe.pickled_metadata = entry.get('pickled_metadata', None)
        fe.save(False)
        
        #for wrd in entry.get('words', []):
        #    if wrd.isdigit():
        #        continue
        #    if not word_index.has_key(wrd):
        #        w = WordDef(word = wrd)
        #        w.save()
        #        word_index[wrd] = w
        #
        for pattern in filename_patterns:
            pattern.processFileEntry(fe)

    def get_file_extension_dict(self):
        file_extensions = {}
        for fg in self.filegroups.all():
            for fe in fg.extensions.all():
                file_extensions[fe.extension] = fe
        return file_extensions

    def get_word_index_dict(self):        
        word_index = {}
        for wo in WordDef.objects.all():
            word_index[wo] = wo
        return word_index

    @classmethod
    def open_primitive_archive(cls, compressed_archive, try_raw_files = False):
        pa = None
        if try_raw_files:
            path, fname = os.path.split(compressed_archive)
            fname, ext = os.path.splitext(fname)
            if os.path.exists(os.path.join(path, fname +'.db')):
                pa = PrimitiveArchive(fname, path)

        if not pa:
            pa = PrimitiveArchive.load_from_file(compressed_archive)

        data = pa.get_archive_settings()
        for key in ('username', 'email', 'path', 'name'):
            if not data.has_key(key):
                raise CorruptPrimitiveArchive(key)

        return (pa, data)
        
    @classmethod
    def get_archive_from_primitive_archive(cls, primtive_archive, archive_settings, debug = False):
        archives = Archive.objects.filter(user = archive_settings.get('username', None), name = archive_settings.get('archive', None))
        if archives.count() == 0:
            user = None
            try:
                user = User.objects.get(username = archive_settings.get('username'))
            except User.DoesNotExist:
                raise MustRegisterFirst()

            current_archive = Archive(
                user = user,
                name = archive_settings.get('name'),
                path = archive_settings.get('path'),
                description = archive_settings.get('description', None),
                last_update = datetime.datetime.today(),
                dirty = True,
            )    
            current_archive.save()
            for fg in FileGroup.objects.all():
                current_archive.filegroups.add(fg)

            current_archive.save()
            return (True, current_archive)
        
        return (False, archives[0])

    @classmethod
    def process_archive(cls, compressed_archive, try_raw_files = False, debug = False, use_raw_sql = False):
        """
        
        """
        pa, data = Archive.open_primitive_archive(compressed_archive, try_raw_files)
        new_archive, current_archive = Archive.get_archive_from_primitive_archive(pa, data, debug)
        if new_archive or use_raw_sql:
            SQL = """
            insert into cetcore_fileentry (archive_id, filepath, filename, extension, extension_link_id,
            filesize, created_datetime, modified_datetime, checksum, authentication_required,
            mature_content, personal_content, pickled_metadata, in_the_attic, date_added, tags)
            values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """
            cursor = connection.cursor()
            extensions = current_archive.get_file_extension_dict()
            errors = []
            for entry in pa.get_files():
                if entry.get('filesize') < current_archive.ignore_files_below_size:
                    continue
                if debug:
                    print entry.get('filepath', None), entry.get('filename', None)    
                params = [
                    current_archive.id,
                    entry.get('filepath', None),
                    entry.get('filename', None),
                    entry.get('extension', None),
                    extensions.get(entry.get('extension', None)),
                    entry.get('filesize', 0),
                    entry.get('created_datetime', None),
                    entry.get('modified_datetime', None),
                    entry.get('checksum', ''),
                    entry.get('authentication_required', False),
                    entry.get('mature_content', False),
                    entry.get('personal_content', False),
                    entry.get('pickled_metadata', None),
                    entry.get('in_the_attic', False),
                    datetime.datetime.today(),
                    'new'
                    ]
                try:
                    cursor.execute(SQL, params)
                    entry['processed'] = (True, datetime.datetime.today(), e)
                    pa.save_data(entry.get('complete_filename'), entry)
                except Exception, e:
                    entry['processed'] = (False, datetime.datetime.today(), e)
                    pa.save_data(entry.get('complete_filename'), entry)
                    errors.append((entry.get('complete_filename'), entry, e))

            if debug and errors:
                print errors
                print len(errors)

        else:    
            for entry in pa.get_files():
                if entry.get('filesize') < current_archive.ignore_files_below_size:
                    continue
                if debug:
                    print entry.get('filepath', None), entry.get('filename', None)
                Archive.process_entry(
                    current_archive,
                    current_archive.user,
                    entry,
                    current_archive.get_file_extension_dict(),
                    current_archive.get_word_index_dict(),
                    FilePattern.objects.all()
                )

        current_archive.serializeToDisk()
        return current_archive.id

# First, define the Manager subclass.
class ActiveFileEntryManager(models.Manager):
    def get_query_set(self):
        return super(ActiveFileEntrykManager, self).get_query_set().filter(in_the_attic = False)

class SafeFileEntryManager(models.Manager):
    def get_query_set(self):
        return super(SafeFileEntrykManager, self).get_query_set().filter(in_the_attic = False, mature_content = False, personal_content = False, authentication_required = False)

class FileEntry(models.Model):
    """
    
    """    
    archive = models.ForeignKey(Archive)
    filepath = models.CharField(max_length=500)
    filename = models.CharField(max_length=256)
    extension = models.CharField(max_length=20)
    extension_link = models.ForeignKey(FileExtension, null = True)
    filesize = models.IntegerField()
    created_datetime = models.DateTimeField()
    modified_datetime = models.DateTimeField()
    checksum = models.CharField(max_length=128, null = True)
    authentication_required = models.BooleanField(null = True)
    mature_content = models.BooleanField(null = True)
    personal_content = models.BooleanField(null = True)
    pickled_metadata = models.TextField(null = True)
    tags = models.ManyToManyField(Tag, null = True, blank = True)
    in_the_attic = models.BooleanField()
    date_added = models.DateTimeField(default=datetime.datetime.now, editable = False)
    tags = TagField()

    objects = models.Manager() # The default manager.
    active_objects = ActiveFileEntryManager()
    safe_objects = SafeFileEntryManager()
    
    @classmethod
    def get_files_in_the_attic(cls, user):
        """
        
        """
        return FileEntry.objects.filter(Q(archive__user = user), Q(in_the_attic = True))

    @classmethod
    def get_file_entries(cls, user = None, include_mature_content = False):
        """
        
        """
        if not user:
            query_set = FileEntry.objects.filter(Q(personal_content = False), Q(authentication_required = False))
        else:
            query_set = FileEntry.objects.filter(Q(archive__user = user) | Q(personal_content = False))

        if not include_mature_content:
            query_set = query_set.filter(Q(mature_content = False))
            
        query_set = query_set.filter(Q(in_the_attic = False))

        return query_set

    def get_tags(self):
        """
        
        """
        return Tag.objects.get_for_object(self) 

    class Meta:
        #ordering = ['archive', 'filepath', 'filename']
        verbose_name = "File Entry"
        verbose_name_plural = "File Entries"

    class Admin:
        pass

    def save(self, serialize = True):
        """
        
        """
        if serialize:
            self.archive.serializeToDisk()
        super(FileEntry, self).save()
        
    def human_filesize(self):
        """
        
        """
        return humansize(self.filesize)

    def __repr__(self):
        return "%s.%s <%s>" % (os.path.join(self.filepath, self.filename), self.extension, self.archive)

    def __unicode__(self):
        return "%s.%s <%s>" % (os.path.join(self.filepath, self.filename), self.extension, self.archive)

    def entry_hash(self):
        """
        
        """
        return (hashlib.md5().update(self.filepath+self.filename).hexdigest(), self.id)
    
    def complete_filename(self):
        """
        Returns a complete filename.
        """
        filepath = self.filepath
        if len(filepath) > 0:
            if filepath[0] == os.sep:
                filepath = filepath[1:]
        return "%s.%s" % (os.path.join(self.archive.path, filepath, self.filename), self.extension)
        
    def partial_filename(self):
        """
        
        """
        return "%s.%s" % (os.path.join(self.filepath, self.filename), self.extension)

    def url(self):
        """
        
        """
        return '<a href="/file_entry/%s">Details</a>' % self.id
    url.allow_tags = True

    def get_words(self):
        """
        
        """
        words = {}

        try:
            for w in getWords(self.filepath):
                words[w.lower()] = None
        except Exception, e:
            pass

        try:
            for w in getWords(self.filename):
                words[w.lower()] = None
        except Exception, e:
            pass

        metadata = self.get_metadata()
        if metadata:
            for k,v in metadata.items():
                if type(v) != types.StringType or type(v) != types.UnicodeType:
                    continue
                try:
                    for w in getWords(v):
                        words[w.lower()] = None
                except Exception, e:
                    pass

        return words.keys()


    def get_metadata(self, flatten = True):
        result = {}
        if not self.pickled_metadata:
            return result

        try:
            metadata = cPickle.loads(base64.decodestring(str(self.pickled_metadata)))
        except Exception, e:
            Log.error('get_metadata', "%s - %s" % (self.id, e))
            return result

        if not metadata:
            return result

        if not flatten:
            return metadata

        for k,v in metadata.items():
            if type(v) == types.DictionaryType:
                for k2, v2 in v.items():
                    result['%s - %s' % (k, k2)] = v2
            else:
                result[k] = v
                
        for k,v in result.items():
            if type(v) in (types.StringType, types.UnicodeType):
                result[k] = v.strip()
        return result

    def exists(self):
        """
        Does the current filename exists on the available filesystem?
        """
        return os.path.exists(self.complete_filename())
    exists.boolean = True
    
    def place_in_attic(self):
        self.in_the_attic = True
        self.save()

    @classmethod
    def refreshFromFile(cls, file_entry, regenerate_checksum = False):
        pass

    @classmethod
    def createObjectFromDict(cls, archive, d):
        """
        
        """
        filepath = smart_str(d['filepath'])
        filename = smart_str(d['filename'])
        extension = d['extension']
        filesize = d['filesize']
        created_datetime = d['created_datetime']
        modified_datetime = d['modified_datetime']
        
        file_entries = FileEntry.objects.filter(archive = archive, filepath = filepath, filename = filename)
        if len(file_entries) != 0:
            file_entry = file_entries[0]
            # File exists, but has it changed and needs updating?
            if (file_entry.filesize != filesize) or (file_entry.modified_datetime < modified_datetime):
                return file_entry
            return None # unchanged, no need to proceed

        return FileEntry(
            archive = archive,
            filepath = filepath,
            filename = filename,
            extension = extension,
            filesize = filesize,
            created_datetime = created_datetime,
            modified_datetime = modified_datetime,
            checksum = d['checksum']
            )

class WordDef(models.Model):
    """
    
    """
    word = models.CharField(max_length = 100)
    
    def __repr__(self):
        return self.word

    def __unicode__(self):
        return self.word
    
ACTION_TYPES = (
    (0, 'NONE'),
    (1, 'DELETE'),
    (2, 'MOVE'),
    (3, 'RENAME'),
    (4, 'COPY'),
    (5, 'MOVE_WITH_STRUCTURE'),
    (6, 'COPY_WITH_STRUCTURE'),
    )

ACTION_TYPES_MAPPING1 = {}
ACTION_TYPES_MAPPING2 = {}

for value, text in ACTION_TYPES:
    ACTION_TYPES_MAPPING1[value] = text
    ACTION_TYPES_MAPPING2[text] = value

class ActionNeedsArgument(Exception): pass
class InvalidActionType(Exception):pass

class Action(models.Model):
    """
    
    """
    file_entry = models.ForeignKey(FileEntry)
    action_type = models.IntegerField(choices = ACTION_TYPES)
    argument = models.CharField(max_length=250, null = True)
    sort_order = models.IntegerField(null = True)
    
    class Meta:
        ordering = ['action_type', '-sort_order']
        verbose_name = "Action"
        verbose_name_plural = "Actions"

    class Admin:
        pass
        
    def __repr__(self):
        return "<Action:'%s' - '%s'>" % (ACTION_TYPES_MAPPING1[self.action_type], self.file_entry.filename)
    
    @classmethod
    def register_action(cls, file_entries, action_type, argument = None, sort_order = None):
        """
        Validates and registers an action related to suuplied file-entries.
        """
        try:
            action_type_as_int = ACTION_TYPES_MAPPING2[action_type]
        except KeyError, key:
            raise InvalidActionType(key)

        if action_type_as_int in (2,3,4,5,5,6) and not argument:
            raise ActionNeedsArgument()
        
        for file_entry in file_entries:
            a = Action(user = file_entry.archive.user, file_entry = file_entry, action_type = action_type_as_int, argument = argument, sort_order = sort_order)
            a.save()

    @classmethod    
    def clear_actions(cls):
        """
        Clears all actions from the database
        """
        Action.objects.all().delete()

    @classmethod
    def list_actions(cls, action_type = None):
        """
        Returns a list of actions
        """
        if action_type:
            try:
                action_type_as_int = ACTION_TYPES_MAPPING2[action_type]
            except KeyError, key:
                raise InvalidActionType(key)

            return Action.objects.filter(action_type = action_type_as_int)

        else:
            return Action.objects.all()

    @classmethod
    def process_actions(cls):
        """
        Processes all files with actions set and clears the database for actions when done
        """
        for action in self.list_actions():
            if not os.path.exists(action.file_entry.complete_filename()):
                if settings.VERBOSE: print  "No such file: %s." % action.file_entry.complete_filename()
                continue
            
            #if USER_DEFINED_ACTION_HANDLING:
            #    if rud.process_file(complete_filename, filename, filepath, action_type, action_argument):
            #        continue
            
            if action_type == ACTION_DELETE:
                try:
                    os.remove(action.file_entry.complete_filename())
                    if settings.VERBOSE: print  "Deleted %s." % action.file_entry.complete_filename()
                except Exception, e:
                    if settings.VERBOSE: print  "Error removing :%s because %s." % (action.file_entry.complete_filename(), e)
                    
            elif action_type == ACTION_MOVE:
                try:
                    new_filename = CheckTargetFilename(os.path.join(action_argument, action.file_entry.filename))
                    shutil.move(action.file_entry.complete_filename(), new_filename)
                    if settings.VERBOSE: print  "Moved %s to %s." % (action.file_entry.complete_filename(), new_filename)
                except Exception, e:
                    if settings.VERBOSE: print  "Error moving :%s because %s." % (action.file_entry.complete_filename(), e)
                 
            elif action_type == ACTION_RENAME:
                # parse action_argument
                # +STRING = filename+STRING.EXT
                # 
                raise NotImplemented()
                
            elif action_type == ACTION_COPY:
                try:
                    new_filename = CheckTargetFilename(os.path.join(action.argument, action.file_entry.filename))
                    shutil.copy(action.file_entry.complete_filename(), new_filename)
                    if settings.VERBOSE: print  "Copied %s to %s." % (action.file_entry.complete_filename(), new_filename)
                except Exception, e:
                    if settings.VERBOSE: print  "Error copying :%s because %s." % (action.file_entry.complete_filename(), e)
                    
            elif action_type == ACTION_MOVE_WITH_STRUCTURE:
                try:
                    new_filename = CheckTargetFilename(os.path.join(action.argument, action.file_entry.filepath, action.file_entry.filename))
                    shutil.move(action.file_entry.complete_filename(), new_filename)
                    if settings.VERBOSE: print  "Moved %s to %s." % (action.file_entry.complete_filename(), new_filename)
                except Exception, e:
                    if settings.VERBOSE: print  "Error moving with structure :%s because %s." % (action.file_entry.complete_filename(), e)
                
            elif action_type == ACTION_COPY_WITH_STRUCTURE:
                try:
                    new_filename = CheckTargetFilename(os.path.join(action.argument, action.file_entry.filepath, action.file_entry.filename))
                    shutil.copy(action.file_entry.complete_filename(), new_filename)
                    if settings.VERBOSE: print  "Copied %s to %s" % (action.file_entry.complete_filename(), new_filename)
                except Exception, e:
                    if settings.VERBOSE: print  "Error copying with structure:%s because %s." % (action.file_entry.complete_filename(), e)

        Action.clear_actions()
        if settings.VERBOSE: print  "Cleared all actions."


PATTERN_TYPES = (
    (0, 'Plain words'),
    (1, 'Plain words, partial matching'),
    (2, 'Regular expression'),
    (3, 'File pattern, case insensitive'),
    (4, 'File pattern, case sensitive'),
    (5, 'Match extensions')
)

from ceemtu.cetcore.pattern_methods import PATTERN_METHODS, PATTERN_METHODS_MAPPING

class FilePattern(models.Model):
    """
    
    """
    name = models.CharField(max_length=100)    
    pattern = models.CharField(max_length=200, null = True, blank = True)    
    pattern_type = models.IntegerField(choices = PATTERN_TYPES)
    description = models.TextField(null = True, blank = True)
    triggers_authentication_requirement = models.BooleanField()
    triggers_mature_content = models.BooleanField()
    triggers_personal_content = models.BooleanField()
    triggers_message_to_admin = models.BooleanField()

    match_extensions = models.ManyToManyField(FileExtension, null = True, blank = True)
    
    triggers_action = models.BooleanField()
    action_type = models.IntegerField(choices = ACTION_TYPES, null = True, blank = True)
    argument = models.CharField(max_length=250, null = True, blank = True)
    
    triggers_sql_statement = models.BooleanField()
    sql_statement = models.TextField(null = True, blank = True)

    triggers_add_to_tags = models.BooleanField()
    tags = models.ManyToManyField(Tag, null = True, blank = True)    
    
    execute_python_method = models.CharField(choices = PATTERN_METHODS, null = True, max_length = 50, blank = True)
    sort_order = models.IntegerField(null = True, blank = True)

    class Meta:
        ordering = ['-sort_order', 'pattern']
        verbose_name = "Pattern"
        verbose_name_plural = "Patterns"

    class Admin:
        pass
    
    def __repr__(self):
        return self.name

    def __unicode__(self):
        return self.name

    def isMatch(self, file_entry):
        """
        
        """
        if self.pattern_type in [0, 1]:
            words = file_entry.get_words()
            hits = 0
            word_patterns = [s.strip().lower() for s in self.pattern.replace(' ', ',').split(',')]
            for pw in word_patterns:
                if self.pattern_type == 0:
                    if pw in words:
                        hits = hits + 1
                elif self.pattern_type == 1:
                    for word in words:
                        if word.count(pw) > 0:
                            hits = hits + 1
            if hits == len(word_patterns):
                return True

        elif self.pattern_type == 2:
            return False

        elif self.pattern_type == 3:
            if fnmatch.fnmatch(file_entry.complete_filename(), self.pattern):
                return True

        elif self.pattern_type == 4:
            if fnmatch.fnmatchcase(file_entry.complete_filename(), self.pattern):
                return True
        
        elif self.pattern_type == 5:
            extensions = [e.extension for e in self.match_extensions.all()]
            if file_entry.extension.lower() in extensions:
                return True

        return False

    def processFileEntry(self, file_entry):
        """
        
        """
        if not self.isMatch(file_entry):
            return
        
        changed = False
        if  self.triggers_authentication_requirement:
            file_entry.authentication_required = True
            changed = True

        if self.triggers_mature_content:
            file_entry.mature_content = True;
            changed = True

        if self.triggers_personal_content:
            file_entry.personal_content = True;
            changed = True

        if self.triggers_message_to_admin:
            raise NotImplemented()
            
        if self.triggers_add_to_tags:
            for tag in self.tags.all():
                file_entry.tags.add(tag)
            changed = True

        if self.triggers_action:
            Action.register_action(file_entry.user, [file_entry], ACTION_TYPES_MAPPING1[self.action_type], self.argument and self.argument or None)

        if self.triggers_sql_statement:
            params = {
                #archive = models.ForeignKey(Archive)
                #filepath = models.CharField(max_length=500)
                #filename = models.CharField(max_length=256)
                #extension = models.CharField(max_length=20)
                #extension_link = models.ForeignKey(FileExtension, null = True)
                #filesize = models.IntegerField()
                #created_datetime = models.DateTimeField()
                #modified_datetime = models.DateTimeField()
                #checksum = models.CharField(max_length=128)
                #authentication_required = models.BooleanField(null = True)
                #mature_content = models.BooleanField(null = True)
                #personal_content = models.BooleanField(null = True)
                }
            #sql_statement = models.TextField(null = True)

        method = PATTERN_METHODS_MAPPING.get(self.execute_python_method, None)
        if method:
            print method.__doc__
            if method(self, file_entry):
                print "changed"
                changed = True

        if changed:
            file_entry.save()

    @classmethod
    def processPatterns(cls, file_entries = FileEntry.objects.all()):
        """

        """
        patterns = list(FilePattern.objects.all())
        for file_entry in file_entries:
            for pattern in patterns:
                pattern.processFileEntry(file_entry)
                


class FileStatus:
    unknown   = 0
    unchanged = 1
    modified  = 2
    new       = 3
    deleted   = 4
    duplicate = 5

def dict_factory(cursor, row):
    d = {}
    for idx, col in enumerate(cursor.description):
        d[col[0]] = row[idx]
    return d
    
import pprint

class InvalidArchivePath(Exception): pass

class PrimitiveArchive:
    
    def __init__(self, name, path):
        if not os.path.exists(path):
            raise InvalidArchivePath()
            
        self.path = path
        self.name = name
        self.diffpath = None
        self.extensions = [fe.extension for fe in FileExtension.objects.all()]
        self.username = None
        self.email = None
        self.generate_checksums = False
        self.extract_metadata = False
        self.ignore_files_below_size = 10*1024
        self.verbose = False
        self.log = []
        self.__dbfilename = os.path.join(self.path, self.name + '.db')
        try:
            self.__db = sqlite3.connect(self.__dbfilename, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
        except Exception, e:
            print e
            print self.__dbfilename

        if self.generate_checksums and self.extract_metadata:
            self.__db.isolation_level = 'EXCLUSIVE' # IMMEDIATE, DEFERRED, None
        else:
            self.__db.isolation_level = None
        self.create_db()
        self.__db.row_factory = dict_factory#sqlite3.Row
        self.__archive_status = None

    def get_params(self):
        return {
            'name': self.name,
            'path': self.path,
            'diffpath': self.diffpath,
            'extensions': self.extensions,
            'username': self.username,
            'email': self.email,
            'generate_checksums': self.generate_checksums,
            'extract_metadata': self.extract_metadata,
            'ignore_files_below_size': self.ignore_files_below_size
        }

    def create_db(self):
        sql1 = '''
            CREATE TABLE "ceemtu_primitive_filecache" (
            "id" integer NOT NULL PRIMARY KEY,
            "complete_filename" varchar(500) NOT NULL,
            "filepath" varchar(250) NOT NULL,
            "filename" varchar(250) NOT NULL,
            "extension" varchar(20) NOT NULL,
            "filesize" integer NOT NULL,
            "created_datetime" datetime NOT NULL,
            "modified_datetime" datetime NOT NULL,
            "checksum" varchar(128) NULL,
            "pickled_metadata" text NULL,
            "date_added" datetime NOT NULL,
            "filestatus" integer NULL,
            "processed" bool NULL,
            "process_msg" varchar(200)
            );'''
    
        sql2 = '''
            CREATE TABLE "ceemtu_primitive_archive" (
            "id" integer NOT NULL PRIMARY KEY,
            "name" varchar(200) NOT NULL,
            "path" varchar(250) NOT NULL,
            "diffpath" varchar(250) NULL,
            "extensions" text NULL,
            "username" varchar(150) NOT NULL,
            "email" varchar(150) NOT NULL,
            "generate_checksums" bool NULL,
            "extract_metadata" bool NULL,
            "ignore_files_below_size" integer NULL,
            "last_scan" datetime NULL,
            "status" integer NULL
            );
            '''
        
        sql3 = '''
            CREATE TABLE "ceemtu_primitive_log" (
            "id" integer NOT NULL PRIMARY KEY,
            "filename" varchar(500) NOT NULL,
            "msg" text NOT NULL,
            "date" datetime NOT NULL
            );'''
 
        cur = self.__db.cursor()
        for sql in (sql1, sql2, sql3):
            try:
                cur.execute(sql)
                self.inital_scanning = True
            except Exception, e:
                if self.verbose:
                    print "-", e
        
    def set_archive_settings(self):
        params = self.get_archive_settings()
        for key in params.keys():
            if hasattr(self, key):
                params[key] = getattr(self, key)
        params['extensions'] = ','.join(params.get('extensions', []))
        cur = self.__db.cursor()
        if params.get('id', None):
            sql = '''
            update ceemtu_primitive_archive set
            name = :name,
            path = :path,
            diffpath = :diffpath,
            extensions = :extensions,
            username = :username,
            email = :email,
            generate_checksums = :generate_checksums,
            ignore_files_below_size = :ignore_files_below_size
            where id = 1
            '''
        else:
            sql = '''
            insert into ceemtu_primitive_archive
            (name, path, diffpath, extensions, username, email, generate_checksums)
            values (:name, :path, :diffpath, :extensions, :username, :email, :generate_checksums)
            '''
        cur.execute(sql, params)

    def get_archive_settings(self):
        params = self.get_params()
        cur = self.__db.cursor()
        cur.execute('select * from ceemtu_primitive_archive where id = 1')
        result = cur.fetchone()
        if result:
            result['extensions'] = result['extensions'].split(',')
            return result
        return params

    def decode_metadata(self, data):
        try:
            data['metadata'] = cPickle.loads(base64.decodestring(data.get('pickled_metadata')))
        except Exception, e:
            print data
            print e
            data['metadata'] = None
        return data

    def load_data(self, complete_filename):
        cur = self.__db.cursor()
        cur.execute('select * from ceemtu_primitive_filecache where complete_filename = ?', [complete_filename])
        result = cur.fetchone()
        if not result:
            return result

        return self.decode_metadata(result)

    def save_data(self, data, initial_scan = False):
        try:
            md = data.get('metadata', None)
            data['pickled_metadata'] = md and base64.encodestring(cPickle.dumps(md)) or None
        except Exception, e:
            print data
            print e
            raise e

        complete_filename = str(data.get('complete_filename'))
        cur = self.__db.cursor()
        if data.has_key('id'): # update
            sql = '''
            update ceemtu_primitive_filecache set
            complete_filename = :complete_filename,
            filepath = :filepath,
            filename = :filename,
            extension = :extension,
            filesize = :filesize,
            created_datetime = :created_datetime,
            modified_datetime = :modified_datetime,
            checksum = :checksum,
            pickled_metadata = :pickled_metadata,
            filestatus = :filestatus,
            processed = :processed,
            process_msg = :process_msg
            where id = :id
            '''
        else: # insert
            sql = '''
            insert into ceemtu_primitive_filecache
            (complete_filename, filepath, filename, extension, filesize, created_datetime, modified_datetime,
            checksum, pickled_metadata, date_added, filestatus, processed, process_msg)
            values (:complete_filename, :filepath, :filename, :extension, :filesize, :created_datetime, :modified_datetime,
            :checksum, :pickled_metadata, :date_added, :filestatus, :processed, :process_msg)
            '''
        data['date_added'] = datetime.datetime.today()
        if not data.has_key('processed'):
            data['processed'] = None
        if not data.has_key('process_msg'):
            data['process_msg'] = None

        try:
            cur.execute(sql, data)
        except Exception, e:
            if self.verbose:
                print sql
                pprint.pprint(data)
            raise e

    def get_files(self):
        cur = self.__db.cursor()
        cur.execute("select * from ceemtu_primitive_filecache order by complete_filename")
        for f in cur:
            yield self.decode_metadata(f)
            
    def get_duplicates(self):
        cur = self.__db.cursor()
        sql = """select * from ceemtu_primitive_filecache where checksum in (
            select checksum from (
                select checksum, count(*) as a
                from ceemtu_primitive_filecache
                where checksum <> '' and checksum is not null
                group by checksum
                having count(*) > 1
                )
            )
        """
        cur.execute(sql)
        for f in cur:
            yield f#self.decode_metadata(f)

    def build_stats(self):
        cur = self.__db.cursor()
        cur.execute("select extension, sum(filesize) as TotalFileSize, count(*) as TotalFileCount from ceemtu_primitive_filecache group by extension order by extension")
        return list(cur.fetchall())

    def set_all_to_unchanged(self):
        cur = self.__db.cursor()
        cur.execute("update ceemtu_primitive_filecache set filestatus = ?", [FileStatus.unchanged])
        
    def check_filesystem(self):
        if self.verbose:
            print "Checking filesystem for changes: %s ..." % self.path,
        old_isolation_level = self.__db.isolation_level
        self.__db.isolation_level = None
        cur = self.__db.cursor()
        #cur.execute("update ceemtu_primitive_filecache set filestatus = ? where processed = ?", [FileStatus.unknown, False])

        for complete_filename in dirwalk(self.path, self.extensions):

            d = parse_filedata(self.path, complete_filename)
            if d.get('filesize', 0) < self.ignore_files_below_size:
                continue
    
            old_data = self.load_data(complete_filename)
            if not old_data:
                d['filestatus'] = FileStatus.new
                d['checksum'] = self.generate_checksums and generate_checksum(complete_filename) or None
                try:
                    d['metadata'] = self.extract_metadata and getMetadata(str(complete_filename)) or None
                except Exception, e:
                    d['metadata'] = None
                    print complete_filename
                    print e
                d['complete_filename'] = complete_filename
                d['processed'] = False
            else:
                for k,v in old_data.items():
                    if not d.has_key(k):
                        d[k] = v
                if (old_data.get('filesize', 0) != d.get('filesize')) or \
                 (old_data.get('modified_datetime', datetime.datetime(1900,1,1)) < d.get('modified_datetime')):
                    d['filestatus'] = FileStatus.modified
                    d['checksum'] = self.generate_checksums and generate_checksum(complete_filename) or None
                    try:
                        d['metadata'] = self.extract_metadata and getMetadata(str(complete_filename)) or None
                    except Exception, e:
                        d['metadata'] = None
                        print complete_filename
                        print e
                    d['processed'] = False
                else:
                    if d.get('processed', False):
                        d['filestatus'] = FileStatus.unchanged

            self.save_data(d)
        
        cur.execute("update ceemtu_primitive_filecache set filestatus = ? where filestatus = ? and processed = ?", [FileStatus.deleted, FileStatus.unknown, False])
        cur.execute("select count(*) as FileCount from ceemtu_primitive_filecache")
        return cur.fetchone()['FileCount']
        self.__db.isolation_level = old_isolation_level
        
    def get_changes(self, mark_as_processed = False):
        cur = self.__db.cursor()
        cur.execute("""
            select *
            from ceemtu_primitive_filecache
            where filestatus <> ?
            """, [FileStatus.unchanged])
        for entry in cur.fetchall():
            yield self.decode_metadata(entry)

        self.set_all_to_unchanged()
        #if mark_as_processed:
        #    cur.execute("update ceemtu_primitive_filecache set processed = ?", [True])
        cur.execute("update ceemtu_primitive_archive set last_scan = ?", [datetime.datetime.today()])

    def reset(self):
        cur = self.__db.cursor()
        cur.execute("update ceemtu_primitive_filecache set processed = null, filestatus = 0")
        
    @classmethod
    def load_from_file(cls, compressed_archive, tempfolder = None):
        tempdir = tempfolder and tempfolder or tempfile.gettempdir()
        archive = os.path.split(compressed_archive)[0].lower()
        outf_db = open(os.path.join(tempdir, archive + '.db'), 'wb')
        inputf = gzip.open(compressed_archive, 'rb', compresslevel = 5)
        try:
            d = inputf.read(1024*8)
            while d:
                outf_db.write(d)
                d = inputf.read(1024*8)                
        finally:
            outf_db.close()

        pa = PrimitiveArchive(archive, tempdir)
        return pa
    
    @classmethod
    def create_from_db(cls, archive):
        pa = PrimitiveArchive(archive.name, archive.path)
        pa.username = archive.user.username
        pa.email = archive.user.email
        dict = archive.get_file_extension_dict()
        pa.extensions = dict and dict.keys() or []
        pa.generate_checksums = archive.generate_checksums
        pa.extract_metadata = archive.extract_metadata
        pa.ignore_files_below_size = archive.ignore_files_below_size

        return pa

    def get_files_by_param(self, param, value):
        cur = self.__db.cursor()
        sql = "select * from ceemtu_primitive_filecache where %s = ?" % param
        cur.execute(sql, [value])
        for f in cur:
            yield self.decode_metadata(f)
    
    @classmethod
    def diffarchive(cls, archive1, archive2, quick_compare = False):
        def __comparison(_pa, _archive1, _archive2, quick_compare):
            for entry1 in _archive1.get_files():
                found = False
                files = _archive2.get_files_by_param('filesize', entry1.get('filesize'))
                for entry2 in files:
                    if quick_compare:
                        if entry1.get('created_datetime') == entry2.get('created_datetime') and \
                           entry1.get('modified_datetime') == entry2.get('modified_datetime') and \
                           entry1.get('filename') == entry2.get('filename'):
                            found = True
                            break
                    else:
                        checksum1 = entry1.get('checksum', None)
                        checksum2 = entry2.get('checksum', None)
                        if not checksum1:
                            checksum1 = generate_checksum(entry1.get('complete_filename'))
                            entry1['checksum'] = checksum1
                            _archive1.save_data(entry1.get('complete_filename'), entry1)
                        if not checksum2:
                            checksum2 = generate_checksum(entry2.get('complete_filename'))
                            entry2['checksum'] = checksum2
                            _archive2.save_data(entry2.get('complete_filename'), entry2)
                        if checksum1 == checksum2:
                            found = True
                            break
    
                new_entry = entry1.copy()
                if not found:
                    new_entry['processed'] = True
                    new_entry['process_msg'] = 'Quick compare: %s. Only in archive %s' % (quick_compare and 'yes' or 'no', _archive1.name)
                    new_entry['filestatus'] = FileStatus.new
                    del new_entry['id']
                else:
                    new_entry['processed'] = True
                    new_entry['process_msg'] = 'Quick compare: %s. Exists also in archive %s' % (quick_compare and 'yes' or 'no', _archive2.name)
                    new_entry['filestatus'] = FileStatus.duplicate
                    del new_entry['id']

                data = _pa.load_data(new_entry.get('complete_filename'))
                if not data:
                    _pa.save_data(new_entry.get('complete_filename'), new_entry)
            
        diffarchive_name = "%s_%s_diff" % (archive1.name, archive2.name)
        pa = PrimitiveArchive(diffarchive_name, archive1.path)
        pa.diffpath = archive2.path
        pa.username = archive1.username
        pa.email = archive1.email
        pa.extensions = archive1.extensions + archive2.extensions
        pa.generate_checksums = quick_compare == False
        __comparison(pa, archive1, archive2, quick_compare)
        __comparison(pa, archive2, archive1, quick_compare)

        return pa

    def synchronize_differences(self):
        if not self.diffpath:
            raise Exception('This does not seem to be a diffarchive.')
        for entry in self.get_files_by_param('filestatus', 3):
            print entry.get('complete_filename'), entry.get('filestatus')

    def create_package(self):
        self.check_filesystem()
        outf_gzip = os.path.join(self.path, self.name + '.cet')
        output = gzip.open(outf_gzip, 'wb', compresslevel = 5)
        try:
            inputf = open(self.__dbfilename)
            d = inputf.read(1024*8)
            while d:
                output.write(d)
                d = inputf.read(1024*8)                
        finally:
            output.close()
        return outf_gzip
    
    def report(self):
        print "="*80
        for k,v in self.get_archive_settings().items():
            print "%s\t:%s" % (k, v)
        print
        print "Files:"
        for entry in self.get_files():
            print "-"*80
            pprint.pprint(self.decode_metadata(entry))
#    
#class CoreCache:
#    
#    def __init__(self, verbose = False):
#        self.verbose = verbose
#        self.last_refresh = datetime.datetime.today()
#        self.refresh_rate = 1 # minutes between refresh
#        if self.verbose:
#            print "CoreCache initialized @ %s ...." % self.last_refresh
#        self.refresh()
#        
#    def refesh(self):
#        if self.verbose:
#            print "CoreCache refeshed ....",
#        self._date_periods = DatePeriod.objects.all()
#        self._categories = Category.objects.all()
#        self._file_extensions = FileExtension.object.all()
#        self._file_system_types = FileSystemType.object.all()
#        self._projects = Project.objects.all()
#        self._actions = Action.objects.all()
#        self._file_patterns = FilePattern.objects.all()
#        #self._build_pattern_methods()
#        if self.verbose:
#            print "done!"
#    
#    def __check_refresh(self):
#        now = datetime.datetime.today()
#        _delta = now - self.last_refresh
#        if _delta.seconds / 60 > self.refresh_rate:
#            self.refresh()
#            self.last_refresh = now
#            
#    def _get_date_periods(self):
#        self.__check_refresh()
#        return self._date_periods
#    date_periods = property(_get_date_periods)
#        
#    def _get_categories(self):
#        self.__check_refresh()
#        return self._categories
#    categories = property(_get_categories)
#
#    def _get_file_extensions(self):
#        self.__check_refresh()
#        return self._file_extensions
#    file_extensions = property(_get_file_extensions)
#
#    def _get_file_system_types(self):
#        self.__check_refresh()
#        return self._file_system_types
#    file_system_types = property(_get_file_system_types)
#
#    def _get_projects(self):
#        self.__check_refresh()
#        return self._projects
#    projects = property(_get_projects)
#
#    def _get_actions(self):
#        self.__check_refresh()
#        return self._actions
#    actions = property(_get_actions)
#
#    def _get_file_patterns(self):
#        self.__check_refresh()
#        return self._file_patterns
#    file_patterns = property(_get_file_patterns)
#
#    #def _build_pattern_methods():
#    #    self.PATTERN_METHODS = [('None', None)]
#    #    self.PATTERN_METHODS_MAPPING = {}
#    #    
#    #    def get_methods(pattern_methods_object, group):
#    #        for item in dir(pattern_methods_object):
#    #            element = getattr(pattern_methods_object, item)
#    #            if type(element) == types.MethodType:
#    #                self._PATTERN_METHODS.append((item, "%s : %s" % (group, element.__doc__ and element.__doc__ or item)))
#    #                self._PATTERN_METHODS_MAPPING[item] = element
#    #
#    #    get_methods(PhotoPatternMethods(), 'Photo')
#    #    get_methods(MoviePatternMethods(), 'Movie')
#    #    get_methods(MusicPatternMethods(), 'Music')
#    #    get_methods(CorePatternMethods(), 'Core')
#    #    self._PATTERN_METHODS = tuple(PATTERN_METHODS)
#    #
#    #def _get_pattern_methods(self):
#    #    self.__check_refresh()
#    #    return self._PATTERN_METHODS
#    #pattern_methods = property(_get_pattern_methods)
#    #
#    #def _get_pattern_methods_mapping(self):
#    #    self.__check_refresh()
#    #    return self._PATTERN_METHODS_MAPPING
#    #pattern_methods_mapping = property(_get_pattern_methods_mapping)