# -*- coding:utf-8 -*-

from __future__ import absolute_import
from django.db.models.signals import pre_delete
from django.db import models
from django.conf import settings
import os
import re
import numpy as np
from django.dispatch import receiver
from metlib.data.misc import seq2chunks
from metlib.datetime import *
from metlib.shell.fileutil import *
from metlib.misc.download import download
from metlib.misc.datatype import *
from metlib.misc.misc import gen_uuid32
from weblib.django_kits import *
from weblib.common.file_transporter import *

FC_CACHE_ROOT = get_setting('FILECACHE_ROOT', os.path.join(get_setting('BASE_DIR'), 'filecache/caches'))
MKDIR(FC_CACHE_ROOT)

FC_TMP_ROOT = get_setting('FILECACHE_TMP', os.path.join(settings.BASE_DIR, 'filecache/caches/tmp'))
MKDIR(FC_TMP_ROOT)

# Create your models here.

DEFAULT_FILECACHE_BUCKET_SIZE = 256000000

class FileCacheBucket(models.Model):
    class Meta:
        pass

    name = models.CharField(max_length=128, default='default', unique=True, primary_key=True)
    max_size = models.BigIntegerField(default=DEFAULT_FILECACHE_BUCKET_SIZE)
    used_size = models.BigIntegerField(default=0)
    full_ratio = models.FloatField(default=0.9)
    clear_ratio = models.FloatField(default=0.4)

    _bypass_update_used_size = False

    @property
    def free_size(self):
        return self.max_size - self.used_size

    @property
    def count(self):
        return self.filecache_set.count()

    def __unicode__(self):
        # return u'%s (%d/%d)' % (self.name, self.used_size, self.max_size)
        return self.name

    def clean(self):
        # 当从表单中更改max_size数值时, 自动更新实际所用大小
        self.update_used_size()

    def get(self, uri, when_not_exist=('create',)):
        now_dt = local_now()
        try:
            cached = self.filecache_set.get(pk=uri)
            cached.access_dt = now_dt
            cached.save()
            if not os.path.exists(cached.filepath):
                self.update_used_size(-cached.size)
                cached.delete()
                raise FileCache.DoesNotExist
        except FileCache.DoesNotExist:
            # fetch orig file, create cached file
            try:
                uufname = gen_uuid32()
                tmp_fname = os.path.join(FC_TMP_ROOT, self.name, uufname)
                cache_fname = os.path.join(FC_CACHE_ROOT, self.name, uufname)
                MKDIR(DIRNAME(tmp_fname))
                MKDIR(DIRNAME(cache_fname))
                fetch_dest = None

                # ext = get_ext(uri)
                # filetype = content_typer(ext)

                if not isseq(when_not_exist):
                    when_not_exist = [when_not_exist]

                for wne in when_not_exist:
                    if wne == 'create':
                        cached = FileCache(uri=uri, bucket=self, storage_name=uufname,
                                           # ext=ext, filetype=filetype,
                                           size=0,
                                           create_dt=now_dt, access_dt=now_dt, modify_dt=now_dt)
                        cached.save()
                        return cached
                    elif isinstance(wne, tuple) and len(wne) == 2 and callable(wne[0]):
                        try:
                            fetch_dest = wne[0](uri, tmp_fname, **wne[1])
                        except Exception:
                            continue
                    elif callable(wne):
                        try:

                            fetch_dest = wne(uri, tmp_fname)
                        except Exception:
                            continue

                    if fetch_dest is not None:
                        break

                if fetch_dest is None:
                    raise DatafileNotAvailable(uri)

                size = filesize(tmp_fname)
                MV(tmp_fname, cache_fname)
                cached = FileCache(uri=uri, bucket=self, storage_name=uufname,
                                   # filetype=filetype, ext=ext,
                                   size=size,
                                   create_dt=now_dt, access_dt=now_dt, modify_dt=now_dt)
                cached.save()
                self.update_used_size(size)
            except Exception as e: # which exception?
                raise DatafileNotAvailable(uri)

        return cached

    def get_total_size(self):
        return np.sum(self.filecache_set.values_list('size', flat=True))

    def update_used_size(self, delta=None):
        if self._bypass_update_used_size == True:
            return self.used_size
        if delta is None:
            self.used_size = self.get_total_size()
        else:
            self.used_size = self.used_size + delta
        if self.used_size > self.max_size * self.full_ratio:
            self._bypass_update_used_size = True
            self.clear_cache(by='access_dt')
            self.used_size = self.get_total_size()
            self._bypass_update_used_size = False
        self.save()
        return self.used_size

    def clear_cache(self, by='access_dt', ensure_free_size=None):
        # TODO: editing
        target_total_size = int(self.max_size * self.clear_ratio)
        if ensure_free_size and self.max_size - target_total_size < ensure_free_size:
            target_total_size = self.max_size - ensure_free_size

        fcs = self.filecache_set.order_by(by).reverse()
        if ensure_free_size >= self.max_size:
            to_delete = fcs[:]
        else:
            sizes = fcs.values_list('size', flat=True)
            if np.sum(sizes) <= target_total_size:
                return
            acc_sizes = np.add.accumulate(sizes)
            i = np.argmax(acc_sizes > target_total_size)
            to_delete = fcs[i:]
        to_delete = [tod.pk for tod in to_delete]
        # fcs.filter(pk__in=to_delete).delete()
        for part in seq2chunks(to_delete, 100):
            fcs_part = fcs.filter(pk__in=part)
            print fcs_part
            fcs_part.delete()
        self.update_used_size()
        return len(to_delete)


class FileCache(models.Model):
    class Meta:
        pass

    uri = models.CharField(max_length=255, unique=True, default=gen_uuid32, primary_key=True)
    storage_name = models.CharField(max_length=32, default=gen_uuid32)
    bucket = models.ForeignKey(FileCacheBucket)
    # bucket = models.CharField(max_length=128, default='default')

    is_link = models.BooleanField(default=False)
    filetype = models.CharField(max_length=32, default='', null=True, blank=True)
    ext = models.CharField(max_length=8, default='', blank=True)
    size = models.BigIntegerField(default=0)
    create_dt = models.DateTimeField(default=local_now)
    access_dt = models.DateTimeField(default=local_now)
    modify_dt = models.DateTimeField(default=local_now)

    @property
    def filepath(self):
        return os.path.join(FC_CACHE_ROOT, self.bucket_id, self.storage_name)

    def update(self, size=None, filetype=None, ext=None, uri=None):
        modified = False
        size_delta = 0
        if size is not None:
            # TODO: 当单个filecache的大小大于bucket的大小时, 会出严重问题.
            old_size = self.size
            self.size = size
            size_delta = self.size - old_size
            modified = True
        if filetype is not None:
            self.filetype = filetype
            modified = True
        if ext is not None:
            self.ext = ext
            modified = True
        if uri is not None:
            self.uri = uri
            modified = True

        self.access_dt = local_now()
        if modified:
            self.modify_dt = self.access_dt
        self.save()

        if size_delta != 0:
            self.bucket.update_used_size(size_delta)

    def open(self, mode='rb'):
        self._f = open(self.filepath)
        return self._f

    def __unicode__(self):
        return self.uri

@receiver(pre_delete, sender=FileCache, dispatch_uid='FC_DEL_DUID')
def on_FileCache_delete(**kwargs):
    instance = kwargs['instance']
    size = instance.size
    bucket = instance.bucket
    RM(instance.filepath)
    if size != 0:
        bucket.update_used_size(-size)


