from common import md5, memToByte
from config import globalConf

maxCacheSize = memToByte(globalConf["maxCacheSize"])
cache = [] # elements: [md5(filename), weight, size, content]
currentCacheSize = 0
sorting = 0 # 0: By filename  1: By weight

def _sortTo(s):
    '''
        将缓存中的文件按照一定的关键字排序
        0: 按照文件名（为了方便查找一个文件是否在缓存中）
        1: 按照文件使用的频率排序（为了从缓存中删去最不常用的文件）
        这个算法其实不是很好，理论上用 trie 树 + 优先队列可能会好一点
    '''
    global sorting, cache
    if sorting != s:
        sorting = s
        cache.sort(key = lambda x: x[s])

def _fileExist(fnm): # check whether fnm is in cache
    _sortTo(0)
    l = 0
    r = len(cache) - 1
    while l <= r:
        m = (l + r) >> 1
        if cache[m][0] == fnm:
            return m
        if cache[m][0] > fnm:
            r = m - 1
        if cache[m][0] < fnm:
            l = m + 1
    return -1
    
def loadFromFile(fnm):
    '''
        从文件读入，如果缓存中有则不调用磁盘
        不进行错误处理
    '''
    global currentCacheSize
    _md5 = md5(fnm)
    indx = _fileExist(_md5)
    if indx != -1: # file exist in cache
        cache[indx][1] += 1 # increase weight
        return cache[indx][3]
    else: # load from disk
        tmp = open(fnm, "r").read()
        l = len(tmp)
        if currentCacheSize + l <= maxCacheSize: # memSize enough
            cache.append([_md5, 1, l, tmp])
            currentCacheSize += l
        else:
            _sortTo(1)
            currentCacheSize += l
            while currentCacheSize > maxCacheSize:
                t = cache.pop(0) # erase the least useful
                currentCacheSize -= t[2]
            cache.append([_md5, 1, l, tmp])
        return tmp

def writeToFile(fnm, cont):
    f = open(fnm ,"w")
    f.write(cont)
    f.close()

    _md5 = md5(fnm)
    indx = _fileExist(_md5)
    if indx != -1: # file exist in cache
        cache[indx][1] += 1 # increase weight
        cache[indx][3] = cont # update cache
    else:
        pass # don't do anything

def appendToFile(fnm, cont):
    f = open(fnm, "a")
    f.write(cont)
    f.close()
