'''
Repository keeps sources, preprocessed sources and ASTs on filesystem

There are two repositories: temporary in /tmp and permanent in SALSA data path
'''

from salsa.service import Service
from salsa.util.rpc import RPCProtocol, RPCInfo
from salsa.util.io import IOFeederReply
from salsa.util import LoggerSeverity

from salsa import SALSAException

import shelve
import os
import hashlib
import random
import string
import shutil

class RepositoryException(SALSAException):
    pass

class Repository(Service):
    def __init__(self):
        Service.__init__(self)
        
        self.prm_data_path = self.core.abs_data_path(self.params.get('prm_data_path', 'repo/'))
        self.tmp_data_path = self.params.get('tmp_data_path', '/tmp/salsa_repo/')
        
        if os.path.exists(self.tmp_data_path):
            shutil.rmtree(self.tmp_data_path)
            
        os.makedirs(self.tmp_data_path)
        
        self.tmp_repo = {}
        self.db_connect()
        
        self.storages = ('prm', 'tmp')
        
        self.logger.log(LoggerSeverity.LS_NOTICE, 'Repository is started')
    
    def db_connect(self):
        self.db_path = os.path.join(self.prm_data_path, 'repo.db')
        
        self.prm_repo = shelve.open(self.db_path)
        
        self.logger.log(LoggerSeverity.LS_NOTICE, 'Repository permanent db is connected to %s' % self.db_path)
    
    def gen_hash(self, name, tag, seed=''):
        return hashlib.sha1(name + tag + seed).hexdigest()
    
    def gen_new_hash(self, name, tag, repo):
        attempts = 5
        seed = ''
        
        while attempts:
            hash = self.gen_hash(name, tag, seed)
            
            attempts -= 1
            seed = random.choice(string.digits)
            
            if hash not in repo:
                break
            else:
                file = repo[hash]
                
                if file['name'] == name and file['tag'] == tag:
                    raise RepositoryException('File %s:%s already exists (hash: %s)' % (name, tag, hash))
        else:
            # Hash collision
            self.logger.log(LoggerSeverity.LS_ALERT, 'Failed generating new hash! Repository may be full')
            raise RepositoryException('Failed generating new hash!')
        
        return hash
    
    def gen_path(self, hash, data_path):
        dir_hash = hash[:2]
        name_hash = hash[2:]
        
        dir_path = os.path.join(data_path, dir_hash)
        file_path = os.path.join(dir_path, name_hash)
        
        return (dir_path, file_path)
    
    def get_repo_path(self, storage):
        if storage in self.storages:
            repo = getattr(self, storage + '_repo')
            data_path = getattr(self, storage + '_data_path')
            
            return repo, data_path
        
        raise RepositoryException('Wrong storage name %s' % storage)
        
    def create_file(self, name, taskid, tag, storage='prm'):
        repo, data_path = self.get_repo_path(storage)
            
        hash = self.gen_new_hash(name, tag, repo)
        
        dir_path, file_path = self.gen_path(hash, data_path)
        
        try:
            if not os.path.exists(dir_path):
                os.makedirs(dir_path)
            
            f = open(file_path, 'w')
            f.close()
        except OSError as ose:
            self.logger.log(LoggerSeverity.LS_ALERT, 'Failed creating file %s; os error: %s' % (name, str(ose)))
            raise ose
        
        self.logger.log(LoggerSeverity.LS_DEBUG, 'Created file %s,%s,%s,%s -> %s,%s' % 
                            (name, tag, taskid, storage, hash, file_path))
        
        repo[hash] = {'name': name, 'creator' : taskid, 'tag': tag}
        
        return hash
    
    def destroy_file(self, hash, storage='prm'):
        hash = str(hash)
        repo, data_path = self.get_repo_path(storage)
        
        if hash not in repo:
            raise RepositoryException('Hash %s is not in repository!')
        
        file = repo[hash]
        dir_path, file_path = self.gen_path(hash, data_path)
        
        try:
            os.remove(file_path)
        except OSError as ose:
            self.logger.log(LoggerSeverity.LS_ALERT, 'Failed destroying file %s; os error: %s' % (file['file'], str(ose)))
            raise ose
        
        del repo[hash]
        
        return RPCProtocol.STATUS_OK
        
    def search_file(self, name):
        def _search_file(self, name):
            files = []
            
            for storage in self.storages:
                repo, data_path = self.get_repo_path(storage)
                
                for hash, file in repo.items():
                    if file['name'] == name:
                        info = {'storage': storage, 'hash': hash}
                        info.update(file)
                        yield info
        
        return IOFeederReply(_search_file(self, name))
    
    def list_repo(self, storage='prm'):
        repo, data_path = self.get_repo_path(storage)
        
        return IOFeederReply(iter(repo.items()))
    
    def abs_repo_path(self, hash, storage='prm'):
        repo, data_path = self.get_repo_path(storage)
        
        return self.gen_path(hash, data_path)[1]
    
    def do_stop(self):
        self.prm_repo.close()

Repository.rpc_info = RPCInfo()
Repository.rpc_info.register(Repository, 'create_file', 'destroy_file', 'search_file', 'abs_repo_path', 'list_repo')