#!/usr/bin/env python
import os
import os.path
import shutil
import time
import collections

from Queue import Empty, Full
from threading import Lock

from Ganga.Utility.logging import getLogger
from Ganga.Lib.LCG.Utility import get_uuid
from Ganga.Core.GangaRepository import getRegistry, RegistryKeyError
from Ganga.Core.GangaThread.MTRunner import MTRunner, Data, Algorithm


from dq2.clientapi.DQ2 import DQ2
from dq2.common.DQException import *

from AutoD3PDMakerTask import AutoD3PDMakerTask
from AutoD3PDMakerUtility import AutoD3PDMakerQueue, AutoD3PDMakerError, readStrippedLines

## Utility functions belong to task management
def selectTasksFromStores(taskStoreList, clauses):
    """
    selects tasks matching the searching clauses in multiple taskStores
    """

    logger = getLogger('AutoD3PDMakerTaskManager')

    taskList = []

    class MyAlgorithm(Algorithm):

        def __init__(self, clauses):
            Algorithm.__init__(self)
            self.clauses = clauses

        def process(self, taskStore):

            tlist = taskStore.selectTasks(self.clauses)

            if tlist:
                self.__appendResult__( taskStore.dbpath, tlist )

    myAlg  = MyAlgorithm(clauses)
    myData = Data(collection=taskStoreList)
    
    numThread = len(taskStoreList)
    if numThread > 10:
        numThread = 10

    runner = MTRunner(name='ts_select', algorithm=myAlg, data=myData, numThread=numThread)
    runner.start()
    runner.join(-1)

    rslt = runner.getResults()

    for dbpath,tlist in rslt.items():
        logger.debug('*** %s ***' % dbpath)
        logger.debug(repr(tlist))
        taskList += tlist

    return taskList

def shapeNewTasks(taskDefinitionList):
    '''
    shapes the given task definition list to form proper AutoD3PDMakerTask list for adding to the task store.
    '''

    logger = getLogger('AutoD3PDMakerTaskManager')

    tasks = []

    now = time.time()

    for l in taskDefinitionList:

        ## convert String-based description to dict
        if type(l) == type('aaa'):
            tl = eval(l)
        elif type(l) == type({}):
            tl = l
        else:
            tl = dict( repr(l) )

        ## remove identity if it's already presented (the id should be given internally)
        if tl.has_key('id'): del tl['id']

        try:
            t = AutoD3PDMakerTask( repr(tl) )
            # assigne new task id and give initial values of some attributes
            t.id         = get_uuid()
            t.gtId       = -1
            t.gtId_pp    = -1
            t.lastUpdate = now

            try:

                ## if t.output is presented, copy it except certain attributes, e.g. ds
                out = t.output
                t.output = {}
                for k,v in out.iteritems():
                    if k not in ['ds']:
                        t.output[k] = v

            except AttributeError:
                t.output = {}

            t.status     = AutoD3PDMakerTask.status_map['new']
            t.error      = ''

            ## set cloud/site/backend value if they are not presented
            try:
                c = t.cloud
            except AttributeError:
                t.cloud = ''

            try:
                s = t.site
            except AttributeError:
                t.site = ''

            try:
                bk = t.backend
            except AttributeError:
                t.backend = 'LCG'

            t.progress   = {}
            tasks.append(t)
        except Exception:
            logger.error('cannot load new task: %s' % l)

    return tasks

def getNewTasksFromFile(taskDefinitionFile):
    '''
    gets AutoD3PDMakerTask list from an plain text file.
    '''

    logger = getLogger('AutoD3PDMakerTaskManager')

    tasks = []

    if os.path.exists(taskDefinitionFile):

        logger.info('Loading new tasks from task definition file ...')

        lines = readStrippedLines(taskDefinitionFile)

        tasks = shapeNewTasks(lines)

    else:
        raise AutoD3PDMakerError('file not found: %s' % taskDefinitionFile)

    return tasks

class AutoD3PDMakerTaskManager:
    """
    AutoD3PDMaker task manager providing interfaces to operate tasks in the queue and in the task store.
    """
    def __init__(self, taskStore=None, taskHandler=None):
        """
        D3PDMaker task manager maintains and manages the tasks defined with the following attributes:

            - id       : the system generated task uniq id
            - rundef   : the location of the AMAAthena rundef.py file
            - amaconfig: the location of the AMA config file
            - ds       : the input dataset pattern
        """

        self.logger = getLogger('AutoD3PDMakerTaskManager')

        ## the task manager UUID
        self.id = get_uuid()

        ## the persistent task storage
        self.taskStore = taskStore

        ## the task handler
        self.taskHandler = taskHandler

        ## tasks in q_new: new tasks waiting for generation and running
        self.q_new      = AutoD3PDMakerQueue(fpathDump='d3pdmaker_mgr_%s_new.tasks' % self.id)
        self.q_new_lock = Lock()

        ## tasks in q_run: tasks in running status
        self.q_run      = AutoD3PDMakerQueue(fpathDump='d3pdmaker_mgr_%s_run.tasks' % self.id)
        self.q_run_lock = Lock()

        ## tasks in q_pause: tasks in 'pause' status, something wrong with the task
        self.q_pause      = AutoD3PDMakerQueue(fpathDump='d3pdmaker_mgr_%s_pause.tasks' % self.id)
        self.q_pause_lock = Lock()

        ## tasks in q_post: tasks to be started for post-processing
        self.q_post      = AutoD3PDMakerQueue(fpathDump='d3pdmaker_mgr_%s_post.tasks' % self.id)
        self.q_post_lock = Lock()

        ## tasks in q_fini: tasks running post-processing jobs
        self.q_fini      = AutoD3PDMakerQueue(fpathDump='d3pdmaker_mgr_%s_fini.tasks' % self.id)
        self.q_fini_lock = Lock()

        ## tasks in q_log: tasks to be archived as it's completed
        self.q_log      = AutoD3PDMakerQueue(fpathDump='d3pdmaker_mgr_%s_log.tasks' % self.id)
        self.q_log_lock = Lock()

        if self.taskStore:
            self.loadTasks()

    def verifyTasks(self, taskList):
        '''
        verifies the given taskList to check if they can be properly handled by the given taskHandler.
        '''

        goodTasks = []
        badTasks  = []

        for t in taskList:
            if self.taskHandler.verifyTaskObj(t):
                goodTasks.append(t)
            else:
                goodTasks.append(t)

        return (goodTasks, badTasks)

    def makeReport(self, taskStoreList=[]):
        '''
        make reports on the tasks
        '''

        if self.taskStore not in taskStoreList:
            taskStoreList.append(self.taskStore)

        tlist = selectTasksFromStores(taskStoreList, {})

        return self.taskHandler.makeTaskReport(tlist)

    def loadTasks(self):
        '''
        loads tasks from persistent task store to internal queues
        '''

        self.logger.info('Loading tasks from task store ...')

        ## 1. load new tasks
        for t in self.taskStore.selectTasks({'status':AutoD3PDMakerTask.status_map['new']}):
            self.addNewTask( t )

        ## 2. load run tasks
        for t in self.taskStore.selectTasks({'status':AutoD3PDMakerTask.status_map['running']}):
            self.addRunTask( t )

        ## 3. load pause tasks
        for t in self.taskStore.selectTasks({'status':AutoD3PDMakerTask.status_map['pause']}):
            self.addPauseTask( t )

        ## 4. load post tasks
        for t in self.taskStore.selectTasks({'status':AutoD3PDMakerTask.status_map['completed']}):
            self.addPostTask( t )

        ## 5. load post-processing tasks
        ppTasks = self.taskStore.selectTasks({'status':AutoD3PDMakerTask.status_map['finishing']})
        if not self.taskHandler.ppBatchMode:
            ## reset the post-processing status to 'completed' in order to rerun them if those post-processing tasks are not in batch mode
            for t in ppTasks:
                self.resetTasks({'id':t.id}, 'completed')
        else:
            ## load them in the q_fini
            for t in ppTasks:
                self.addFiniTask( t )

    def __getNextTaskFromQueue__(self, queue, lock):
        """common logic to get next task from an Queue object"""
        task = None

        lock.acquire()

        try:
            if not queue.empty():
                task = queue.get_nowait()
        except Empty:
            pass

        lock.release()

        return task

    def __getAllTasksFromQueue__(self, queue, lock):
        """common logic to get all tasks currently in an Queue object"""
        tasks = []

        lock.acquire()

        try:
            while not queue.empty():
                tasks.append( queue.get_nowait() )
        except Empty:
            pass

        lock.release()

        return tasks

    def __addTasksToQueue__(self,tasks,queue,lock):
        """common logic to add multiple tasks to a Queue object"""

        lock.acquire()

        try:
            for t in tasks:
                if t in queue.queue:
                    self.logger.warning('task %s already in the queue, skip adding it.' % t.id)
                else:
                    queue.put_nowait(t)
        except Full:
            pass

        lock.release()

        return

    def __addNewTaskToQueue__(self, task, queue, lock):
        """common logic to add next task to an Queue object"""

        lock.acquire()

        try:
            if task in queue.queue:
                self.logger.warning('task %s already in the queue, skip adding it.' % task.id)
            else:
                queue.put_nowait(task)
        except Full:
            pass

        lock.release()

        return

    def __removeTaskFromQueue__(self, task, queue, lock):
        """common logic to remove task from the queue if it exists in the queue"""

        lock.acquire()

        try:
            if task in queue.queue:
                if sys.hexversion < 0x020500F0:

                    ## list-ify the queue.queue
                    mylist = list(queue.queue)

                    ## remove the task item from the list
                    mylist.remove(task)

                    ## replace the queue.queue by the new list
                    queue.queue = collections.deque(mylist)

                else:
                    queue.queue.remove(task)
            else:
                pass
        except Exception:
            pass

        lock.release()

        return

    def getNextNewTask(self):
        """gets next new task"""
        t = self.__getNextTaskFromQueue__(self.q_new, self.q_new_lock)

        ## no new jobs in queue, now check the taskStore and add them in internal queue
        if not t and self.taskStore:
            for tt in self.taskStore.selectTasks({'status':AutoD3PDMakerTask.status_map['new']}):
                self.addNewTask( tt )

            ## try again to get the next new job from the queue
            t = self.__getNextTaskFromQueue__(self.q_new, self.q_new_lock)

        return t

    def listTasks(self, clauses={}):
        """lists tasks in task store given the clauses in key:valure pairs"""

        if not self.taskStore:
            self.logger.error('task listing requires activated task store')

        for t in self.taskStore.selectTasks(clauses):
            ## TODO: a better printing method should be provided
            print '%s' % repr(t)

        return

    def addNewTask(self, task, persistency=True):
        """adds new task"""

        if self.taskStore and persistency:
            self.taskStore.updateTasks( [task] )

        return self.__addNewTaskToQueue__(task, self.q_new, self.q_new_lock)

    def delNewTaskFromQueue(self, task):
        """delete task from the internal queue holding new tasks"""
        return self.__removeTaskFromQueue__(task, self.q_new, self.q_new_lock)

    def getNextRunTask(self):
        """gets next running task"""
        return self.__getNextTaskFromQueue__(self.q_run, self.q_run_lock)

    def getAllRunTasks(self):
        """gets all paused tasks"""
        return self.__getAllTasksFromQueue__(self.q_run, self.q_run_lock)

    def addRunTask(self, task):
        """adds run task"""

        return self.__addNewTaskToQueue__(task, self.q_run, self.q_run_lock)

    def delRunTaskFromQueue(self, task):
        """delete task from the internal queue holding running tasks"""
        return self.__removeTaskFromQueue__(task, self.q_run, self.q_run_lock)

    def getNextPauseTask(self):
        """gets next paused task"""
        return self.__getNextTaskFromQueue__(self.q_pause, self.q_pause_lock)

    def getAllPauseTasks(self):
        """gets all paused tasks"""
        return self.__getAllTasksFromQueue__(self.q_pause, self.q_pause_lock)

    def addPauseTask(self, task):
        """adds pause task"""
        return self.__addNewTaskToQueue__(task, self.q_pause, self.q_pause_lock)

    def delPauseTaskFromQueue(self, task):
        """delete task from the internal queue holding paused tasks"""
        return self.__removeTaskFromQueue__(task, self.q_pause, self.q_pause_lock)

    def getNextPostTask(self):
        """gets next task for post-processing"""
        return self.__getNextTaskFromQueue__(self.q_post, self.q_post_lock)

    def getAllPostTasks(self):
        """gets all tasks for post-processing"""
        return self.__getAllTasksFromQueue__(self.q_post, self.q_post_lock)

    def addPostTask(self, task):
        """adds a task for post-processing"""
        return self.__addNewTaskToQueue__(task, self.q_post, self.q_post_lock)

    def delPostTaskFromQueue(self, task):
        """delete task from the internal queue holding post-processing tasks"""
        return self.__removeTaskFromQueue__(task, self.q_post, self.q_post_lock)

    def getNextFiniTask(self):
        """gets next task still running post-processing"""
        return self.__getNextTaskFromQueue__(self.q_fini, self.q_fini_lock)

    def getAllFiniTasks(self):
        """gets all tasks still running post-processing"""
        return self.__getAllTasksFromQueue__(self.q_fini, self.q_fini_lock)

    def addFiniTask(self, task):
        """adds a task still running post-processing"""
        return self.__addNewTaskToQueue__(task, self.q_fini, self.q_fini_lock)

    def delFiniTaskFromQueue(self, task):
        """delete task from the internal queue holding tasks still running post-processing"""
        return self.__removeTaskFromQueue__(task, self.q_fini, self.q_fini_lock)

    def getNextLogTask(self):
        """gets next task to be logged"""
        return self.__getNextTaskFromQueue__(self.q_log, self.q_log_lock)

    def getAllLogTasks(self):
        """gets all tasks for post-processing"""
        return self.__getAllTasksFromQueue__(self.q_log, self.q_log_lock)

    def addLogTask(self, task, persistency=True):
        """adds log task"""

        ## add the log either in taskStore or to log queue if taskStore is not available
        if self.taskStore and persistency:
            self.taskStore.updateTasks( [task] )
        else:
            self.__addNewTaskToQueue__(task, self.q_log, self.q_log_lock)
        return

    def delLogTaskFromQueue(self, task):
        """delete task from the internal queue holding logging tasks"""
        return self.__removeTaskFromQueue__(task, self.q_log, self.q_log_lock)

    def selectTasks(self, clauses):
        """selects tasks matching the searching clauses in the taskStore"""

        taskList = []

        if not self.taskStore:
            self.logger.error("selectTasks works only with Task Store")
        else:
            taskList = self.taskStore.selectTasks(clauses)

        return taskList

    def __cleanupGridOutput__(self, taskObj):
        """
        remove output dataset from the Grid if it exists.
        """

        try:
            ds = taskObj.output['ds']

            if ds:
                dq2c = DQ2()

                ## resolve datasets in container
                if ds.endswith('/'):

                    sub_dslist = dq2c.listDatasetsInContainer(ds)

                    self.logger.info('removing dataset from container: %s' % ds)
                    dq2c.deleteDatasetsFromContainer(ds, sub_dslist)

                    for sub_ds in sub_dslist:
                        self.logger.info('deleting dataset: %s' % sub_ds)
                        dq2c.deleteDatasetReplicas( sub_ds, deep=True, all=True, locations=[])
                        #dq2c.eraseDataset(sub_ds)

                    ## TODO: not sure if this is the good API to remove dataset container
                    dq2c.eraseDataset(ds)

                else:
                    self.logger.info('deleting dataset: %s' % ds)
                    dq2c.deleteDatasetReplicas( ds, deep=True, all=True, locations=[])

            del taskObj.output['ds']

        except DQException, e:
            self.logger.error(e)
            pass
        except Exception, e:
            pass

        return True

    def __cleanupLocalOutput__(self, taskObj):
        """
        remove local output if it exists.
        """
        ## remove local disk copy if available
        try:
            if os.path.exists(taskObj.output['disk']):
                self.logger.debug('removing output disk copy: %s' % taskObj.output['disk'])
                shutil.rmtree(taskObj.output['disk'], ignore_errors=True)

        except (AttributeError, KeyError, OSError):
            pass

        ## remove the poin to local disk copy
        try:
            del taskObj.output['disk']
        except:
            pass

        return True

    def __cleanupPPGangaTask__(self, taskObj):
        """
        remove GangaTask created by the post-processing job.
        """

        if self.taskHandler.ppBatchMode:

            try:
                if taskObj.status == AutoD3PDMakerTask.status_map['finishing'] and taskObj.gtId_pp != -1:
                    try:
                        trp = getRegistry('tasks').getProxy()
                        gt = trp(taskObj.gtId_pp)
                        ## if the GangaTask is still running, pause it first before removing it
                        if gt.status in ['running']:
                            gt.pause()
                        gt.remove(remove_jobs=True)
                    except RegistryKeyError:
                        pass
            except Exception,e:
                self.logger.warning(e)
                pass

            taskObj.gtId_pp = -1

        return True

    def __cleanupTaskFromQueues__(self, taskObj):
        """
        remove taskObj that has been presented in one of the internal queues.
        """
        self.delNewTaskFromQueue(taskObj)
        self.delRunTaskFromQueue(taskObj)
        self.delPauseTaskFromQueue(taskObj)
        self.delPostTaskFromQueue(taskObj)
        self.delLogTaskFromQueue(taskObj)
        self.delFiniTaskFromQueue(taskObj)

    def __cleanupGangaTask__(self, taskObj, deep=True):
        """
        remove GangaTasks as well as associated Ganga jobs.

        deep mode removes also local output as well as dq2 datasets if they are available.
        """

        ## remove task entry in the internal queues (go through all queues)
        self.logger.debug('removing task %s from internal queues' % taskObj.id)
        self.__cleanupTaskFromQueues__(taskObj)

        ## cleanup PP process
        self.logger.debug('removing post-processing GangaTask %s' % repr(taskObj.gtId_pp))
        self.__cleanupPPGangaTask__(taskObj)

        if deep:
            ## remove GangaTask and Ganga jobs
            if ( taskObj.gtId != -1 ):
                self.logger.debug('removing GangaTask %s' % repr(taskObj.gtId))

                try:
                    trp = getRegistry('tasks').getProxy()
                    gt = trp(taskObj.gtId)
                    ## if the GangaTask is still running, pause it first before removing it
                    if gt.status in ['running']:
                        gt.pause()
                    gt.remove(remove_jobs=True)
                except RegistryKeyError:
                    pass

            ## remove local disk copy if available
            self.__cleanupLocalOutput__(taskObj)

            ## remove Grid dataset if available
            self.__cleanupGridOutput__(taskObj)

            taskObj.gtId = -1

        return True

    def resetTasks(self, clauses, status):
        """
        resets tasks matching the searching clauses in the taskStore to a certain status
        as well as removing them from the task queues in memory.

        NB: this is the key method implementing the workflow of resetting task status.
        """

        ick = False

        if not status:
            self.logger.error('resetTask requires destination status')
            return False

        if status not in AutoD3PDMakerTask.status_map.values():
            self.logger.error('status %s is not recognized' % status)
            return False

        taskList = self.selectTasks(clauses)

        now = time.time()

        for t in taskList:
            self.logger.info("reseting task %s to status %s" % (t.id, status))

            ## pause the task
            ##  - pause the associated GangaTask
            ##  - kill the still running Ganga jobs
            ##  - remove task from the internal queues
            if status in AutoD3PDMakerTask.status_map['pause']:
                try:

                    trp = getRegistry('tasks').getProxy()

                    if (t.gtId != -1) and (trp(t.gtId).status != 'completed'):
                        self.logger.debug('pausing GangaTask: %s' % t.gtId)
                        trp(t.gtId).pause()

                        for sj in trp(t.gtId).getJobs(only_master_jobs=False):
                            if sj.status in ['running','submitted']:
                                sj.kill()

                        t.error      = 'task stopped'
                        
                    t.status     = status
                    t.lastUpdate = now

                    self.__cleanupTaskFromQueues__(t)

                except RegistryKeyError:
                    self.logger.warning('nothing to pause')
                    pass

            ## unfinish the task
            ##  - pause the associated post-processing GangaTask
            ##  - kill the still running Ganga jobs
            ##  - remove task from the internal queues
            elif status == AutoD3PDMakerTask.status_map['unfinished']:
                try:

                    trp = getRegistry('tasks').getProxy()

                    if (t.gtId_pp != -1) and (trp(t.gtId_pp).status != 'completed'):
                        self.logger.debug('pausing post-processing GangaTask: %s' % t.gtId_pp)
                        trp(t.gtId_pp).pause()

                        for sj in trp(t.gtId_pp).getJobs(only_master_jobs=False):
                            if sj.status in ['running','submitted']:
                                sj.kill()

                        t.error      = 'pp task stopped'
                        
                    t.status     = status
                    t.lastUpdate = now

                    self.__cleanupTaskFromQueues__(t)

                except RegistryKeyError:
                    self.logger.warning('nothing to pause')
                    pass

            ## resume the task
            ##  - resume the GangaTask
            ##  - remove task from the internal queues (it will be added later accordingly)
            elif status  == AutoD3PDMakerTask.status_map['running']:
                try:

                    trp = getRegistry('tasks').getProxy()

                    if ( t.gtId != -1 ) and (trp(t.gtId).status == 'pause'):
                        self.logger.debug('resuming GangaTask: %s' % t.gtId)
                        trp(t.gtId).run()

                    t.error      = ''
                    t.status     = status
                    t.lastUpdate = now

                    self.__cleanupTaskFromQueues__(t)

                except RegistryKeyError:
                    self.logger.warning('nothing to resume')
                    pass

            ## resume the post-processing part of the task
            ##  - resume the post-processing GangaTask
            ##  - remove task from the internal queues (it will be added later accordingly)
            elif status  == AutoD3PDMakerTask.status_map['finishing']:
                try:

                    trp = getRegistry('tasks').getProxy()

                    if ( t.gtId_pp != -1 ) and (trp(t.gtId_pp).status == 'pause'):
                        self.logger.debug('resuming post-processing GangaTask: %s' % t.gtId_pp)
                        trp(t.gtId_pp).run()
                        
                    t.error      = ''
                    t.status     = status
                    t.lastUpdate = now

                    self.__cleanupTaskFromQueues__(t)

                except RegistryKeyError:
                    self.logger.warning('nothing to resume')
                    pass

            ## rerun the entire task
            ##  - cleanup everything deeply: GangaTasks, Ganga jobs, output, etc.
            ##  - remove task from the internal queues (this is done internally in __cleanupGangaTask__)
            elif status == AutoD3PDMakerTask.status_map['new']:
                self.__cleanupGangaTask__(t, deep=True)
                t.error      = ''
                t.status     = status
                t.lastUpdate = now

            ## rerun the post-processing part of the task
            ##  - cleanup outputs: grid replica and disk copy
            ##  - remove task from the internal queues
            ##  - remove the post-processing GangaTask and jobs
            else:
                if status == AutoD3PDMakerTask.status_map['completed']:
                    ## try to remove local disk copy if available
                    self.__cleanupLocalOutput__(t)

                    ## try to remove Grid disk copy if available
                    self.__cleanupGridOutput__(t)

                ## this will just remove task from internal queues
                self.__cleanupTaskFromQueues__(t)
                self.__cleanupPPGangaTask__(t)

                t.error      = ''
                t.status     = status
                t.lastUpdate = now

        ick = self.taskStore.updateTasks(taskList)

        ## put the task into internal task queue accordingly
        if ick:

            if status == AutoD3PDMakerTask.status_map['new']:
                self.__addTasksToQueue__(taskList, self.q_new, self.q_new_lock)

            if status == AutoD3PDMakerTask.status_map['running']:
                self.__addTasksToQueue__(taskList, self.q_run, self.q_run_lock)

            if status == AutoD3PDMakerTask.status_map['pause']:
                self.__addTasksToQueue__(taskList, self.q_pause, self.q_pause_lock)

            if status == AutoD3PDMakerTask.status_map['completed']:
                self.__addTasksToQueue__(taskList, self.q_post, self.q_post_lock)

            if status == AutoD3PDMakerTask.status_map['finishing']:
                if self.taskHandler.ppBatchMode:
                    self.__addTasksToQueue__(taskList, self.q_fini, self.q_fini_lock)

        return ick


    def removeTasks(self, clauses, deep=True):
        """
        removes tasks matching the searching clauses in the taskStore as well as
        the possible entries in the task queues in memory.

        The default 'deep' mode deletion removes also the produced ntuples from local disk as
        well as the associated GangaTasks and Ganga jobs.
        """

        failedRemoval  = []
        successRemoval = []

        ## select tasks matching the criteria
        taskList = self.selectTasks(clauses)

        for t in taskList:

            self.logger.info("removing task %s ..." % t.id)

            self.__cleanupGangaTask__(t, deep=deep)

            ## remove task entry from
            self.logger.debug('removing task %s from taskStore' % t.id)
            ick = self.taskStore.deleteTasks({'id':t.id})

            ## TODO: remove dq2 dataset if available

            if not ick:
                failedRemoval.append(t)
            else:
                successRemoval.append(t)

        return (successRemoval, failedRemoval)

    def dumpQueuedTasks(self, persistency=True):
        """dumps all tasks in internal queues"""

        if persistency and self.taskStore:
            #self.logger.info('Ignoring internal queue dump as persistent task store is on')
            pass
        else:
            ## dump new tasks
            self.q_new.dumpTasks(mode='w')
            ## dump running tasks
            self.q_run.dumpTasks(mode='w')
            ## dump pasued tasks
            self.q_pause.dumpTasks(mode='w')
            ## dump tasks waiting for post-processing
            self.q_post.dumpTasks(mode='w')
            ## dump tasks under post-processing
            self.q_fini.dumpTasks(mode='w')
            ## dump task logs (it could be very large)
            self.q_log.dumpTasks(mode='w')
