# -*- coding: utf-8 -*-

"""
    (c) 2020 - Copyright ...
    
    Authors:
        zPlus <zplus@peers.community>
"""


#from .. import database
from .. import settings

import celery
import logging
import os

import pagure.config
import pagure.lib.model
import pagure.lib.model_base

# When the plugin is imported by Pagure, the logging module already has a handler
# defined. When however the celery queue is started, it lives in its own process,
# and there is not a handler. However, celery already has one handler set up
# (see https://docs.celeryproject.org/en/latest/userguide/tasks.html#logging) so
# we reuse that one for logging task execution.
log = celery.utils.log.get_task_logger(__name__)
log.setLevel(settings.LOG_LEVEL)

broker_url = pagure.config.config.get('BROKER_URL', None)

# Without a broker, forgefed cannot schedule/process any activity
if not broker_url:
    log.critical('Broker not defined.')
    raise Exception('Broker not defined.')

log.info('Using broker: ' + broker_url)

# The Celery instance used to register forgefed tasks
broker = celery.Celery('forgefed', broker=broker_url, backend=broker_url)
broker.conf.update({
    **pagure.config.config["CELERY_CONFIG"],
    
    # These settings apply to all of our tasks by default.
    # See https://docs.celeryproject.org/en/latest/userguide/configuration.html#task-settings
    # and https://docs.celeryproject.org/en/stable/userguide/tasks.html#retrying
    # for more info.
    # TODO move these settings to config file.
    'task_annotations': {
        '*': {  'queue':             'forgefed',
                'autoretry_for':     [ Exception ],
                'retry_kwargs':      { 'max_retries': 20 },
                'retry_backoff':     True,     # Exponential backoff
                'retry_backoff_max': 60*60*24, # Do not delay for more than 24h
                'retry_jitter':      True,
                #'rate_limit':       '10/s'
    }}
})

class database_session:
    """
    Because tasks are used from the independent Celery workers, these functions
    do not have access to the database sessions that are automatically created
    at the beginning of HTTP requests within Flask. So the job of this
    Context Manager is to create a new database session that tasks can use.
    This class can be used like this:
    
        with database_session() as (pagure_db, forgefed_db):
            pagure_db.query()...
            forgefed_db.query()...
            ...
    
    NOTE An alternative way of obtaining the same behavior would be with a
         decorator that contains a try...except...finally block.
    """
    
    def __init__(self):
        self.pagure   = None
        self.forgefed = None
    
    def __enter__(self):
        self.pagure   = pagure.lib.model_base.create_session(pagure.config.config['DB_URL'])
        self.forgefed = database.start_database_session()
        
        return (self.pagure, self.forgefed)
    
    def __exit__(self, exception_type, exception_value, exception_traceback):
        # If the task has raised an exception we need to rollback the session
        # first, in order not to leave uncommitted transactions hanging.
        if exception_type:
            self.pagure.rollback()
            self.forgefed.rollback()
        else:
            self.pagure.commit()
            self.forgefed.commit()
        
        # Close the database sessions
        self.pagure.remove()
        self.forgefed.remove()
        
        # Optionally we can return True to prevent the exception from bubbling up.
        # However, because these are Celery tasks, Celery will automatically
        # retry the task after some time so it's OK to pass the exception up
        # to Celery.
        # return True

from . import activity, notification, person, project, repository
