from minio import Minio

from airflow.configuration import conf
from airflow.utils.log.logging_mixin import LoggingMixin

'''
用于同步定时任务所依赖的dag 文件， 主要用于 scheduler
'''


class S3DagBag(LoggingMixin):

    def __init__(self):
        super().__init__()
        self.dags_update = dict()
        self.s3_client = None
        self.s3_bucket = None
        self.s3_folder = None
        self.dags_folder = conf.get('core', 'dags_folder')

    def get_s3_client(self):
        if self.s3_client is None:
            dags_minio_folder = conf.get('core', 'dags_minio_folder')
            if dags_minio_folder and dags_minio_folder.startswith('s3://'):
                self.log.info('use remote dags folder sync {}'.format(dags_minio_folder))
                self.s3_client = Minio(conf.get('minio', 'endpoint'),
                                       access_key=conf.get('minio', 'access_key'),
                                       secret_key=conf.get('minio', 'secret_key'),
                                       secure=False)
                prefix = dags_minio_folder.split('//')[1]
                if not prefix.endswith('/'):
                    prefix += '/'
                if not self.dags_folder.endswith('/'):
                    self.dags_folder += '/'
                self.s3_bucket = prefix.split('/')[0]
                self.s3_folder = prefix[prefix.find('/') + 1:]

        return self.s3_client

    def sync_to_local(self, key=None):
        try:
            s3_client = self.get_s3_client()
            if s3_client is None:
                return
            else:
                if key and key.find(self.s3_folder) >= 0:
                    key = key[key.find(self.s3_folder):]
                    self.recursive_sync(s3_client, key)
                else:
                    self.recursive_sync(s3_client, self.s3_folder)
        except Exception as e:
            self.log.exception(e)

    def recursive_sync(self, client, prefix, start_after=None):
        count = 0
        last_key = None

        for obj in client.list_objects(self.s3_bucket, prefix, start_after=start_after):
            count += 1
            last_key = obj.object_name

            if obj.object_name.endswith('/'):
                self.recursive_sync(client, obj.object_name)
            elif obj.object_name.endswith('.py'):
                if obj.object_name in self.dags_update \
                        and obj.last_modified.timestamp() == self.dags_update[obj.object_name]:
                    pass
                else:
                    self.dags_update[obj.object_name] = obj.last_modified.timestamp()
                    subdir = self.dags_folder + obj.object_name[len(self.s3_folder):]
                    client.fget_object(self.s3_bucket, obj.object_name, subdir)
                    self.log.info('download from s3: ' + subdir)

        if count > 990 and last_key:
            self.recursive_sync(client, prefix, start_after=last_key)
