# -*- coding: utf-8 -*-

"""
kg file job
"""

from __future__ import unicode_literals
from __future__ import absolute_import

from kgpipeline.job import KgJob
from kgpipeline.sparkutil import FileSystem, FileUtil, Path, HDFS, HADOOP_CONFIG, Configuration, copy_merge, jvm

import logging

logger = logging.getLogger(__name__)


class KgFileJob(KgJob):
    """
    HDFS FileSystem related actions: copy, copy_merge, delete

    To work with a specific user for destination HDFS, we need the following Java utility:

    ```java
    package ai.ruyi.kgpipeline.util;

    import java.security.PrivilegedExceptionAction;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.security.UserGroupInformation;

    public class HadoopUtil {
        public static FileSystem getFileSystemByUser(String username, Configuration conf) throws Exception {
            UserGroupInformation ugi = UserGroupInformation.createRemoteUser(username);
            return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
                public FileSystem run() throws Exception {
                    return FileSystem.get(conf);
                }
            })
        }
    }
    ```

    And the jar file must be submitted along with kgpipeline jobs.
    """

    def __init__(self, config, reader):
        super(KgFileJob, self).__init__(config, reader)
        if "actions" not in self.config:
            logger.warn("Empty file job!")
            self.actions = []
        else:
            self.actions = self.config["actions"]  # actions is a list of dicts
            if "dst_fs" in self.config:
                conf = Configuration()
                for k, v in self.config["dst_fs"].items():
                    conf.set(k, v)
                username = self.config.get("username")
                if username:
                    HadoopUtil = jvm.ai.ruyi.kgpipeline.util.HadoopUtil
                    self.dst_fs = HadoopUtil.getFileSystemByUser(username, conf)
                else:
                    self.dst_fs = FileSystem.get(conf)
            else:
                self.dst_fs = HDFS

    def process(self, inputs):
        """
        file job implementation: no inputs, no output
        """
        logger.info("Start KgFileJob: {} ...".format(self.name))

        for ac in self.actions:
            action = ac["action"]
            if action == "copy":
                delete_source = ac.get("delete_source", False)
                override = ac.get("override", True)
                src = Path(ac["from"])
                dst = Path(ac["to"])
                if override and self.dst_fs.exists(dst):
                    if not self.dst_fs.delete(dst, True):
                        raise Exception("failed to override target {0} when deleting".format(ac["to"]))
                if not FileUtil.copy(HDFS, src, self.dst_fs, dst, delete_source, override, HADOOP_CONFIG):
                    raise Exception("failed to execute copy action from {0} to {1}".format(ac["from"], ac["to"]))
            elif action == "copy_merge":
                delete_source = ac.get("delete_source", False)
                override = ac.get("override", True)
                src = Path(ac["from"])
                dst = Path(ac["to"])
                if override and self.dst_fs.exists(dst):
                    if not self.dst_fs.delete(dst, True):
                        raise Exception("failed to override target {0} when deleting".format(ac["to"]))
                if not copy_merge(HDFS, src, self.dst_fs, dst, delete_source, HADOOP_CONFIG):
                    raise Exception("failed to execute copy_merge action from {0} to {1}".format(ac["from"], ac["to"]))
            elif action == "delete":
                recursive = ac.get("recursive", True)
                path = Path(ac["path"])
                if not self.dst_fs.delete(path, recursive):
                    raise Exception("failed to delete path {0}".format(ac["path"]))
            else:
                raise Exception("Unknown action {0}!!!".format(action))

        return []


KgJob.register("file", KgFileJob)
