# -*- coding: utf-8 -*-

from resource_management import *
from resource_management.core.logger import Logger
from resource_management.libraries.functions import default
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions.check_process_status import check_process_status
from resource_management.libraries.script.script import Script


class FlinkClient(Script):
    def install(self, env):
        import params
        # Install packages listed in metainfo.xml
        env.set_params(params)

        cmd = '/bin/rm -rf /usr/hdp/current/flink'
        Execute(cmd)

        # delete the base_dir dir
        cmd = '/bin/rm -rf ' + params.flink_home
        Execute(cmd)

        Execute("wget -O flink.tgz {0}".format(params.flink_download_url))
        Execute('tar -zxvf flink.tgz -C {0} && rm -f flink.tgz'.format(params.hdp_base_dir))
        # 对解压后的flink文件夹改名
        Execute('mv {0}/flink-* {1}'.format(params.hdp_base_dir, params.flink_home))

        # 对flink创建一个软链接到bin目录
        cmd = '/bin/ln' + ' -s  {0} /usr/hdp/current/flink'.format(params.flink_home)
        Execute(cmd)
        Execute("hdfs dfs -rm -r {0}".format(params.flink_dependency_jar),user='hdfs')
        Execute("hdfs dfs -mkdir {0}".format(params.flink_dependency_jar),user='hdfs')
        Execute("hdfs dfs -put {0}/lib/* {1}/".format(params.flink_home, params.flink_dependency_jar), user='hdfs')
        self.configure(env)

    def restart(self,env):
        import params
        env.set_params
        # write out flink-conf.yaml
        properties_conf = InlineTemplate(params.flink_yaml_content)
        File(format(params.flink_home + "/conf/flink-conf.yaml"), content=properties_conf)

        # log4j property
        properties_log4j = InlineTemplate(params.flink_log4j)
        File(format(params.flink_home + "/conf/log4j.properties"), content=properties_log4j)

    def configure(self, env):
        import params
        env.set_params(params)
        # Execute('rm -rf {0}/lib/*'.format(params.flink_home))
        # Execute('/usr/bin/hadoop fs -get {0}/* {1}/lib/'.format(params.flink_dependency_jar, params.flink_home))
        Execute('chmod 755 {0} -R'.format(params.flink_home))
        Execute('chmod 777 -R {0}/lib'.format(params.flink_home))
        Execute('chmod 777 -R {0}/log'.format(params.flink_home))

        # copy hadoop conf to flink
        Execute('cp {0}/hdfs-site.xml {1}/conf'.format(params.hadoop_conf_dir, params.flink_home))
        Execute('cp {0}/yarn-site.xml {1}/conf'.format(params.hadoop_conf_dir, params.flink_home))
        Execute('cp {0}/core-site.xml {1}/conf'.format(params.hadoop_conf_dir, params.flink_home))

        # write out flink-conf.yaml
        properties_conf = InlineTemplate(params.flink_yaml_content)
        File(format(params.flink_home + "/conf/flink-conf.yaml"), content=properties_conf)

        # log4j property
        properties_log4j = InlineTemplate(params.flink_log4j)
        File(format(params.flink_home + "/conf/log4j.properties"), content=properties_log4j)


if __name__ == "__main__":
    FlinkClient().execute()






