"""
Sqoop数据同步类
待完善参数：
    -D mapred.job.queue.name
    -D mapred.job.name
"""
from my_utils.shell import Shell


class Sqoop(Shell):
    def sqoop(self, cmd):
        return self.sh_cmd_and_alert(' '.join(cmd.split()))

    def hive_import(self, hive_tb, query, partition_key='ymd', partition_value=None):
        return self.sqoop(r'''
        {sqoop} import
        --hive-import
        --hive-overwrite
        --null-string '\\N' --null-non-string '\\N'
        --num-mappers 1
        --hive-drop-import-delims
        --connect jdbc:mysql://{host}:{port}/{mysql_db}?serverTimezone=Asia/Shanghai
        --username '{mysql_user}'
        --password '{mysql_pwd}'
        --hive-database {hive_db}
        --hive-table {hive_tb}
        --hive-partition-key {partition_key}
        --hive-partition-value {partition_value}
        --query '{query} AND $CONDITIONS'
        --target-dir /temp/sqoop/{hive_db}/{hive_tb}
        --delete-target-dir
        --compress
        '''.format(
            sqoop=self.get('sqoop', 'sqoop'),
            host=self.get('mysql_host', 'localhost'),
            port=self.get('mysql_port', '3306'),
            mysql_db=self['mysql_db'],
            mysql_user=self.get('mysql_user', 'root'),
            mysql_pwd=self['mysql_pwd'],
            hive_db=self['hive_db'],
            hive_tb=hive_tb,
            partition_key=partition_key,
            partition_value=partition_value or self.ymd,
            query=query.replace("'", '"').format(ymd=self.ymd),
        ))

    def hive_import_sap(self, hive_tb, query, partition_key='ymd', partition_value=None):
        """
        前提：在`/opt/cloudera/parcels/CDH/lib/sqoop/lib`添加【SAP / HANA】的驱动
        """
        return self.sqoop(r'''
        {sqoop} import
        --hive-import
        --hive-overwrite
        --null-string '\\N' --null-non-string '\\N'
        --num-mappers 1
        --hive-drop-import-delims
        --connect jdbc:sap://{host}:{port}
        --driver com.sap.db.jdbc.Driver
        --username '{sap_user}'
        --password '{sap_pwd}'
        --hive-database {hive_db}
        --hive-table {hive_tb}
        --hive-partition-key {partition_key}
        --hive-partition-value {partition_value}
        --query '{query} AND $CONDITIONS'
        --target-dir /temp/sqoop/{hive_db}/{hive_tb}
        --delete-target-dir
        --compress
        '''.format(
            sqoop=self.get('sqoop', 'sqoop'),
            host=self['sap_host'],
            port=self['sap_port'],
            sap_user=self['sap_user'],
            sap_pwd=self['sap_pwd'],
            hive_db=self['hive_db'],
            hive_tb=hive_tb,
            partition_key=partition_key,
            partition_value=partition_value or self.ymd,
            query=query.replace("'", '"').format(ymd=self.ymd),
        ))

    def sqoop_export(self, mysql_tb, export_dir, columns=''):
        """
        省略--columns默认是全部列；建议加上，不然会有莫名其妙的bug
        省略--update-mode默认是updateonly
        """
        return self.sqoop(r'''
        {sqoop} export
        --connect jdbc:mysql://{host}:{port}/{database}
        --username '{username}'
        --password '{password}'
        --table {table}
        --num-mappers 1
        --input-fields-terminated-by '\001'
        --input-null-string '\\N'
        --input-null-non-string '\\N'
        --export-dir '{export_dir}'
        {columns}
        '''.format(
            sqoop=self.get('sqoop', 'sqoop'),
            host=self.get('mysql_host', 'localhost'),
            port=self.get('mysql_port', '3306'),
            database=self['mysql_db'],
            username=self.get('mysql_user', 'root'),
            password=self['mysql_pwd'],
            table=mysql_tb,
            export_dir=export_dir,
            columns=columns,
        ))


if __name__ == '__main__':
    print(Sqoop(
        mysql_db='MySQL库',
        mysql_pwd='密码',
        hive_db='HIVE库',
    ).hive_import(
        hive_tb='HIVE表',
        query='SELECT * FROM t WHERE 1=1'
    ))
