from datetime import timedelta
from airflow.operators.bash import BashOperator
from airflow.models import Variable
import pendulum
from spmi_analysis.dm.dm_piece_kg_part_jjj import spmi_dm__dm_piece_kg_part_jjj


tidb_host = Variable.get('bigdata_tidb_host')
tidb_port = Variable.get('bigdata_tidb_port')
tidb_url = Variable.get('bigdata_tidb_url')
tidb_user = Variable.get('bigdata_tidb_user')
tidb_password = Variable.get('bigdata_tidb_password')

# ############hive to tidb的信息############
# hive表默认分区表 分区为dt  如不是请跟进自己表进行调整sqoop命令
hive_table='spmi_dm.dm_piece_kg_part_jjj'
# tidb信息
tidb_table='spmi_dm.dm_piece_kg_part_jjj'
# tidb是否是分区表 true ：tidb是分区表  false：tidb不是分区表
if_partitions='false'
# 每次推送天数
interval_dt=1
# 推送map个数（最大100map 一般20-50即可在15分钟内推送亿级别数据）
sqoop_maps=10



tidb_dm_piece_kg_part_jjj = BashOperator(
    task_id='tidb_dm_piece_kg_part_jjj',
    execution_timeout=timedelta(hours=1),
    email=['yushuo@jtexpress.com', 'yl_bigdata@yl-scm.com'],
    bash_command="spmi_analysis/tidb/tidb_dm_piece_kg_part_jjj/execute.sh",
    pool='spmi_piece',
    retries=0,
    pool_slots=1,
    params={'tidb_host':tidb_host,'tidb_port':tidb_port,'tidb_url':tidb_url,'tidb_user':tidb_user,'tidb_password':tidb_password,'hive_table':hive_table,'tidb_table':tidb_table,'if_partitions':if_partitions,'interval_dt':interval_dt,'sqoop_maps':sqoop_maps}

)


tidb_dm_piece_kg_part_jjj << spmi_dm__dm_piece_kg_part_jjj