from airflow.models.dag import DAG
from airflow.utils.dates import days_ago
import datetime
import pendulum

local_tz = pendulum.timezone("Asia/Shanghai")
from dagen.operators import SSHOperator


schedule_interval="0 4 * * *"

dag = DAG(
**{
'dag_id': "dag",
'catchup': False,
'start_date': datetime.datetime(2021, month=10, day=1, tzinfo=local_tz),
'schedule_interval': schedule_interval,
'is_paused_upon_creation': False,
'default_args':
    {
        'owner': 'Avris',
        'depends_on_past': False,
        'retries': 2,
        'email': ['guanghu@tesla.com'],
        'email_on_failure': True,
        'email_on_retry': False

    }
}
)

ssh_hook_task = SSHOperator(
    dag=dag,
    **{
        'task_id': 'test the ',
        'ssh_conn_id': 'eden_yarn_stg',
        'command': '/opt/bigdata/spark/current/bin/spark-submit --master yarn --deploy-mode cluster --queue batch --driver-memory 3G --executor-memory 2G --num-executors 2 --executor-cores 4 --jars /opt/bigdata/jar/hudi-spark3-bundle_2.12-0.9.0.jar,/opt/bigdata/jar/spark-avro_2.12-3.0.1.jar --conf "spark.serializer=org.apache.spark.serializer.KryoSerializer" --conf "spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension" --class com.avris.rule.RuleEngine /mnt/app/cdp-rule-engine/msdp_test-1.0-SNAPSHOT.jar @DAG_ID root root pvg03s1bieap012.cb1.pvg03.tzla.net avris_dmp_oap ads_cdp.user_labels'
      }
)
