# -*- coding:utf-8 -*-
__author__ = 'PC'

from datetime import datetime
import airflow
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from datetime import timedelta
from airflow.operators import SqoopOperator
from airflow.operators import SparkSqlOperator
from airflow.models import Variable

# Set DAG variable
now = datetime.now()
today_yyyymmdd = now.strftime("%Y%m%d")
spark_sql_file_path = Variable.get("SQL_FILE_PATH")
schema = 'dwb'
table = 'db_crm_user_profile'
# spark_sql_validate_file = spark_sql_file_path + '/dim_crm_user_profile.sql'

dag = DAG('full_ingest_crm_user_profile', schedule_interval=None,
          start_date=datetime(2019, 1, 20), catchup=False)

sqoop_opt = {
    'hive-table': 'ods_'+schema+'.'+table+'_feed',
    'delete-target-dir': '',
    'warehouse-dir': '/user/hive/warehouse/etl_input/',
    'hive-overwrite': '',
    'hive-import': '',
    'hive-drop-import-delims': '',
    'hive-partition-key': 'processing_dttm',
    'hive-partition-value': today_yyyymmdd,
    'compress': '',
    'compression-codec': 'org.apache.hadoop.io.compress.SnappyCodec'
}
table_args = {
    '#part_value#': today_yyyymmdd
}


spark_sql_create_schema = SparkSqlOperator(task_id='create_schema',dag=dag,
                                sql_file_path=spark_sql_file_path+schema+'/ods/'+table+'/create/create.sql',
                                name='create_schema', executor_cores=2, num_executors=2)

spark_sql_create_invalid = SparkSqlOperator(task_id='create_invalid_table',dag=dag,
                                sql_file_path=spark_sql_file_path+schema+'/ods/'+table+'/create/create_invalid.sql',
                                name='create_invalid_table',executor_cores=2,num_executors=2)

spark_sql_create_valid = SparkSqlOperator(task_id='create_valid_table',dag=dag,
                                sql_file_path=spark_sql_file_path+schema+'/ods/'+table+'/create/create_valid.sql',
                                name='create_valid_table',executor_cores=2,num_executors=2)


sqoop_fully = SqoopOperator(task_id='full_ingest', conn_id='sqoop_src_gp',
                            table=table+'_test',
                            cmd_type='import',
                            split_by='user_id',
                            input_null_string='\\N',
                            input_null_non_string='\\N',
                            input_fields_terminated_by='\001',
                            extra_import_options=sqoop_opt,
                            schema=schema,
                            dag=dag)

spark_sql_load_invalid = SparkSqlOperator(task_id='load_invalid_table', dag=dag,
                                  sql_file_path=spark_sql_file_path+schema+'/ods/'+table+'/fully/load_invalid.sql',
                                  name='load_invalid_table', executor_cores=2, num_executors=2,
                                  sql_args=table_args)
spark_sql_load_valid = SparkSqlOperator(task_id='load_valid_table', dag=dag,
                                  sql_file_path=spark_sql_file_path+schema+'/ods/'+table+'/fully/load_valid.sql',
                                  name='load_valid_table', executor_cores=2, num_executors=2,
                                  sql_args=table_args)


spark_sql_create_schema >> sqoop_fully
sqoop_fully >> spark_sql_create_invalid >> spark_sql_load_invalid
sqoop_fully >> spark_sql_create_valid >> spark_sql_load_valid