from datetime import timedelta, datetime
from airflow import DAG
from airflow.operators.python import PythonOperator
from airflow.providers.mysql.hooks.mysql import MySqlHook


class CustomMySqlHook(MySqlHook):
    def __init__(self, *args, **kwargs):
        kwargs['local_infile'] = True  # 添加local_infile参数，否则上传将导致错误
        super().__init__(*args, **kwargs)


# 创建 MySqlHook 实例
# mysql_hook = MySqlHook(mysql_conn_id='mysql_connection')
# mysql_hook = CustomMySqlHook(conn_name_attr='mysql_connection')
mysql_hook = CustomMySqlHook(mysql_conn_id='mysql_connection')


# 获取连接
# conn = mysql_hook.get_conn()


def update_data():
    # 执行sql
    mysql_hook.run("UPDATE course SET id = id + 1000;")


def import_task():
    # 从本地导入
    mysql_hook.bulk_load("course", "/home/jueyi/airflow/custom_data/data.csv")


def export_task():
    # 导出并保存在服务器上
    mysql_hook.bulk_dump('course', 'export/data.csv')


default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime(2024, 8, 30),
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=1),
}

dag = DAG(
    'airflow_mysql_example',
    default_args=default_args,
    description='airflow mysql操作练习',
    schedule_interval=timedelta(days=1),
)

update_task = PythonOperator(
    task_id='update_task',
    python_callable=update_data,
    dag=dag,
)
import_task = PythonOperator(
    task_id='import_task',
    python_callable=import_task,
    dag=dag,
)
export_task = PythonOperator(
    task_id='export_task',
    python_callable=export_task,
    dag=dag,
)

update_task >> import_task >> export_task
