# !/usr/bin/env/python
# -*- coding:utf-8 -*-
# 作者 believexin
# 创建时间 2021/7/8 9:36

from datetime import datetime, timedelta

import airflow
from airflow.operators.postgres_operator import PostgresOperator
from airflow.operators.dummy_operator import DummyOperator

args = {
 'owner': 'airflow',
 'depends_on_past': False,
 'start_date': datetime(2020, 1, 1),  # start_date会决定这个DAG从哪天开始生效
 'email': ['airflow@example.com'],
 'email_on_failure': False,
 'email_on_retry': False,
 'retries': 2,
 'retry_delay': timedelta(minutes=5)
 # 'queue': 'bash_queue',
 # 'pool': 'test_archve'
 # 'priority_weight': 10,
 # 'end_date': datetime(2016, 1, 1),
}

my_dag = airflow.DAG(
 # dag编号
 'to_table',
 # schedule_interval是调度的频率
 # schedule_interval=timedelta(hours=5),
 schedule_interval=None,
 # 并发数量
 concurrency=1,
 # 默认参数
 default_args=args,

 # 最多运行的job实例数量
 max_active_runs=1
)


start_step = DummyOperator(task_id="start", dag=my_dag)
end_step = DummyOperator(task_id="finish", dag=my_dag)


def py_read_file():
    f = open('/usr/local/airflow/dags/user.txt', 'r', encoding='utf-8')
    sql = "insert into t_user values"
    # global exec_sql
    for line in f:
        fields = line.split(",")
        sql += f"('{fields[0]}','{fields[1]}','{fields[2]}'),"

    return sql[0:-1]


to_table = PostgresOperator(
    task_id="to_table",
    postgres_conn_id="test_airflow",
    sql=py_read_file(),
    pool="default_pool",
    dag=my_dag
)

start_step >> to_table >> end_step
