from airflow import DAG
from airflow.operators.bash import BashOperator
from airflow.utils.dates import days_ago
from datetime import timedelta

params = {
	'python3': '/usr/bin/python3',
	'work_root': '~/containers/singularity/efso',
	'data_root': '/mnt/sda/work/efso/sample_data',
	'efso_image': '~/containers/singularity/efso/efso-1.0.sif',
	'ntasks': 30
}

args = {
	'owner': 'airflow'
}

with DAG(
	dag_id='grapes_geps_efso',
	default_args=args,
	user_defined_macros=params,
	schedule_interval='0 0 * * *',
	start_date=days_ago(2),
	dagrun_timeout=timedelta(minutes=60),
) as dag:
	prepare = BashOperator(
		task_id='prepare',
		bash_command='''
{% set work_dir = work_root + '/' + execution_date.format("YYYYMMDDHH") %}
test -d {{work_dir}} || mkdir -p {{work_dir}}
'''
	)

	write_namelist = BashOperator(
		task_id='write_namelist',
		bash_command='''
{% set work_dir = work_root + '/' + execution_date.format("YYYYMMDDHH") %}
cd {{work_dir}} && singularity run --bind {{data_root}}:/data {{efso_image}} \
{{python3}} /opt/efso/src/write_namelist.py --time {{execution_date.format("YYYYMMDDHH")}} \
--use-raob --use-amdar --use-profiler --fcst-hour 24
'''
	)

	run_efso = BashOperator(
		task_id='run_efso',
		bash_command='''
{% set work_dir = work_root + '/' + execution_date.format("YYYYMMDDHH") %}
cd {{work_dir}} && singularity run --bind {{data_root}}:/data {{efso_image}} \
mpiexec -np {{ntasks}} /opt/efso/build/efso_driver.exe namelist
'''
	)

	write_db = BashOperator(
		task_id='write_db',
		bash_command='''
{% set work_dir = work_root + '/' + execution_date.format("YYYYMMDDHH") %}
cd {{work_dir}} && singularity run --bind {{data_root}}:/data {{efso_image}} \
{{python3}} /opt/efso/src/write_db.py --time {{execution_date.format("YYYYMMDDHH")}} \
--input obs_impact.{{execution_date.format("YYYYMMDDHH")}}.txt
'''
	)

	prepare >> write_namelist
	write_namelist >> run_efso
	run_efso >> write_db
