from datetime import datetime

from sqlalchemy.orm import create_session

from airflow import DAG
from airflow.patsnap.service import task_service
from airflow.models import DagPickle, DagRun
from airflow.models.idata_task import Task

# task = Task.get_task(1)
# task_service.run_in_pod(task)
from airflow.utils import timezone
from typing import Optional, Set
from airflow.utils import dates

# run_dates = dag.get_run_dates(start_date=start_date, end_date=self.bf_end_date)
from airflow.utils.state import State



def _get_dag_run(run_date: datetime, dag: DAG):

    respect_dag_max_active_limit = bool(dag.schedule_interval and not dag.is_subdag)

    current_active_dag_count = dag.get_num_active_runs(external_trigger=False)

    # check if we are scheduling on top of a already existing dag_run
    # we could find a "scheduled" run instead of a "backfill"

    runs = DagRun.find(dag_id=dag.dag_id, execution_date=run_date)
    run: Optional[DagRun]
    if runs:
        run = runs[0]
        if run.state == State.RUNNING:
            respect_dag_max_active_limit = False
    else:
        run = None

    # enforce max_active_runs limit for dag, special cases already
    # handled by respect_dag_max_active_limit


    max_active_runs_per_dag = 1
    if respect_dag_max_active_limit and current_active_dag_count >= max_active_runs_per_dag:
        return None

    print(run_date)

if __name__ == '__main__':
    dag_graph_id = 2294

    start_date = 1636646400000
    end_date = 1636686189000

    start_date = dates.from_milliseconds(start_date)
    end_date = dates.from_milliseconds(end_date)

    dag_pickle = DagPickle.get_by_dag('dag-{}'.format(dag_graph_id))

    dag = dag_pickle.pickle
    run_dates = dag.get_run_dates(start_date, end_date)
    dates_to_process = [
        run_date for run_date in run_dates
    ]
    for next_run_date in dates_to_process:
        for d in [dag] + dag.subdags:
            dag_run = _get_dag_run(run_date=next_run_date, dag=d)





