from datetime import datetime


from airflow import DAG
from airflow.models import DagPickle, DagRun

from typing import Optional, Set
from airflow.utils import dates

from airflow.utils.state import State


def _get_dag_run( run_date: datetime, dag: DAG):
    """
    Returns a dag run for the given run date, which will be matched to an existing
    dag run if available or create a new dag run otherwise. If the max_active_runs
    limit is reached, this function will return None.

    :param run_date: the execution date for the dag run
    :param dag: DAG
    :param session: the database session object
    :return: a DagRun in state RUNNING or None
    """
    # consider max_active_runs but ignore when running subdags
    respect_dag_max_active_limit = bool(dag.schedule_interval and not dag.is_subdag)

    current_active_dag_count = dag.get_num_active_runs(external_trigger=False)

    # check if we are scheduling on top of a already existing dag_run
    # we could find a "scheduled" run instead of a "backfill"
    runs = DagRun.find(dag_id=dag.dag_id, execution_date=run_date)
    run: Optional[DagRun]
    if runs:
        run = runs[0]
        if run.state == State.RUNNING:
            respect_dag_max_active_limit = False
    else:
        run = None

    # enforce max_active_runs limit for dag, special cases already
    # handled by respect_dag_max_active_limit
    if respect_dag_max_active_limit and current_active_dag_count >= 1:
        return None

    # run = run or dag.create_dagrun(
    #     execution_date=run_date,
    #     start_date=timezone.utcnow(),
    #     state=State.RUNNING,
    #     external_trigger=False,
    #     conf=self.conf,
    #     run_type=DagRunType.BACKFILL_JOB,
    #     creating_job_id=self.id,
    # )
    #
    # # set required transient field
    # run.dag = dag
    #
    # # explicitly mark as backfill and running
    # run.state = State.RUNNING
    # run.run_id = run.generate_run_id(DagRunType.BACKFILL_JOB, run_date)
    # run.run_type = DagRunType.BACKFILL_JOB
    # run.verify_integrity()
    print(dates)
    return run

if __name__ == '__main__':
    dag_graph_id = 2294

    start_date = 1636646400000
    end_date = 1636686189000

    start_date = dates.from_milliseconds(start_date)
    end_date = dates.from_milliseconds(end_date)

    dag_pickle = DagPickle.get_by_dag('dag-{}'.format(dag_graph_id))

    dag = dag_pickle.pickle
    run_dates = dag.get_run_dates(start_date, end_date)
    for next_run_date in run_dates:
        _get_dag_run(next_run_date, dag)

