import logging
from datetime import datetime

from kubernetes import client
from kubernetes.client.rest import ApiException
from sqlalchemy.orm.session import Session

from airflow.exceptions import AirflowException
from airflow.jobs.backfill_job import BackfillJob
from airflow.kubernetes.kube_client import get_kube_client
from airflow.kubernetes.kube_config import KubeConfig
from airflow.kubernetes.kubernetes_helper_functions import create_pod_id
from airflow.kubernetes.pod_generator import PodGenerator
from airflow.kubernetes.pod_launcher import PodLauncher
from airflow.utils import dates
from airflow.utils.session import provide_session

log = logging.getLogger(__name__)


def run_in_pod(dag_id, start_date: datetime, end_date: datetime, is_synchronous):
    kube_config = KubeConfig()
    kube_client = get_kube_client()
    if is_synchronous:
        command = ['airflow', 'dags', 'backfill', dag_id, '-s', start_date.isoformat(), '-e', end_date.isoformat(), '--synchronous',
                   '--reset-dagruns', '--local']
    else:
        command = ['airflow', 'dags', 'backfill', dag_id, '-s', start_date.isoformat(), '-e', end_date.isoformat(),
                   '--reset-dagruns', '--local']

    base_worker_pod = PodGenerator.deserialize_model_file(kube_config.pod_template_file)

    pod = PodGenerator.construct_pod(
        namespace=kube_config.kube_namespace,
        scheduler_job_id='0',
        pod_id=create_pod_id(dag_id, 'backfill'),
        dag_id=dag_id,
        task_id='backfill',
        kube_image=kube_config.kube_image,
        try_number=1,
        date=start_date,
        args=command,
        pod_override_object=None,
        base_worker_pod=base_worker_pod,
        task_type="SparkSubmitOperator",
        bash_bind_machine=kube_config.bash_bind_machine,
    )

    log.info("Kubernetes running for command %s", command)

    launcher = PodLauncher(kube_client=kube_client)
    launcher.run_pod_async(pod, **kube_config.kube_client_request_args)


def delete_pod(pod_id: str) -> None:
    try:
        kube_client = get_kube_client()
        kube_config = KubeConfig()
        log.info("Deleting pod %s in namespace %s", pod_id, kube_config.kube_namespace)
        kube_client.delete_namespaced_pod(
            pod_id,
            kube_config.kube_namespace,
            body=client.V1DeleteOptions(**kube_config.delete_option_kwargs),
            **kube_config.kube_client_request_args,
        )
    except ApiException as e:
        # If the pod is already deleted
        if e.status != 404:
            raise


@provide_session
def kill_pod(job_id: int, session: Session = None):
    job = session.query(BackfillJob).filter(BackfillJob.id == job_id).first()
    if not job:
        raise AirflowException('job_id not found')
    delete_pod(job.hostname)


@provide_session
def query_job(dag_id: str, session: Session = None):
    jobs = session.query(BackfillJob).filter(BackfillJob.dag_id == dag_id).order_by(
        BackfillJob.start_date.desc()).limit(20).all()
    rs = [{'id': job.id, 'dag_id': dag_id, 'state': job.state, 'hostname': job.hostname,
           'start_date': dates.to_milliseconds(job.start_date),
           'end_date': dates.to_milliseconds(job.end_date)} for job in jobs]
    return rs
