from io import BytesIO
from minio import Minio, S3Error
from airflow import DAG
from airflow.operators.python import PythonOperator
from airflow.operators.bash import BashOperator
from airflow.providers.cncf.kubernetes.operators.spark_kubernetes import SparkKubernetesOperator
from datetime import datetime, timedelta

import pandas as pd
default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime(2024, 11, 27),
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 0,
}
dag = DAG(
    dag_id='yz-dag',
    default_args=default_args,
    description='yz create a dag!',
    template_searchpath=['/opt/airflow'],
    schedule_interval=None,
)
data_source = None
minio_client = Minio(
    "192.168.110.11:30990",
    access_key="TlBvQvkht9xTRIbLw6CD",
    secret_key="X8PWjkNjJWaE3jR62fDWuwINAEJKlmY4IIseEjeo",
    secure=False
)
if minio_client.bucket_exists('yz-bucket'):
    data_source = minio_client.get_object("yz-bucket", "QSworld.csv")
bytes_data = BytesIO()
for d in data_source.stream(32 * 1024):
    bytes_data.write(d)
bytes_data.seek(0)
converted_data1 = pd.read_csv(bytes_data)

def user_template(dataframe, a, b):
    res = dataframe.head(10).to_dict(orient='records')
    print(res)

def test_func(c):
    print(123)

task1 = PythonOperator(
    task_id='yz-task1',
    python_callable=user_template,
    op_kwargs={
        "dataframe": converted_data1,
        
    },
    dag=dag,
)
task2 = PythonOperator(
    task_id='yz-task2',
    python_callable=test_func,
    op_kwargs={
        
    },
    dag=dag,
)
task3 = SparkKubernetesOperator(
    task_id='yz-task3',
    namespace='default',
    application_file='spark_job_template.yaml',
    image='spark-pandas:3.5.3',
    code_path='local:///home/count_schools.py',
    random_name_suffix=True,
    dag=dag,
)
task2.set_upstream(task1)
task3.set_upstream(task1)
