import io
import json
import os
import time
from logging import Logger
from typing import Optional
from pathlib import Path

from dli.exception import DliException
from dli.dli_client import DliClient
from dli.table import Table, TableSchema

from prefect.blocks.core import Block
from pydantic import Field, SecretStr
from prefect import get_run_logger, task
from prefect.blocks.abstract import SecretBlock
from prefect.utilities.asyncutils import run_sync_in_worker_thread, sync_compatible
from pydantic import VERSION as PYDANTIC_VERSION
from prefect.logging.loggers import get_logger, get_run_logger
from prefect.exceptions import MissingContextError


class Column:
    def __init__(self, name, data_type):
        self.name = name
        self.data_type = data_type


#prefect block register --file dli-sdk-python-1.0.8/dli_block.py
class DliBlock(Block):

    _logo_url = "https://res.hc-cdn.com/console-ui-common/default/logo.svg"
    _block_type_name = "HuaweiCloud DLI"
    _documentation_url = ("https://support.huaweicloud.com/sdkreference-dli/dli_04_0033.html")

    auth_mode = 'aksk'

    huawei_cloud_access_key_id: Optional[SecretStr] = Field(
        default=None,
        description="AK",
        title="Huawei Cloud Access Key ID",
    )
    huawei_cloud_secret_access_key: Optional[SecretStr] = Field(
        default=None,
        description="SK",
        title="Huawei Cloud Access Key Secret",
    )
    huawei_cloud_security_token: Optional[SecretStr] = Field(
        default=None,
        description="临时令牌",
        title="Huawei Cloud Security Token",
    )

    region: Optional[SecretStr] = Field(
        default=None,
        description="服务器编码",
        title="Region",
    )
    project_id: Optional[SecretStr] = Field(
        default=None,
        description="API凭证项目ID",
        title="Project_id",
    )
    end_point: str = Field(
        default="dli.cn-north-4.myhuaweicloud.com",
        description="终端地址",
        title="End Point",
    )
    obs_endpoint: Optional[SecretStr] = Field(
        default=None,
        description="obs终端地址",
        title="Obs_endpoint",
    )
    username: Optional[SecretStr] = Field(
        default=None,
        description="华为云账号名",
        title="Username",
    )
    queue_name: Optional[SecretStr] = Field(
        default=None,
        description="队列名",
        title="Queue_name",
    )



    def _get_dli_client(self) -> DliClient:

        if not self.huawei_cloud_access_key_id or not self.huawei_cloud_secret_access_key:
            raise Exception("please input both huawei_cloud_access_key_id and huawei_cloud_secret_access_key")

        return DliClient(
            region=self.region.get_secret_value(),
            project_id=self.project_id.get_secret_value(),
            auth_mode='aksk',
            ak=self.huawei_cloud_access_key_id.get_secret_value(),
            sk=self.huawei_cloud_secret_access_key.get_secret_value(),
            endpoint=self.end_point,
            obs_endpoint=self.obs_endpoint.get_secret_value(),
        )


    @property
    def logger(self) -> Logger:
        try:
            return get_run_logger()
        except MissingContextError:
            return get_logger(self.__class__.__name__)


    @sync_compatible
    @task
    async def upload_resource(self, kind, obs_jar_paths, group_name):
        try:
            dli_client = self._get_dli_client()
            await run_sync_in_worker_thread(dli_client.upload_resource, kind, obs_jar_paths, group_name)
            self.logger.info(f"Resource uploaded successfully for kind '{kind}' in group '{group_name}'.")
        except DliException as e:
            self.logger.error(f"Failed to upload resource for kind '{kind}' in group '{group_name}': {e}")
    '''
    kind：资源包类型，当前支持的包类型分别为：
    jar：用户jar文件
    pyfile：用户Python文件
    file：用户文件
    modelfile：用户AI模型文件
    obs_jar_paths：对应资源包的OBS路径，参数构成为：{bucketName}.{obs域名}/{jarPath}/{jarName}。
    例如："https://bucketname.obs.cn-north-1.myhuaweicloud.com/jarname.jar"
    group_name：资源包所属分组名称。
    '''

    @sync_compatible
    @task
    async def delete_resource(self, resource_name, group_name):
        try:
            dli_client = self._get_dli_client()
            await run_sync_in_worker_thread(dli_client.delete_resource, resource_name, group_name)
            self.logger.info(f"Resource '{resource_name}' in group '{group_name}' deleted successfully.")
        except DliException as e:
            self.logger.error(f"Failed to delete resource '{resource_name}' in group '{group_name}': {e}")


    @sync_compatible
    @task
    async def create_db(self,db_name:str=None):
        client=self._get_dli_client()
        try:
            db = await run_sync_in_worker_thread(client.create_database, db_name)
            self.logger.info(f"Database '{db_name}' created successfully.")
            return db
        except DliException as e:
            self.logger.error(f"Failed to create database '{db_name}': {e}")
            return None


    @sync_compatible
    @task
    async def delete_database(self, db_name: str):
        dli_client = self._get_dli_client()
        try:
            await run_sync_in_worker_thread(dli_client.delete_database, db_name)
            self.logger.info(f"Database '{db_name}' deleted successfully.")
        except DliException as e:
            self.logger.error(f"Failed to delete database '{db_name}': {e}")



    @sync_compatible
    @task
    async def list_all_dbs(self):
        client = self._get_dli_client()
        try:
            dbs = await run_sync_in_worker_thread(client.list_databases)
            self.logger.info("List of all databases:")
            for db in dbs:
                self.logger.info(db)
        except DliException as e:
            self.logger.error(f"Failed to list all databases: {e}")





    @sync_compatible
    @task
    async def create_obs_tbl(self, db_name, tbl_name):
        cols = [
            Column('col_1', 'string'),
            Column('col_2', 'string'),
            Column('col_3', 'smallint'),
            Column('col_4', 'int'),
            Column('col_5', 'bigint'),
            Column('col_6', 'double'),
            Column('col_7', 'decimal(10,0)'),
            Column('col_8', 'boolean'),
            Column('col_9', 'date'),
            Column('col_10', 'timestamp')
        ]
        tbl_schema = TableSchema(tbl_name, cols)
        try:
            table = await run_sync_in_worker_thread(
                self._get_dli_client().create_obs_table,
                db_name,
                tbl_schema,
                'obs://bucket/obj',
                'csv'
            )
            self.logger.info(f"Observatory table '{tbl_name}' created successfully.")
            print(table)
            return table
        except DliException as e:
            self.logger.error(f"Failed to create Observatory table '{tbl_name}': {e}")
            return None

    @sync_compatible
    @task
    async def delete_tbls(self, db_name):
        try:
            dli_client = self._get_dli_client()
            tbls = await run_sync_in_worker_thread(dli_client.list_tables, db_name)
            for tbl in tbls:
                await run_sync_in_worker_thread(dli_client.delete_table, db_name, tbl.name)
                self.logger.info(f"Table '{tbl.name}' in database '{db_name}' deleted successfully.")

        except DliException as e:
            self.logger.error(f"Failed to delete tables in database '{db_name}': {e}")


    @sync_compatible
    @task
    async def list_all_tbls(self, db_name):
        try:
            dli_client = self._get_dli_client()
            tbls = await run_sync_in_worker_thread(dli_client.list_tables, db_name, with_detail=True)

            # 打印所有表名
            for tbl in tbls:
                self.logger.info(tbl.name)

        except DliException as e:
            self.logger.error(f"Failed to list tables in database '{db_name}': {e}")


    @sync_compatible
    @task
    async def upload_resource(self, kind, obs_paths, group_name):
        dli_client = self._get_dli_client()
        try:
            await run_sync_in_worker_thread(dli_client.upload_resource, kind, obs_paths, group_name)
            self.logger.info(f"Resource of kind '{kind}' uploaded successfully.")
        except DliException as e:
            self.logger.error(f"Failed to upload resource of kind '{kind}': {e}")



    @sync_compatible
    @task
    async def import_data(self, db_name, tbl_name, queue_name):
        options = {
            "with_column_header": True,
            "delimiter": ",",
            "quote_char": "\"",
            "escape_char": "\\",
            "date_format": "yyyy/MM/dd",
            "timestamp_format": "yyyy/MM/dd hh:mm:ss"
        }

        try:
            dli_client = self._get_dli_client()
            job_id, status = await run_sync_in_worker_thread(
                dli_client.import_table,
                tbl_name,
                db_name,
                'obs://bucket/obj/data.csv',
                'csv',
                queue_name=queue_name,
                options=options
            )

            self.logger.info(f"Import job for table '{tbl_name}' in database '{db_name}' started.")
            self.logger.info(f"Job ID: {job_id}")
            self.logger.info(f"Job Status: {status}")

        except DliException as e:
            self.logger.error(f"Failed to import data for table '{tbl_name}': {e}")


    @sync_compatible
    @task
    async def export_data(self, db_name, tbl_name, queue_name):
        try:
            dli_client = self._get_dli_client()
            job_id, status = await run_sync_in_worker_thread(
                dli_client.export_table,
                tbl_name,
                db_name,
                'obs://bucket/obj',
                queue_name=queue_name
            )

            self.logger.info(f"Export job for table '{tbl_name}' in database '{db_name}' started.")
            self.logger.info(f"Job ID: {job_id}")
            self.logger.info(f"Job Status: {status}")

        except DliException as e:
            self.logger.error(f"Failed to export data for table '{tbl_name}': {e}")


    @sync_compatible
    @task
    async def run_sql(self, db_name, queue_name):
        try:
            dli_client = self._get_dli_client()
            sql_job = await run_sync_in_worker_thread(
                dli_client.execute_sql,
                'select * from tbl_dli_for_test',
                db_name,
                queue_name=queue_name
            )
            result_set = await run_sync_in_worker_thread(sql_job.get_result, queue_name=queue_name)
        except DliException as e:
            self.logger.error(f"Failed to execute SQL query: {e}")
            return

        if result_set.row_count == 0:
            return

        # 打印查询结果
        for row in result_set:
            self.logger.info(row)

        # 导出查询结果到 OBS
        try:
            status = await run_sync_in_worker_thread(
                sql_job.export_result,
                'obs://bucket/obj',
                queue_name=queue_name
            )
            self.logger.info(f"Export SQL query result to OBS status: {status}")

        except DliException as e:
            self.logger.error(f"Failed to export SQL query result to OBS: {e}")

    @sync_compatible
    @task
    async def cancel_sql(self, job_id):
        try:
            dli_client = self._get_dli_client()
            await run_sync_in_worker_thread(dli_client.cancel_sql, job_id)
            self.logger.info(f"SQL job with ID '{job_id}' canceled successfully.")
        except DliException as e:
            self.logger.error(f"Failed to cancel SQL job with ID '{job_id}': {e}")


    @sync_compatible
    @task
    async def submit_spark_batch_job(self, batch_queue_name, batch_job_info):
        try:
            dli_client = self._get_dli_client()
            batch_job = await run_sync_in_worker_thread(
                dli_client.submit_spark_batch_job,
                batch_queue_name,
                batch_job_info
            )
        except DliException as e:
            self.logger.error(f"Failed to submit Spark batch job: {e}")
            return

        self.logger.info(f"Spark batch job submitted successfully. Job ID: {batch_job.job_id}")

        while True:
            time.sleep(3)
            job_status = await run_sync_in_worker_thread(batch_job.get_job_status)
            self.logger.info(f'Job status: {job_status}')
            if job_status == 'dead' or job_status == 'success':
                break

        logs = await run_sync_in_worker_thread(batch_job.get_driver_log, 500)
        for log_line in logs:
            self.logger.info(log_line)


    @sync_compatible
    @task
    async def del_spark_batch(self, batch_id):
        try:
            dli_client = self._get_dli_client()
            resp = await run_sync_in_worker_thread(dli_client.del_spark_batch_job, batch_id)
            self.logger.info(resp.msg)
        except DliException as e:
            self.logger.error(f"Failed to delete Spark batch job with ID '{batch_id}': {e}")