from flask import Flask, request, jsonify
from airflow.api.client.local_client import Client
import time
import os
import psycopg2
from typing import List, Dict
import json

app = Flask(__name__)
app.config['SECRET_KEY'] = os.urandom(24)

client = Client(None)

class SQLExecutor:
    def __init__(self, host: str, port: int, database: str, user: str, password: str):
        self.host = host
        self.port = port
        self.database = database
        self.user = user
        self.password = password
        
    def execute_sql_batch(self, sql_list: List[str]) -> Dict:
        results = {}
        try:
            conn = psycopg2.connect(
                host=self.host,
                port=self.port,
                database=self.database,
                user=self.user,
                password=self.password
            )
            cursor = conn.cursor()
            
            for sql in sql_list:
                try:
                    cursor.execute(sql)
                    affected_rows = cursor.rowcount
                    results[sql] = {
                        "命中条数": affected_rows,
                        "失败条数": 0,
                        "执行状态": "成功"
                    }
                    conn.commit()
                except Exception as e:
                    results[sql] = {
                        "命中条数": 0,
                        "失败条数": 1,
                        "执行状态": "失败",
                        "错误信息": str(e)
                    }
                    conn.rollback()
                    
        except Exception as e:
            raise Exception(f"数据库连接失败: {str(e)}")
        finally:
            if 'cursor' in locals():
                cursor.close()
            if 'conn' in locals():
                conn.close()
                
        return results

@app.route('/health', methods=['GET'])
def health_check():
    return jsonify({'status': 'healthy'}), 200

@app.route('/execute_sql', methods=['POST'])
def trigger_sql_execution():
    try:
        # 获取请求中的 SQL
        data = request.get_json()
        if not data:
            return jsonify({'error': 'No JSON data provided'}), 400
            
        sql = data.get('sql')
        if not sql:
            return jsonify({'error': 'No SQL provided'}), 400
        
        # 触发 DAG 运行
        dag_run = client.trigger_dag(
            dag_id='sql_executor',
            conf={'sql': sql}
        )
        
        # 设置最大等待时间（秒）
        max_wait_time = 300  # 5分钟
        start_time = time.time()
        
        # 等待 DAG 执行完成
        while True:
            if time.time() - start_time > max_wait_time:
                return jsonify({'error': 'DAG execution timeout'}), 504
                
            dag_run_status = client.get_dag_run_status(
                dag_id='sql_executor',
                run_id=dag_run.run_id
            )
            
            if dag_run_status == 'success':
                # 获取执行结果
                result = client.get_task_xcom(
                    dag_id='sql_executor',
                    task_id='execute_sql',
                    run_id=dag_run.run_id,
                    key='sql_results'
                )
                return jsonify({
                    'status': 'success',
                    'results': result
                })
            
            elif dag_run_status == 'failed':
                return jsonify({
                    'status': 'failed',
                    'error': 'DAG execution failed'
                }), 500
                
            time.sleep(1)
        
    except Exception as e:
        return jsonify({
            'status': 'error',
            'error': str(e)
        }), 500

@app.errorhandler(404)
def not_found(error):
    return jsonify({'error': 'Not found'}), 404

@app.errorhandler(500)
def internal_error(error):
    return jsonify({'error': 'Internal server error'}), 500

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=5000, debug=False) 