import os
import requests
from typing import Dict, Any, List, Optional
from pyspark import SparkConf, SparkContext
from pyspark.sql import SparkSession

class ClusterManager:
    def __init__(self):
        self.hadoop_namenode_url = "http://localhost:9870"
        self.yarn_rm_url = "http://localhost:8088"
        self.spark_master_url = "http://localhost:8080"
        self.flink_jobmanager_url = "http://localhost:8181"
        
        # 初始化Spark Session
        self.spark = self._create_spark_session()
    
    def _create_spark_session(self) -> SparkSession:
        """创建Spark会话"""
        return SparkSession.builder\
            .appName("ElectricPowerPrediction")\
            .master("spark://spark-master:7077")\
            .getOrCreate()
    
    def check_hdfs_status(self) -> Dict[str, Any]:
        """检查HDFS状态"""
        try:
            response = requests.get(f"{self.hadoop_namenode_url}/jmx")
            if response.status_code == 200:
                data = response.json()
                return {
                    "status": "healthy",
                    "total_capacity": data.get("beans", [{}])[0].get("Total", 0),
                    "used_capacity": data.get("beans", [{}])[0].get("Used", 0)
                }
            return {"status": "error", "message": "无法获取HDFS状态"}
        except Exception as e:
            return {"status": "error", "message": str(e)}
    
    def list_hdfs_directory(self, path: str) -> List[Dict[str, str]]:
        """列出HDFS目录内容"""
        try:
            response = requests.get(
                f"{self.hadoop_namenode_url}/webhdfs/v1/{path}?op=LISTSTATUS"
            )
            if response.status_code == 200:
                return response.json().get("FileStatuses", {}).get("FileStatus", [])
            return []
        except Exception as e:
            print(f"列出HDFS目录出错: {str(e)}")
            return []
    
    def submit_spark_job(self, job_path: str, job_args: Optional[List[str]] = None) -> str:
        """提交Spark作业"""
        try:
            if job_args is None:
                job_args = []
            
            # 使用spark-submit提交作业
            job_id = self.spark.sparkContext.applicationId
            self.spark.sparkContext.addPyFile(job_path)
            
            return f"作业已提交，ID: {job_id}"
        except Exception as e:
            return f"提交Spark作业失败: {str(e)}"
    
    def get_spark_job_status(self, job_id: str) -> Dict[str, Any]:
        """获取Spark作业状态"""
        try:
            response = requests.get(f"{self.spark_master_url}/api/v1/applications/{job_id}")
            if response.status_code == 200:
                return response.json()
            return {"status": "error", "message": "无法获取作业状态"}
        except Exception as e:
            return {"status": "error", "message": str(e)}
    
    def submit_flink_job(self, jar_path: str, entry_class: str,
                        program_args: Optional[List[str]] = None) -> str:
        """提交Flink作业"""
        try:
            if program_args is None:
                program_args = []
            
            files = {
                'jarfile': ('job.jar', open(jar_path, 'rb'), 'application/x-java-archive')
            }
            
            data = {
                'entryClass': entry_class,
                'programArgs': ' '.join(program_args)
            }
            
            response = requests.post(
                f"{self.flink_jobmanager_url}/jars/upload",
                files=files
            )
            
            if response.status_code == 200:
                jar_id = response.json().get('filename')
                run_response = requests.post(
                    f"{self.flink_jobmanager_url}/jars/{jar_id}/run",
                    json=data
                )
                
                if run_response.status_code == 200:
                    return f"作业已提交，ID: {run_response.json().get('jobid')}"
            
            return "提交Flink作业失败"
        except Exception as e:
            return f"提交Flink作业失败: {str(e)}"
    
    def get_flink_job_status(self, job_id: str) -> Dict[str, Any]:
        """获取Flink作业状态"""
        try:
            response = requests.get(f"{self.flink_jobmanager_url}/jobs/{job_id}")
            if response.status_code == 200:
                return response.json()
            return {"status": "error", "message": "无法获取作业状态"}
        except Exception as e:
            return {"status": "error", "message": str(e)}
    
    def get_yarn_metrics(self) -> Dict[str, Any]:
        """获取YARN集群指标"""
        try:
            response = requests.get(f"{self.yarn_rm_url}/ws/v1/cluster/metrics")
            if response.status_code == 200:
                return response.json().get("clusterMetrics", {})
            return {"status": "error", "message": "无法获取YARN指标"}
        except Exception as e:
            return {"status": "error", "message": str(e)}
    
    def list_yarn_applications(self, status: Optional[str] = None) -> List[Dict[str, Any]]:
        """列出YARN应用"""
        try:
            url = f"{self.yarn_rm_url}/ws/v1/cluster/apps"
            if status:
                url += f"?states={status}"
            
            response = requests.get(url)
            if response.status_code == 200:
                return response.json().get("apps", {}).get("app", [])
            return []
        except Exception as e:
            print(f"列出YARN应用出错: {str(e)}")
            return []