import json
import numpy as np
from pathlib import Path
from osgeo import gdal

from CreatOceanGrid.CreatOceanGrid import input_dir

class GridTIFFMergerGDAL:
    def __init__(self):
        self.level = 6
        # 路径配置
        self.baseDir = Path(input_dir)
        self.tideDir = self.baseDir / "daily_tide"
        self.uvDir = self.baseDir / "daily_uv" 
        self.outputDir = self.baseDir / "union"
        
        # 创建输出目录
        self.outputDir.mkdir(exist_ok=True)
        
        # 注册GDAL驱动
        gdal.AllRegister()
    
    def createJsonConfig(self, dateFolder, outputDir):
        """创建JSON配置文件"""
        json_data = {
            "name": "featureLayer",
            "level": 6,
            "bandCount": 5,
            "dataType": 7,  # DGGS_Float64
            "elementType": 2,  # DGGSElementType_Cell
            "gridType": 0,  # DGGSGridType_DGGS_ISEA4H
            "partParam": [
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 1
                    },
                    "fileName": "_1"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 2
                    },
                    "fileName": "_1_2"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 3
                    },
                    "fileName": "_1_2_3"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 4
                    },
                    "fileName": "_1_2_3_4"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 5
                    },
                    "fileName": "_1_2_3_4_5"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 6
                    },
                    "fileName": "_1_2_3_4_5_6"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 7
                    },
                    "fileName": "_1_2_3_4_5_6_7"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 8
                    },
                    "fileName": "_1_2_3_4_5_6_7_8"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 9
                    },
                    "fileName": "_1_2_3_4_5_6_7_8_9"
                },
                {
                    "boundBox": {
                        "rowMin": 0,
                        "rowSize": 64,
                        "colSize": 64,
                        "colMax": 63,
                        "basePartition": 10
                    },
                    "fileName": "_1_2_3_4_5_6_7_8_9_10"
                }
            ]
        }
        
        # JSON文件路径
        json_path = outputDir / f"{dateFolder}.json"
        
        # 写入JSON文件
        with open(json_path, 'w', encoding='utf-8') as f:
            json.dump(json_data, f, indent=4, ensure_ascii=False)
        
        print(f"  已创建配置文件: {json_path}")
        return True
    
    def diagnoseSourceData(self, file_path):
        """诊断源数据"""
        try:
            dataset = gdal.Open(str(file_path), gdal.GA_ReadOnly)
            if dataset is None:
                print("无法打开文件")
                return
            
            for i in range(1, dataset.RasterCount + 1):
                band = dataset.GetRasterBand(i)
                data = band.ReadAsArray()
                nodata = band.GetNoDataValue()
                
                # 显示前几个值
                print(f"前5个值: {data.flatten()[:5]}")
            
            dataset = None
            
        except Exception as e:
            print(f"  诊断出错: {e}")
    
    def getDateFolders(self):
        """获取所有日期文件夹"""
        tideDates = {f.name for f in self.tideDir.iterdir() if f.is_dir()}
        uvDates = {f.name for f in self.uvDir.iterdir() if f.is_dir()}
        return sorted(tideDates & uvDates)
    
    def getTiffFiles(self, dateFolder):
        """获取指定日期的所有tiff文件"""
        tideFolder = self.tideDir / dateFolder
        uvFolder = self.uvDir / dateFolder
        
        tideFiles = sorted([f for f in tideFolder.glob("*.tiff") if f.is_file()])
        uvFiles = sorted([f for f in uvFolder.glob("*.tiff") if f.is_file()])
        
        return tideFiles, uvFiles
    
    def readRasterData(self, file_path, band_index=1):
        """读取栅格数据"""
        try:
            dataset = gdal.Open(str(file_path), gdal.GA_ReadOnly)
            if dataset is None:
                print(f"无法打开文件: {file_path}")
                return None, None, None, None
            
            # 获取波段
            band = dataset.GetRasterBand(band_index)
            data = band.ReadAsArray()
            nodata = band.GetNoDataValue()
            
            # 处理NoData值
            if nodata is not None:
                data = np.where(data == nodata, np.nan, data)
            
            # 获取地理信息
            geotransform = dataset.GetGeoTransform()
            projection = dataset.GetProjection()
            
            dataset = None  # 关闭数据集
            
            return data, geotransform, projection, nodata
            
        except Exception as e:
            return None, None, None, None
    
    def createOutputRaster(self, output_path, data_arrays, geotransform, projection, nodata_value=np.nan):
        """创建输出栅格文件"""
        try:
            # 获取数组形状和数据类型
            rows, cols = data_arrays[0].shape
            band_count = len(data_arrays)
            
            # 获取驱动并创建文件
            driver = gdal.GetDriverByName('GTiff')
            out_dataset = driver.Create(
                str(output_path), 
                cols, rows, 
                band_count, 
                gdal.GDT_Float64  # 使用Float64数据类型
            )
            
            if out_dataset is None:
                print(f"无法创建输出文件: {output_path}")
                return False
            
            # 设置地理参考
            out_dataset.SetGeoTransform(geotransform)
            out_dataset.SetProjection(projection)
            
            # 写入每个波段
            for i, data_array in enumerate(data_arrays):
                band = out_dataset.GetRasterBand(i + 1)
                
                # 处理NaN值
                if np.any(np.isnan(data_array)):
                    data_array = np.where(np.isnan(data_array), nodata_value, data_array)
                
                band.WriteArray(data_array)
                band.SetNoDataValue(nodata_value)
                band.FlushCache()
            
            out_dataset = None  # 关闭文件，确保写入磁盘
            return True
            
        except Exception as e:
            print(f"创建输出文件 {output_path} 时出错: {e}")
            return False
    
    def mergeSingleFile(self, tideFile, uvFile, outputDir, filename):
        """合并单个tiff文件对"""
        
        # 读取潮位数据（单波段）
        tide_data, geotransform, projection, tide_nodata = self.readRasterData(tideFile, 1)
        if tide_data is None:
            print(f"  错误: 无法读取潮位文件 {tideFile}")
            return False
        
        # 读取UV数据（4个波段）
        uv_bands = []
        for band_idx in range(1, 5):  # 波段1-4
            uv_data, _, _, uv_nodata = self.readRasterData(uvFile, band_idx)
            if uv_data is None:
                print(f"  错误: 无法读取UV文件波段{band_idx} {uvFile}")
                return False
            uv_bands.append(uv_data)
        
        # 合并数据数组
        merged_bands = [tide_data] + uv_bands
        
        # 创建输出文件路径
        output_path = outputDir / filename
        
        # 创建输出栅格
        nodata_value = tide_nodata if tide_nodata is not None else -9999.0
        success = self.createOutputRaster(
            output_path, 
            merged_bands, 
            geotransform, 
            projection, 
            nodata_value
        )
        
        if success:
            print(f"  已保存: {output_path}")
            return True
        else:
            print(f"  保存失败: {output_path}")
            return False
    
    def processDate(self, dateFolder):
        """处理单个日期文件夹"""
        print(f"处理日期: {dateFolder}")
        
        tideFiles, uvFiles = self.getTiffFiles(dateFolder)
        
        if len(tideFiles) != len(uvFiles):
            return
        
        # 创建日期输出目录
        dateOutputDir = self.outputDir / dateFolder
        dateOutputDir.mkdir(exist_ok=True)
        
        success_count = 0
        for tideFile, uvFile in zip(tideFiles, uvFiles):
            filename = tideFile.name
            if self.mergeSingleFile(tideFile, uvFile, dateOutputDir, filename):
                success_count += 1
        
        # 创建JSON配置文件
        if success_count > 0:
            self.createJsonConfig(dateFolder, dateOutputDir)
    
    def run(self):
        """运行合并流程"""
        dateFolders = self.getDateFolders()
        
        if not dateFolders:
            print("未找到匹配的日期文件夹")
            return
        
        print(f"找到 {len(dateFolders)} 个文件需要处理")
        
        for dateFolder in dateFolders:
            self.processDate(dateFolder)
        
        print("所有文件合并完成！")

def main():
    merger = GridTIFFMergerGDAL()
    merger.run()

if __name__ == "__main__":
    main()