package org.jeecg;

import org.geotools.data.DataStore;
import org.geotools.data.DataStoreFinder;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.locationtech.jts.geom.MultiPolygon;
import org.locationtech.jts.io.WKBWriter;
import org.opengis.feature.simple.SimpleFeature;

import java.io.File;
import java.sql.*;
import java.util.Map;
import java.util.UUID;

public class Shp2PostGIS_JDBCBatch_Resumable {

    // 配置参数
    private static final String JDBC_URL = "jdbc:postgresql://123.56.202.52:5432/gisdb";
    private static final String USERNAME = "postgres";
    private static final String PASSWORD = "wwq20010215";
    private static final String TABLE_NAME = "geotype";

    // 专门处理最后8838条数据的批量大小
    private static final int BATCH_SIZE = 1000;

    // 指定起始ID（1880001）和结束ID（1888838）
    private static final long START_ID = 1880001;
    private static final long END_ID = 1888838;
    private static int totalProcessed = 0;

    public static void main(String[] args) {
        String shpPath = "F:\\地质数据\\地形坡度\\地形坡度底图.shp";

        Connection conn = null;
        PreparedStatement pstmt = null;
        SimpleFeatureIterator iterator = null;
        DataStore shpDataStore = null;
        WKBWriter wkbWriter = new WKBWriter();
        long programStart = System.currentTimeMillis();

        try {
            // 1. 初始化数据库连接
            conn = DriverManager.getConnection(JDBC_URL, USERNAME, PASSWORD);
            conn.setAutoCommit(false);

            // 2. 加载Shapefile
            File shpFile = new File(shpPath);
            shpDataStore = DataStoreFinder.getDataStore(Map.of(
                    "url", shpFile.toURI().toURL(),
                    "create spatial index", Boolean.FALSE,
                    "memory mapped buffer", Boolean.TRUE
            ));
            if (shpDataStore == null) throw new RuntimeException("无法加载Shapefile数据源");

            String typeName = shpDataStore.getTypeNames()[0];
            SimpleFeatureCollection features = shpDataStore.getFeatureSource(typeName).getFeatures();
            long featureCount = features.size();
            System.out.println("Shapefile总记录数: " + featureCount);

            // 3. 准备插入语句（去掉ON CONFLICT子句）
            String insertSQL = "INSERT INTO " + TABLE_NAME + " (id, the_geom, id_orig, gridcode) " +
                    "VALUES (?, ST_GeomFromWKB(?, 32650), ?, ?)";
            pstmt = conn.prepareStatement(insertSQL);

            // 4. 仅导入指定ID范围的记录
            System.out.printf("开始导入最后 %d 条记录 (ID: %d 到 %d)%n",
                    (END_ID - START_ID + 1), START_ID, END_ID);
            iterator = features.features();
            int currentBatch = 0;
            long batchStartTime = System.currentTimeMillis();

            while (iterator.hasNext()) {
                SimpleFeature feature = iterator.next();
                Long currentId = (Long) feature.getAttribute("Id");

                // 只处理指定ID范围的记录
                if (currentId == null || currentId < START_ID || currentId > END_ID) {
                    continue;
                }

                // 设置参数
                pstmt.setObject(1, UUID.randomUUID());

                // 几何对象
                MultiPolygon geom = (MultiPolygon) feature.getAttribute("the_geom");
                pstmt.setBytes(2, wkbWriter.write(geom));

                // Id (Long)
                pstmt.setLong(3, currentId);

                // gridcode (Long)
                Long gridcode = (Long) feature.getAttribute("gridcode");
                pstmt.setLong(4, gridcode != null ? gridcode : -1);

                pstmt.addBatch();
                totalProcessed++;
                currentBatch++;

                // 定期提交批次
                if (currentBatch >= BATCH_SIZE) {
                    executeBatch(conn, pstmt, currentBatch);
                    currentBatch = 0;
                    batchStartTime = System.currentTimeMillis();
                    System.out.printf("已导入: %d/%d 条 (%.1f%%)%n",
                            totalProcessed, (END_ID - START_ID + 1),
                            (totalProcessed * 100.0) / (END_ID - START_ID + 1));
                }
            }

            // 执行最后一批
            if (currentBatch > 0) {
                executeBatch(conn, pstmt, currentBatch);
            }

            System.out.println("最后批次导入完成!");

            // 5. 验证最终记录数
            long dbCount = getRecordCount(conn, TABLE_NAME);
            System.out.println("========================================");
            System.out.printf("数据库现有记录数: %d 条%n", dbCount);
            System.out.printf("本次成功导入记录: %d 条%n", totalProcessed);
            System.out.printf("预计总记录数: %d 条%n", 1880000 + totalProcessed);
            System.out.println("========================================");

            if (dbCount == 1880000 + totalProcessed) {
                System.out.println("✅ 数据完整性验证通过");
            } else {
                System.out.println("❌ 数据完整性验证失败! 请检查数据库");
            }

            long totalProgramTime = System.currentTimeMillis() - programStart;
            System.out.println("========================================");
            System.out.printf("程序执行完成! 总耗时: %d ms (%.1f 分钟)%n",
                    totalProgramTime, totalProgramTime / 60000.0);
            System.out.println("========================================");

        } catch (Exception e) {
            System.err.println("导入失败: " + e.getMessage());
            e.printStackTrace();
            try {
                if (conn != null) conn.rollback();
            } catch (SQLException ex) {
                System.err.println("回滚失败: " + ex.getMessage());
            }
        } finally {
            closeResources(iterator, shpDataStore, pstmt, conn);
        }
    }

    // 执行批处理（简化版）
    private static void executeBatch(Connection conn, PreparedStatement pstmt, int batchSize)
            throws SQLException {
        long start = System.currentTimeMillis();
        int[] results = pstmt.executeBatch();
        conn.commit();
        long duration = System.currentTimeMillis() - start;

        // 计算实际插入数量
        int insertedCount = 0;
        for (int result : results) {
            if (result >= 0) { // SUCCESS_NO_INFO 或更新计数
                insertedCount++;
            }
        }

        double rate = insertedCount * 1000.0 / duration;
        System.out.printf("提交批次: %d条, 耗时: %d ms, 速率: %.0f 条/秒%n",
                insertedCount, duration, rate);
    }

    // 获取数据库记录数
    private static long getRecordCount(Connection conn, String tableName) throws SQLException {
        String sql = "SELECT COUNT(*) FROM " + tableName;
        try (Statement stmt = conn.createStatement();
             ResultSet rs = stmt.executeQuery(sql)) {
            if (rs.next()) {
                return rs.getLong(1);
            }
        }
        return -1;
    }

    // 安全关闭所有资源
    private static void closeResources(SimpleFeatureIterator iterator, DataStore shpDataStore,
                                       PreparedStatement pstmt, Connection conn) {
        try {
            if (iterator != null) iterator.close();
        } catch (Exception e) {
            System.err.println("关闭iterator失败: " + e.getMessage());
        }
        try {
            if (shpDataStore != null) shpDataStore.dispose();
        } catch (Exception e) {
            System.err.println("关闭shpDataStore失败: " + e.getMessage());
        }
        try {
            if (pstmt != null) pstmt.close();
        } catch (SQLException e) {
            System.err.println("关闭pstmt失败: " + e.getMessage());
        }
        try {
            if (conn != null && !conn.isClosed()) {
                if (!conn.getAutoCommit()) {
                    conn.rollback();
                }
                conn.close();
            }
        } catch (SQLException e) {
            System.err.println("关闭conn失败: " + e.getMessage());
        }
    }
}