package com.lxj.app.dws;

import com.lxj.utils.MyKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import ru.yandex.clickhouse.ClickHouseConnection;
import ru.yandex.clickhouse.ClickHouseDataSource;
import ru.yandex.clickhouse.settings.ClickHouseProperties;

import java.sql.PreparedStatement;
import java.time.Duration;

/**
 * 使用自定义Sink写入ClickHouse
 */
public class DwsProductStatsWithCustomSink {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.getConfig().setIdleStateRetention(Duration.ofHours(1));

        // ... 前面的Kafka源表创建代码相同 ...

        // 创建聚合视图
        String productStatsViewSQL = "CREATE TEMPORARY VIEW product_stats_view AS\n" +
                "SELECT\n" +
                "  DATE_FORMAT(TUMBLE_START(rt, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') AS stt,\n" +
                "  DATE_FORMAT(TUMBLE_END(rt, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
                "  sku_id,\n" +
                "  MAX(sku_name) AS sku_name,\n" +
                "  COUNT(DISTINCT user_id) AS uv,\n" +
                "  COUNT(DISTINCT CASE WHEN order_id IS NOT NULL THEN user_id END) AS pay_user_count,\n" +
                "  CASE \n" +
                "    WHEN COUNT(DISTINCT user_id) = 0 THEN 0.0\n" +
                "    ELSE ROUND(COUNT(DISTINCT CASE WHEN order_id IS NOT NULL THEN user_id END) * 100.0 / COUNT(DISTINCT user_id), 2)\n" +
                "  END AS pay_convert_rate,\n" +
                "  COUNT(DISTINCT order_id) AS order_num,\n" +
                "  COALESCE(SUM(split_total_amount), 0) AS gmv,\n" +
                "  UNIX_TIMESTAMP() * 1000 AS ts\n" +
                "FROM combined_data\n" +
                "GROUP BY TUMBLE(rt, INTERVAL '10' SECOND), sku_id";

        tableEnv.executeSql(productStatsViewSQL);

        // 转换为DataStream并使用自定义Sink
        Table resultTable = tableEnv.sqlQuery("SELECT * FROM product_stats_view");

        tableEnv.toAppendStream(resultTable, org.apache.flink.types.Row.class)
                .addSink(new ClickHouseSinkFunction());

        env.execute("DwsProductStatsWithCustomSink");
    }

    public static class ClickHouseSinkFunction extends RichSinkFunction<org.apache.flink.types.Row> {
        private transient ClickHouseConnection connection;
        private transient PreparedStatement statement;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            ClickHouseProperties properties = new ClickHouseProperties();
            properties.setUser("default");
            properties.setPassword("");
            ClickHouseDataSource dataSource = new ClickHouseDataSource("jdbc:clickhouse://hadoop102:8123/gmall_dws", properties);
            connection = dataSource.getConnection();

            String sql = "INSERT INTO product_stats (stt, edt, sku_id, sku_name, uv, pay_user_count, pay_convert_rate, order_num, gmv, ts) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
            statement = connection.prepareStatement(sql);
        }

        @Override
        public void invoke(org.apache.flink.types.Row value, Context context) throws Exception {
            statement.setString(1, value.getField(0).toString());
            statement.setString(2, value.getField(1).toString());
            statement.setString(3, value.getField(2).toString());
            statement.setString(4, value.getField(3).toString());
            statement.setLong(5, (Long) value.getField(4));
            statement.setLong(6, (Long) value.getField(5));
            statement.setBigDecimal(7, new java.math.BigDecimal(value.getField(6).toString()));
            statement.setLong(8, (Long) value.getField(7));
            statement.setBigDecimal(9, new java.math.BigDecimal(value.getField(8).toString()));
            statement.setLong(10, (Long) value.getField(9));

            statement.executeUpdate();
        }

        @Override
        public void close() throws Exception {
            if (statement != null) statement.close();
            if (connection != null) connection.close();
        }
    }
}