package com.atguigu.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.bean.DwsTest;
import com.atguigu.gmall.realtime.utils.ClickHouseUtil;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import ru.yandex.clickhouse.ClickHouseConnection;
import ru.yandex.clickhouse.ClickHouseDataSource;
import ru.yandex.clickhouse.ClickHouseStatement;
import ru.yandex.clickhouse.settings.ClickHouseProperties;
import ru.yandex.clickhouse.settings.ClickHouseQueryParam;

import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;

/*
Clickhouse 基本的数据读取操作没有任何和其他JDBC不同的地方

 */
public class ClickhouseExample {
    public static void main(String[] args) throws Exception {
        //TODO 0.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000);

        // TODO 1. 从kafka的pv，uv，跳转明细主题中获取数据
        String groupId = "visitor_stats_app";
        String testTopic ="dws_test";

        FlinkKafkaConsumer<String> testSource = MyKafkaUtil.getKafkaSource("localhost:9092",testTopic,groupId);
        DataStreamSource<String> testDs = env.addSource(testSource);

        //id,sku_id,total_amount,create_time;
        SingleOutputStreamOperator<DwsTest> dwsStatsDS = testDs.map(json -> {
            JSONObject jsonObject = JSON.parseObject(json);
            return new DwsTest(jsonObject.getInteger("id"),jsonObject.getString("sku_id"),jsonObject.getBigDecimal("total_amount"),
                    jsonObject.getString("create_time"));
        });

        dwsStatsDS.print();
        dwsStatsDS.addSink(ClickHouseUtil.getJdbcSink("insert into t_order_mt values(?,?,?,?)"));


        env.execute();
    }

    private static void selectTest() throws SQLException {
        String url = "jdbc:clickhouse://192.168.0.102:8123/default";
        ClickHouseProperties properties = new ClickHouseProperties();


        properties.setSessionId("default-session-id");


        ClickHouseDataSource dataSource = new ClickHouseDataSource(url, properties);
        String sql = "select * from t_order_mt";
        Map<ClickHouseQueryParam, String> additionalDBParams = new HashMap<>();
        additionalDBParams.put(ClickHouseQueryParam.SESSION_ID, "new-session-id");

        try (ClickHouseConnection conn = dataSource.getConnection();
             ClickHouseStatement stmt = conn.createStatement();
             ResultSet rs = stmt.executeQuery(sql, additionalDBParams)) {
            while(rs.next()){
                System.out.println(rs.getInt(1));
            }
        }
    }
}
