//package com.atguigu;
//
//import org.apache.flink.api.common.typeinfo.TypeInformation;
//import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.flink.streaming.connectors.cdc.mysql.MySQLSource;
////import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
//import org.apache.flink.table.api.EnvironmentSettings;
//import org.apache.flink.table.api.Table;
//import org.apache.flink.table.api.TableResult;
//import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
//import org.apache.flink.table.functions.ScalarFunction;
//import org.apache.flink.types.Row;
//import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
//import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks;
//import org.apache.flink.streaming.api.watermark.Watermark;
//import org.apache.flink.streaming.api.datastream.DataStream;
//import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
//import org.apache.flink.streaming.util.serialization.JSONKeyValueDeserializationSchema;
//
//import java.sql.ResultSet;
//import java.util.Properties;
//import java.util.concurrent.TimeUnit;
//import org.apache.flink.table.api.DataTypes;
//import org.apache.flink.table.api.Expressions;
//import org.apache.flink.table.api.Schema;
//import org.apache.flink.table.api.TableSchema;
//import org.apache.flink.table.api.ValidationException;
//import org.apache.flink.table.functions.FunctionDefinition;
//import org.apache.flink.table.functions.FunctionIdentifier;
//import org.apache.flink.table.functions.ScalarFunction;
//import org.apache.flink.table.types.DataType;
//import org.apache.flink.table.types.inference.TypeInference;
//import org.apache.flink.table.types.logical.LogicalType;
//import org.apache.flink.table.types.logical.LogicalTypeRoot;
//import org.apache.flink.table.types.logical.RowType;
//import org.apache.flink.table.types.utils.DataTypeUtils;
//import org.apache.flink.table.types.utils.LiteralTypeInformation;
//import org.apache.flink.table.utils.TableSchemaUtils;
//public class FlinkTableJoinExample {
//    public static void main(String[] args) throws Exception {
//        // Set up the Flink execution environment
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
//        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);
//        // Set up the MySQL CDC source
//        MySQLSource.Builder builder = MySQLSource.builder()
//                .hostname("localhost")
//                .port(3306)
//                .username("root")
//                .password("password")
//                .databaseList("test")
//                .tableList("test_table")
//                .deserializer(new MyDeserializer());
//
//        // Create a Flink Table from the CDC source
//        Table mysqlTable = tEnv.fromDataStream(builder.build(), "id, name, age, buy_time.rowtime");
//
//
//        // Set up the Kafka source
//        Properties properties = new Properties();
//        properties.setProperty("bootstrap.servers", "localhost:9092");
//        properties.setProperty("group.id", "test-group");
//        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>("test-topic",
//                new JSONKeyValueDeserializationSchema(false),
//                properties);
//
//        // Create a Flink Table from the Kafka source
//        Table kafkaTable = tEnv.fromDataStream(env.addSource(kafkaConsumer)
//                        .name("Kafka Source")
//                        .assignTimestampsAndWatermarks(new CustomTimestampExtractor()),
//                Schema.newBuilder()
//                        .column("name", DataTypes.STRING())
//                        .column("pay_time", DataTypes.TIMESTAMP(3)).watermark("pay_time", "10 seconds")
//                        .build());
//
//        // Join the two tables on the "name" column
//        Table resultTable = mysqlTable.join(kafkaTable).where("name = name")
//                .select("name, MAX(buy_time) as max_buy_time");
//
//
//        // Print the result table
//        TableSchema schema = resultTable.getSchema();
//        TypeInference typeInference = TypeInference.newBuilder().inputColumns(schema.getFieldNames(), schema.getFieldDataTypes()).build();
//        DataType producedDataType = typeInference.getOutputType();
//        TableSchemaUtils.validateSchema(producedDataType.getLogicalType(), "Result schema");
//        tEnv.createTemporaryFunction("toString", new ToStringFunction());
//        TableResult tableResult = resultTable.execute();
//        tableResult.print();
//    }
//
//    // Custom deserializer to map MySQL columns to Flink fields
//    private static class MyDeserializer implements MySQLSource.Deserializer<Row> {
//        @Override
//        public void deserialize(Row record, ResultSet resultSet) throws Exception {
//            record.setField(0, resultSet.getLong("id"));
//            record.setField(1, resultSet.getString("name"));
//            record.setField(2, resultSet.getInt("age"));
//            record.setField(3, resultSet.getTimestamp("buy_time").toInstant());
//        }
//        @Override
//        public TypeInformation<Row> getProducedType() {
//            return DataTypes.ROW(
//                    DataTypes.FIELD("id", DataTypes.BIGINT()),
//                    DataTypes.FIELD("name", DataTypes.STRING()),
//                    DataTypes.FIELD("age", DataTypes.INT()),
//                    DataTypes.FIELD("buy_time", DataTypes.TIMESTAMP(3)).bridgedTo(java.time.Instant.class)
//            ).getLogicalType().toInternalType();
//        }
//    }
//    // Custom timestamp extractor for the Kafka source
//    private static class CustomTimestampExtractor implements AssignerWithPunctuatedWatermarks<Row> {
//        @Override
//        public long extractTimestamp(Row element, long previousElementTimestamp) {
//            return element.getField(1);
//        }
//        @Override
//        public Watermark checkAndGetNextWatermark(Row lastElement, long extractedTimestamp) {
//            return new Watermark(extractedTimestamp - TimeUnit.SECONDS.toMillis(10));
//        }
//    }
//    // Custom scalar function to convert a timestamp to a string
//    public static class ToStringFunction extends ScalarFunction {
//        public String eval(java.time.Instant timestamp) {
//            return timestamp.toString();
//        }
//        @Override
//        public DataType getReturnType(DataType[] signature) {
//            LogicalType timestampType = signature[0].getLogicalType();
//            if (timestampType.getTypeRoot() != LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) {
//                throw new ValidationException("Timestamp expected.");
//            }
//            return DataTypes.STRING();
//        }
//        @Override
//        public FunctionIdentifier getIdentifier() {
//            return new FunctionIdentifier("toString", "default");
//        }
//        @Override
//        public boolean isDeterministic() {
//            return true;
//        }
//        @Override
//        public TypeInference getTypeInference(DataType[] signature) {
//            return TypeInference.newBuilder()
//                    .inputTypeStrategy(TypeInference.InputTypeStrategies.explicit(
//                            LiteralTypeInformation.of(signature[0].getLogicalType())))
//                    .outputTypeStrategy(TypeInference.OutputTypeStrategies.explicit(DataTypes.STRING()))
//                    .build();
//        }
//    }
//}