package com.edu.flink.table;

import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.io.jdbc.JDBCInputFormat;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.io.Serializable;

/**
 * <p>
 * 数据源切换为MySQL
 * </p>
 *
 * @author jpge
 * @since 2024-04-23
 */
public class TableJdbc {

    public static void main(String[] args) throws Exception {

        // 1. 定义执行环境
//1. 定义执行环境
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(streamExecutionEnvironment);

        //定义数据类型
        TypeInformation[] fieldTypes = new TypeInformation[]{
                BasicTypeInfo.STRING_TYPE_INFO,
                BasicTypeInfo.STRING_TYPE_INFO,
                BasicTypeInfo.BOOLEAN_TYPE_INFO
        };
        RowTypeInfo rowTypeInfo = new RowTypeInfo(fieldTypes);

        //定义JDBC参数
        JDBCInputFormat jdbcInputFormat = JDBCInputFormat.buildJDBCInputFormat()
                .setDrivername("com.mysql.jdbc.Driver")
                .setDBUrl("jdbc:mysql://192.168.15.130:3306/nacos?characterEncoding=utf8")
                .setUsername("root")
                .setPassword("root")
                .setQuery("select * from users")
                .setRowTypeInfo(rowTypeInfo)
                .finish();

        //定义数据源
        DataStreamSource<Row> source = streamExecutionEnvironment.createInput(jdbcInputFormat);

        //注册flink table
//        streamTableEnvironment.registerDataSet("myTable", source);

        //执行查询
        Table table = streamTableEnvironment.fromDataStream(source);


        //返回查询结果
        streamTableEnvironment.toAppendStream(table, Row.class).print("=============").setParallelism(1);


        streamExecutionEnvironment.execute();

    }



}
