package api.tableapi;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * 从kafka读取数据  (SQL-DDL)
 *  输出到MySQL JDBC    (SQL-DDL)
 */
public class TableTest7_jdbc2 {

    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String topicFrom = "myTest";
        String kafkaServer = "192.168.36.130:9092,192.168.36.129:9092,192.168.36.128:9092";
        String kafkaFormat = "json";

        String db_url = "jdbc:mysql://192.168.36.129:3306/test?useUnicode=true&characterEncoding=UTF-8&useSSL=false";
        String db_driver = "com.mysql.cj.jdbc.Driver";
        String db_username = "root";
        String db_password = "root";
        String db_table = "flink_test";

        StringBuffer sb1 = new StringBuffer();
        sb1.append("CREATE TABLE inputTable (");
        sb1.append("id STRING, `timestamp` BIGINT, temp DOUBLE");
        sb1.append(") WITH (");
        sb1.append("'connector.type' = 'kafka', ");
        sb1.append("'connector.version' = 'universal', ");
        sb1.append("'connector.topic' = '").append(topicFrom).append("', ");
        sb1.append("'connector.properties.bootstrap.servers' = '").append(kafkaServer).append("', ");
        sb1.append("'format.type' = '").append(kafkaFormat).append("'");
        sb1.append(")");
        System.out.println("inputTable SQL: " + sb1.toString());
        tableEnv.executeSql(sb1.toString());

        StringBuffer sb2 = new StringBuffer();
        sb2.append("CREATE TABLE outputTable (");
        sb2.append("id varchar(255), temp double");
        sb2.append(") WITH (");
        sb2.append("'connector.type' = 'jdbc', ");
        sb2.append("'connector.url' = '").append(db_url).append("', ");
        sb2.append("'connector.table' = '").append(db_table).append("', ");
        sb2.append("'connector.driver' = '").append(db_driver).append("', ");
        sb2.append("'connector.username' = '").append(db_username).append("', ");
        sb2.append("'connector.password' = '").append(db_password).append("', ");
        sb2.append("'connector.write.flush.max-rows'   = '1'");
        sb2.append(")");
        System.out.println("outputTable SQL: " + sb2.toString());
        tableEnv.executeSql(sb2.toString());

        // 方式一 调用方法Insert
//        //Table transactions = tableEnv.from("inputTable");
//        Table transactions = tableEnv.sqlQuery("select id, temp from inputTable");
//        transactions.executeInsert("outputTable");
        // 方式二 自定义SQl
        tableEnv.executeSql("insert into outputTable(id,temp) select id, temp from inputTable");
    }

}
