package com.atguigu.chapter11;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * TODO
 *
 * @author cjp
 * @version 1.0
 * @date 2021/3/12 9:30
 */
public class Flink12_SQL_KafkaToMySQL {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // SQL的方式对接外部系统
        // 当成 source
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 使用SQL语法，关联 Kafka的 Topic，作为 数据源表
        tableEnv.executeSql("create table sensor_source (id string,ts bigint,vc int) with (" +
                "  'connector' = 'kafka'," +
                "  'topic' = 'topic_source'," +
                "  'properties.bootstrap.servers' = 'hadoop102:9092'," +
                "  'properties.group.id' = 'testGroup'," +
                "  'scan.startup.mode' = 'earliest-offset'," +
                "  'format' = 'csv')");


        // 使用SQL语法，关联 Kafka的 Topic，作为 输出表
        tableEnv.executeSql("create table mysql_sink (id string,ts bigint,vc int) with (" +
                "   'connector' = 'jdbc'," +
                "   'url' = 'jdbc:mysql://hadoop102:3306/test'," +
                "   'table-name' = 'sensor'," +
                "   'username'='root'," +
                "   'password'='000000')");

        tableEnv.executeSql("insert into mysql_sink select * from sensor_source where vc > 3");

//        env.execute();
    }
}


