

package org.example.flinksql.test;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.example.flinksql.test.custom.TransDatetimeFunc;


/**
 * 流读取、转换、写入demo
 * 此demo需要先在本机安装kafka，启动、创建topic，生产一些测试消息
 * kafka -> trans -> jdbc
 */
public class TransformStreamData {

	public static void main(String[] args) throws Exception {
		EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
		TableEnvironment tbEnv = TableEnvironment.create(bsSettings);

		// kafka需要配置advertised.listeners=PLAINTEXT://under.azure:9092
		// 且hosts文件添加under.azure IP地址映射
		tbEnv.executeSql("CREATE TABLE t_records (" +
				"username STRING," +
				"action STRING," +
				"create_time BIGINT) WITH (" +
				"'connector' = 'kafka'," +
				"'topic' = 'sync_t_records'," +
				"'properties.bootstrap.servers' = 'under.azure:9092'," +
				"'properties.group.id' = 'flink-test'," +
				"'scan.startup.mode' = 'earliest-offset'," +
				"'format' = 'json'" +
				")");

		// TODO:id为主键且自增时，Sink可不设置主键
		tbEnv.executeSql("CREATE TABLE t_logs (" +
				"username STRING," +
				"action STRING," +
				"create_time TIMESTAMP) WITH (" +
				"'connector' = 'jdbc'," +
				"'url' = 'jdbc:mysql://under.azure:3306/flink-test?useSSL=false'," +
				"'table-name' = 't_logs'," +
				"'username' = 'root'," +
				"'password' = '123456'" +
				")");

		tbEnv.createFunction("transDT", TransDatetimeFunc.class);

		Table t_record = tbEnv.sqlQuery("select username,action,transDT(create_time) as create_time from t_records");
		//不可再此处print，否则t_record.executeInsert无法插入成功
//		t_record.execute().print();
		t_record.executeInsert("t_logs").print();


		// execute program
//		env.execute("Flink Streaming Java API Skeleton");
	}
}
