package com.myflink.day10;

import com.myflink.bean.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.OldCsv;
import org.apache.flink.table.descriptors.Schema;

/**
 * @author Shelly An
 * @create 2020/9/27 9:34
 */
public class SQL_Create {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        //读取数据、转换
        DataStreamSource<String> socketDS = env.readTextFile("input/sensor-data.log");
        SingleOutputStreamOperator<WaterSensor> sensorDS = socketDS
                .map(new MapFunction<String, WaterSensor>() {
                    @Override
                    public WaterSensor map(String value) throws Exception {
                        String[] datas = value.split(",");
                        return new WaterSensor(datas[0], Long.valueOf(datas[1]), Integer.valueOf(datas[2]));
                    }
                })
                .assignTimestampsAndWatermarks(
                        new BoundedOutOfOrdernessTimestampExtractor<WaterSensor>(Time.seconds(3)) {
                            @Override
                            public long extractTimestamp(WaterSensor element) {
                                return element.getTs() * 1000L;
                            }
                        }
                );

        /*---------------------------------------------------------------------------------------*/
        //1. 创建 表执行环境
//        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useOldPlanner() //使用官方的planner
                //.useBlinkPlanner()    //使用blink的planner
                .inStreamingMode() //默认流 可设置批
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        //2. 把datastream转换成table  fields不设定就按属性名来
        //方式1：将datastream转换成table对象
        Table table = tableEnv.fromDataStream(sensorDS, "id,ts,vc");
        //方式2：从表名获取table对象
        tableEnv.createTemporaryView("sensorTable",sensorDS,"id,ts,vc");
        Table sensorTable = tableEnv.from("sensorTable");

        //3. 保存到本地文件系统
        tableEnv
                //文件，还有kafka、es
                .connect(new FileSystem().path("output/flink.txt"))
                //存储格式 新版的csv要手动导入依赖
                .withFormat(new OldCsv().fieldDelimiter(";"))
                //结构信息 列名、列的类型
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("timestamp",DataTypes.BIGINT())
                        .field("hahaha", DataTypes.INT())
               )
                .createTemporaryTable("fsTable");

        //方式2：从表名获取table对象
        Table fstable = tableEnv.from("fstable");

        //把流数据（表）插入到外部文件系统（表）
        tableEnv.sqlUpdate("INSERT INTO fsTable SELECT * FROM sensorTable");

        env.execute();
    }
}
