package com.mxnavi5.example.TableSql;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.table.types.DataType;
import org.apache.flink.types.Row;

import java.sql.PreparedStatement;
import java.sql.SQLException;

import static org.apache.flink.table.api.Expressions.$;

public class MyFileOutputTest {

    public static void main(String[] args) throws Exception {

        Configuration configuration = new Configuration();
        configuration.setString("rest.port","9091");

        // 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);
        env.setParallelism(1);
        // 创建表环境
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);


        // 2. 连接外部系统，读取数据，注册表
        String filePath = "E:\\workspace\\flink-example\\src\\main\\resources\\sensor.txt";

//        DataStreamSource<String> stringDataStreamSource = env.readTextFile(filePath);
//        SingleOutputStreamOperator<Tuple3<String,String,String>> mapStream =
//                stringDataStreamSource
//                        .map(x->{
//                            String[] strings = x.split(",");
//                           return Tuple3.of(strings[0],strings[1],strings[2]);
//                        })
//                        .returns(TypeInformation.of(new TypeHint<Tuple3<String,String,String>>() {}));
//
//        Table table = tableEnv.fromDataStream(mapStream, "id,ts,temp");
//        tableEnv.createTemporaryView("inputTable",table);

        tableEnv.connect(new FileSystem().path(filePath))
                .withFormat(new Csv())
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("ts", DataTypes.BIGINT())
                        .field("temp", DataTypes.DOUBLE())
                ).createTemporaryTable("inputTable");

        // 3. 转换操作
        Table sensorTable = tableEnv.from("inputTable");
        //table api
        Table aggTable = sensorTable
                .groupBy("id")// 基于id分组
                .select("id, id.count as count");

        //  SQL
        Table resultSqlTable = tableEnv.sqlQuery(" select id, count(*) from inputTable group by id");
        Table resultSqlTable2 = tableEnv.sqlQuery("select id,ts,temp from inputTable where id='sensor_1'");
        tableEnv.executeSql("create table sink_print ( id STRING, ts BIGINT,temp DOUBLE ) with ('connector' = 'print' )");
        tableEnv.executeSql("insert into  sink_print select id,ts,temp from inputTable where id='sensor_1'");
        tableEnv.toAppendStream(resultSqlTable2,Row.class).print("resultSqlTable2");


        tableEnv.toRetractStream(aggTable, Row.class).print("table");
//        tableEnv.toRetractStream(resultSqlTable, Row.class).print("sql");


//        String sql = "INSERT INTO temperatures (sensor,temp) VALUES (?, ?)";
//        SinkFunction<Row> jdbcSink = JdbcSink.sink(sql,
//                new JdbcStatementBuilder<Row>() {
//                    @Override
//                    public void accept(PreparedStatement preparedStatement, Row row) throws SQLException {
//                        preparedStatement.setString(1, row.getFieldAs(1));
//                        preparedStatement.setDouble(2, row.getFieldAs("temp"));
//                    }
//                },
//                new JdbcExecutionOptions.Builder().withBatchIntervalMs(1000).withBatchSize(10).build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
//                        .withUrl("").withUsername("").withPassword("").build()
//        );
//        tableEnv.toAppendStream(resultSqlTable2,Row.class).addSink(jdbcSink);





        env.execute();

    }
}
