package com.jiayuan.cn.energy.test.one;

import com.jiayuan.cn.energy.test.AppFlink;
import com.jiayuan.cn.energy.test.ok.MqttConsumer;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;

@Slf4j
public class FromData {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // create a DataStream
//        DataStream<Row> dataStream = env.fromElements(
//                Row.of("Alice", 12),
//                Row.of("Bob", 10),
//                Row.of("Alice", 100));
        // interpret the insert-only DataStream as a Table
//        Table inputTable = tableEnv.fromDataStream(dataStream).as("name", "score");
        // register the Table object as a view and query it
        // the query contains an aggregation that produces updates
//        tableEnv.createTemporaryView("InputTable", inputTable);
//        Table resultTable = tableEnv.sqlQuery(
//                "SELECT name, SUM(score) FROM InputTable GROUP BY name");
//        // interpret the updating Table as a changelog DataStream
//        DataStream<Row> resultStream = tableEnv.toChangelogStream(resultTable);
//        System.out.println("-------------------");
//        System.out.println(resultStream);
//        resultStream.print();
//        tableEnv.executeSql("SELECT name, SUM(score) FROM InputTable GROUP BY name");

//        DataStream<String,Row,String> inStream = env.fromElements(
//                Row.of("Alice", 12),
//                Row.of("Bob", 10),
//                Row.of("Alice", 100));
        Collection<String > cs = new ArrayList<String>();
        cs.add("Alice");
        cs.add("Bob");
        cs.add("Twn");
        DataStreamSource<String> stream  = env.fromData(cs);
//        DataStream<Row> stream = env.fromSource(dataStream);
        DataStream<Tuple2<String, String>> dataStreamOut = stream.flatMap(new FlatMapFunction<String, Tuple2<String, String>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, String>> out) throws Exception {
                log.info("value: " + value);
//                out.collect(Tuple2.of(str[0], str[1]));
                out.collect(Tuple2.of("name", value));
            }
        }).setParallelism(1);//线程并行数量
        dataStreamOut.print();
        env.execute();

    }
}