/*
package com.ruoyi.system.flink;

import lombok.Builder;
import lombok.Data;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.*;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.sql.*;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;


public class DataStreamAPI {

    StreamExecutionEnvironment env;
    DataStreamSource<String> stream0;
    DataStreamSource<Integer> stream1;
    DataStreamSource<Integer> stream2;

    public static void main(String[] args) {

    }

    */
/**
     * StreamExecutionEnvironment是flink程序的基础
     * @return
     *//*

    public StreamExecutionEnvironment getEnvironment(){
        if(env ==null){
            env = StreamExecutionEnvironment.getExecutionEnvironment();
        }
        LocalStreamEnvironment localEnvironment = StreamExecutionEnvironment.createLocalEnvironment();
        String jarFiles = "";
        StreamExecutionEnvironment remoteEnvironment = StreamExecutionEnvironment.createRemoteEnvironment("127.0.0.1", 9999, jarFiles);
        return null;
    }

    */
/**
     * 设置运行模式
     *//*

    public void setRunTimeMode(){
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);//流处理 一条一条数据处理 有中间态 （java,1）...(java,2)
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);//批处理 没有中间态 （java,2）
        env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);//自动模式，根据输入的数据是无界流还是有界流区分是流处理还是批处理
    }

    public void getDataStream(){
        String localFile = "";//本地单个文件
        DataStreamSource<String> localSource = env.readTextFile(localFile);
        String localDir = "";//文件夹
        DataStreamSource<String> localDirSource = env.readTextFile(localDir);
        String remoteFile ="hdfs://12.10.10.1:9000/root/input/test.txt";//hdfs数据源
        DataStreamSource<String> remoteSource = env.readTextFile(remoteFile);

        List<String> list = List.of("nacos,java,php", "naocs,python,java", "nacos,spring,mybatis", "nacos,sentinel,gateway");
        stream0 = env.fromCollection(list);
        List<Integer> list1 = List.of(1, 2, 3, 4, 5);
        stream1 = env.fromCollection(list1);
        List<Integer> list2 = List.of(10,5,1,20,2);
        stream2 = env.fromCollection(list2);

        env.addSource(new MyMysqlSource());//设置数据源为mysql

        // Kafka参数
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "172.16.10.159:9092");
        properties.setProperty("group.id", "flink-group");
        String topic = "test";

        // kafka Source
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), properties);
        env.addSource(consumer);//数据源为kafka


    }

    @Builder
    public class User {
        private Integer userId;
        private String userName;
        private String userRealName;
        private String userPwd;
        private String userTel;
        private String userEmail;
        private Integer userStatus;
        private Date userCreateTime;
        private Date userUpdateTime;
    }


    private class MyMysqlSource extends RichParallelSourceFunction<User> {
        private boolean close = false;

        @Override
        public void run(SourceContext<User> out) throws Exception {
            String url = "jdbc:mysql://192.168.100.88:3306/newframe?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&useSSL=false&allowMultiQueries=true&rewriteBatchedStatements=true";
            String sql = "select * from user_info";
            Connection conn = null;
            PreparedStatement ps = null;
            ResultSet rs = null;
            try {
                conn = DriverManager.getConnection(url, "root", "root");
                ps = conn.prepareStatement(sql);
            } catch (SQLException e) {
                e.printStackTrace();
            }

            while (!close) {
                rs = ps.executeQuery();
                while (rs.next()) {
                    Integer userId = rs.getInt("user_id");
                    String userName = rs.getString("user_name");
                    String userRealName = rs.getString("user_real_name");
                    User user = User.builder()
                        .userId(userId)
                        .userName(userName)
                        .userRealName(userRealName).build();
                    //收集数据
                    out.collect(user);
                }
                Thread.sleep(5000);
                cancel();
            }
            close(conn, ps, rs);
        }

        @Override
        public void cancel() {
            close = true;
        }

        public void close(Connection conn, PreparedStatement ps, ResultSet rs) throws Exception {
            if (conn != null) {
                conn.close();
            }
            if (ps != null) {
                ps.close();
            }
            if (rs != null) {
                rs.close();
            }
        }
    }

//    ---------------转换算子-------------

    public void transform(){
        stream0.flatMap(new RichFlatMapFunction<String, String>() {
            @Override
            public void flatMap(String value, Collector<String> out) throws Exception {
                Arrays.stream(value.split(",")).forEach(v -> out.collect(v));
            }
        })
        .map(new RichMapFunction<String, String>() {
            @Override
            public String map(String value) throws Exception {
                return value.toUpperCase();
            }
        })
        .filter(new RichFilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                if(!"JAVA".equals(value))return true;
                return false;
            }
        })
        .keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String value) throws Exception {
                return value;
            }
        })
//      还有min、minBy、maxBy、sum等
        .reduce((v1,v2) ->v1+v2)
        .print();

//       connect 连接两个数据源，对两个数据源是单独操作，可以进行 CoMapFunction、CoFlatMapFunction、CoProcessFunction等操作
        stream0.connect(stream1).map(new CoMapFunction<String, Integer, Object>() {
            @Override
            public Object map1(String s) throws Exception {
                return s.toLowerCase();
            }

            @Override
            public Object map2(Integer integer) throws Exception {
                return integer;
            }
        }).print("connect");//connect> xxxxxxx

//       union 联合多个个数据源，数据源类型相同
        stream1.union(stream1).union(stream2).map(new MapFunction<Integer, Integer>() {
            @Override
            public Integer map(Integer value) throws Exception {
                return value * 2;
            }
        }).print("union");//union> xxxxxxxxxxx

        OutputTag<Long> tag1 = new OutputTag<>("优秀", TypeInformation.of(Long.class));
        OutputTag<Long> tag2 = new OutputTag<>("良好", TypeInformation.of(Long.class));

        SingleOutputStreamOperator<Long> process = stream1
        .process(new ProcessFunction<Integer, Long>() {
            @Override
            public void processElement(Integer value, Context ctx, Collector<Long> out) throws Exception {
                if (value >= 90) {
                    ctx.output(tag1, Long.valueOf(value));
                } else {
                    ctx.output(tag2, Long.valueOf(value));
                }
            }
        });
        DataStream<Long> output1 = process.getSideOutput(tag1);
        DataStream<Long> output2 = process.getSideOutput(tag2);
        output1.print("优秀");
        output2.print("良好");
//        优秀> 100
//        优秀> 90





    }

}
*/
