package flink;

import bean.columnBean;
import bean.commentBean;
import bean.orderInfo;
import function.RichMapSqlFunction;
import org.apache.commons.compress.utils.CharsetNames;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.CoGroupFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.CountTrigger;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

import java.nio.file.FileSystem;
import java.util.Locale;
import java.util.regex.Pattern;

import static org.apache.flink.table.api.Expressions.$;

public class productCreateInsertSqlApp {
    public static void main(String[] args) throws Exception {
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,settings);

        //读取数据
        SingleOutputStreamOperator<String> columonStreamSource = env.readTextFile("C:\\Users\\Administrator\\Desktop\\columon.txt", CharsetNames.UTF_8)
                .assignTimestampsAndWatermarks(WatermarkStrategy.<String>forMonotonousTimestamps().withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                    @Override
                    public long extractTimestamp(String element, long recordTimestamp) {
                        return System.currentTimeMillis();
                    }
                }));
        SingleOutputStreamOperator<String> commentStreamSource = env.readTextFile("C:\\Users\\Administrator\\Desktop\\comment.txt", CharsetNames.UTF_8)
                .assignTimestampsAndWatermarks(WatermarkStrategy.<String>forMonotonousTimestamps().withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                    @Override
                    public long extractTimestamp(String element, long recordTimestamp) {
                        return System.currentTimeMillis();
                    }
                }));

        //处理两条数据流
        SingleOutputStreamOperator<columnBean> columnDs = columonStreamSource.map(lines -> {
            columnBean columnBean = new columnBean();
            String[] split = lines.split("#");
            boolean isBigintMatch = Pattern.matches(".*NUMBER.*", split[2]);
            boolean isDoubleMatch = Pattern.matches(".*,.*", split[2]);
            columnBean.setSource(split[0].toLowerCase(Locale.ROOT));
            columnBean.setColumnName(split[1].toLowerCase(Locale.ROOT));
            if (isBigintMatch && isDoubleMatch) {
                columnBean.setColumnType("Double");
            } else if (isBigintMatch && !isDoubleMatch) {
                columnBean.setColumnType("Bigint");
            } else {
                columnBean.setColumnType("String");
            }
            return columnBean;
        });

        SingleOutputStreamOperator<commentBean> commentDs = commentStreamSource.map(lines -> {
            commentBean commentBean = new commentBean();
            String[] split = lines.split("#");
            String key = split[0].toLowerCase(Locale.ROOT);
            commentBean.setSource(key);
            commentBean.setColumnComment(split[1]);
            return commentBean;
        });

        //转换成表
        tableEnv.createTemporaryView("columnDs",columnDs);

        tableEnv.createTemporaryView("commentDs",commentDs);

        //关联表获取数据
        String sql = "\tselect \n" +
                "\t   trim(a.columnName),\n" +
                "\t   trim(a.columnType),\n" +
                "\t   trim(b.columnComment)\n" +
                "\t   from columnDs a\n" +
                "\t   left join commentDs b\n" +
                "\t   on trim(a.source)||'.'||trim(a.columnName)=trim(b.source)";
        Table joinTable = tableEnv.sqlQuery(sql);
        DataStream<Tuple2<Boolean, Row>> tuple2DataStream = tableEnv.toRetractStream(joinTable, Row.class);
        SingleOutputStreamOperator<String> map = tuple2DataStream.map(new RichMapSqlFunction());
        //map.print();
        //tuple2DataStream.print();

        //启动程序
        env.execute("productCreateInsertSqlApp");

    }
}
