package com.framework.flink;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.jdbc.JDBCInputFormat;
import org.apache.flink.api.java.io.jdbc.JDBCOutputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
import org.apache.flink.types.Row;

import java.sql.Types;
import java.util.ArrayList;
import java.util.List;

/**
 * 描述：
 * mysql写入    基于dataset
 * 读取表test 然后id+1 插入test1
 * @author xianggj
 * @Date 2021/10/19 13:51
 **/
public class FlinkTestMysqlOutput {

    public static void main(String[] args) throws Exception{

        getDatas();

    }

    private static void getDatas() throws Exception {
        TypeInformation[] fieldTypes =new TypeInformation[]{
                BasicTypeInfo.INT_TYPE_INFO,
                BasicTypeInfo.STRING_TYPE_INFO,
                BasicTypeInfo.INT_TYPE_INFO

        };

        RowTypeInfo rowTypeInfo =new RowTypeInfo(fieldTypes);
        JDBCInputFormat jdbcInputFormat = JDBCInputFormat.buildJDBCInputFormat()
                .setDrivername("com.mysql.jdbc.Driver")
                .setDBUrl("jdbc:mysql://192.168.1.37:3306/test")
                .setUsername("root")
                .setPassword("123456")
                .setQuery("select *  from test2")
                .setRowTypeInfo(rowTypeInfo)
                .finish();

        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        DataSource s = env.createInput(jdbcInputFormat); //datasource
        BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env);
        //视图名称
        tableEnv.createTemporaryView( "test2",s);
        tableEnv.sqlQuery("select * from test2").printSchema();
        Table query = tableEnv.sqlQuery("select * from test2");
        DataSet result = tableEnv.toDataSet(query, Row.class);
//        result = result.map(new IdAddOneSplitter());
//        result.print();
        JDBCOutputFormat jdbcOutput = JDBCOutputFormat.buildJDBCOutputFormat()
                .setDrivername("com.mysql.jdbc.Driver")
                .setDBUrl("jdbc:mysql://192.168.1.37:3306/test")
                .setUsername("root")
                .setPassword("123456")
                .setQuery("INSERT INTO test3 (id, name, age) VALUES (?, ?, ?)")
                .setSqlTypes(new int[] { Types.INTEGER, Types.VARCHAR,Types.INTEGER }) //set the types
                .setBatchInterval(30)//设置10条数据提交一次
                .finish();
        //将对象转换为Row
//        DataStream<Row> rows = res.map((MapFunction<对象Class, Row>) aCase -> {
//            Row row = new Row(2);
//            row.setField(0, aCase.);
//            row.setField(1, aCase.getTraceHash());
//            return row;
//        });
        //我们的res本来就是res 所以不转
        //writeUsingOutputFormat 是DataStream 独有的
        //DataSet调用方法output
//        List<Row> collect = result.collect();
//        Optional<Integer> max = collect.stream().map(e -> Integer.valueOf(e.getField(2).toString())).max(Integer::compareTo);
//        System.out.println(max.get());
        List<Integer> interList = new ArrayList<>();
        interList.add(0);
        interList.add(2);
        RowTypeInfo type = (RowTypeInfo)result.getType();
        TypeInformation<?>[] fieldTypes1 = type.getFieldTypes();
        /**
         * 结果值
         */
        //这会断流 哎但是没办法
        List<Row> collect = result.collect();
        InterFunction interFunction = new InterFunction(interList.toArray(new Integer[0]),
                fieldTypes1, collect.size());
        for (int i = 0; i < collect.size()-1 ; i++) {
            Row row1 = collect.get(i);
            Row row2 = collect.get(i+1);
            interFunction.reduce(new Tuple2<>(i,row1), new Tuple2<>(i+1,row2));
        }
        result = env.fromCollection(interFunction.getResList(), type);

        result.output(jdbcOutput);

        //count内部会触发一次execute动作
        env.execute("执行操作");

    }

    public static class IdAddOneSplitter implements MapFunction<Row, Row> {
        @Override
        public Row map(Row row) throws Exception {
            Object field = row.getField(0);
            row.setField(0, Integer.valueOf(field.toString()) + 3);
            return row;
        }
    }
}
