package api.sink;

import api.beans.ErrorDataJdbcSink;
import api.beans.ErrorDataJdbcSink2;
import api.beans.ErrorSinkData;
import api.beans.SensorReading;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import utils.DateUtils;
import utils.FlinkDataType;
import utils.JsonUtils;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Properties;

public class SinkTest4 {

    private static final Logger logger = LoggerFactory.getLogger(SinkTest4.class);

    private static String db_url = "jdbc:mysql://192.168.36.129:3306/zzb?useUnicode=true&characterEncoding=UTF-8&useSSL=false";
    private static String db_driver = "com.mysql.jdbc.Driver";
    private static String db_username = "root";
    private static String db_password = "root";

    public static void main(String[] args) throws Exception {

        /**运行环境*/
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        env.setParallelism(1);
        String servers = "192.168.36.130:9092,192.168.36.129:9092,192.168.36.128:9092";
        String topic = "myTest";
        List<String> dataKeys = Arrays.asList(new String[] {"t_id", "t_key", "t_val"});
        String[] kafkaFields = new String[dataKeys.size()];
        for (int i = 0, len = dataKeys.size(); i < len; i++) {
            kafkaFields[i] = dataKeys.get(i);
        }
        List<String> dataTypes = Arrays.asList(new String[] {"string", "string", "string"});
        TypeInformation<?>[] kafkaTypes = new TypeInformation[dataTypes.size()];
        for (int i = 0, len = dataTypes.size(); i < len; i++) {
            kafkaTypes[i] = FlinkDataType.getTypeInformation(dataTypes.get(i));
        }
        // kafka消费者配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", servers);
        props.setProperty("group.id",  "flink-test-SinkTest4");
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), props);

        /**1、定义OutputTag*/
        OutputTag<String> outputTag = new OutputTag<String>("error-out"){};

        /**2、在ProcessFunction中处理主流和分流*/
        DataStream dataStream = env.addSource(kafkaConsumer);
        SingleOutputStreamOperator<String> inputStream = dataStream.process(new ProcessFunction<String, String> (){
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) {
                System.out.println("Start......");
                System.out.println("data:" + value);
                // 空字符直接丢到错误侧流
                if (value == null || "".equals(value)) {
                    System.out.println("out to error-output...");
                    ctx.output(outputTag, value);
                }
                //默认json格式校验
                if(JsonUtils.kafkaStreamDataCheck(value, dataKeys, dataTypes)){
                    System.out.println("out to main...");
                    out.collect(value);
                }else {
                    System.out.println("out to error-output...");
                    ctx.output(outputTag, value);
                };
            }
        });
        DataStream<Row> mainStream = inputStream.map(new MapFunction<String, Row>() {
            @Override
            public Row map(String value) throws Exception {
                System.out.println("mainStream getData:" + value);
                // 处理json格式数据
                Row row = new Row(dataKeys.size());
                //默认json格式
                JSONObject object = JSON.parseObject(value);
                for (int i = 0; i < dataKeys.size(); i++) {
                    String key = dataKeys.get(i);
                    row.setField(i, FlinkDataType.transformValue(kafkaTypes[i].getTypeClass(), object.getString(key)));
                }
                return row;
            }
        }).returns(new RowTypeInfo(kafkaTypes, kafkaFields));
        //获取主流
        tEnv.createTemporaryView(topic, mainStream);

        //侧流处理
        String conf = "{\"sourceType\":\"kafka\",\"taskId\":\"testId100004\",\"taskName\":\"ZZBFlinkTest\"}";
        ErrorSinkData basicData = JSON.parseObject(conf, ErrorSinkData.class);
        System.out.println("ErrorSinkData:" + JSON.toJSONString(basicData));
        //获取侧流
//        DataStream<ErrorSinkData> rowOutputStream = inputStream.getSideOutput(outputTag).map((MapFunction<String, ErrorSinkData>) value -> {
//            System.out.println("sideOutputStream getData:" + value);
//            ErrorSinkData errorData = new ErrorSinkData();
//            errorData.setTaskId(basicData.getTaskId());
//            errorData.setTaskName(basicData.getTaskName());
//            errorData.setBasicTableId(basicData.getBasicTableId());
//            errorData.setBasicTableName(basicData.getBasicTableName());
//            errorData.setKafkaOffset("0");
//            errorData.setSourceType(basicData.getSourceType());
//            errorData.setDsId(basicData.getDsId());
//            errorData.setoDate(DateUtils.formatDate(new Date()));
//            errorData.setErrorData(value);
//            return errorData;
//        });
//        rowOutputStream.addSink(new ErrorDataJdbcSink("testJob"));
        DataStream<Row> rowOutputStream = inputStream.getSideOutput(outputTag).map((MapFunction<String, Row>) value -> {
            System.out.println("sideOutputStream getData:" + value);
            Row row = new Row(11);
            row.setField(0, basicData.getTaskId());
            row.setField(1, basicData.getTaskName());
            row.setField(2, basicData.getBasicTableId());
            row.setField(3, basicData.getBasicTableName());
            row.setField(4,"0");
            row.setField(5, basicData.getSourceType());
            row.setField(6, basicData.getDsId());//f_dsid
            row.setField(7, value);
            return row;
        });
        rowOutputStream.addSink(new ErrorDataJdbcSink2("testJob"));


        StringBuffer sb2 = new StringBuffer();
        sb2.append("CREATE TABLE flink_tg_test (");
        sb2.append("tg_id VARCHAR(32),tg_key VARCHAR(100),tg_value VARCHAR(100) ");
        sb2.append(") WITH (");
        sb2.append("'connector.type' = 'jdbc', ");
        sb2.append("'connector.url' = '").append(db_url).append("', ");
        sb2.append("'connector.table' = 'flink_tg_test', ");
        sb2.append("'connector.driver' = '").append(db_driver).append("', ");
        sb2.append("'connector.username' = '").append(db_username).append("', ");
        sb2.append("'connector.password' = '").append(db_password).append("', ");
        sb2.append("'connector.write.flush.max-rows'   = '1'");
        sb2.append(")");
        System.out.println("outputTable SQL: " + sb2.toString());
        tEnv.executeSql(sb2.toString());
        String sql = "INSERT INTO   flink_tg_test(tg_id, tg_key, tg_value) select   t_id,   t_key,   t_val from   myTest";
        tEnv.executeSql(sql);

        env.executeAsync();

       // env.execute();
    }


}
