package com.itcast.flink.connectors.jdbc;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @program: flink-app
 * @description: 流方式写入数据库
 * @author: zhanghz001
 * @create: 2021-07-23 09:35
 **/
public class ZhzCustomSinkApplication {
    public static void main(String[] args) throws Exception {
        //创建运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //读取socket数据源
        DataStreamSource<String> socketStr = env.socketTextStream("192.168.23.128", 9911, "\n");
        //    转换处理流数据
        SingleOutputStreamOperator<AccessLog> outputStream = socketStr.map(new MapFunction<String, AccessLog>() {
            @Override
            public AccessLog map(String value) throws Exception {
                System.out.println("receive==> " + value);
                //根据分隔符解析数据
                String[] arrValue = value.split("\t");
                //将数组封装成对象
                AccessLog log = new AccessLog();
                for (int i = 0; i < arrValue.length; i++) {
                    if (i == 0) {
                        log.setIp(arrValue[i]);
                    } else if (i == 1) {
                        log.setTime(arrValue[i]);
                    } else if (i == 2) {
                        log.setType(arrValue[i]);
                    } else if (i == 3) {
                        log.setApi(arrValue[i]);
                    }
                }
                
                return log;
            }
        });
        //配置自定义写入数据源
        outputStream.addSink(new MySQLSinkFunction());
        //执行任务
        env.execute("jdbc sink stream job");
    }
}
