package car;

import bean.AreaControl;
import bean.MonitorInfo;
import day4.Constants;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.Size;

import java.sql.PreparedStatement;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;

public class Test5_VehicleDistribution {
    public static void main(String[] args) throws Exception {
        // 获取环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(1); //设置并行度
        Properties properties = new Properties();
        properties.getProperty("bootstrap.servers","h                .window(TumblingProcessingTimeWindows.of(Time.minutes(10)))\n" +
                " .apply(new WindowFunction<MonitorInfo, AreaControl, String, TimeWindow>() {\n" +
                "     @Override\n" +
                "     public void apply(String s, TimeWindow window, Iterable<MonitorInfo> input, Collector<AreaControl> out) throws Exception {\n" +
                "         Set<String> set = new HashSet<>();\n" +
                "         Integer carCount = 0;\n" +
                "         for (MonitorInfo monitorInfo : input) {\n" +
                "             set.add(monitorInfo.getCar());\n" +
                "         }\n" +
                "         String start = DateFormatUtils.format(window.getStart(), Constants.D1)\n" +
                "         String end = DateFormatUtils.format(window.getEnd(), Constants.D1);\n" +
                "                        out.collect(new Areaadoop10:9092");
        properties.getProperty("group.id","g5");

        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>("topic-car",new SimpleStringSchema(),properties);
        DataStreamSource<String> ds1 = env.addSource(consumer);
/*
        ds1.keyBy(value -> value.getAreaId())
Control(null,s,set.size(),start,end));
                    }
                }).addSink(JdbcSink.sink("INSERT INTO t_area_control values (null,?,?,?,?)",
                    ( ps, value)-> {
                    ps.setString(1,value.getAreaId());
                    ps.setInt(2,value.getCarCount());
                    ps.setString(3,value.getWindowStart());
                    ps.setString(4,value.getWindowEnd());
                },
                JdbcExecutionOptions.builder().withBatchSize(10).withBatchIntervalMs(5000).build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUsername(Constants.USERNAME)
                        .withPassword(Constants.PASSWORD)
                        .withUrl(Constants.URL)
                        .withDriverName(Constants.DRIVER).build()));*/


        env.execute();
    }
}
