package org.example.flinketl.main;
import java.util.Properties;
import java.util.Timer;
import java.util.TimerTask;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.example.flinketl.util.ClickHouseInsertUtil;
import org.example.flinketl.util.MapAndFilter;
import org.example.flinketl.util.MyConfig;



public class Consumer {
  public static long count = 0;
  public static long rate = 0;



  public static void main(String[] args) throws Exception {
    ClickHouseInsertUtil<Object> clickHouseUtil = new ClickHouseInsertUtil();
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    Properties props = new Properties();
    props.put("bootstrap.servers", MyConfig.kafkaUrl);
    props.put("group.id", MyConfig.groupId);
    FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(MyConfig.topic, new SimpleStringSchema(), props);
    
    consumer.setStartFromGroupOffsets();
    consumer.setStartFromEarliest(); // 设置每次都从头消费
    DataStreamSource<String> source = env.addSource(consumer);

    source
    .flatMap(new FlatMapFunction<String,Object>() {
      @Override
      public void flatMap(String value, Collector<Object> out) throws Exception {
        count++;
        rate++;
        MapAndFilter.flatmap(value, out);
      }
    })
    .addSink(clickHouseUtil);

    
    env.execute("FlinkEtl");
  }
}
