package tk.xboot.kfk;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.ValueMapper;

import java.io.IOException;
import java.time.LocalDate;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;

public class KfkStream {

    private static ObjectMapper mapper = new ObjectMapper();
    // 3个月到15天数据的取数频次
    private static int mod = 24/6;

    private static final String SOURCE_TOPIC = "firm-offer-ratio-prod";
    private static final String SINK_TOPIC = "test"; // "firm-offer-etl-prod"

    public static void main(String[] args) {
        Properties conf = new Properties();
        conf.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-pipe");
        conf.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"kfk1.bicoin.com.cn:9092,kfk2.bicoin.com.cn:9092,kfk3.bicoin.com.cn:9092");
        conf.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        conf.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        final org.apache.kafka.streams.StreamsBuilder sb = new org.apache.kafka.streams.StreamsBuilder();

        // source stream from a kafka topic named firm-offer-prod
        KStream<String,String> source = sb.stream(SOURCE_TOPIC);

        source.mapValues(new ValueMapper<String, Object>() {
            @Override
            public Object apply(String s) {
                try {
                    Map<String,Object> data = mapper.readValue(s, HashMap.class);
                    List<LinkedHashMap<String,Object>> list = (ArrayList<LinkedHashMap<String,Object>>)data.get("balanceModels");
                    data.put("balanceModels",reHash(list));

                    List<LinkedHashMap<String,Object>> list2 = (ArrayList<LinkedHashMap<String,Object>>)data.get("benifitModels");
                    data.put("benifitModels",reHash(list));

                    List<LinkedHashMap<String,Object>> list3 = (ArrayList<LinkedHashMap<String,Object>>)data.get("incomeModels");
                    data.put("incomeModels",reHash(list));

                    List<LinkedHashMap<String,Object>> list4 = (ArrayList<LinkedHashMap<String,Object>>)data.get("monthRatioModels");
                    //data.put("monthRatioModels",reHash(list));
                    data.put("monthRatioModels",Collections.EMPTY_LIST);

                    List<LinkedHashMap<String,Object>> list5 = (ArrayList<LinkedHashMap<String,Object>>)data.get("weekRatioModels");
                    data.put("weekRatioModels",reHash(list));
                    return mapper.writeValueAsString(data);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                return null;
            }
        }).to(SINK_TOPIC);
        final Topology topology = sb.build();
        System.out.println(topology.describe());
        final KafkaStreams streams = new KafkaStreams(topology, conf);
        final CountDownLatch latch = new CountDownLatch(1);
        Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook"){
            public void run(){
                streams.close();
                latch.countDown();
            }
        });
        try {
            streams.start();
            latch.await();
        } catch (Throwable throwable) {
            System.exit(1);
        }
    }

    private static List reHash(List<LinkedHashMap<String,Object>> list){
        // 计算3个月之前的数据,包含3个月当天的数据(- , latest 3]
        LocalDate td3M = LocalDate.now().minusMonths(3);
        List<LinkedHashMap<String,Object>> lst3M = list.stream()
                .filter(o-> td3M.toString().compareTo(o.get("ctime").toString())>=0)
                .collect(Collectors.groupingBy(e -> e.get("ctime")))
                .entrySet().stream()
                .sorted(Comparator.comparing(m -> m.getKey().toString()))
                .collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue,(oldVal,newVal)->oldVal,LinkedHashMap::new))
                .values().stream().map(o->o.get(0)).collect(Collectors.toList());


        // 计算3个月到15天的数据，前开后闭,注意生成的数据的次序
        LocalDate td15D = LocalDate.now().minusDays(15);
        List<LinkedHashMap<String,Object>> lst3MonTo15D = list.stream()
                .filter(o->td3M.toString().compareTo(o.get("ctime").toString())< 0 && td15D.toString().compareTo(o.get("ctime").toString())>=0)
                .collect(Collectors.toList());

        // 每6小时取一根()
        List<LinkedHashMap<String,Object>> lst6H = new ArrayList<LinkedHashMap<String,Object>>();
        for (int i = 0; i < lst3MonTo15D.size(); i += mod) {
            lst6H.add(lst3MonTo15D.get(i));
        }
        // 计算15天内的数据，不含有15天的数据,注意生成的数据的次序
        List<LinkedHashMap<String,Object>> lst15D = list.stream()
                .filter(o->td15D.toString().compareTo(o.get("ctime").toString())<0)
                .collect(Collectors.toList());

        lst3M.addAll(lst6H);
        lst3M.addAll(lst15D);
        return lst3M;
    }
}
