package com.k2data.processors;//package com.k2data.processors;
//
//import com.k2data.common.Constants;
//import org.apache.kafka.common.serialization.Serdes;
//import org.apache.kafka.streams.KafkaStreams;
//import org.apache.kafka.streams.StreamsConfig;
//import org.apache.kafka.streams.processor.TopologyBuilder;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//
//import java.util.Properties;
//
///**
// * Created by luoqifei on 17-1-17.
// * just adpter to jdk 1.8
// */
//public class TransformProcessorLambda {
//    private static Logger logger = LoggerFactory.getLogger(KmxTopology.class);
//    private TopologyBuilder builder = new TopologyBuilder();
//    private String sourceMataData = "SOURCE";
//    private Properties conf = new Properties();
//    private StreamsConfig streamConfig;
//    public static String zkUrl;
//    public static String zkNs;
//
//    public TransformProcessorLambda() {
//    }
//
//    public void buildTopology(String sourceTopic) {
//        //add state store
////        StateStoreSupplier anomalyStore = Stores.create(Constants.ANOMALY_STATE_STORE_NAME)
////                .withKeys(Serdes.String())
////                .withValues(Serdes.String())
////                .persistent()
////                .build();
//
//
//        //set topo source data by topic
//        builder.addSource(sourceMataData, sourceTopic)
//                //add first processor ,it trans to json
//                .addProcessor("TRANSFORM-PROCESS", () -> new TransformProcessor(), sourceMataData)
//                //add state store
//                //.addStateStore(anomalyStore,"TRANSFORM-PROCESS")
//                //.addProcessor("ANOMALY-PROCESS",() ->  new AnomalyProcessor(),"TRANSFORM-PROCESS")
//                //result send to topic
//                .addSink("SINK1", Constants.KAFKA_TOPIC_NAME_ADAPTER, "TRANSFORM-PROCESS");
//        //.addSink("SINK2","huineng-anomaly","ANOMALY-PROCESS");
//        logger.info("kafka stream result send to topic:" + Constants.KAFKA_TOPIC_NAME_ADAPTER);
//    }
//
//    public void run() {
//        KafkaStreams streams = new KafkaStreams(builder, streamConfig);
//        Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
//        streams.start();
//    }
//
//    public void setConfig(String kafkaUrl, String zkUrl) {
//        conf.put(StreamsConfig.APPLICATION_ID_CONFIG, "huineng-kstreams-application");
//        conf.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaUrl);
//        conf.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, zkUrl);
//        conf.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
//        conf.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
//        streamConfig = new StreamsConfig(conf);
//    }
//
//    public static void main(String args[]) {
////        if (args.length < 2) {
////            System.out.println("Usage:  ZookeeperUrl, namespace.  for example: 192.168.1.1:2181 zk_test ");
////            System.exit(1);
////        }
////        Configuration conf;
////        ZookeeperService zkService;
////        zkUrl = args[0];
////        zkNs = args[1];
////        zkService = new ZookeeperService(zkUrl, zkNs);
////        zkService.start();
////        conf = new Configuration();
////        try {
////            conf.loadFromZookeeper(zkService);
////        } catch (Exception e) {
////            e.printStackTrace();
////        }
////        zkService.close();
////        logger.info("zkUrl:" + zkUrl + " . zk namespace:" + zkNs);
////        logger.info("kafka stream source topic is:" + Constants.MATA_TOPIC_NAME);
////
//        TransformProcessorLambda kmxTopology = new TransformProcessorLambda();
//        kmxTopology.buildTopology(Constants.MATA_TOPIC_NAME);
////        if (StringUtils.isBlank(conf.getString(ParamNames.METADATA_BROKER_LIST))) {
////            System.err.println("KMX conf Not set env " + ParamNames.METADATA_BROKER_LIST.toUpperCase().replace('.', '_').trim());
////            System.exit(1);
////        }
////        logger.info("kafka broker list is:" + conf.getString(ParamNames.METADATA_BROKER_LIST));
//        kmxTopology.setConfig("localhost:9092", "localhost:2181");
//        //kmxTopology.setConfig(conf.getString(ParamNames.METADATA_BROKER_LIST), zkUrl);
//        try {
//            kmxTopology.run();
//        } catch (Exception e) {
//            logger.error("error happen when trans csv to json.");
//            e.printStackTrace();
//        }
//
//    }
//}
