package consumer;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import pros.propties;
import testjson.Hbasemain;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

public class HbaseConsumer {
    protected static final Log log = LogFactory.getLog(HbaseConsumer.class);
    private static  String confPath= System.getProperty("user.dir") + File.separator + "conf/0327.properties";


    private static  Properties properties(){

        Properties  properties=new Properties();
        File  file=new File(confPath);
        if(!file.exists()) {
         InputStream in= HbaseConsumer.class.getClassLoader().getResourceAsStream("0327.properties");

            try {
                properties.load(in);

            } catch (IOException e) {
                e.printStackTrace();
            }
        } else {
            try {
                properties.load(new FileInputStream(confPath));
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        return  properties;



    }




    private static  void  consumerkafka(){

/*
kafka和zookeeper的配置信息
 */
        Properties properties=properties();
        String brokers = properties.getProperty("kafka.brokers");
        String  topics = properties.getProperty("kafka.topics");
        String zookeeperserver=properties.getProperty("zookeeperlist.server");
        String  zookeeperport=properties.getProperty("zookeeperlist.port");
        log.info("kafka.brokers:" + brokers);
        log.info("kafka.brokers:" + brokers);
        log.info("kafka.topics:" + topics);
        log.info("zookeeperlist.server:" + zookeeperserver);
        log.info("zookeeperlist.port:" + zookeeperport);
        if(StringUtils.isEmpty(brokers)|| StringUtils.isEmpty(topics)) {
            System.out.println("未配置Kafka信息...");
            System.exit(0);
        }



        propties pro=new propties();
        KafkaConsumer<String,String> consumer = new KafkaConsumer(pro.properties1());

        consumer.subscribe(Arrays.asList(topics));
        Hbasemain test=new Hbasemain(zookeeperserver,zookeeperport);

        while (true) {

            ConsumerRecords<String, String> records = consumer.poll(100);
            for (ConsumerRecord<String, String> record : records) {

                System.out.println("kafka111"+record.value());

              //  readjson(record.value());

                test.testJSON(record.value());
                System.out.println("kafka"+record.value());
                //  System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
//               readjson(record.value());



            }

        }



    }




    public static void main(String[] args) {

        consumerkafka();


//      String files="{\n" +
//              "    \"database\":\"test_maxwell\",\n" +
//              "    \"table\":\"im\",\n" +
//              "    \"type\":\"insert\",\n" +
//              "    \"ts\":1553509866,\n" +
//              "    \"xid\":2079863,\n" +
//              "    \"commit\":true,\n" +
//              "    \"data\":{\n" +
//              "        \"id\":6,\n" +
//              "        \"name\":\"zhangffdd\"\n" +
//              "    }\n" +
//              "}";

        // TODO Auto-generated method stub








    }
}
