
package com.sailing.lianxi.db;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import com.sailing.lianxi.common.Constants;
import com.sailing.lianxi.common.ObjTool;

public class ConsumerDemo {
    private static String driver = "oracle.jdbc.driver.OracleDriver"; //驱动
    
    private static String url = "jdbc:oracle:thin:@//172.20.46.94:1521/orcl"; //连接字符串
     
    private static String userName = "RYGK"; //用户名
     
    private static String passWord = "sailing123"; //密码
    
    
    /**
     * 
     * consumerData[消费数据]
     * 创建人:  wanggang
     * 创建时间: 2018年4月12日 下午5:27:17
     *
     * @Title: consumerData
     * @since  CodingExample　Ver(编码范例查看) 1.1
     */
    public static void consumerData(){
         long beginTime = System.currentTimeMillis();
         System.out.println("开始时间："+new Date());
        Properties props = new Properties();
        //kafka服务器地址  
        props.put("bootstrap.servers", Constants.KAFKA_SERVERS);  
        //kafka组
        props.put("group.id", "2");  
        /*
        earliest:当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费 
        latest:当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产生的该分区下的数据 
        none:topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常 
                 以上配置只对一个组里没有消费的消息起作用
        */
        props.put("auto.offset.reset", "earliest");  
        props.put("enable.auto.commit", "false");  
        //Consumer向集群发送自己的心跳，超时则认为Consumer已经死了，kafka会把它的分区分配给其他进程
        props.put("session.timeout.ms", "30000");  
        //key序列化器
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  
        //值序列化器
        props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
        //设置最大拉取SIZE
        props.put("fetch.message.max.bytes",1024 * 1024*100);//设置最大拉取SIZE
        int  submitSize = 20000;
        for(int i=0;i<Constants.KAFKA_PARTITION_NUM;i++){
            //创建消费者
            KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<String, byte[]>(props);
            //订阅主题列表topic
            consumer.subscribe(Arrays.asList(Constants.TOPIC_NAME));
            //最小提交记录数
            List<Map<String,Object>> dataList = new ArrayList<Map<String,Object>>();
            try {
                while(true){
                    ConsumerRecords<String, byte[]> records = consumer.poll(1000);//1秒获取一次
                    System.out.println("每次拉取数据量="+records.count());
//                    consumer.commitSync();  
                    
                    for (ConsumerRecord<String, byte[]> record : records) {
                       //　正常这里应该使用线程池处理，不应该在这里处理
//                        System.out.printf("partition=%d,offset=%d,key=%s,value=%s",record.partition(),record.offset(),record.key(),record.value()+"\n");
                        Map<String, Object> map = (Map<String, Object>)ObjTool.ByteToObject(record.value()) ; 
                        dataList.add(map);
//                        if(dataList.size() >= submitSize){
//                            insertData(dataList);
//                            dataList.clear();
//                            //手动提交
//                            consumer.commitSync();
//                        }
                    }
                    long beginTime2 = System.currentTimeMillis();
                    if(dataList.size()>0 ){//剩余数据
                        insertData(dataList);
                        dataList.clear();
                        //手动提交
                        consumer.commitSync();  
                    }
                    System.out.println("插入数据耗时："+(System.currentTimeMillis()-beginTime2)/1000);
                } 
            } catch (Exception e) {
                // TODO: handle exception
                System.out.println("消费失败....");
                e.printStackTrace();
            }finally {
                consumer.close();
            }
        
            
        }
       long endTime = System.currentTimeMillis();
       System.out.println("消费耗时："+(endTime-beginTime)/1000);
    }
    
    static String[] columns = {"ID","XM","SFZ","CSRQ","XBDM","XBMC",
            "MZDM","MZMC","YYCSDM","YYCSMC","SWKSSJ",
            "XWSJ","RWSJ","SWZDH","JLRKSJ","ZHGXSJ","RKSJ"
           };
    public static void insertData(List<Map<String,Object>>  list){
        long beginTime = System.currentTimeMillis();
       Connection conn = null;
       PreparedStatement pstat = null;
       String tableName = "WG_TEST";
       StringBuffer sql = new StringBuffer("insert into "+tableName+" (");
       StringBuffer paramSql = new StringBuffer(" values(");
       for(int i=0;i<columns.length;i++){
           if(i==columns.length-1){
               sql.append("\""+columns[i]+"\")");
               paramSql.append("?)");
           }else{
               sql.append("\""+columns[i]+"\",");
               paramSql.append("?,");
           }
       }
       String resultSql = sql.append(paramSql).toString();
//       System.out.println("resultSql="+resultSql);
       try {
           conn = DBUtil.getConnect(userName, passWord, driver, url);
           pstat = conn.prepareStatement(resultSql);
           conn.setAutoCommit(false);
           for(int j=0;j<list.size();j++){
               Map<String,Object> map = list.get(j);
               for(int k=0;k<columns.length;k++){
                   pstat.setObject(k+1, map.get(columns[k]));
               }
               pstat.addBatch();
           }
           pstat.executeBatch();
           conn.commit();
           long endTime = System.currentTimeMillis();
           System.out.println("插入数据量："+list.size()+",插入用时："+(endTime-beginTime));
        } catch (Exception e) {
            // TODO: handle exception
            System.out.println("插入数据失败");
            e.printStackTrace();
        }finally {
            DBUtil.closeDB(conn, pstat, null);
        }
       
    }
    public static void main(String[] args) {
        consumerData();
    }
}

