package com.etl;

import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;

public class Hive2RMDS {
       
	private static final String INSERT_MODEL_BATCH = "0";       //分批次处理
	private static final String INSERT_MODEL_FULL = "1";          //全量处理
	private static int BATCH_SIZE_INT = 5000;  //默认批处理数
	private static String batch_size = "";  //批处理记录数
	private static String insert_model = INSERT_MODEL_BATCH;  //默认分批次处理
	
	// hive参数
	private static String hive_driver = "org.apache.hive.jdbc.HiveDriver";
	private static String hive_url = null;
	private static String hive_username = "hive2";
	private static String hive_password = "";
	private static String hive_hql = null;
	
	// RDMS参数
	private static String rdms_driver = "com.mysql.jdbc.Driver";
	private static String rdms_url = null;
	private static String rdms_username = null;
	private static String rdms_password = null;
	private static String rdms_presql = null;
	private static String rdms_tableName = null;
	private static String rdms_columnNames = null;

	

	
	public static void main(String[] args) {
			
		Map<String,String> map = getParams(args);
		init(map);
		
		//预先处理rdms sql
		if(map.containsKey("rdms_presql")){
			exePreSql();
		}

		//读取插入模式
		if(insert_model.equals(INSERT_MODEL_BATCH)) {
		        batchInsertRdms();
		} else  if(insert_model.equals(INSERT_MODEL_FULL)){
		        fullInsertRdms();
		}
		
	}
	
        private static void init(Map<String,String> map){

		String[] paramNames = {"hive_driver","hive_url","hive_username","hive_password","hive_hql",
							   "rdms_driver","rdms_url","rdms_username","rdms_password",
							   "rdms_presql","rdms_tableName","rdms_columnNames","insert_model","batch_size"};
		Hive2RMDS h2r = new Hive2RMDS();
		for(String paramName:paramNames){
			if(map.containsKey(paramName)){
				try {
					Field field = Hive2RMDS.class.getDeclaredField(paramName);
					field.setAccessible(true);
					field.set(h2r, map.get(paramName));
				} catch (IllegalArgumentException | IllegalAccessException |NoSuchFieldException | SecurityException e) {
					e.printStackTrace();
				}
			}
		}
		
		//打印参数值：
		System.out.println("解析后参数如下：");
		for(String paramName:paramNames){
			if (map.containsKey(paramName)){
				if(paramName.contains("password")) {
					System.out.println("key:" + paramName + " value:********" );
				} else {
				System.out.println("key:" + paramName + " value:" + map.get(paramName));
				}
			}
		}
		
		//处理batch_size值
		String batchSizeParams = map.get("batch_size");
		try {
	                if(StringUtils.isNotBlank(batchSizeParams)) {
	                        BATCH_SIZE_INT = Integer.valueOf(batchSizeParams);
	                }
                } catch (NumberFormatException e) {
                        System.out.println("batch_size参数有误，使用默认批处理数:" + BATCH_SIZE_INT);
                }

	}
	
    //解析参数
	private static Map<String,String> getParams(String[] params){
		Map<String,String> map = new HashMap<String,String>();
		String key = null;
		String value = null;
		for(int i=0;i<params.length; i++){
			if(i%2==0){
				key = params[i].startsWith("--") ? params[i].substring(2) : params[i];
			}
			if(i%2==1){
				value = params[i];
				map.put(key, value);
				key = null; value = null;
			}
		}
		return map;
	}
	
	//全量抽取到数据库
	private static void fullInsertRdms() {
	        
	        System.out.println("开始全量读取执行 ... ... ");
	        List<Map<String,String>> list = getHiveList();
	        if(list!=null && list.size() > 0){
	                insertRdms(list);
	        } else {
                      System.out.println("hive2中未找到记录");
                }
	}
	
	//分批次抽取到数据库
        private static void batchInsertRdms() {
                
                Connection hiveconn = null;
                ResultSet hivers = null;
                Statement hiveps = null;
                
                System.out.println("开始分批次读取执行 ... ... 每批次最大处理数：" + BATCH_SIZE_INT);
                String[] columns = trimRdms_columnNames(rdms_columnNames); 
                List<Map<String,String>> list = null;
                int totalSize = 0;
                try {
                        hiveconn = DBUtil.getConnection(hive_driver,hive_url,hive_username,hive_password);
                        hiveps = hiveconn.createStatement();
                        hivers = hiveps.executeQuery(hive_hql);
                        
                        list = new ArrayList<Map<String,String>>();
                        while(hivers.next()){
                                Map<String,String> map = new HashMap<String,String>();
                                for(String column:columns){
                                        String value = hivers.getString(column);
                                        map.put(column, value);
//                                      System.out.println("column:" + column + " value:" + value);
                                }
                                
                                list.add(map);
                                int length = list.size();
                                
                                //满足批次部分插入
                                if(length == BATCH_SIZE_INT) {
                                        insertRdms(list);
                                        totalSize = totalSize + BATCH_SIZE_INT;
                                        list.clear();
                                        
                                }
                        }
                        
                        //剩余小于批次部分或者少于批次部分插入
                        if(list.size() > 0) {
                                insertRdms(list);
                                totalSize = totalSize + list.size();
                                list.clear();
                        }
                } catch (SQLException e) {
//                        System.out.println(e);
                        e.printStackTrace();
//                      System.exit(1);
                } finally {
                        DBUtil.close(hivers, hiveps, hiveconn);
                }
                System.out.println("hive中获取到记录数:" + totalSize);
        }
	
	//获取hive记录
	private static List<Map<String,String>> getHiveList(){
		
	        Connection hiveconn = null;
	        ResultSet hivers = null;
	        Statement hiveps = null;
		String[] columns = trimRdms_columnNames(rdms_columnNames); 
		List<Map<String,String>> list = null;
		try {
			hiveconn = DBUtil.getConnection(hive_driver,hive_url,hive_username,hive_password);
			hiveps = hiveconn.createStatement();
			hivers = hiveps.executeQuery(hive_hql);
			
			list = new ArrayList<Map<String,String>>();
			while(hivers.next()){
				Map<String,String> map = new HashMap<String,String>();
				for(String column:columns){
					String value = hivers.getString(column);
					map.put(column, value);
//					System.out.println("column:" + column + " value:" + value);
				}
				
				list.add(map);
			}
		} catch (SQLException e) {
			e.printStackTrace();
//			System.exit(1);
		} finally {
			DBUtil.close(hivers, hiveps, hiveconn);
		}
		System.out.println("hive中获取到记录数:" + (list!=null?list.size():0));
		return list;
		
	}
	
	//写入rdms
	private static void insertRdms(List<Map<String,String>> list){
	        
	        Connection conn = null;
	        PreparedStatement ps = null;
	        ResultSet rs = null;
	        
		try {
			String[] columns = trimRdms_columnNames(rdms_columnNames); 
			conn = DBUtil.getConnection(rdms_driver,rdms_url,rdms_username,rdms_password);
			System.out.println("Connect:" + conn.toString());
			//插入oracle
			String sql = getInsertSqlString(rdms_tableName, rdms_columnNames.toUpperCase());
			System.out.println("sql:" + sql);
			conn.setAutoCommit(false);
			
			ps = conn.prepareStatement(sql);
			for(Map<String,String> map:list){
				for(int i=1; i<= columns.length;i++){
					ps.setObject(i, map.get(columns[i-1]));
				}
				ps.addBatch();  
			}
			
			int[] result = ps.executeBatch(); 
			conn.commit();
			
            System.out.println("insert : " + result.length); 
            //提交，设置事务初始值  
		} catch (SQLException e) {
			e.printStackTrace();
		} finally{
			DBUtil.close(rs, ps, conn);
		}

	}
	
	//根据表名和字段拼接insert sql
	private static String getInsertSqlString(String tableName,String fieldNames){
		int size = fieldNames.split(",").length;
		StringBuffer sb = new StringBuffer("");
		sb.append("insert into ").append(tableName).append("(").append(fieldNames).append(")")
		.append("values(").append(String.join(",",Collections.nCopies(size,"?"))).append(")");
		return sb.toString();
	}

	//预先执行的sql
	private static void exePreSql(){
	        Connection conn = null;
	        Statement ps = null;
	        ResultSet rs = null;
		try {
			conn = DBUtil.getConnection(rdms_driver,rdms_url,rdms_username,rdms_password);
			ps = conn.createStatement();    
			int size = ps.executeUpdate(rdms_presql);  
			System.out.println("pre sql process record size : " + size); 
		} catch (SQLException e) {
			e.printStackTrace();
		} finally{
		        DBUtil.close(rs, ps, conn);
		}
	}
	
	//去除空格rdms_columnNames中字段左右的空格
	private static String[] trimRdms_columnNames(String rdms_columnNames) {
	        List<String> newRdms_columnNames = new ArrayList<String>();
	        String[] array = rdms_columnNames.split(",");
	        for(String column:array) {
	                newRdms_columnNames.add(column.trim());
	        }
	        String[] newArray = new String[newRdms_columnNames.size()];
	        return newRdms_columnNames.toArray(newArray);
	}
}
