package com.gtmc.mould.impala;
import com.cloudera.impala.jdbc41.Driver;
import com.cloudera.impala.jdbc41.DataSource;
import com.gtmc.mould.kafka.PaasKafkaConsumer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import com.gtmc.mould.jsonstr.JsonArrResolving;

import java.io.IOException;
import java.security.PrivilegedAction;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.UUID;
import java.util.logging.Logger;

/**
 * @author: MaLiang
 * @date: 2020/8/6 16:59
 * @version: 1.0
 * 接受kafka 数据写入 impala
 **/

//数据插入impala
public class ImpalaSink {
    static String jdbc_driver = "com.cloudera.impala.jdbc41.Driver";
    /**
     *  Kerberos 认证的几个参数
     *  AuthMech： 0无认证、1Kerberos认证、2用户名方式、3用户名和密码认证、6使用Hadoop授权认证
     *  KrbRealm：你的KDC服务定义的域名
     *  krbHostFQDN：你的HiveServer2服务的FQDN（hostname或你dns解析的域名）
     *  KrbServiceName：HiveServer2服务的Principal默认为hive
     *  KrbAuthType：0表示获取你的Subject来实现Kerberos认证、1表示基于JAAS方式获取Kerberos认证、2表示基于当前客户端的Tick Cache方式认证
     *  */
    //本地测试地址
    //private static String conn_url = "jdbc:impala://192.168.10.111:21050/ods";
   /**
    * 广丰内网测试地址
    * impala url
    * */
    private static String conn_url = "jdbc:impala://172.16.136.59:21050/ods;AuthMech=1;KrbHostFQDN=qfwa0468;KrbServiceName=impala;KrbRealm=GTMC.COM";
    //广丰生产地址
    //static String conn_url = "jdbc:impala://192.168.10.111:21050/ods";
    private static String user = "dintm";
    private static String pwd = "pass1234";
    private static Connection conn = null;
    private static PreparedStatement ps = null;
    Logger logger = Logger.getLogger(ImpalaSink.class.getName());


    /**
     * impala数据库操作
     * */
    public  void SinkImpala(String key, String tierOnePartNo, String partNo, Double partLength, Double partWidth, Double partHeight, Double partThickness, String partMaterial, String modelTypeName, String mouldMaterial, Double mouldLength, Double mouldWidth, Double mouldHeight, Double mouldWeight, int equipmentTonnage, int takeNum, String equipmentTypeName, String processName, int camNum, int mouldStepNum, Double emptyCoefficient, int lifeNum, String manufacturer, String unitPrice, String quoteTime, String supplier, String dataStatus){
        try {
            logger.info("==========调用Kerberos认证返回数据库连接==========");
            kerberosAuthenticationAndInitImpalaConnection();
            //getConnect();
            //获取系统时间
            SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");//设置日期格式
            String sysdate = df.format(new Date()).toString();// new Date()为获取当前系统时间
            String created_user_name = "dintm";
            //获取UUID
            ImpalaSink ls = new ImpalaSink();
            String id = ls.getUUID();
            String insert_sql = "insert into ods.ods_mo_quoted(" +
                    "id," +
                    "quoted_key," +
                    "tier_one_part_no," +
                    "part_no," +
                    "part_length," +
                    "part_width," +
                    "part_height," +
                    "part_thickness," +
                    "part_material," +
                    "mould_type," +
                    "mould_material," +
                    "mould_length," +
                    "mould_width," +
                    "mould_height," +
                    "mould_weight," +
                    "equipment_tonnage," +
                    "take_num," +
                    "equipment_type," +
                    "process_name," +
                    "cam_num," +
                    "mould_step_num," +
                    "empty_coefficient," +
                    "life_num," +
                    "manufacture," +
                    "unit_price," +
                    "quote_time," +
                    "supplier," +
                    "data_status," +
                    "created_time," +
                    "created_user_name) " +
                    "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
            logger.info("==========执行的insert_sql==========:"+insert_sql);
            ps = conn.prepareStatement(insert_sql);
            ps.setString(1,id);
            ps.setString(2,key);
            ps.setString(3,tierOnePartNo);
            ps.setString(4,partNo);
            ps.setDouble(5,partLength);
            ps.setDouble(6,partWidth);
            ps.setDouble(7,partHeight);
            ps.setDouble(8,partThickness);
            ps.setString(9,partMaterial);
            ps.setString(10,modelTypeName);
            ps.setString(11,mouldMaterial);
            ps.setDouble(12,mouldLength);
            ps.setDouble(13,mouldWidth);
            ps.setDouble(14,mouldHeight);
            ps.setDouble(15,mouldWeight);
            ps.setDouble(16,equipmentTonnage);
            ps.setInt(17,takeNum);
            ps.setString(18,equipmentTypeName);
            ps.setString(19,processName);
            ps.setInt(20,camNum);
            ps.setInt(21,mouldStepNum);
            ps.setDouble(22,emptyCoefficient);
            ps.setInt(23,lifeNum);
            ps.setString(24,manufacturer);
            ps.setString(25,unitPrice);
            ps.setString(26,quoteTime);
            ps.setString(27,supplier);
            ps.setString(28,dataStatus);
            ps.setString(29,sysdate);
            ps.setString(30,created_user_name);
            ps.executeUpdate();
            logger.info("==========数据写入hive成功==========");
            ps.close();
            conn.close();

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    // 获取UUID
    public  String getUUID(){
        UUID uuid=UUID.randomUUID();
        String ID=uuid.toString().replace("-","");
        System.out.println("ID is :"+ID);
        return ID;

    }

    /**
     * 得到impala连接
     * 获得impala JDBC连接，这边由于集成了Kerberos所以需要，在UserGroupInformation 信息里面进行连接初始化才能够验证权限通过
     * 不然会有该错出现： Unable to obtain Principal Name for authentication
     */
    public static void getConnect() throws SQLException {
        try {
            Class.forName(jdbc_driver);
            conn = DriverManager.getConnection(conn_url,user,pwd);
        }
        catch (Exception e){
            e.printStackTrace();
        }

    }

    /**
     * 进行Kerberos认证，并且进行Impala JDBC连接权限初始化
     */
    public static void kerberosAuthenticationAndInitImpalaConnection() throws IOException {
        //本地调试 获取文件路径
/*        String rootPath = System.getProperty("user.dir").toString()+"\\src\\main\\Krb5Conf\\";
        String krb5_conf=rootPath+"krb5.conf";
        String krb5_keytab = rootPath+"dintm_dev.keytab";*/
        //服务器部署获取的路径
        String rootPath = System.getProperty("user.dir").toString();
        System.out.println("=================================rootPath:"+rootPath);
        String krb5_conf=rootPath+"/krb5.conf";
        String krb5_keytab = rootPath+"/dintm_dev.keytab";
        String krb5_user = "dintm@GTMC.COM";
        Configuration conf = new Configuration();
        conf.set("hadoop.security.authentication", "Kerberos");
        System.setProperty("java.security.krb5.conf",krb5_conf);
        System.setProperty("java.security.krb5.realm","GTMC.COM");
        System.setProperty("java.security.krb5.kdc","qfwa0466");
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytab(krb5_user, krb5_keytab);
        UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
        loginUser.doAs(new PrivilegedAction<Void>() {
            @Override
            public Void run(){
                try {
                    getConnect();
                }
                catch (SQLException e){
                    System.out.println("==========fail init Connection==========");
                    e.printStackTrace();
                }
                return null;
            }
        });

    }



}
