package com.huawei.dli.flink.sye.sink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.huawei.dli.flink.sye.config.FlinkOggMetaConfig;
import com.huawei.dli.flink.sye.utils.LoginUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scalikejdbc.IsolationLevel;

import java.security.PrivilegedExceptionAction;
import java.util.Date;
import java.util.List;
import java.util.Map;

public class HBaseSinkString extends RichSinkFunction<String> {

  private static final Logger logger = LoggerFactory.getLogger(HBaseSinkString.class);

  private String userPrincipal;
  private String tmpDir = "/tmp/hbaseSink.";
  private org.apache.hadoop.conf.Configuration config = null;
  private Connection conn = null;

  // 存储源端表对应的rowkey
  public Map<String, String> rowkeysMap = null;
  // 存储目标端hbase的表名称
  public List<String> targetTableNames = null;
  // 存储表对应解析的字段
  public Map<String, List<String>> tableColumnMap = null;
  // 存储原表和目标表的映射
  public Map<String, String> sourceTargetMap = null;

  // 列族统一为data
  private String familyName = "data";
  private Table tableClient = null;
  // <hbaseTableName,Table>
  //  private Map<String,Table> tableClients=null;


  public HBaseSinkString(String principal, FlinkOggMetaConfig flinkOggMetaConfig) {
    this.userPrincipal = principal;
    this.rowkeysMap=flinkOggMetaConfig.rowkeysMap;
    this.targetTableNames=flinkOggMetaConfig.targetTableNames;
    this.tableColumnMap=flinkOggMetaConfig.tableColumnMap;
    this.sourceTargetMap=flinkOggMetaConfig.sourceTargetMap;
  }

  @Override
  public void open(Configuration parameters) throws Exception {
    config = HBaseConfiguration.create();
    config.set("hadoop.security.authentication", "kerberos");
    config.set("hadoop.rpc.protection", "privacy");
    config.addResource(this.getClass().getResource("/hbase-site.xml"));
    connect();
  }

  @Override
  public void invoke(String value, Context context) throws Exception {
    System.out.println("断点:"+targetTableNames);

    try {
      JSONObject jsonObject = JSON.parseObject(value);
      if(jsonObject!=null){
        String sourceTable = jsonObject.getString("table");
        /**
         * 根据原表查对应hbase表,根据hbase表查对应的tableClient
         */
        String targetTable = sourceTargetMap.get(sourceTable);
        if(targetTable!=null){
          /**
           * 修改的代码
           */
          TableName tableName = TableName.valueOf(targetTable);
          tableClient = conn.getTable(tableName);

//         Table tableClient = tableClients.get(targetTable);
          if(tableClient!=null){
            // 获取after的数据
            JSONObject afterData = jsonObject.getJSONObject("after");
            if(afterData!=null){
              // 找到对应表的rowkey
              String rowkeyColumn = rowkeysMap.get(sourceTable);
              Put put = new Put(Bytes.toBytes(afterData.getString(rowkeyColumn)));
              for(String column:tableColumnMap.get(sourceTable)){
                String content=afterData.getString(column);
                if(content!=null){
                  put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(column), Bytes.toBytes(content));
                }
              }
              tableClient.put(put);
            }
          }else{
            throw new RuntimeException(new Date()+":hbase的连接已经断开");
          }
        }
      }
    } catch (Exception e) {
      logger.error("invoke catch exception: " + e);
      e.printStackTrace();
    }
  }

  @Override
  public void close() throws Exception {
    super.close();
    try {
//      for(Table table:tableClients.values()){
//        if (table != null) {
//          table.close();
//        }
//      }
      if (tableClient != null) {
        tableClient.close();
        }
      if (conn != null && !conn.isClosed()) {
        conn.close();
      }
      LoginUtil.deleteFile(tmpDir, "user.keytab");
      LoginUtil.deleteFile(tmpDir, "krb5.conf");
    } catch (Exception e) {
      e.printStackTrace();
      logger.error("close catch exception: " + e);
    }

  }

  private void connect() throws Exception {
    String userKeytabFile = LoginUtil.createFile(tmpDir, "user.keytab", this.getClass());
    String krb5File = LoginUtil.createFile(tmpDir, "krb5.conf", this.getClass());
    LoginUtil.setJaasConf(LoginUtil.Module.ZOOKEEPER.getName(), userPrincipal, userKeytabFile);
    LoginUtil.login(userPrincipal, userKeytabFile, krb5File, config);
    UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {
      @Override
      public Void run() throws Exception {
        try {
          conn = ConnectionFactory.createConnection(config);
          Admin admin = conn.getAdmin();
          // 循环遍历多个目标表
          for(String hbaseTableName:targetTableNames){
            TableName tableName = TableName.valueOf(hbaseTableName);
            // 不存在的时候自动创建hbase的表,前提要创建好名称空间
            if (!admin.tableExists(tableName)) {
              admin.createTable(new HTableDescriptor(tableName).addFamily(new HColumnDescriptor(familyName)));
            }
//            Table table = conn.getTable(tableName);
//            tableClients.put(hbaseTableName,table);
          }
          logger.info("batch hbase connect success");
        } catch (Exception e) {
          logger.info("batch hbase connect failed");
          throw e;
        }
        return null;
      }
    });
  }

}
