package com.sg.java.apps;

import com.sg.java.PropertiesUtil;
import com.sg.java.ResourcePath;
import com.sg.java.util.DateUtils;
import com.sg.java.util.HBaseUtils;
import com.sg.java.util.HdfsUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;

public class GetDataFromHdfsFileToHBase {

    private static final FileSystem fs = HdfsUtils.fs;

    private static final Logger log = LoggerFactory.getLogger(GetDataFromHdfsFileToHBase.class);

    private static final String prefix = HdfsUtils.hdfsPathPrefix;

    public static       byte[]               info_column          = Bytes.toBytes("info");
    public static       byte[]               METER_ID_qualifier   = Bytes.toBytes("METER_ID");
    public static       byte[]               DATA_DATE_qualifier  = Bytes.toBytes("DATA_DATE");
    public static       byte[]               ORG_NO_qualifier     = Bytes.toBytes("ORG_NO");
    public static       byte[]               U_column             = Bytes.toBytes("U");
    public static       byte[]               I_column             = Bytes.toBytes("I");
    public static       byte[]               P_column             = Bytes.toBytes("P");
    public static       byte[]               PHASE_FLAG_qualifier = Bytes.toBytes("PHASE_FLAG");
    public static       byte[]               DATA_TYPE_qualifier  = Bytes.toBytes("DATA_TYPE");
    public static       byte[]               emptyValue           = Bytes.toBytes("");
    public static final Map<Integer, byte[]> map                  = DateUtils.pointHHmmssToMap();

    public static int putsBatch = 5000;

    public static void main(String[] args) throws Exception {
        long s = System.currentTimeMillis();
        String f_t = args[0];
        //vol_1314:U,cur_1314:I,power_1314:P
        String[] split = f_t.split(",");
        Connection hBaseConn = HBaseUtils.getHBaseConn(PropertiesUtil.createPropertiesFromResource(ResourcePath.hbase_properties));
        Arrays.stream(split).parallel().forEach(
                item -> {
                    final String[] strings = item.split(":");
                    try {
                        readAndWrite(prefix + "/" + strings[0], strings[1], hBaseConn);
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                }
        );
        hBaseConn.close();
        fs.close();
        log.info("全部任务结束，总耗时:{}", System.currentTimeMillis() - s);
    }

    private static void readAndWrite(String directoryPathString, String type, Connection hBaseConn) throws IOException {
        Path directoryPath = new Path(directoryPathString);
        FileStatus[] status = fs.listStatus(directoryPath);
        FSDataInputStream in = null;
        BufferedReader reader = null;
        byte[] typeColumn;
        byte[] uniqueQualifier;
        Map<String, List<Put>> puts = new HashMap<>();
        puts.put("20220813", new ArrayList<>(putsBatch));
        puts.put("20220814", new ArrayList<>(putsBatch));
        Map<String, Table> tables = new HashMap<>();
        switch (type) {
            case "U":
                typeColumn = U_column;
                uniqueQualifier = PHASE_FLAG_qualifier;
                tables.put("20220813", hBaseConn.getTable(TableName.valueOf(HBaseUtils.withNamespace("cms_volt_curve_20220813"))));
                tables.put("20220814", hBaseConn.getTable(TableName.valueOf(HBaseUtils.withNamespace("cms_volt_curve_20220814"))));
                break;
            case "I":
                typeColumn = I_column;
                uniqueQualifier = PHASE_FLAG_qualifier;
                tables.put("20220813", hBaseConn.getTable(TableName.valueOf(HBaseUtils.withNamespace("cms_cur_curve_20220813"))));
                tables.put("20220814", hBaseConn.getTable(TableName.valueOf(HBaseUtils.withNamespace("cms_cur_curve_20220814"))));
                break;
            case "P":
                typeColumn = P_column;
                uniqueQualifier = DATA_TYPE_qualifier;
                tables.put("20220813", hBaseConn.getTable(TableName.valueOf(HBaseUtils.withNamespace("cms_power_curve_20220813"))));
                tables.put("20220814", hBaseConn.getTable(TableName.valueOf(HBaseUtils.withNamespace("cms_power_curve_20220814"))));
                break;
            default:
                throw new RuntimeException("Unknown type");
        }
        long s = System.currentTimeMillis();
        long finalTotalCount = 0;
        for (FileStatus file : status) {
            try {
                log.info("标识类型:{}，正在读取文件名:{}", type, file.toString());
                in     = fs.open(file);
                reader = new BufferedReader(new InputStreamReader(in));
                String dataLine;
                long tempCount = 0;
                long totalCount = 0;
                while ((dataLine = reader.readLine()) != null) {
                    tempCount++;
                    totalCount++;
                    finalTotalCount++;
                    String[] dataCol = dataLine.split("\t");
                    String id = dataCol[96];
                    String ds = dataCol[97];
                    String orgNo = dataCol[98];
                    String unique = dataCol[99];
                    String row = id + "-" + ds + "-" + unique;
                    Put put = new Put(Bytes.toBytes(row));
                    put.addColumn(info_column, METER_ID_qualifier, StringUtils.isNotBlank(id) ? Bytes.toBytes(id) : emptyValue);
                    put.addColumn(info_column, DATA_DATE_qualifier, StringUtils.isNotBlank(ds) ? Bytes.toBytes(ds) : emptyValue);
                    put.addColumn(info_column, ORG_NO_qualifier, StringUtils.isNotBlank(orgNo) ? Bytes.toBytes(orgNo) : emptyValue);
                    put.addColumn(info_column, uniqueQualifier, StringUtils.isNotBlank(unique) ? Bytes.toBytes(unique) : emptyValue);
                    for (int i = 0; i < 96; i++) {
                        if (StringUtils.isNotBlank(dataCol[i])) {
                            put.addColumn(typeColumn, map.get(i + 1), Bytes.toBytes(dataCol[i]));
                        }
                    }
                    puts.get(ds).add(put);
                    //每5000次保存一个hbase
                    if (tempCount == putsBatch) {
                        puts.forEach((_ds, list) -> {
                            if (list.size() != 0) {
                                try {
                                    Table table = tables.get(_ds);
                                    table.put(list);
                                    list.clear();
                                } catch (IOException e) {
                                    throw new RuntimeException(e);
                                }
                            }
                        });
                        log.info("标识类型:{}\t此文件此次读取:{}\t此文件此次写入:{}\t此文件总读取写入:{}", type, tempCount, tempCount, totalCount);
                        tempCount = 0;
                    }
                }
                //循环结束，把剩余的也保存
                puts.forEach((_ds, list) -> {
                    try {
                        tables.get(_ds).put(list);
                        list.clear();
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }
                });
            } finally {
                // make sure the streams are closed finally.
                IOUtils.closeStream(reader);
                IOUtils.closeStream(in);
            }
        }
        log.info("标识类型:{}任务结束，总耗时:{}，总条数:{}", type, System.currentTimeMillis() - s, finalTotalCount);
        tables.forEach((_ds, table) -> {
            try {
                table.close();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
    }
}
