package com.sg.java.apps;

import com.sg.java.PropertiesUtil;
import com.sg.java.ResourcePath;
import com.sg.java.util.DateUtils;
import com.sg.java.util.HBaseUtils;
import com.sg.java.util.HdfsUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.PrintWriter;
import java.util.Map;
import java.util.StringJoiner;

public class HBaseToHdfs {

    private static final Logger log = LoggerFactory.getLogger(HBaseToHdfs.class);

    public static int batch = 500000;

    public static       byte[]               info_column          = Bytes.toBytes("info");
    public static       byte[]               METER_ID_qualifier   = Bytes.toBytes("METER_ID");
    public static       byte[]               DATA_DATE_qualifier  = Bytes.toBytes("DATA_DATE");
    public static       byte[]               ORG_NO_qualifier     = Bytes.toBytes("ORG_NO");
    public static       byte[]               U_column             = Bytes.toBytes("U");
    public static       byte[]               I_column             = Bytes.toBytes("I");
    public static       byte[]               P_column             = Bytes.toBytes("P");
    public static       byte[]               PHASE_FLAG_qualifier = Bytes.toBytes("PHASE_FLAG");
    public static       byte[]               DATA_TYPE_qualifier  = Bytes.toBytes("DATA_TYPE");
    public static       byte[]               emptyValue           = Bytes.toBytes("");
    public static final Map<Integer, byte[]> map                  = DateUtils.pointHHmmssToMap();


    public static void main(String[] args) throws Exception {
        String name     = args[0];
        String hdfsPath = args[1].trim();
        if (!hdfsPath.startsWith(HdfsUtils.hdfsPathPrefix)) {
            hdfsPath = HdfsUtils.hdfsPathPrefix + hdfsPath;
        }

        byte[] c1;
        byte[] c2;

        if (name.contains("vol")) {
            c1 = U_column;
            c2 = PHASE_FLAG_qualifier;
        } else if (name.contains("power")) {
            c1 = P_column;
            c2 = DATA_TYPE_qualifier;
        } else {
            c1 = I_column;
            c2 = PHASE_FLAG_qualifier;
        }

        Connection conn      = HBaseUtils.getHBaseConn(PropertiesUtil.createPropertiesFromResource(ResourcePath.hbase_properties));
        TableName  tableName = TableName.valueOf(HBaseUtils.withNamespace(name));
        Table      table     = conn.getTable(tableName);

        Path               path = new Path(hdfsPath);
        FSDataOutputStream os   = HdfsUtils.fs.create(path, true);
        PrintWriter        pw   = new PrintWriter(os);

        long   total   = 0;
        long   count;
        byte[] nextRow = null;

        do {
            count = 0;
            Scan scan = new Scan();
            scan.setLimit(batch);
            if (nextRow != null) {
                scan.withStartRow(nextRow, false);
            }
            scan.setCaching(1000);
            ResultScanner scanner = table.getScanner(scan);
            for (Result result : scanner) {
                count++;
                nextRow = result.getRow();
                StringJoiner sj = new StringJoiner("\t");
                for (int i = 1; i <= 96; i++) {
                    sj.add(Bytes.toString(result.getValue(c1, map.get(i))));
                }
                sj.add(Bytes.toString(result.getValue(info_column, METER_ID_qualifier)));
                sj.add(Bytes.toString(result.getValue(info_column, DATA_DATE_qualifier)));
                sj.add(Bytes.toString(result.getValue(info_column, ORG_NO_qualifier)));
                sj.add(Bytes.toString(result.getValue(info_column, c2)));
                pw.println(sj);
            }
            total += count;
            log.info("已写入:{}", total);
        } while (count == batch);

        pw.flush();
        pw.close();
        os.close();
        table.close();
        conn.close();
    }

}
