package org.leafDomain.file.impl;

import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.leafDomain.file.FileSource;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class FileSourceImpl implements FileSource {

    private final Properties properties;
    private BufferedReader reader;
    private int batchSize;
    private String filedDelimiter;
    private Integer rowKeyIndex;
    private String fileType;
    private String rowKeyCol;
    private Pattern patRegex;
    private int headerLineCount;

    private Map<String, Map<String, Integer>> txtOrRegexMap;
    private Map<String, Map<String, String>> rdbOrJsonMap;


    public FileSourceImpl(Properties properties) {
        this.properties = properties;
    }

    @Override
    public void init() throws Exception {
        reader = new BufferedReader(new FileReader(filePath()));
        batchSize = Integer.parseInt(checkAndGetConfig("file.source.batchSize"));
        fileType = fileType();
        switch (fileType){
            case "txt": case "csv": case "regex":
                // 跳过表头行
                headerLineCount = headerLineCount();
                for (int i = 0; i < headerLineCount; i++) {
                   reader.readLine();
                }
                switch (fileType) {
                    case "txt": case "csv":
                        filedDelimiter = fileFieldsDelimiter();
                        break;
                    case "regex":
                        patRegex = Pattern.compile(regexStr());
                        break;
                }
                txtOrRegexMap = fileFieldsHBaseColumnMapping();
                break;
            case "json":
                rdbOrJsonMap = hbaseJsonColumnsMapping();
                break;
        }
    }

    @Override
    public boolean read(List<Put> container) throws IOException {
        if(!container.isEmpty()){
            container.clear();
        }
        String line;
        int count = 0;
        while (++count < batchSize && Objects.nonNull(line = reader.readLine())){
            container.add(toPut(line));
        }
        return !container.isEmpty();
    }

    @Override
    public int batchSize() {
        return batchSize;
    }

    @Override
    public Properties config() {
        return properties;
    }

    @Override
    public void close() {
        closeAll(reader);
    }


    /**
     * 带分隔符的纯文本或CSV格式解析
     */
    private byte[] txtFieldToBytes(String field){
        if(field.matches("^\\d+$")){
            return Bytes.toBytes(Long.parseLong(field));
        }else if(field.matches("\\d*.\\.\\d+")){
            return Bytes.toBytes(Double.parseDouble(field));
        }else if(field.toLowerCase().matches("true|false")){
            return Bytes.toBytes(Boolean.parseBoolean(field.toLowerCase()));
        }else {
            return Bytes.toBytes(field);
        }
    }

    /**
     * 根据文件类型调用对应格式的解析方法
     */
    private Put toPut(String line){
        switch (fileType){
            case "txt": case "csv":
                return txtLineToPut(line);
            case "json":
                return jsonLineToPut(line);
            case "regex":
                return regexLineToPut(line);
            default:
                throw new RuntimeException("FileType 不支持异常" + fileType);
        }
    }


    private Put txtLineToPut(String line){
        String[] ps = line.split(filedDelimiter);
        Put put = new Put(txtFieldToBytes(ps[rowKeyIndex]));
        for(Map.Entry<String, Map<String, Integer>> e : txtOrRegexMap.entrySet()){
            String cf = e.getKey();
            for(Map.Entry<String, Integer> s : e.getValue().entrySet()){
                String col = s.getKey();
                int filedIndex = s.getValue();
                put.addColumn(Bytes.toBytes(cf), Bytes.toBytes(col), txtFieldToBytes(ps[filedIndex]));
            }
        }
        return put;
    }

    /**
     * json 行格式数据解析
     */
    private Put jsonLineToPut(String line) {
        JSONObject jo = JSON.parseObject(line);
        Put put = new Put(toBytes(jo.get(rowKeyCol)));
        for (Map.Entry<String, Map<String, String>> e : rdbOrJsonMap.entrySet()) {
            final String cf = e.getKey();
            for (Map.Entry<String, String> s : e.getValue().entrySet()) {
                final String col = s.getKey();
                final Object jsonField = jo.get(s.getValue());
                put.addColumn(Bytes.toBytes(cf), Bytes.toBytes(col), toBytes(jsonField));
            }
        }
        return put;
    }

    /**
     * regex 行格式数据解析
     */
    private Put regexLineToPut(String line){
        Matcher mat = patRegex.matcher(line);
        if(!mat.find()){
            throw new RuntimeException("正则表达式与行不匹配异常: " + patRegex.pattern());
        }
        Put put = new Put(txtFieldToBytes(mat.group(rowKeyIndex)));
        for (Map.Entry<String, Map<String, Integer>> e : txtOrRegexMap.entrySet()) {
            final String cf = e.getKey();
            for (Map.Entry<String, Integer> s : e.getValue().entrySet()) {
                final String col = s.getKey();
                int groupId = s.getValue();
                put.addColumn(Bytes.toBytes(cf), Bytes.toBytes(col), txtFieldToBytes(mat.group(groupId)));
            }
        }
        return put;
    }


    /**
     *  获取配置项
     * @return 配置项值
     */
    private String fileType(){
        return checkAndGetConfig("file.source.type");
    }
    private String fileFieldsDelimiter(){
        return checkAndGetConfig("file.fields.delimiter");
    }
    private File filePath(){
        String path = checkAndGetConfig("file.path");
        File file = new File(path);
        if(!file.exists() || !file.isFile() ||!file.canRead()){
            throw new RuntimeException(String.format("FileSource %s 不存在、不是文件或不可读", path));
        }
        return file;
    }
    private String regexStr(){
        return checkAndGetConfig("line.regex");
    }
    private Integer headerLineCount(){
        return Integer.parseInt(checkAndGetConfig("file.header.line.count"));
    }

    /**
     * hbase列族和列与json中列的映射关系
     *             hbase列族     列     json表的列
     * @return Map<String, Map<String, String>>
     */
    private Map<String, Map<String, String>> hbaseJsonColumnsMapping(){
        Map<String, Map<String, String>> map = hBaseColumnsMapping("json.hbase.columns.mapping");
        rowKeyCol = map.remove("ROWKEY").get("ROWKEY");
        return map;
    }


    private Map<String,Map<String,Integer>> fileFieldsHBaseColumnMapping(){
        Map<String,Map<String,Integer>> map = new HashMap<>();
        String item = checkAndGetConfig("file.hbase.columns.mapping");
        for (String ms : item.split(",")) {
            String[] ps = ms.split("->");
            int fieldIndex = Integer.parseInt(ps[1]);
            if(ps[0].equals("ROWKEY")){
                rowKeyIndex = fieldIndex;
            }else{
                String[] ss = ps[0].split(":");
                String colFamily = ss[0];
                String column = ss[1];
                if(!map.containsKey(colFamily)){
                    map.put(colFamily,new HashMap<>());
                }
                map.get(colFamily).put(column,fieldIndex);
            }
        }
        return map;
    }

}
