package org.sxp.common.utils.datax.reader;


import cn.hutool.core.math.MathUtil;
import cn.hutool.core.util.ArrayUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Maps;
import org.sxp.common.dto.datax.reader.DataXHiveReader;
import org.sxp.common.dto.datax.reader.DataXRDMSReader;
import org.sxp.common.utils.SpringContextUtils;
import org.sxp.modules.datasource.entity.DatabaseDO;
import org.sxp.modules.datasource.service.DatabaseService;
import org.sxp.modules.dataworks.dto.SyncDTO;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;

/**
 * mysql reader 构建类
 *
 * @author zhouhongfa@gz-yibo.com
 * @ClassName MysqlReader
 * @Version 1.0
 * @since 2019/7/30 23:07
 */
public class HiveReader extends BaseReader implements IDataxReader {

    public static final String INT = "int";
    public static final String LONG = "long";

    public HiveReader(SyncDTO syncDTO) {
        super(syncDTO);
    }

    @Override
    public String getName() {
        return "hdfsreader";
    }

    @Override
    public Map<String, Object> build() {
        DatabaseService databaseService = SpringContextUtils.getBean(DatabaseService.class);
        DatabaseDO jobDatasource = databaseService.getById(syncDTO.getSourceDb());
        Map<String, Object> readerObj = Maps.newLinkedHashMap();
        readerObj.put("name", getName());
        Map<String, Object> parameterObj = Maps.newLinkedHashMap();

        List<Map<String, Object>> columnMap = new ArrayList<>();
        for (int i = 0; i < syncDTO.getSourceField().size(); i++) {
            JSONObject jsonObject = syncDTO.getSourceField().get(i);
            if(StrUtil.isNotBlank(jsonObject.getString("name"))){
                JSONObject newObj = new JSONObject();
                if(StrUtil.isNotBlank(jsonObject.getString("value"))){
                    newObj.put("value", jsonObject.getString("value"));
                }else{
                    newObj.put("index", jsonObject.getInteger("index") != null ? jsonObject.getInteger("index") : i);
                }
                newObj.put("type", convertType(jsonObject.getString("type")));
                columnMap.add(newObj);
            }else{
                columnMap.add(jsonObject);
            }
        }

        if(StrUtil.isNotBlank(syncDTO.getSourceHadoopConfig())){
            parameterObj.put("hadoopConfig", JSON.parseObject(syncDTO.getSourceHadoopConfig()));
        }
        parameterObj.put("path", syncDTO.getSourcePath());
        parameterObj.put("defaultFS", jobDatasource.getHdfsUrl());
        parameterObj.put("column", columnMap);
        parameterObj.put("fileType", syncDTO.getSourceFileType());
        parameterObj.put("encoding", syncDTO.getSourceEncoding());
        parameterObj.put("fieldDelimiter", syncDTO.getSourceFieldDelimiter());

        readerObj.put("parameter", parameterObj);
        return readerObj;
    }

    @Override
    public void backBuild() {
        String removeSpace = StrUtil.trim(syncDTO.getDataxJson());

        Matcher matcher = pattern.matcher(removeSpace);
        String value = null;
        if (matcher.find()) {
            value = matcher.group(1);
        }
        if(JSONUtil.isJson(value)){
            DataXHiveReader reader = JSON.parseObject(value, DataXHiveReader.class);
            if(StrUtil.isNotBlank(reader.getParameter().getPath())){
                syncDTO.setSourcePath(reader.getParameter().getPath());
            }
            if(ArrayUtil.isNotEmpty(reader.getParameter().getFieldDelimiter())){
                syncDTO.setSourceFieldDelimiter(reader.getParameter().getFieldDelimiter());
            }
            if(ArrayUtil.isNotEmpty(reader.getParameter().getFileType())){
                syncDTO.setSourceFileType(reader.getParameter().getFileType());
            }
        }
    }

    private String convertType(String type){
        if(type.toLowerCase().endsWith(INT)){
            return LONG;
        }
        return type;
    }
}
