package com.haisen.service.impl;

import com.alibaba.fastjson.JSON;
import com.haisen.core.utils.RandomUtil;
import com.haisen.dataobject.entity.ColumnsMap;
import com.haisen.dataobject.entity.FileQueue;
import com.haisen.dataobject.entity.SourceFile;
import com.haisen.dataobject.entity.TblField;
import com.haisen.dataobject.mapper.base.ColumnsMapMapper;
import com.haisen.dataobject.mapper.base.FileQueueMapper;
import com.haisen.dataobject.mapper.base.SourceFileMapper;
import com.haisen.dataobject.mapper.base.TblFieldMapper;
import com.haisen.poi.basic_poi.BigExcel2TextUtil;
import com.haisen.service.ETLEngineService;
import com.haisen.service.FileQueueService;
import com.haisen.service.HadoopTemplateService;
import com.haisen.service.HiveJdbcTemplateService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@Service
@Slf4j
public class ETLEngineServiceImp implements ETLEngineService{

     String src_path = null;
     String src_fileName = null;
     String dest_path = null;
     String dest_fileName = null;
     //excel数据源描述
     Map<Integer,String> excelTitleMap = new HashMap<>();
     //表格元数据
     List<TblField> tbFieldList = new ArrayList<>();

     String hivewarehouse = "/user/hive/warehouse";

     //名称标准化
     @Autowired
     private ColumnsMapMapper columnsMapMapper;
     //保存元数据
    @Autowired
     private TblFieldMapper tblFieldMapper;

     @Autowired
     private SourceFileMapper sourceFileMapper;

     @Autowired
     private FileQueueService fileQueueService;

    @Autowired
    private HadoopTemplateService hadoopTemplateService;

    @Autowired
    private HiveJdbcTemplateService hiveJdbcTemplateService;

    @Override
    public void process(String sourceFile,String location,String tableName,String cdate) {
          //1、萃取数据源,数据格式
          extract(sourceFile);
          //2、数据转换，规范数据格式，校验
          transform(location,tableName,cdate);
    }

    /**
     * 从excel表中萃取数据源
     * @param sourceFile 源文件
     */
    @Override
    public void extract(String sourceFile) {
        //1、根据源文件确定目标文件的文件夹中
         if (sourceFile.indexOf(".") < 1) {
             try {
                 throw new Exception("文件不能识别，没有后缀！！");
             } catch (Exception e) {
                 e.printStackTrace();
             }
         }
           src_path = sourceFile.substring(0,sourceFile.lastIndexOf("/"));
           src_fileName = sourceFile.substring(sourceFile.lastIndexOf("/")+1);
           String src_pathfile = src_path+"/"+src_fileName;
           dest_path = src_path;
           dest_fileName = RandomUtil.getRandomFileName()+".txt";
           String dest_pathfile = dest_path+"/"+dest_fileName;
           BigExcel2TextUtil.getInstance().readExcel2Txt(src_pathfile,dest_pathfile);
           excelTitleMap = BigExcel2TextUtil.getInstance().getTableFields();
        log.info("源文件路径={},文件名={}；目标路径文件名={},字段名称={}",src_path, src_pathfile,dest_pathfile, JSON.toJSON(excelTitleMap));
    }

    @Override
    public void transform(String location,String tableName,String cdate) {

           //1、excel标题映射表字段
             //清空
            tbFieldList.clear();
            //写入
            tranExcelTitle2Field(tableName);
           log.info("表格源数据{}",JSON.toJSONString(tbFieldList));
           //2、建表校验，生成DDL脚本
          if (checkDDL(tableName)){
              //2.1 创建creattable.sql
             String creatTable = InitDDL(location,tableName);
             log.info("{}",creatTable);
             String result = hiveJdbcTemplateService.createTable(creatTable);
             log.info("{}",result);
             //建立表格失败
              //todo://两次更细，需要优化
              FileQueue fileQueueFaild = new FileQueue();
              fileQueueFaild.setTblName(tableName);
             if (result.indexOf("error") >0 ) {
                 fileQueueFaild.setResult(2);
                 fileQueueFaild.setRemark("建表语法有错误:"+result);
                 fileQueueService.updateByTblName(fileQueueFaild);
                 return;
             } else {
                 fileQueueFaild.setRemark("建表成功：");
                 fileQueueService.updateByTblName(fileQueueFaild);
             }

              //2、2上传文件,
              String localPathFile =dest_path+"/"+dest_fileName;
              String hdfsPath = hivewarehouse+"/"+location+"/"+tableName;
              hadoopTemplateService.uploadFile(localPathFile,hdfsPath);
              //2.3插入文件处理路径
              sourceFileMapper.insert(new SourceFile(cdate,src_fileName,src_path,dest_fileName,hdfsPath,tableName,0,0,creatTable));
              //2.4添加元字段
               log.info("插入元数据={}",JSON.toJSONString(tbFieldList));
               for(TblField tblField : tbFieldList){
                   tblFieldMapper.insertSelective(tblField);
               }

          }

    }

    /**
     * 获取excel标题到字段的名称和类型的描述
     * @param tableName
     */
    private void tranExcelTitle2Field(String tableName){
        //init 清空
        tbFieldList.clear();
        //1、字段中名称映射，中文->英文
        List<ColumnsMap> columnsMapList = columnsMapMapper.selectAll();
        //map(1,号码：string)
        for (Map.Entry<Integer,String> entry : excelTitleMap.entrySet()){
            String[] fieldNameAndType = entry.getValue().split(":");
            //将中文名称转成标准化的英文字段名
            //2、查映射字段中存在的
          List<String> fieldNameList =  columnsMapList.stream().filter(o ->{
                //log.info("匹配开始:{},{}",o.getAliasList(),fieldNameAndType[0]);
              if (o.getAliasList().contains(fieldNameAndType[0])){
                  return true;
              } else return false;
          }).map(o ->{
                 return o.getName();
          }).collect(Collectors.toList());
          log.info("{}",JSON.toJSONString(fieldNameList));

             if (fieldNameList.size() > 0){
                 tbFieldList.add(new TblField(tableName,fieldNameAndType[0],fieldNameList.get(0),fieldNameAndType[1],entry.getKey()));
             } else {
                 tbFieldList.add(new TblField(tableName,fieldNameAndType[0],"",fieldNameAndType[1],entry.getKey()));
             }
        }
    }

    /**
     * 检查excel标题是否都有字段映射
     * @param tableName
     * @return
     */
    private boolean checkDDL(String tableName){
        StringBuilder sb = new StringBuilder();
        for(TblField tblField : tbFieldList){
            if (StringUtils.isEmpty(tblField.getColumnName())){
                sb.append("字段映射不存在->");
                sb.append(tblField.getCcomment()+":"+tblField.getColumnName()+","+tblField.getTypeName()+"\n");
               // log.info("字段映射不存在：{}",JSON.toJSONString(columns));
            }
        }
        FileQueue fileQueueFaild = new FileQueue();
        fileQueueFaild.setTblName(tableName);
         if (sb.length() >0 ) {
             log.info("字段映射不存在：{}",sb.toString());
             fileQueueFaild.setResult(2);
             fileQueueFaild.setRemark(sb.toString());
             //更新到文件队列中
             fileQueueService.updateByTblName(fileQueueFaild);
             //情况sb
             sb.delete(0,sb.length());
            return false;
         }
         fileQueueFaild.setResult(1);
         fileQueueFaild.setRemark("字段映射成功");
        fileQueueService.updateByTblName(fileQueueFaild);
        return true;
    }

    /**
     * 数据定义语言初始化
     * @param tableName
     * @return
     */
    private String InitDDL(String location,String tableName){
        StringBuilder sb = new StringBuilder();
        sb.append("create table "+tableName+"(\n");
        for(TblField tblField : tbFieldList){
            sb.append(tblField.getColumnName()+" "+tblField.getTypeName()+" comment '"+tblField.getCcomment()+"',\n");
        }
        if (sb.length() >1 )
            sb.deleteCharAt(sb.length()-2);
        sb.append(") COMMENT '"+src_fileName+"'\n");
        sb.append("ROW FORMAT DELIMITED\n");
        sb.append("FIELDS TERMINATED BY '|'\n");
        sb.append("COLLECTION ITEMS TERMINATED BY '-'\n");
        sb.append("MAP KEYS TERMINATED BY ':'\n");
        sb.append("LINES TERMINATED BY '\\n'");
        sb.append("\n");
        sb.append("LOCATION 'hdfs://nameservice1"+ hivewarehouse+"/"+location+"/"+tableName+"'\n");
        return sb.toString();
    }


    @Override
    public void load() {

    }
}
