package avicit.bdp.dds.api.service;

import avicit.bdp.common.base.BaseService;
import avicit.bdp.common.dto.StorageResourceConf;
import avicit.bdp.common.dto.response.dds.UdfFuncDTO;
import avicit.bdp.common.service.dto.CommonTypeDTO;
import avicit.bdp.common.service.service.CommonTypeService;
import avicit.bdp.common.utils.BdpLogUtil;
import avicit.bdp.common.utils.enums.CommonTypeEnums;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.common.utils.uploads.IFileAdapter;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.dds.dao.entity.Resource;
import avicit.bdp.dds.dao.entity.UdfFunc;
import avicit.bdp.dds.dao.mapper.UdfFuncMapper;
import avicit.platform6.commons.utils.ComUtil;
import avicit.platform6.core.exception.BusinessException;
import avicit.platform6.core.exception.BusinessException;
import avicit.platform6.core.rest.msg.QueryRespBean;
import com.github.pagehelper.Page;
import com.google.common.collect.Lists;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;

/**
 * @author felix project service
 */
@Service
public class UdfFuncService extends BaseService<UdfFuncMapper, UdfFunc> {

  private static final Logger logger = LoggerFactory.getLogger(UdfFuncService.class);

  @Autowired private DdsResourceService resourceService;

  @Autowired private CommonTypeService commonTypeService;

  /**
   * 新增UDF
   *
   * @param name name
   * @param funcName funcName
   * @param typeId typeId
   * @param className className
   * @param type type
   * @param argTypes argTypes
   * @param remark remark
   * @param file file
   * @param projectId projectId
   * @return udf id
   */
  @Transactional(rollbackFor = Exception.class)
  public String insertUDF(
      String name,
      String funcName,
      String typeId,
      String className,
      Integer type,
      String argTypes,
      String remark,
      MultipartFile file,
      String projectId) {
    try {
      // 判断文件大小
      if (file.getSize() > Constants.GB_IN_BYTES) {
        logger.error("file [{}] size is too large, more than 1GB.", file.getOriginalFilename());
        throw new BusinessException("文件大于1G，不能上传");
      }

      // 组装UDF
      String udfId = ComUtil.getId();
      UdfFunc udfFunc = new UdfFunc();
      udfFunc.setId(udfId);
      udfFunc.setName(name);
      udfFunc.setFuncName(funcName);
      udfFunc.setTypeId(typeId);
      udfFunc.setClassName(className);
      udfFunc.setType(type);
      udfFunc.setArgTypes(argTypes);
      udfFunc.setRemark(remark);
      udfFunc.setProjectId(projectId);
      // 组装Resource
      Resource resource = new Resource();
      String resourceId = ComUtil.getId();
      resource.setId(resourceId);

      String originalFilename = file.getOriginalFilename();
      // 获取文件后缀
      String fileSuffix =
          originalFilename == null
              ? null
              : originalFilename.substring(originalFilename.lastIndexOf(".") + 1);
      // 获取文件名称
      String fileName = originalFilename;

      resource.setSize(file.getSize());
      resource.setName(fileName);
      resource.setFileSuffix(fileSuffix);
      // 默认文件
      resource.setDirectory(Resource.DIRECTORY_FILE);
      // 类型为UDF
      resource.setType(Resource.TYPE_UDF);
      resource.setProjectId(udfFunc.getProjectId());

      Date d = new Date();
      SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
      String dateNowStr = sdf.format(d);

      // 如果文件同名，则创建一个随机的目录存放
      boolean flag = this.isSameResourceName(projectId, fileName);
      if (flag) {
        dateNowStr += Constants.SINGLE_SLASH + UUID.randomUUID();
      }

      StorageResourceConf conf = FileAdapterUtils.getStorageResourceByProjectId(projectId);
      if (!FileAdapterUtils.TYPE_HDFS.equals(conf.getType())) {
        throw new BusinessException("minio存储引擎目前不支持UDF的增加！UDF目前只支持hdfs存储引擎。");
      }
      IFileAdapter fileAdapter = FileAdapterUtils.getFileAdapterByProject(projectId);

      resource.setEngineResourceId(conf.getId());

      // 全路径变量
      String fullName =
          Constants.UDF_UPLOAD_TOP_PATH
              + Constants.SINGLE_SLASH
              + udfFunc.getProjectId()
              + Constants.SINGLE_SLASH
              + dateNowStr
              + Constants.SINGLE_SLASH
              + originalFilename;

      fileAdapter.upload(file.getInputStream(), fullName);

      resource.setFullName(fullName);
      resourceService.insertSelective(resource);
      udfFunc.setResourceId(resourceId);
      insertSelective(udfFunc);
      // 记录日志
      BdpLogUtil.log4Insert(udfFunc);
      return udfId;
    } catch (Exception e) {
      throw new BusinessException(e.getMessage(), e);
    }
  }

  /**
   * 判断资源名称是否重复
   *
   * @param fileName fileName
   * @return 是否重复
   */
  private boolean isSameResourceName(String projectId, String fileName) {
    Resource resource = new Resource();
    resource.setName(fileName);
    resource.setType(1);
    resource.setProjectId(projectId);
    List<Resource> list = resourceService.selectList(resource);
    return CollectionUtils.isNotEmpty(list);
  }

  /**
   * 修改UDF
   *
   * @param udfFunc udfFunc
   */
  @Transactional(rollbackFor = Exception.class)
  public void updateUDF(UdfFunc udfFunc) {
    try {
      // 修改udf
      updateByPrimaryKey(getUpdateDto(udfFunc));
    } catch (Exception e) {
      throw new BusinessException(e.getMessage(), e);
    }
  }

  private UdfFunc getUpdateDto(UdfFunc udfFunc) {
    UdfFunc oldDTO = selectByPrimaryKey(udfFunc.getId());
    if (oldDTO == null) {
      throw new BusinessException("数据不存在");
    }

    // 记录日志
    BdpLogUtil.log4Update(udfFunc, oldDTO);

    oldDTO.setName(udfFunc.getName());
    oldDTO.setFuncName(udfFunc.getFuncName());
    oldDTO.setClassName(udfFunc.getClassName());
    oldDTO.setType(udfFunc.getType());
    oldDTO.setArgTypes(udfFunc.getArgTypes());
    oldDTO.setRemark(udfFunc.getRemark());
    return oldDTO;
  }

  /**
   * 删除UDF
   *
   * @param id id
   */
  @Transactional(rollbackFor = Exception.class)
  public void deleteUDF(String id) {
    if (StringUtils.isBlank(id)) {
      throw new BusinessException("删除失败！传入的参数主键为null");
    }
    try {
      // 记录日志
      UdfFunc udfFunc = selectByPrimaryKey(id);
      if (udfFunc == null) {
        throw new BusinessException("删除失败！对象不存在");
      }

      BdpLogUtil.log4Delete(udfFunc);

      Resource resource = resourceService.selectByPrimaryKey(udfFunc.getResourceId());

      IFileAdapter fileAdapter = FileAdapterUtils.getFileAdapterByProject(resource.getProjectId());

      // 判断hdfs是否存在该文件
      if (fileAdapter.exists(resource.getFullName())) {
        // 删除HDFS文件或目录
        boolean status = fileAdapter.delete(resource.getFullName(), false);
        if (!status) {
          throw new BusinessException("hdfs删除文件失败");
        }
      }
      resourceService.deleteByPrimaryKey(udfFunc.getResourceId());
      deleteByPrimaryKey(id);
    } catch (Exception e) {
      throw new BusinessException(e.getMessage(), e);
    }
  }

  public Boolean isSameName(String id, String name, String projectId) {
    if (StringUtils.isAnyBlank(name, projectId)) {
      throw new BusinessException("请检查参数!");
    }
    boolean flag = false;
    List<UdfFunc> list = this.mapper.getName(name, projectId);
    if (list != null && list.size() > 0) {
      if (StringUtils.isBlank(id)) {
        return true;
      }
      for (UdfFunc dto : list) {
        if (!dto.getId().equals(id)) {
          flag = true;
          break;
        }
      }
    }
    return flag;
  }

  public Boolean isSameFuncName(String id, String funcName, String projectId) {
    if (StringUtils.isAnyBlank(funcName, projectId)) {
      throw new BusinessException("请检查参数!");
    }
    boolean flag = false;
    List<UdfFunc> list = this.mapper.getFuncName(funcName, projectId);
    if (list != null && list.size() > 0) {
      if (StringUtils.isBlank(id)) {
        return true;
      }
      for (UdfFunc dto : list) {
        if (!dto.getId().equals(id)) {
          flag = true;
          break;
        }
      }
    }
    return flag;
  }

  /**
   * 查询右侧数据
   *
   * @param typeId typeId
   * @param keyWords keyWords
   * @param pageNo pageNo
   * @param pageSize pageSize
   * @return 右侧数据
   */
  public QueryRespBean<UdfFunc> getList(
      String typeId, String projectId, String keyWords, Integer pageNo, Integer pageSize) {
    QueryRespBean<UdfFunc> queryRespBean = new QueryRespBean<>();
    try {
      StorageResourceConf storageResourceConf =
          FileAdapterUtils.getStorageResourceByProjectId(projectId);
      if (storageResourceConf == null) {
        logger.error("存储资源未设置，请先设置存储资源！");
        throw new BusinessException("存储资源未设置，请先设置存储资源！");
      }

      List<String> typeIds =
          commonTypeService.getChildrenIds(CommonTypeEnums.UDF, typeId, projectId);
      if (typeIds == null) {
        typeIds = Lists.newArrayList();
      }
      typeIds.add(typeId);
      com.github.pagehelper.PageHelper.startPage(pageNo, pageSize);
      Page<UdfFunc> dataList =
          this.mapper.getList(typeIds, storageResourceConf.getId(), projectId, keyWords);
      List<UdfFunc> udfFuncList = dataList.getResult();
      if (udfFuncList != null && udfFuncList.size() > 0) {
        for (UdfFunc udfFunc : udfFuncList) {
          String name = udfFunc.getFileName();
          if (StringUtils.isNotBlank(name)) {
            String fileName =
                name.substring(udfFunc.getFileName().lastIndexOf(Constants.SINGLE_SLASH) + 1);
            udfFunc.setFileName(fileName);
          }
        }
      }
      queryRespBean.setResult(dataList);
      BdpLogUtil.log4Query(queryRespBean);
      return queryRespBean;
    } catch (Exception e) {
      throw new BusinessException(e.getMessage(), e);
    }
  }

  /**
   * 查询UDF分类树
   *
   * @param projectId projectId
   * @param type 类型：0--hiveUDF；1--sparkUDF
   * @return 流程定义分类树
   */
  public List<CommonTypeDTO> getTypeAndUdfFuncList(String projectId, Integer type) {
    List<CommonTypeDTO> result = commonTypeService.findCommonType(CommonTypeEnums.UDF, projectId);
    if (CollectionUtils.isNotEmpty(result)) {
      List<UdfFunc> udfFuncList = this.mapper.getListByProjectIdAndType(projectId, type);
      if (CollectionUtils.isNotEmpty(udfFuncList)) {
        Map<String, List<UdfFunc>> map = new HashMap<>(16);
        udfFuncList.stream()
            .collect(Collectors.groupingBy(UdfFunc::getTypeId, Collectors.toList()))
            .forEach(map::put);
        for (CommonTypeDTO commonTypeDTO : result) {
          handleTypeWithUdfFunc(map, commonTypeDTO);
        }
      }
    }
    return result;
  }

  private void handleTypeWithUdfFunc(Map<String, List<UdfFunc>> map, CommonTypeDTO commonTypeDTO) {
    if (map.containsKey(commonTypeDTO.getId())) {
      List<UdfFuncDTO> dtoList =
          map.get(commonTypeDTO.getId()).stream()
              .map(
                  item -> {
                    UdfFuncDTO dto = new UdfFuncDTO();
                    dto.setId(item.getId());
                    dto.setName(item.getName());
                    dto.setClassName(item.getClassName());
                    dto.setFuncName(item.getFuncName());
                    dto.setResourceId(item.getResourceId());
                    dto.setType(item.getType());
                    return dto;
                  })
              .collect(Collectors.toList());
      commonTypeDTO.setUdfFuncList(dtoList);
    }
    List<CommonTypeDTO> children = commonTypeDTO.getChildren();
    if (CollectionUtils.isNotEmpty(children)) {
      for (CommonTypeDTO child : children) {
        handleTypeWithUdfFunc(map, child);
      }
    }
  }
}
