package com.sh.data.engine.domain.datadev.offline.service.impl;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.sh.data.engine.domain.datadev.offline.service.DatadevFunctionService;
import com.sh.data.engine.domain.datadev.offline.service.DatadevResourceService;
import com.sh.data.engine.domain.shims.hive.util.HiveUtil;
import com.sh.data.engine.infrastructure.config.HDFSConfig;
import com.sh.data.engine.repository.dao.datadev.DatadevFunctionMapper;
import com.sh.data.engine.repository.dao.datadev.entity.DatadevFunctionEntity;
import com.sh.data.engine.repository.dao.datadev.entity.DatadevResourceEntity;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.*;
import java.util.stream.Collectors;

@Service
public class DatadevFunctionServiceImpl
    extends ServiceImpl<DatadevFunctionMapper, DatadevFunctionEntity>
    implements DatadevFunctionService {

    @Autowired
    private DatadevResourceService datadevResourceService;

    @Autowired
    private HDFSConfig hdfsConfig;

    @Override
    public List<String> getHiveCreateFunctionSql(
        Set<String> functionNames, Long projectId, Set<String> functionHasCreated) {
        if (CollectionUtils.isEmpty(functionNames)) {
            return Collections.EMPTY_LIST;
        }

        List<DatadevFunctionEntity> entityList =
            list(
                new LambdaQueryWrapper<DatadevFunctionEntity>()
                    .eq(DatadevFunctionEntity::getProjectId, projectId)
                    .in(DatadevFunctionEntity::getFunctionName, functionNames));

        if (CollectionUtils.isEmpty(entityList)) {
            return Collections.EMPTY_LIST;
        }

        List<String> sqls = new ArrayList<>();

        entityList =
            entityList.stream()
                .filter(
                    e -> {
                        if (null == functionHasCreated
                            || !functionHasCreated.contains(e.getFunctionName())) {
                            return true;
                        }
                        return false;
                    })
                .collect(Collectors.toList());

        final List<Long> resourceNodeIdList =
            entityList.stream().map(c -> c.getResourceId()).collect(Collectors.toList());

        final List<DatadevResourceEntity> resourceConfigList =
            datadevResourceService.list(
                new LambdaQueryWrapper<DatadevResourceEntity>(DatadevResourceEntity.class).in(
                    DatadevResourceEntity::getNodeId, resourceNodeIdList));

        final Map<Long, DatadevResourceEntity> resourceMap =
            resourceConfigList.stream().collect(Collectors.toMap(x -> x.getNodeId(), x -> x));

        for (DatadevFunctionEntity functionConfig : entityList) {

            if (null == functionConfig) {
                continue;
            }

            DatadevResourceEntity resource = resourceMap.get(functionConfig.getResourceId());

            if (null == resource) {
                continue;
            }
            String functionName = functionConfig.getFunctionName();
            String packageName = functionConfig.getClassName();

            String hdfsUrl = hdfsConfig.getHdfsUrl();
            String jarParth = resource.getFilePath();
            String path = String.format("%s%s", hdfsUrl, jarParth);

            final String sql = HiveUtil.getCreateFunctionSql(functionName, packageName, path);

            sqls.add(sql);
        }

        return sqls;
    }

    @Override
    public DatadevFunctionEntity getDatadevFunctionByNodeId(Long nodeId) {

        LambdaQueryWrapper<DatadevFunctionEntity> eq =
            new LambdaQueryWrapper<DatadevFunctionEntity>()
                .eq(DatadevFunctionEntity::getNodeId, nodeId);

        DatadevFunctionEntity one = getOne(eq);
        return one;
    }
}
