package org.apache.dolphinscheduler.api.service.impl;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.io.Files;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ModelService;
import org.apache.dolphinscheduler.api.service.UsersService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.RegexUtils;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.common.utils.SparkSqlUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.ModelMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.service.memoryaddress.MemoryAddressService;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.apache.hadoop.conf.Configuration;
import org.apache.spark.ml.PipelineModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.io.ByteArrayOutputStream;
import java.util.*;

@Service
public class ModelServiceImpl extends BaseServiceImpl implements ModelService {
    @Autowired
    private MemoryAddressService memoryAddressService;

    @Autowired
    private UsersService usersService;

    @Autowired
    private ModelMapper modelMapper;

    @Autowired
    private TenantMapper tenantMapper;

    private Configuration configuration;

    private static final Logger logger = LoggerFactory.getLogger(ModelServiceImpl.class);

    @Transactional
    public Map<String, Object> insertModel(Model model){

        MemoryAddress memoryAddress =memoryAddressService.getOutPathByNodeId(model.getNodeId()).get(0);

        //model.setAddress("memoryAddress.getOutPath()");
        model.setAddress(memoryAddress.getOutPath());
        modelMapper.insert(model);
        memoryAddressService.updateModelTag(1,model.getNodeId());
        Map<String, Object> result = new HashMap<>();

        putMsg(result, Status.SUCCESS);
        return result;
    }

    public Map<String, Object> insertModels(Model model){
        MemoryAddress memoryAddress =memoryAddressService.getOutPathByNodeId(model.getNodeId()).get(0);
        PipelineModel pipelineModel = PipelineModel.load(memoryAddress.getOutPath());
        User user=usersService.queryUser(model.getUserId());
        String path= SparkSqlUtils.getInstance().toHDFS(pipelineModel,user.getUserName());
        model.setAddress(path);

        modelMapper.insert(model);
        Map<String, Object> result = new HashMap<>();

        putMsg(result, Status.SUCCESS);
        return result;
    }

    public Map<String,Object> viewModel(int userId){
        Map<String, Object> result = new HashMap<>();
        Menu menu = new Menu();
        List<Submenu> list = new ArrayList<>();
        List<Model> modelList=modelMapper.getListByUserId(userId);
        //新增一个菜单数据
        Submenu modelUpdate= new Submenu();
        modelUpdate.setName("模型更新");
        modelUpdate.setType("SPARKSCALA");
        modelUpdate.setSecondType("ModelUpdate");
        modelUpdate.setShow(true);
        list.add(modelUpdate);
        if(modelList.size()>0){
            for(int i=0;i<modelList.size();i++){
                Submenu submenu= new Submenu();
                submenu.setName(modelList.get(i).getAlias());
                submenu.setType("SAVEDMODEL");
                submenu.setSecondType("Model"+i);
                submenu.setShow(true);
                list.add(submenu);
            }
        }
        menu.setName("模型服务");
        menu.setShow(true);
        menu.setType("modelService");
        menu.setChildren(list);
        putMsg(result, Status.SUCCESS);
        result.put("data",menu);
        return result;
    }

    public Result<Object> verifyModelName(String alias, ResourceType type,User loginUser){
        Result<Object> result = new Result<>();
        putMsg(result, Status.SUCCESS);
        List<Model> modelList=modelMapper.getListByName(alias,loginUser.getId());
        if(modelList.size()>0){
            logger.error("resource type:{} name:{} has exist, can't create again.", type, RegexUtils.escapeNRT(alias));
            putMsg(result, Status.RESOURCE_EXIST);
        }
        return result;
    }

    public Map<String,Object> deleteModelById(int id){
        Map<String, Object> result = new HashMap<>();
        String defaultFS = PropertyUtils.getString(Constants.FS_DEFAULTFS);
        List<Model> modelList=modelMapper.getListById(id);
        try {
            if(modelList.size()>0){
                String path=modelList.get(0).getAddress();
                String hdfsFilename=path.substring(defaultFS.length(), path.length());
                HadoopUtils.getInstance().delete(hdfsFilename, true);
            }
        }catch (Exception e){
            logger.error(e.getMessage());
        }
        modelMapper.deleteModelById(id);
        putMsg(result, Status.SUCCESS);
        return result;
    }

    @Transactional
    public Map<String,Object> uploadModel(User loginUser,String alias,String description, MultipartFile file, ResourceType type){
        Map<String, Object> result = new HashMap<>();
        String defaultFS = PropertyUtils.getString(Constants.FS_DEFAULTFS);

        String fullName = "/"+file.getOriginalFilename();
        Model model=new Model();
        model.setUserId(loginUser.getId());
        model.setDescribe(description);
        model.setModelName(file.getOriginalFilename());
        model.setAlias(alias);
        String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
        String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);//上传文件到hdfs路径
        model.setAddress(defaultFS+hdfsFilename);
        if(!upload(loginUser,fullName,file,type)){//上传文件到hdfs
            logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(file.getName()), RegexUtils.escapeNRT(file.getOriginalFilename()));
            putMsg(result, Status.HDFS_OPERATION_ERROR);
            throw new ServiceException(String.format("upload resource: %s file: %s failed.", file.getName(), file.getOriginalFilename()));
        }else {
            putMsg(result, Status.SUCCESS);
        }
        modelMapper.insert(model);//插入模型表
        return result;
    }

    /**
     * upload model to hdfs
     *
     * @param loginUser login user
     * @param fullName  full name
     * @param file      file
     */
    private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) {
        // save to local
        /*String fileSuffix = Files.getFileExtension(file.getOriginalFilename());
        String nameSuffix = Files.getFileExtension(fullName);*/

        // determine file suffix
        /*if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) {
            return false;
        }*/
        // query tenant
        String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode();
        // random file name
        String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString());

        // save file to hdfs, and delete original file
        String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName);
        String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode);
        try {
            // if tenant dir not exists
            if (!HadoopUtils.getInstance().exists(resourcePath)) {
                createTenantDirIfNotExists(tenantCode);
            }
            org.apache.dolphinscheduler.api.utils.FileUtils.copyInputStreamToFile(file, localFilename);
            HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true);
        } catch (Exception e) {
            FileUtils.deleteFile(localFilename);
            logger.error(e.getMessage(), e);
            return false;
        }
        return true;
    }

    /**
     * 获取hdfs文件路径
     * @param id
     * @return
     */
    public String getHdfsFilename(int id){
        List<Model> model=modelMapper.getListById(id);
        String address="";
        if(model.size()>0){
            address=model.get(0).getAddress();
        }
        String defaultFS = PropertyUtils.getString(Constants.FS_DEFAULTFS);
        String hdfsFilename=address.substring(defaultFS.length(), address.length());
        return hdfsFilename;
    }


    /**
     *
     * @param userId
     * @param pageSize 页面数
     * @param currentPage 当前页
     * @param
     * @param searchVal 查询条件
     * @return
     */
    public Result queryModelListPaging(int userId,int pageSize,int currentPage,String searchVal){

        Result result = new Result();
        List<Model> listModel=modelMapper.getListByUserId(userId);
        int pageNo=(currentPage*pageSize)-pageSize;
        PageInfo<Model> pageInfo = new PageInfo<>(pageNo, pageSize);
        if(pageNo <= listModel.size()){
            List<Model> pageList=modelMapper.queryModelPaging(pageSize,userId,pageNo,searchVal);
            pageInfo.setTotal(listModel.size());
            pageInfo.setTotalList(pageList);
            pageInfo.setStart(pageNo);
            pageInfo.setCurrentPage(currentPage);
            result.setData(pageInfo);
            putMsg(result,Status.SUCCESS);
        }

        return result;
    }

    public static void main(String args[]){
        String str = "hdfs://172.16.34.135:9000/user/root/Extract/74aecf73-02f9-4310-b882-8b1a106c952d0";
        //截取_之后字符串
        String str1 = "hdfs://172.16.34.135:9000";
        String str2 = str.substring(str1.length(), str.length());
        System.out.println("截取_之后字符串:"+str2);
    }
}
