package com.sbtr.business.feature.service.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.alibaba.druid.sql.builder.UpdateBuilder;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.sbtr.business.feature.dto.FeDatafileDto;
import com.sbtr.business.feature.dto.FeDatafileUploadDto;
import com.sbtr.business.feature.entity.*;
import com.sbtr.business.feature.mapper.FeDatafileMapper;
import com.sbtr.business.feature.mapper.FeOperatorMapper;
import com.sbtr.business.feature.service.FeDatafieldService;
import com.sbtr.business.feature.service.FeDatafileService;
import com.sbtr.business.utils.AnalyzeUtil;
import com.sbtr.business.utils.OperatorUtil;
import com.sbtr.common.PageDto;
import com.sbtr.common.file.CommonAnalyzeService;
import com.sbtr.common.file.FileService;
import com.sbtr.common.file.dto.AnalyzeDto;
import com.sbtr.common.file.dto.FileContentDto;
import com.sbtr.common.file.dto.FileDto;
import com.sbtr.exception.BDException;
import com.sbtr.util.BeanCopyUtils;
import com.sbtr.util.PageDtoUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import javax.annotation.PostConstruct;
import java.io.*;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.*;
import java.util.stream.Collectors;

/**
 * 特征工程原始数据文件字段列表 Service接口实现类
 *
 * @author dgl
 * @since 2023-03-17
 */
@Transactional
@Service
public class FeDatafileServiceImpl extends ServiceImpl<FeDatafileMapper, FeDatafile> implements FeDatafileService {

    @Autowired
    FeDatafileMapper feDatafileMapper;
    @Autowired
    FeOperatorMapper feOperatorMapper;
    @Autowired
    FeDatafieldService feDatafieldService;
    @Autowired
    private CommonAnalyzeService commonAnalyzeService;
    @Autowired
    private FileService fileService;
    @Autowired
    AnalyzeUtil analyzeUtil;
    @Autowired
    OperatorUtil operatorUtil;

    @Value("${spring.datasource.username}")
    String username;

    @Value("${spring.datasource.password}")
    String password;

    @Value("${spring.datasource.url}")
    String url;

    @Override
    public void execSQL(MultipartFile file) {
        try (
                Connection conn = DriverManager.getConnection(url, username, password);
                Statement stmt = conn.createStatement();
        ) {
            Class.forName("com.mysql.jdbc.Driver");

            BufferedReader br = new BufferedReader(new InputStreamReader(file.getInputStream()));
            StringBuilder sb = new StringBuilder();
            String line;
            while ((line = br.readLine()) != null) {
                sb.append(line);
                sb.append(System.lineSeparator());
            }

            String[] sqlStatements = sb.toString().split(";");
            for (String sqlStatement : sqlStatements) {
                if (StringUtils.isBlank(sqlStatement) || "\r\n".equals(sqlStatement)) continue;
                stmt.executeUpdate(sqlStatement);
            }

        } catch (Exception e) {
            throw new BDException(e.getMessage());

        }
    }
    @Override
    public void execSQL2(String sql) {
        try (
                Connection conn = DriverManager.getConnection(url, username, password);
                Statement stmt = conn.createStatement();
        ) {
            Class.forName("com.mysql.jdbc.Driver");

            BufferedReader br = new BufferedReader(new FileReader(sql));
            StringBuilder sb = new StringBuilder();
            String line;
            while ((line = br.readLine()) != null) {
                sb.append(line);
                sb.append(System.lineSeparator());
            }

            String[] sqlStatements = sb.toString().split(";");
            for (String sqlStatement : sqlStatements) {
                if (StringUtils.isBlank(sqlStatement) || "\r\n".equals(sqlStatement)) continue;
                stmt.executeUpdate(sqlStatement);
            }

        } catch (Exception e) {
            throw new BDException(e.getMessage());

        }
    }
    @Override
    public PageDto<FeDatafile> pageList(FeDatafileDto feDatafileDto) {

        IPage<FeDatafile> page = new Page<>();
        page.setSize(feDatafileDto.getPageSize());
        page.setCurrent(feDatafileDto.getPage());
        page = feDatafileMapper.selectPages(page, BeanUtil.beanToMap(feDatafileDto));
        PageDto<FeDatafile> r = PageDtoUtils.getPageDtoByIPage(page, FeDatafile.class);

        return r;
    }

    @Override
    public FileContentDto analyze(Long id) {
        FeDatafile feDatafile = feDatafileMapper.selectById(id);
        //分析保存
        return commonAnalyzeService.common(new AnalyzeDto().setPath(feDatafile.getBucketDir() + "/" + feDatafile.getFileName2()).setTxtMethod(feDatafile.getTxtMethod()));
    }

    @Override
    public Map<String, Object> getTreeByProcessId(Long id) {
        QueryWrapper<FeDatafile> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("processing_id", id);
        queryWrapper.last("limit 1");

        FeDatafile feDatafile = feDatafileMapper.selectOne(queryWrapper);
        if (feDatafile == null) {
            return new HashMap<>();
        }
        //分析保存
        FileContentDto f = commonAnalyzeService.common(new AnalyzeDto().setPath(feDatafile.getBucketDir() + "/" + feDatafile.getFileName2()).setTxtMethod(feDatafile.getTxtMethod()));
        if (CollectionUtils.isEmpty(f.getContent())) {
            return new HashMap<>();
        }

        List<FeDatafield> list = new ArrayList<>();
        //字段名，对应前100行数据
        Map<String, List<String>> filterParam = new HashMap<>();
        //字段名,类型
        Map<String, String> paramType = new HashMap<>();

        //判断前100个数据类型
        for (int i = 0; i < (Math.min(100, f.getContent().size())); i++) {
            for (Map.Entry<Integer, String> entry : f.getContent().get(i).entrySet()) {
                List<String> param = filterParam.get(f.getTitle().get(entry.getKey()));
                if (CollectionUtils.isEmpty(param)) {
                    param = new ArrayList<>();
                }
                String o = entry.getValue() == null ? "" : entry.getValue().toString();
                param.add(o);
                filterParam.put(f.getTitle().get(entry.getKey()), param);
            }
        }

        //解析类型，优先级别 日期时间型>日期型>浮点型>整数型>字符型
        for (Map.Entry<String, List<String>> entry : filterParam.entrySet()) {
            paramType.put(entry.getKey(), analyzeUtil.analyzes(entry.getValue(), 1));
        }

        for (int i = 0; i < f.getTitle().size(); i++) {
            FeDatafield feDatafield = new FeDatafield();
            feDatafield.setCreateTime(new Date());
            feDatafield.setName(f.getTitle().get(i));
            feDatafield.setType(paramType.get(f.getTitle().get(i)));
            feDatafield.setDatafileId(feDatafile.getId());
            list.add(feDatafield);
        }
        List<LinkedHashMap<String, String>> data = f.getListMap();
        Map<String, Object> r = new HashMap<>();
        r.put("data", data);
        r.put("type", list);
        return r;

    }

    @Override
    public FeDatafile analyzeSave(Long id) {
        FeDatafile feDatafile = feDatafileMapper.selectById(id);

        QueryWrapper<FeDatafield> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("datafile_id", id);
        List<FeDatafield> existList = feDatafieldService.list(queryWrapper);
        List<Long> ids = existList.stream().map(FeDatafield::getId).collect(Collectors.toList());
        if (CollectionUtils.isNotEmpty(ids)) feDatafieldService.removeByIds(ids);


        //分析保存
        FileContentDto f = commonAnalyzeService.common(new AnalyzeDto().setPath(feDatafile.getBucketDir() + "/" + feDatafile.getFileName2()).setTxtMethod(feDatafile.getTxtMethod()));
        List<FeDatafield> list = new ArrayList<>();
        LinkedHashMap<Long, String> titleMap = new LinkedHashMap<>();
        for (int i = 0; i < f.getContent().get(0).size(); i++) {
            FeDatafield feDatafield = new FeDatafield();
            feDatafield.setCreateTime(new Date());
            feDatafield.setName(f.getTitle().get(i));

            Object param = f.getContent().get(0).get(i);
            if (param == null) {
                continue;
            }
            feDatafield.setType(analyzeUtil.analyze(f.getContent().get(0).get(i).toString(), 1));
            feDatafield.setDatafileId(feDatafile.getId());
            list.add(feDatafield);
        }

        if (!feDatafieldService.saveBatch(list)) {
            throw new BDException("分析失败");
        }

        for (String title : f.getTitle()) {
            for (FeDatafield feDatafield : list) {
                if (feDatafield.getName().equals(title)) {
                    titleMap.put(feDatafield.getId(), feDatafield.getName());
                    break;
                }
            }
        }

        operatorUtil.putData(feDatafile.getProcessingId().toString(), f.getListMap());
        operatorUtil.putTitle(feDatafile.getProcessingId().toString(), titleMap);

        return feDatafile;
    }


    @Override
    public Map<String, Object> uploadAnalyze(FeDatafileUploadDto feDatafileUploadDto, MultipartFile file, String username) {

        //重置算子信息
        UpdateWrapper<FeDatafile> deleteWrapper = new UpdateWrapper<>();
        deleteWrapper.eq("processing_id", feDatafileUploadDto.getProcessingId());
        feDatafileMapper.delete(deleteWrapper);

        FeOperator feOperator = feOperatorMapper.selectById(feDatafileUploadDto.getProcessingId());

        QueryWrapper<FeOperator> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("canvas_id", feOperator.getCanvasId());
        List<FeOperator> feOperators = feOperatorMapper.selectList(queryWrapper);

        operatorUtil.cleanTreeChildren(feOperators, feOperator.getId());

        //上传保存
        FileDto fileDto = fileService.uploadFile(file, new FileDto("feature"));
        FeDatafile feDatafile = BeanCopyUtils.copyProperties(fileDto, FeDatafile.class);
        feDatafile.setProcessingId(feDatafileUploadDto.getProcessingId());
        feDatafile.setFileName(file.getOriginalFilename());
        feDatafile.setEquipentityId(feDatafileUploadDto.getEquipentityId());
        feDatafile.setIteminfoId(feDatafileUploadDto.getIteminfoId());
        feDatafile.setFileDesc(feDatafileUploadDto.getFileDesc());
        feDatafile.setItemname(feDatafileUploadDto.getItemname());
        feDatafile.setCreater(username);
        feDatafile.setCreateTime(new Date());
        QueryWrapper<FeDatafile> feDatafileQueryWrapper = new QueryWrapper<>();
        feDatafileQueryWrapper.eq("processing_id", feDatafileUploadDto.getProcessingId());
        List<FeDatafile> feDatafiles = feDatafileMapper.selectList(feDatafileQueryWrapper);
        if (feDatafiles.size() > 0) {
            UpdateWrapper<FeDatafile> updateWrapper = new UpdateWrapper<>();
            updateWrapper.eq("processing_id", feDatafileUploadDto.getProcessingId());
            FeDatafile feDatafile1 = new FeDatafile();
            feDatafile1.setProcessingId(0L);
            feDatafile1.update(updateWrapper);//置空原来的，文件后期可作缓存或删除
        }
        feDatafile.insert();


        //分析
        FileContentDto f = commonAnalyzeService.common(new AnalyzeDto().setPath(fileDto.getPath()).setTxtMethod(feDatafileUploadDto.getTxtMethod()));
        if (CollectionUtils.isEmpty(f.getContent()) || f.getContent().get(0).size() == 0 || f.getTitle().size() == 0) {
            throw new BDException("数据读取为空");
        }


        List<FeDatafield> list = new ArrayList<>();
        //字段名，对应前100行数据
        Map<String, List<String>> filterParam = new HashMap<>();
        //字段名,类型
        Map<String, String> paramType = new HashMap<>();
        //判断前100个数据类型
        for (int i = 0; i < (Math.min(100, f.getContent().size())); i++) {
            for (Map.Entry<Integer, String> entry : f.getContent().get(i).entrySet()) {
                List<String> param = filterParam.get(f.getTitle().get(entry.getKey()));
                if (CollectionUtils.isEmpty(param)) {
                    param = new ArrayList<>();
                }
                String o = entry.getValue() == null ? "" : entry.getValue().toString();
                param.add(o);
                filterParam.put(f.getTitle().get(entry.getKey()), param);
            }
        }

        //解析类型，优先级别 日期时间型>日期型>浮点型>整数型>字符型
        for (Map.Entry<String, List<String>> entry : filterParam.entrySet()) {
            paramType.put(entry.getKey(), analyzeUtil.analyzes(entry.getValue(), 1));
        }

        for (int i = 0; i < f.getTitle().size(); i++) {
            FeDatafield feDatafield = new FeDatafield();
            feDatafield.setCreateTime(new Date());
            feDatafield.setName(f.getTitle().get(i));
            feDatafield.setType(paramType.get(f.getTitle().get(i)));
            feDatafield.setDatafileId(feDatafile.getId());
            list.add(feDatafield);
        }
        List<LinkedHashMap<String, String>> data = f.getListMap();
        Map<String, Object> r = new HashMap<>();
        r.put("data", data);
        r.put("type", list);

        return r;
    }
}

