package com.hebeu.xwl.service;


import cn.dev33.satoken.stp.StpUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.toolkit.SqlRunner;
import com.hebeu.xwl.config.DynamicTableTreadLocal;
import com.hebeu.xwl.dao.hivedao.ArticleDao;
import com.hebeu.xwl.dao.BatchDao;
import com.hebeu.xwl.dao.LogDao;
import com.hebeu.xwl.dao.UserDao;
import com.hebeu.xwl.pojo.*;
import com.hebeu.xwl.utils.UUIDUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;

@Service
public class BatchService {

    @Autowired
    private ArticleDao articleDao;

    @Autowired
    private BatchDao batchDao;

    @Autowired
    private UserDao userDao;

    @Autowired
    private LogDao logDao;


    public Result addBatch(Batch batch){
        User user = userDao.selectById(batch.getUserId());
        //设置wrapper
        LambdaQueryWrapper<Batch> wrapper = new LambdaQueryWrapper<>();
        //设置UserID和Batch的userID一致
        wrapper.eq(Batch::getUserId, batch.getUserId());
        //获取BatchNum
        int batchNum = Math.toIntExact(batchDao.selectCount(wrapper));
        int maxBatch = user.getMaxBatch();
        if(batchNum >= maxBatch){
            return new Result(101,"该用户数据集数量已达上限");
        }else {
            String index = "user" + batch.getUserId()+"_"+"batch"+ UUIDUtil.getUUID();
            batch.setIndex(index);
            //插入batch
            batchDao.insert(batch);
            //建立hive表
            try {
                DynamicTableTreadLocal.INSTANCE.setTableName(index);
            }
            catch(Exception e)
            {
                return null;
            }
            //创建hive对应表
            articleDao.hiveCreatTable(index);
            Log log = new Log();
            log.setUserId(batch.getUserId());
            log.setRecord("创建数据集"+index+"");
            Date date = new Date();
            long time = (date.getTime()/1000);
//        SimpleDateFormat dateFormat= new SimpleDateFormat("yyyy-MM-dd :hh:mm:ss");
//
//        log.setTime(dateFormat.format(date));
            log.setTime(time);
            logDao.insert(log);
            return new Result(0,"success");
        }
    }

    public void deleteBatch(int userId, int[] ids){

        List<Integer> deleteIds = new ArrayList<>();
        String idstring = "";
        for(int id : ids){
            deleteIds.add(id);
            Batch batch = batchDao.selectById(id);
            String index = batch.getIndex();
            try {
                DynamicTableTreadLocal.INSTANCE.setTableName(index);
            }
            catch(Exception e)
            {
            }
            articleDao.dropTable(index);
            idstring = idstring + id +",";
        }
        batchDao.deleteBatchIds(deleteIds);
        Log log = new Log();
        log.setUserId(userId);
        log.setRecord("删除数据集,被删除的数据集id为："+ids.toString());
        Date date = new Date();
        long time = (date.getTime()/1000);
        log.setTime(time);
        logDao.insert(log);
    }

    /**
     *
     * @param batch 即克隆的新的数据集，需要以下参数：
     *      *              userId（必须有）
     *      *              ,description(对数据集的描述，建议设置默认值)
     *      *              ，name（数据集的名字，建议设置默认值）
     *      *              不清楚为空能不能添加
     * @param batchId_old
     */
    public Result cloneBatch(Batch batch,int batchId_old){
        Result result = new Result();
        User user = userDao.selectById(batch.getUserId());
        //设置wrapper
        LambdaQueryWrapper<Batch> wrapper = new LambdaQueryWrapper<>();
        //设置UserID和Batch的userID一致
        wrapper.eq(Batch::getUserId, batch.getUserId());
        //获取BatchNum
        int batchNum = Math.toIntExact(batchDao.selectCount(wrapper));
        int maxBatch = user.getMaxBatch();
        //如果超限
        if(batchNum >= maxBatch){
            result.setCode(101);
            result.setMsg("该用户数据集数量已达上限");
            return result;
        }else {
            //创建新表
            String index = "user" + batch.getUserId()+"_"+"batch"+ UUIDUtil.getUUID();
            batch.setIndex(index);
            Batch batch1 = batchDao.selectById(batchId_old);
            String index_old = batch1.getIndex();
            articleDao.clone(index,index_old);
            //Batch1的数据量复制到batch里
            batch.setArticlenum(batch1.getArticlenum());
            //插入batch
            batchDao.insert(batch);

            result.setCode(0);
            result.setMsg("success");

            Log log = new Log();
            log.setUserId(batch.getUserId());
            log.setRecord("克隆id为"+batchId_old+"的数据集"+index+",新的数据集索引为"+index);
            Date date = new Date();
            long time = (date.getTime()/1000);
            log.setTime(time);
            logDao.insert(log);

            return result;
        }
    }

    public Result mergeBatch(Batch batch,int batchId_01,int batchId_02){
        Result result = new Result();
            String index = "user" + batch.getUserId()+"_"+"batch"+ UUIDUtil.getUUID();
            batch.setIndex(index);
            //插入batch
            batchDao.insert(batch);
            //建立hive表
            try {
                DynamicTableTreadLocal.INSTANCE.setTableName(index);
            }
            catch(Exception e)
            {
                return null;
            }
            Batch batch1 = batchDao.selectById(batchId_01);
            String index_01 = batch1.getIndex();
            int artnum = batch1.getArticlenum();
            Batch batch2 = batchDao.selectById(batchId_01);
            String index_02 = batch2.getIndex();
            int artnum2 = batch2.getArticlenum();
            int mergeall = artnum + artnum2;
            // 获取Batch
            LambdaQueryWrapper<Batch> wrapper = new LambdaQueryWrapper<>();
            wrapper.eq(Batch::getIndex, index);
            Batch batch3 = batchDao.selectList(wrapper).get(0);
            // 处理batch3并更新其articlenum
            batch3.setArticlenum(mergeall);
            batchDao.updateById(batch3);
            // 处理
            articleDao.merge(index,index_01,index_02);
            articleDao.dropTable(index_01);
            articleDao.dropTable(index_02);


        batchDao.deleteById(batchId_01);
            batchDao.deleteById(batchId_02);

            result.setCode(0);
            result.setMsg("success");

            Log log = new Log();
            log.setUserId(batch.getUserId());
            log.setRecord("合并id为"+batchId_01+"与id为："+batchId_02+"的数据集" +
                    ",新的数据集索引为"+index);
            Date date = new Date();
            long time = (date.getTime()/1000);
            log.setTime(time);
            logDao.insert(log);

            return result;
    }


    public void updateBatch(Batch batch){

        //先获取再更新
        Batch beforeBatch = batchDao.selectById(batch.getBatchId());
        batchDao.updateById(batch);


        Log log = new Log();
        log.setUserId(batch.getUserId());
        log.setRecord("更新id为"+batch.getBatchId()+"的数据集，"
                +"更新前：描述为："+beforeBatch.getDescription()+",名字为："+beforeBatch.getName()
                +"更新后：描述为："+batch.getDescription()+",名字为："+batch.getName());
        Date date = new Date();
        long time = (date.getTime()/1000);
//        SimpleDateFormat dateFormat= new SimpleDateFormat("yyyy-MM-dd :hh:mm:ss");
//
//        log.setTime(dateFormat.format(date));
        log.setTime(time);
        logDao.insert(log);
    }



    public List<Batch> findAll(int userId){
        Log log = new Log();
        log.setUserId(userId);
        log.setRecord("查看所有数据集");
        Date date = new Date();
        long time = (date.getTime()/1000);
//        SimpleDateFormat dateFormat= new SimpleDateFormat("yyyy-MM-dd :hh:mm:ss");
//
//        log.setTime(dateFormat.format(date));
        log.setTime(time);
        logDao.insert(log);
        LambdaQueryWrapper<Batch> wrapper = new LambdaQueryWrapper<>();
        wrapper.eq(Batch::getUserId, userId);
        return batchDao.selectList(wrapper);
    }

    public Batch findById(int userId, int batchId){

        Batch batch = batchDao.selectById(batchId);

        Log log = new Log();
        log.setUserId(userId);
        log.setRecord("查看id为"+batch.getBatchId()+",描述为："
            +batch.getDescription()+",命名为："+batch.getName()+"的数据集");
        Date date = new Date();
        long time = (date.getTime()/1000);
//        SimpleDateFormat dateFormat= new SimpleDateFormat("yyyy-MM-dd :hh:mm:ss");
//
//        log.setTime(dateFormat.format(date));
        log.setTime(time);
        logDao.insert(log);
        return batch;
    }

    public String findIndex(int batchId){
        return batchDao.selectById(batchId).getIndex();
    }



}
