package cn.getech.data.development.controller;

import cn.getech.data.development.config.properties.DataDevelopmentConfig;
import cn.getech.data.development.constant.DataDevelopmentBizExceptionEnum;
import cn.getech.data.development.constant.LastRunState;
import cn.getech.data.development.constant.PermissionTypeEnum;
import cn.getech.data.development.constant.StorageEngineEnum;
import cn.getech.data.development.entity.DataExplore;
import cn.getech.data.development.entity.TableFieldInfo;
import cn.getech.data.development.entity.TableInfo;
import cn.getech.data.development.service.DataExploreService;
import cn.getech.data.development.service.TableInfoService;
import cn.getech.data.development.utils.*;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.*;
import cn.getech.data.intelligence.common.validator.ValidatorUtils;
import cn.getech.system.center.annotation.SysLog;
import cn.getech.system.center.constant.OperationModulesEnum;
import cn.getech.system.center.constant.OperationTypeEnum;
import cn.getech.system.center.constant.module.ModuleEnum;
import cn.getech.system.center.utils.ShiroUtils;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.google.common.collect.Sets;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.*;

import javax.servlet.http.HttpServletResponse;
import java.io.OutputStream;
import java.net.URLEncoder;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * <p>
 * 数据探索
 * </p>
 *
 * @author zenith
 * @since 2019-07-08
 */
@Slf4j
@Api(tags = "数据探索")
@RestController
@RequestMapping("/bdp/dataExplore")
public class DataExploreController {


    @Autowired
    private DataExploreService dataExploreService;
    @Autowired
    private DataDevelopmentConfig hiveConfig;

    @Autowired
    private HdfsUserUtil hdfsUserUtil;

    @Autowired
    private TableInfoService tableInfoService;

    @Value("${spring.hive.exploreDB:explore}")
    private String exploreDB;
    /**
     * 获取列表
     */
    @ApiOperation("获取列表")
    @GetMapping("/list")
    //@SysLog(value="获取探索列表",type= OperationTypeEnum.SELECTCODE, modulesType = OperationModulesEnum.BDPMODELCODE,platformModuleType = ModuleEnum.DATA_EXPLORE_MODULE_CODE)
    //todo 0925
    //@RequiresPermissions({"bdp:dateExpore:list"})
    public R list(@ApiParam(value = "{'tableName':'t_user_tbl'}") @RequestParam Map<String, Object> params, @RequestParam("curPage") Integer curPage, @RequestParam("limit") Integer limit,@RequestParam("sql") String sql) {
        params.put("createPer", ShiroUtils.getUserId().intValue());
        params.put("sidx", "id");
        params.put("order", "desc");
        params.put(Constant.PAGE, curPage.toString());
        params.put(Constant.LIMIT, limit.toString());
        params.put("sql",sql);
        PageUtils page = dataExploreService.queryPage(params);
        List<DataExplore> dataExploreList = (List<DataExplore>) page.getList();
        for (DataExplore dataExplore : dataExploreList
        ) {
            if (dataExplore.getState() != null) {
                dataExplore.setStateName(LastRunState.ObjOf(dataExplore.getState()).getName());
            }
        }
        return R.okWithPage(page);
    }

    @ApiOperation("停止sql任务")
    @PostMapping("/stop")   // /data-development/bdp/dataExplore/stop
    @SysLog(value="停止sql任务",type= OperationTypeEnum.STOPCODE, modulesType = OperationModulesEnum.BDPMODELCODE,platformModuleType = ModuleEnum.DATA_EXPLORE_MODULE_CODE)
    public R stop(@RequestBody DataExplore explore) {
        DataExplore exp = dataExploreService.getById(explore.getId());
        if (exp != null) {
            exp.setState(LastRunState.STOP.getCode());
            exp.setMsg("success");
            exp.setExecuteTime(((float) (DateUtil.parseDateTime(DateUtil.now()).getTime() - exp.getCreateTime().getTime())) / 1000);
            dataExploreService.updateById(exp);
            if (poolMap.containsKey(explore.getId())) {
                if (poolMap.get(explore.getId()) != null) {
                    poolMap.get(explore.getId()).shutdownNow();
                    poolMap.remove(explore.getId());
                }
            }else{
                return R.error("任务已经停止");
            }
        }
        return R.ok();
    }
    private static HashMap<Integer, ExecutorService> poolMap = new HashMap<>();
    /**
     * 运行sql任务
     */
    @SysLog(value="运行sql任务",type= OperationTypeEnum.RUNCODE, modulesType = OperationModulesEnum.BDPMODELCODE,platformModuleType = ModuleEnum.DATA_EXPLORE_MODULE_CODE)
    @ApiOperation("运行sql任务")
    @PostMapping("/run")
    public R run(  @RequestBody DataExplore explore) {
        DateTime dateTime = DateUtil.parseDateTime(DateUtil.now());
        ValidatorUtils.validateEntity(explore);
        if (!new SqlCheckUtil().sqlCheck(explore.getSql())) {
            return R.error("SQL不合法");
        }
        explore.setCreatePer(ShiroUtils.getUserId().intValue());
        explore.setCreateTime(DateUtil.date());
        explore.setState(LastRunState.RUNNING.getCode());
        String resultTableName = UUID.randomUUID().toString().replaceAll("-", "");
        explore.setResultTableName(resultTableName);
        try {
            //初始化当前用户
            hdfsUserUtil.setRangerUser(ShiroUtils.getUserId());
            if (dataExploreService.save(explore)) {
                if(explore.getSql().trim().endsWith(";")){
                    String sql = explore.getSql().replace(";","");
                    explore.setSql(sql);
                }
                ExecutorService myExecutor = Executors.newSingleThreadExecutor();
                poolMap.put(explore.getId(),myExecutor);
                //改为线程池，并且异常改为Exception
                myExecutor.execute(() -> {

                    String url = hiveConfig.getHiveserverurl();
                    String sql = String.format("CREATE TABLE %s.%s\n" +
                            "   ROW FORMAT SERDE \"org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe\"\n" +
                            "   STORED AS RCFile\n" +
                            "   AS\n" +
                            "%s", exploreDB, resultTableName, explore.getSql());
                    if (StorageEngineEnum.KUDU.getCode().equals(explore.getSearchEngine())) {
                        url = hiveConfig.getKuduJdbcUrl();
                        if (explore.getSql().toUpperCase().contains("REFRESH")
                                || explore.getSql().toUpperCase().contains("INVALIDATE METADATA")) {
                            sql = explore.getSql();//REFRESH test.test1000  刷新表
                        } else {
                            //CREATE TABLE TEST.testcreate20210129  AS  SELECT * from test.test1000 where database_name IS NOT NULL LIMIT 5
                            sql = String.format("CREATE TABLE %s.%s \n" +
                                    "   AS \n" +
                                    "%s", exploreDB, resultTableName, explore.getSql());
                        }
                    }
                    //给当前用户创建表的权限
                    HiveTableUtil hiveTableUtilHdfs = null;
                    try {
                        hiveTableUtilHdfs = new HiveTableUtil(url,"root.default","default",hdfsUserUtil.getRangerUserName(),
                                hdfsUserUtil.getRangerUserPassword());
                        hiveTableUtilHdfs.execute(sql);
                        if (poolMap.containsKey(explore.getId())) {
                            DataExplore exp = dataExploreService.getOne(new QueryWrapper<DataExplore>()
                                    .eq("result_table_name", resultTableName));
                            if (exp != null) {
                                exp.setState(LastRunState.SUCCESS.getCode());
                                exp.setMsg("success");
                                exp.setExecuteTime(((float) (DateUtil.parseDateTime(DateUtil.now()).getTime() - dateTime.getTime())) / 1000);
                                dataExploreService.updateById(exp);
                            }
                        }
                    } catch (Exception e) {
                        log.error("运行失败：sql:\n{}",explore.getSql(),e);
                        if (poolMap.containsKey(explore.getId())) {
                            DataExplore exp = dataExploreService.getOne(new QueryWrapper<DataExplore>()
                                    .eq("result_table_name", resultTableName));
                            if (exp != null) {
                                exp.setState(LastRunState.FAIL.getCode());
                                exp.setMsg(e.getMessage());
                                exp.setExecuteTime(((float) (DateUtil.parseDateTime(DateUtil.now()).getTime() - dateTime.getTime())) / 1000);
                                dataExploreService.updateById(exp);
                            }
                        }
                        throw new RRException(e.getMessage());
                    } finally {
                        hiveTableUtilHdfs.close();
                    }
                });

            } else {
                return R.error(DataDevelopmentBizExceptionEnum.SERVER_ERROR.getMessage());
            }
        } catch (Exception e) {
            log.error("运行失败：sql:\n{}",explore.getSql(),e);
            return R.error("执行SQL错误:"+e.getMessage());
        }
        return R.ok();
    }

    /**
     * 结果数据预览
     */
    @SysLog(value="结果数据预览",type= OperationTypeEnum.SELECTCODE, modulesType = OperationModulesEnum.BDPMODELCODE,platformModuleType = ModuleEnum.DATA_EXPLORE_MODULE_CODE)
    @ApiOperation("结果数据预览")
    @GetMapping("/dataPreview")
//    @RequiresPermissions("bdp:dataExplore:dataPreview")
    public R dataPreview(@RequestParam Map<String, Object> params) {
        int id = Integer.parseInt(params.get("id").toString());
        int pageSize = Integer.parseInt(params.get("limit").toString());
        int currPage =  Integer.parseInt(params.get("page").toString());
        DataExplore dataExplore = dataExploreService.getById(id);
        if (dataExplore.getCreateTime() != null) {
            Date date = DateUtils.addDateMonths(dataExplore.getCreateTime(), 3);
            if (date.getTime() < new Date().getTime()) {
                return R.error("已过期，请重新查询");
            }
        }
        String tableName = dataExplore.getResultTableName();
        String sql = "select * from "+tableName;
        //判断当前的用户所执行的sql是否需要限制
        Integer userId= dataExploreService.isTenantAdmin(ShiroUtils.getTenantId(),ShiroUtils.getUserId().intValue());
        Integer limit = dataExploreService.getSqlLimit(ShiroUtils.getTenantId(),userId);
        if(userId!=null&&limit!=null){
            if(pageSize>limit){
                params.put("limit",limit);
            }
        }
        params.put("sql", sql);
        String url = hiveConfig.getHiveserverurl();
        if (StorageEngineEnum.KUDU.getCode().equals(dataExplore.getSearchEngine())) {
            url = hiveConfig.getKuduJdbcUrl();
            params.put("searchEngine",StorageEngineEnum.KUDU.getCode());
        }
        HiveTableUtil hiveTableUtil = new HiveTableUtil(url, exploreDB,"hdfs","hdfs");
        //hive查询结果的分页
        params.put("tableName",dataExplore.getResultTableName());
        Map<String,Object> resultMap = hiveTableUtil.dataPreview(params);
        List<JSONObject> data = (List<JSONObject>)resultMap.get("data");
        List<String> columList = (List<String>) resultMap.get("columnList");
        int totalCount = 0;

        PageUtils page = new PageUtils(data, totalCount, pageSize, currPage);
        R r = R.okWithPage(page);
        //将标题也放进去
        r.put("titleList", columList);
        return r;
    }

    /**
     * 数据预览总条数
     */
    @ApiOperation("数据预览总条数")
    @GetMapping("/dataPreview/{id}")
    //@SysLog(value="数据预览总条数",type= OperationTypeEnum.SELECTCODE, modulesType = OperationModulesEnum.BDPMODELCODE,platformModuleType = ModuleEnum.DATA_EXPLORE_MODULE_CODE)
    public R dataPreviewRowNum(@PathVariable("id") Integer id) {
        DataExplore dataExplore = dataExploreService.getById(id);
        String url = hiveConfig.getHiveserverurl();
        if (StorageEngineEnum.KUDU.getCode().equals(dataExplore.getSearchEngine())) {
            url = hiveConfig.getKuduJdbcUrl();
        }
        HiveTableUtil hiveTableUtil = new HiveTableUtil(url, exploreDB,"hdfs","hdfs");
        TableInfo tableInfo = new TableInfo();
        tableInfo.setTableName(dataExplore.getResultTableName());
        //查询总数
        Long totalCount = hiveTableUtil.dataPreviewRowNum(tableInfo);
        Map<String, Object> map = new HashMap<>();
        map.put("totalCount", totalCount);
        return R.ok(map);
    }

    private String wrapSql( DataExplore dataExplore){
        String tableName = dataExplore.getResultTableName();
        String sql = "select * from "+tableName;
        Long userId = ShiroUtils.getUserId();
        if (!DataPermissionUtil.isAdmin(userId.intValue())) {//判断是否管理员
            SqlParseUtil.QueryTable table = SqlParseUtil.parseHiveql(dataExplore.getSql());
            Set<String> columns = table.getColumnNames();
            if (CollectionUtils.isNotEmpty(columns)) {
                List<TableFieldInfo> fieldInfoList = tableInfoService.selectListByDbNamesAndTableNames(table.getDbNames(), table.getTableNames(), PermissionTypeEnum.EXPORT.getType());
                if (CollectionUtils.isNotEmpty(fieldInfoList)) {
                    Set<String> columnNames= Sets.newLinkedHashSet();
                    if (columns.contains("*")) {
                        columnNames.add("*");
                    }else{
                        for (TableFieldInfo tableFieldInfo : fieldInfoList) {
                            if (columns.contains(tableFieldInfo.getFieldName())) {
                                columnNames.add(tableFieldInfo.getFieldName());
                            }
                        }
                    }
                    if (CollectionUtils.isNotEmpty(columnNames)) {
                        sql = SqlParseUtil.generateSql(columnNames, dataExplore.getResultTableName());
                    }
                } else {
                    sql = sql + " limit 0";//默认无权限导出空数据
                }
            }
        }
        return sql;
    }

    @ApiOperation("下载查询数据")
    @SysLog(value="下载查询数据",type= OperationTypeEnum.DOWNLOADCODE, modulesType = OperationModulesEnum.BDPMODELCODE,platformModuleType = ModuleEnum.DATA_EXPLORE_MODULE_CODE)
    @GetMapping("/downloadQuery")
    public void downloadQuery(@RequestParam Map<String, Object> params, HttpServletResponse response) throws Exception {
        int id = Integer.parseInt(params.get("id").toString());
        DataExplore dataExplore = dataExploreService.getById(id);
        String tableName = dataExplore.getResultTableName();
        String sql = "select * from "+tableName;
        //判断当前的用户所执行的sql是否需要限制
        Integer userId= dataExploreService.isTenantAdmin(ShiroUtils.getTenantId(),ShiroUtils.getUserId().intValue());
        Integer limit = dataExploreService.getSqlLimit(ShiroUtils.getTenantId(),userId);
        if(userId!=null&&limit!=null){
            sql = sql+" limit "+ limit;
        }
        // 由于复杂的 取了字段别名的 sql 无法判断字段权限 所以去掉导出字段权限限制
        //String sql = wrapSql(dataExplore);
        log.info("导出sql:{}\n",sql);
        List<String> column = new ArrayList<>();
        //设置文件名
        String fileNameOlder = "dataExplore"+id;
        response.setHeader("Content-Type", "application/octet-stream");
        String fileName = String.format("%s.csv", URLEncoder.encode(fileNameOlder, "utf-8"));
        response.setHeader("Content-Disposition", "attachment;filename=" + fileName);
        response.setHeader("filename", fileName);
        response.setHeader("Access-Control-Expose-Headers", "Content-Disposition");
        response.setCharacterEncoding("UTF-8");
        OutputStream outputStream = response.getOutputStream();
        if (dataExplore.getCreateTime() != null) {
            Date date = DateUtils.addDateMonths(dataExplore.getCreateTime(), 3);
            if (date.getTime() < new Date().getTime()) {//三个月后的数据 已过期
                ArrayList<String> list = new ArrayList<>();
                list.add("已过期，请重新查询");
                BigDataExcelUtil.geneteData(list,null,outputStream);
                outputStream.close();
                return;
            }
        }
        if (sql.contains(" limit 0")) {
            ArrayList<String> list = new ArrayList<>();
            list.add("无字段导出权限");
            BigDataExcelUtil.geneteData(list,null,outputStream);
            outputStream.close();
            return;
        }
        String url = hiveConfig.getHiveserverurl();
        if (StorageEngineEnum.KUDU.getCode().equals(dataExplore.getSearchEngine())) {
            url = hiveConfig.getKuduJdbcUrl();
        }
        HiveTableUtil hiveTableUtil = new HiveTableUtil(url, exploreDB,"hdfs","hdfs");
        try {
            ResultSet result = hiveTableUtil.getResultSet(sql);
            ResultSetMetaData tsmd=result.getMetaData();
            for (int i = 1; i < tsmd.getColumnCount()+1; i++) {
                column.add(tsmd.getColumnName(i).replaceAll(String.format("%s.",tableName),""));
            }
            if (column.size()>16384){
                throw new RRException("列数超出excel支持的最大值");
            }
            BigDataExcelUtil.geneteData(column,result,outputStream);
            outputStream.close();
            hiveTableUtil.close();
        }catch (RRException e) {
            ArrayList<String> list = new ArrayList<>();
            list.add(e.getMessage());
            BigDataExcelUtil.geneteData(list,null,outputStream);
            outputStream.close();
        }  catch (Exception e) {
            ArrayList<String> list = new ArrayList<>();
            list.add("sql解析失败："+sql);
            BigDataExcelUtil.geneteData(list,null,outputStream);
            outputStream.close();
            log.error("解析sql错误：",e);
        }
    }


    /**
     * 将一个list均分成n个list
     */
    public <T> List<T> averageAssign(List<T> list, int n, int curr) {
        List<T> value = null;

        List<T> result = new ArrayList<T>();
        int remaider = list.size() % n;  //(先计算出余数)
        int number = list.size() / n;  //然后是商
        if (list.size() == 0)
            return value;
        else {
            int i = curr - 1;
            if (i == (number)) {
                value = list.subList(i * n, list.size());
            } else {
                value = list.subList(i * n, (i + 1) * n);
            }
            return value;
        }

    }


}

