package com.loongtech.bi.action.newsystem;

import com.loongtech.bi.action.authManagement.ResultData;
import com.loongtech.bi.manager.flow.HadoopQueryManager;
import com.loongtech.bi.manager.system.SysLogManager;
import com.loongtech.bi.support.Session;
import com.loongtech.core.util.RetCode;
import org.springframework.context.annotation.Scope;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import javax.annotation.Resource;
import java.util.List;
import java.util.Map;

/**
 * Author: yuri
 * Description: 通用SQL查询
 * Date: 12:11 2017/5/23
 */
@RestController
@Scope("prototype")
@RequestMapping("/hivequery")
public class HiveQueryAction {
    @Resource
    private Session session;
    @Resource
    private SysLogManager sysLogManager;
    @Resource
    private HadoopQueryManager hadoopQueryManager;

    @RequestMapping(value = "list.do", method = {RequestMethod.GET, RequestMethod.POST})
    public ResultData<?> search(@RequestParam String sql) {
        if (null == sql || sql.trim().isEmpty()) {
            return new ResultData<>(RetCode.eEmptyQuery.getErrorcode(), RetCode.eEmptyQuery.getErrorMsg());
        }
        sql = sql.toLowerCase().trim();
        //不允许修改
        if (sql.startsWith("insert ") || sql.startsWith("update ") || sql.startsWith("delete ") || sql.startsWith("drop ") || sql.startsWith("truncate ") || sql.startsWith("alter ")) {
            return new ResultData<>(RetCode.eNotAllowOperation.getErrorcode(), RetCode.eNotAllowOperation.getErrorMsg());
        }
        // 如果是select 必须增加limit限制
        if (sql.contains("select") && !sql.contains("limit")) {
            if (sql.endsWith(";")) {
                sql = sql.substring(0, sql.length() - 1) + " limit 1000 ;";
            } else {
                sql = sql + " limit 1000 ;";
            }
        }
        // 执行sql
        Map.Entry<List<String>, List<Object[]>> ret;
        try {
            ret = hadoopQueryManager.executeSQL(sql);
            if (null == ret || null == ret.getKey() || ret.getKey().isEmpty() || null == ret.getValue()) {
                return new ResultData<>(RetCode.eNotAllowOperation.getErrorcode(), RetCode.eNotAllowOperation.getErrorMsg());
            }
            sysLogManager.addLog(session.getUserName(), "查询执行成功,sql=" + sql);
        } catch (Exception ex) {
            ex.printStackTrace();
            return ResultData.otherException();
        }
        return new ResultData<>(RetCode.eSuccess.getErrorcode(), ret);
    }
}
