package com.stone.ctrl;

import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.SparkSession;

import com.stone.util.AesEncryptUtils;
import com.stone.util.DateUtils;

public class SparkRun {

	private static final Log log = LogFactory.getLog(SparkRun.class);
	
	public static void main(String[] args) throws Exception {
		
		if(args.length == 0) throw new Exception("未传入参数");
		
		SparkSession spark = null;
		try{
			String sparkSql = args[0];
			
			sparkSql = AesEncryptUtils.decrypt(sparkSql);
			
			System.out.println("===========SQL替换之前=================");
			System.out.println(sparkSql);
			System.out.println("======================================");
			
			String localParams = StringUtils.trim(AesEncryptUtils.decrypt(args[1]));
			
			Date date = null;
			if(args.length == 2){//兼容老版本
				date = new Date();
			}else{
				String execTime = args[2];
				System.out.println("startTime："+execTime);
				
				SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
				date = sdf.parse(execTime);
				
				String biz_format = args[3];
				System.out.println("查询日期格式："+biz_format);
				
				sparkSql = replaceTime(sparkSql, date, biz_format);
			}
			
			//yyyy-MM-dd-1/24##${yyyy-MM-dd-1/24}##1##day_hour_delay@@HH-1/24##${HH-1/24}##1##hour
			log.info("localParams:"+localParams);
			if(StringUtils.isNotBlank(localParams)){
				String[] localParamsStr = localParams.split("@@");
				for(String localParam : localParamsStr){
					String[] params = localParam.split("##");
					String prop = params[0];//yyyy-MM-dd-1/24
					String format = params[1];//${yyyy-MM-dd-1/24}
					int delay = Integer.parseInt(params[2]);//1
					String name = params[3];//day_hour_delay
					if(sparkSql.contains(format)){
						if(StringUtils.equals(name, "hour")){
							String[] propSplit = prop.split("-");
							sparkSql = sparkSql.replace(format, DateUtils.getDelayHour(delay,propSplit[0],date));
						}else{
							String[] propSplit = prop.split("-");
							String propStr = propSplit[0];
							if (propSplit.length == 3) {
								propStr = prop;
							}else if(propSplit.length == 4){
								propStr = prop.substring(0, prop.lastIndexOf("-"));
							}
							if(StringUtils.equals(name, "day_hour_delay")){
								sparkSql = sparkSql.replace(format, DateUtils.getDelayHour(delay,propStr,date));
							}else if(StringUtils.equals(name, "day_delay")){
								sparkSql = sparkSql.replace(format, DateUtils.getDelayDay(delay, propStr,date));
							}
						}
					}
				}
			}
			
			System.out.println("===========SQL替换之后=================");
			System.out.println(sparkSql);
			System.out.println("======================================");
			
			spark = SparkSession.builder()
					.enableHiveSupport()
					.getOrCreate();
			
			spark.sql("set hive.exec.dynamic.partition.mode=nonstrict");
			
			boolean isError = false;
			StringBuffer errInfo = new StringBuffer();
			String[] sparkSqlSplit = sparkSql.split(";");
			for(String sql : sparkSqlSplit){
				try{
					spark.sql(sql);
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
					isError = true;
					errInfo.append(e.getMessage());
				}
			}
			if(isError){
				throw new Exception(errInfo.toString());
			}
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
			throw new Exception(e.getMessage());
		}finally{
			if(spark != null){
				spark.close();
			}
		}
	}

	private static String replaceTime(String sparkSql, Date date, String biz_format) {
		//自然周
		sparkSql = sparkSql.replace("${weekstart}", DateUtils.getNLastWeek(biz_format,date,0)[0]);
		sparkSql = sparkSql.replace("${weekend}", DateUtils.getNLastWeek(biz_format,date,0)[1]);
		
		sparkSql = sparkSql.replace("${weekstart-1}", DateUtils.getNLastWeek(biz_format,date,1)[0]);
		sparkSql = sparkSql.replace("${weekend-1}", DateUtils.getNLastWeek(biz_format,date,1)[1]);
		
		//自然月
		sparkSql = sparkSql.replace("${monthstart}", DateUtils.getNLastMonth(biz_format,date,0)[0]);
		sparkSql = sparkSql.replace("${monthend}", DateUtils.getNLastMonth(biz_format,date,0)[1]);
		
		sparkSql = sparkSql.replace("${monthstart-1}", DateUtils.getNLastMonth(biz_format,date,1)[0]);
		sparkSql = sparkSql.replace("${monthend-1}", DateUtils.getNLastMonth(biz_format,date,1)[1]);
		
		//自然年
		sparkSql = sparkSql.replace("${yearstart}", DateUtils.getNLastYear(biz_format,date,0)[0]);
		sparkSql = sparkSql.replace("${yearend}", DateUtils.getNLastYear(biz_format,date,0)[1]);
		
		sparkSql = sparkSql.replace("${yearstart-1}", DateUtils.getNLastYear(biz_format,date,1)[0]);
		sparkSql = sparkSql.replace("${yearend-1}", DateUtils.getNLastYear(biz_format,date,1)[1]);
		
		return sparkSql;
	}
}