package com.red.wood.task;

import java.text.SimpleDateFormat;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.SparkSession;

import com.red.wood.model.SqlInfo;
import com.red.wood.util.CommonUtils;
import com.red.wood.util.DateUtil;

public class VerificationTask {

	private static final Log log = LogFactory.getLog(VerificationTask.class);
	
	public static void main(String[] args) {
		
		SparkSession spark = SparkSession.builder()
				.appName("VerificationTask")
				.enableHiveSupport()
				.getOrCreate();
		try {
			SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
			String day = "";
			if(args.length > 0) {
				day = args[0];
				if(!CommonUtils.isValidDate(day)) {
					System.exit(0);
				}
			}else {
				day = sdf.format(DateUtil.getYesterday());//默认查询昨天数据
			}
			
			log.info("导入时间："+day);
			
			SqlInfo sqlInfo = CommonUtils.getSqlInfo();
			
			//IP异常库生成
			IpVerifyBaseTask.ipTask(spark, sqlInfo, day);
			
			//domain异常库生成
			DomainVerifyBaseTask.domainTask(spark, sqlInfo, day);
			
			//IP异常表导入oracle中
			HiveToDbTask.exeHiveSqoop("t_dm_ip_verify_base", day);
			
			//域名异常表导入oracle中
			HiveToDbTask.exeHiveSqoop("t_dm_domain_verify_base", day);
			
			//异常统计
			StatisticsTask.doTask(sqlInfo,day);
			
		}catch(Exception e) {
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
		}finally {
			if(spark != null) {
				spark.stop();
			}
		}
	}
}