package com.red.wood.task;

import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.SparkSession;

import com.red.wood.util.DateUtil;
import com.red.wood.util.JarToolUtil;
import com.red.wood.util.Reader;
import com.red.wood.util.SysProps;

import scala.reflect.ClassTag$;

public class DwdTask {

	private static final Log log = LogFactory.getLog(DwdTask.class);
	
	public static void main(String[] args) throws IOException {
		SparkSession spark = SparkSession.builder()
				.appName("dwdTask")
				.enableHiveSupport()
				.getOrCreate();

		String path = File.separator + "ipiptest.ipdb";
		Reader db = null;
		if(JarToolUtil.isWindows()) {
			db = new Reader(path);
		}else {
			db = new Reader(JarToolUtil.getJarDir() + path);
		}
		
		String day = "";//日期格式2018-10-10
		if(args.length > 0) {
			day = args[0];
		}else {
			day = new SimpleDateFormat("yyyy-MM-dd").format(DateUtil.getYesterday());//默认查询昨天数据
		}
		
		log.info("导入时间："+day);
		
		SysProps sys = SysProps.getInstance();
		
		Broadcast<Reader> broadcast = spark.sparkContext().broadcast(db,ClassTag$.MODULE$.apply(Reader.class));
		
		IrcsProvinceParse.ircsActiveIp(spark, broadcast, day,sys);
		
		IrcsProvinceParse.ircsActiveDomain(spark, broadcast,day,sys);
		
		CdnProvinceParse.cdnActiveIp(spark, broadcast,day,sys);
		
		CdnProvinceParse.cdnActiveDomain(spark, broadcast,day,sys);
		
		spark.stop();
	}
}
