package com.red.wood.task;

import java.io.File;
import java.io.IOException;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import com.red.wood.model.IrcsActiveIp;
import com.red.wood.util.CommonUtils;
import com.red.wood.util.JarToolUtil;
import com.red.wood.util.Reader;

import scala.reflect.*;

public class DwdBeanTask {

	private static final Log log = LogFactory.getLog(DwdBeanTask.class);
	
	public static void main(String[] args) throws IOException {
		long t1 = System.currentTimeMillis();
		SparkSession spark = SparkSession.builder()
				.appName("dwdTask")
				.enableHiveSupport()
				.getOrCreate();

		Dataset<Row> dataset = spark.sql("select ircsid,ip,firsttimestr,lasttimestr,block,liveness,isinipseg,port,protocol from isdms.t_ods_ircs_activeip where ds='2018-10-10'");
		
		String path = File.separator + "ipiptest.ipdb";
		Reader db = null;
		if(JarToolUtil.isWindows()) {
			db = new Reader(path);
		}else {
			db = new Reader(JarToolUtil.getJarDir() + path);
		}
		
		Broadcast<Reader> broadcast = spark.sparkContext().broadcast(db,ClassTag$.MODULE$.apply(Reader.class));
		JavaRDD<IrcsActiveIp> rdd = dataset.toJavaRDD().map(new Function<Row, IrcsActiveIp>() {

			private static final long serialVersionUID = 1L;

			@Override
			public IrcsActiveIp call(Row row){
				IrcsActiveIp ircsIp = new IrcsActiveIp();
				try {
					String ircsid = row.getAs("ircsid");
					String ip = row.getAs("ip");
					String firsttimestr = row.getAs("firsttimestr");
					String lasttimestr = row.getAs("lasttimestr");
					String block = CommonUtils.valueOf(row.getAs("block"));
					String liveness = CommonUtils.valueOf(row.getAs("liveness"));
					String isinipseg = CommonUtils.valueOf(row.getAs("isinipseg"));
					String port = CommonUtils.valueOf(row.getAs("port"));
					String protocol = CommonUtils.valueOf(row.getAs("protocol"));
					String provinceid = CommonUtils.getProvinceId(broadcast.getValue().find(ip, "CN")[1]);
					
					ircsIp.setIrcsid(ircsid);
					ircsIp.setIp(ip);
					ircsIp.setFirsttimestr(firsttimestr);
					ircsIp.setLasttimestr(lasttimestr);
					ircsIp.setBlock(Integer.parseInt(block));
					ircsIp.setLiveness(Long.parseLong(liveness));
					ircsIp.setIsinipseg(Integer.parseInt(isinipseg));
					ircsIp.setPort(Long.parseLong(port));
					ircsIp.setProtocol(Integer.parseInt(protocol));
					ircsIp.setProvinceid(provinceid);
				}catch(Exception e) {
					if(log.isErrorEnabled()) {
						log.error(e.getMessage(),e);
					}
				}
				return ircsIp;
			}
		});
		Dataset<Row> ipDataset = spark.createDataFrame(rdd, IrcsActiveIp.class);
		ipDataset.createOrReplaceTempView("t_ircs_activeip");
		String sql ="insert overwrite table isdms.t_dwd_ircs_activeip partition (ds='2018-10-10') select * from t_ircs_activeip";
		spark.sql(sql);
		long t2 = System.currentTimeMillis();
		log.info((t2-t1)/1000/60+"分钟");
		System.out.println((t2-t1)/1000/60+"分钟");
		spark.stop();
	}
}
