package com.red.wood.task;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import com.red.wood.util.CommonUtils;
import com.red.wood.util.Reader;
import com.red.wood.util.SysProps;

public class IrcsProvinceParse {

	private static final Log log = LogFactory.getLog(DwdTask.class);
	
	public static void ircsActiveIp(SparkSession spark,Broadcast<Reader> broadcast,String day,SysProps sys) {
		
		String hive_databse = sys.getValue("hive.database");
		
		Dataset<Row> dataset = spark.sql("select ircsid,ip,firsttimestr,lasttimestr,block,liveness,isinipseg,port,protocol from "+hive_databse+".t_ods_ircs_activeip where ds='"+day+"'");
		
		JavaRDD<String> rdd = dataset.toJavaRDD().map(new Function<Row, String>() {

			private static final long serialVersionUID = 1L;

			@Override
			public String call(Row row){
				StringBuffer buffer = new StringBuffer();
				try {
					String ircsid = row.getAs("ircsid");
					String ip = row.getAs("ip");
					String firsttimestr = row.getAs("firsttimestr");
					String lasttimestr = row.getAs("lasttimestr");
					String block = CommonUtils.valueOf(row.getAs("block"));
					String liveness = CommonUtils.valueOf(row.getAs("liveness"));
					String isinipseg = CommonUtils.valueOf(row.getAs("isinipseg"));
					String port = CommonUtils.valueOf(row.getAs("port"));
					String protocol = CommonUtils.valueOf(row.getAs("protocol"));
					String provinceid = "";
					if(CommonUtils.isIp(ip)) {
						provinceid = CommonUtils.getProvinceId(broadcast.getValue().find(ip, "CN")[1]);
					}
					buffer.append(ircsid+"\t");
					buffer.append(ip+"\t");
					buffer.append(firsttimestr+"\t");
					buffer.append(lasttimestr+"\t");
					buffer.append(block+"\t");
					buffer.append(liveness+"\t");
					buffer.append(isinipseg+"\t");
					buffer.append(port+"\t");
					buffer.append(protocol+"\t");
					buffer.append(provinceid+"\t");
				}catch(Exception e) {
					if(log.isErrorEnabled()) {
						log.error(e.getMessage(),e);
					}
				}
				return buffer.toString();
			}
		});
		String dwdPath= sys.getValue("ircs_activeip_hdfs");
		String sql ="load data inpath '"+dwdPath+"/*' overwrite into table "+hive_databse+".t_dwd_ircs_activeip partition (ds='"+day+"')";
		CommonUtils.loadData(rdd,spark,dwdPath,sql);
	}
	
	public static void ircsActiveDomain(SparkSession spark,final Broadcast<Reader> broadcast,String day,SysProps sys) {
		
		String hive_databse = sys.getValue("hive.database");
		
		Dataset<Row> dataset = spark.sql("select ircsid,domain,firsttimestr,lasttimestr,block,liveness,topdomainflag,topdomain,ip from "+hive_databse+".t_ods_ircs_activedomain where ds='"+day+"'");

		JavaRDD<String> rdd = dataset.toJavaRDD().map(new Function<Row, String>() {

			private static final long serialVersionUID = 1L;

			@Override
			public String call(Row row){
				StringBuffer buffer = new StringBuffer();
				try {
					String ircsid = row.getAs("ircsid");
					String domain = row.getAs("domain");
					String firsttimestr = row.getAs("firsttimestr");
					String lasttimestr = row.getAs("lasttimestr");
					String block = CommonUtils.valueOf(row.getAs("block"));
					String liveness = CommonUtils.valueOf(row.getAs("liveness"));
					String topdomainflag = CommonUtils.valueOf(row.getAs("topdomainflag"));
					String topdomain = row.getAs("topdomain");
					String ip = row.getAs("ip");
					String provinceid = "";
					if(CommonUtils.isIp(ip)) {
						provinceid = CommonUtils.getProvinceId(broadcast.getValue().find(ip, "CN")[1]);
					}
					buffer.append(ircsid+"\t");
					buffer.append(domain+"\t");
					buffer.append(firsttimestr+"\t");
					buffer.append(lasttimestr+"\t");
					buffer.append(block+"\t");
					buffer.append(liveness+"\t");
					buffer.append(topdomainflag+"\t");
					buffer.append(topdomain+"\t");
					buffer.append(ip+"\t");
					buffer.append(provinceid+"\t");
				}catch(Exception e) {
					if(log.isErrorEnabled()) {
						log.error(e.getMessage(),e);
					}
				}
				return buffer.toString();
			}
		});
		String dwdPath= sys.getValue("ircs_domain_hdfs");
		String sql ="load data inpath '"+dwdPath+"/*' overwrite into table "+hive_databse+".t_dwd_ircs_activedomain partition (ds='"+day+"')";
		CommonUtils.loadData(rdd,spark,dwdPath,sql);
	}
}