package com.aotain.coeus.spark;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;

import scala.Tuple2;

import com.aotain.hbase.HBaseDao;

/**
 * SDS_IP_TI表入库
 * @ClassName: SdsIpTiSpark
 * @Description: TODO(这里用一句话描述这个类的作用) 
 * @author 程彬
 * @date 2016年5月13日 上午9:47:05 
 *
 */
public class SdsIpTiSpark {

	public static void main(String[] args) throws IOException {

		if (args.length != 3){
			System.err.printf("Usage: <Input><Date><Zookeeper>");
			System.exit(1);
		}      

		String inpath = args[0];
		final String date = args[1];
		String zooserver = args[2];

//		Date dateNow = new Date();
//		Calendar cal = Calendar.getInstance();
//		cal.setTime(dateNow);
//		cal.add(Calendar.DAY_OF_MONTH, -1);
//		Date dateBef = cal.getTime();
//
//		SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
//		final String date = sdf.format(dateBef);

		//		config.set("fs.defaultFS", "hdfs://nameservice1");
		SparkConf conf = new SparkConf()
		.setAppName("SdsIpTi")
		//		.set("spark.default.parallelism", "680")
		.set("spark.shuffle.consolidateFiles", "true");


		JavaSparkContext ctx = new JavaSparkContext(conf);
		final Broadcast<String> bcZooServer = ctx.broadcast(zooserver);
		final Broadcast<String> bcDate = ctx.broadcast(date);

		Configuration config = new Configuration();
		config.addResource("/etc/hadoop/conf");


		//		JavaPairRDD<LongWritable,Text> line = ctx.newAPIHadoopFile(inpath, LzoTextInputFormat.class, LongWritable.class, Text.class,config);
		JavaPairRDD<LongWritable,Text> line = ctx.newAPIHadoopFile(inpath, TextInputFormat.class, LongWritable.class, Text.class,config);
		JavaPairRDD<String, String> ss = line.mapToPair(new PairFunction<Tuple2<LongWritable,Text>, String,String>() {

			private static final long serialVersionUID = 6584386367142393259L;

			@Override
			public Tuple2<String, String> call(Tuple2<LongWritable, Text> v1)
					throws Exception {
				// TODO Auto-generated method stub
				String str = v1._2.toString();

				return new Tuple2<String,String>(str,"");
			}

		});

		ss.foreach(new VoidFunction<Tuple2<String,String>>(){

			String tableName = "SDS_IP_TI";
			String column = "";
			String desc = "";
			@Override
			public void call(Tuple2<String, String> tuple) throws Exception {
				// TODO Auto-generated method stub
				HBaseDao dao = HBaseDao.getInstance(bcZooServer.getValue());

				String[] splits = tuple._1.split(",", -1);
				if(splits.length >= 4) {
					String rowkey = splits[0]+ "_" + date;
					//				String rowkey =  date+"_"+str;
					Put put = new Put(rowkey.getBytes());
					put.setDurability(Durability.SKIP_WAL);

					switch (splits[1]) {
					case "22" : 
						column = "A";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，存在远程登陆攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "3389" :
						column = "B";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，存在远程登陆攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "1433" :
						column = "C";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，对SQL SERVER数据存在攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "53" : 
						column = "D";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，对DNS服务器存在攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "25" : 
						column = "E";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，对邮件服务器存在攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "21" : 
						column = "F";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，对FTP服务器存在攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "135" :
						column = "G";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，存在蠕虫攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					case "445" : 
						column = "H";
						desc = "此IP在["+ date +"]发起" + splits[1] + "端口扫描["+ splits[2] +"]次，存在蠕虫攻击风险，当日扫描IP数[" + splits[3] + "]";
						break;
					}
					if(!"".equals(column)) {
						put.add("cf".getBytes(), column.getBytes(), desc.getBytes("UTF-8"));
						dao.save(put, tableName);
					}
				}
			}

		});
		//		ss.saveAsHadoopFile(outpath, Text.class, Text.class, TextOutputFormat.class);
		ctx.stop();
	}

}
