package com.aotain.coeus.spark;

import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;

import scala.Tuple2;

import com.aotain.apollo.ApolloConfig;
import com.aotain.apollo.IPDatabase;
import com.aotain.apollo.IPSearch;

/**
 * 源IP 黑名单分析
 * @author Administrator
 *
 */
public class SourceIPBlackListAly {

	
	public static void main(String[] args) {
		   //System.out.println("0###OK################################");
		
		if (args.length != 4){
		   System.err.printf("Usage: <SourcePath> <ZOOSERVER> <TAG> <DBSERVER>");
		   System.exit(1);
		}          
		
		//HTTPDCSpark spark = new HTTPDCSpark();
		int nexit = SparkFunc(args);
		   //System.out.println("1###OK################################");
		System.exit(nexit);
	}
	
	private static int SparkFunc(String[] args)
	{
		try
		 {
		 SparkConf sparkConf = new SparkConf().setAppName("SourceIPBlackAly");
		 Configuration config = new Configuration();
//		 config.addResource("/etc/hadoop/conf");
//		 System.out.println("#%%%HDFS:" + config.get("fs.defaultFS"));
		 JavaSparkContext ctx = new JavaSparkContext(sparkConf);
		 
		 

		 String sourcePath = args[0];
		 final String zooServer = args[1];
		 final String tag = args[2];  //属性标签
		// String targetPath = args[2];
		 
		 String dbhost = args[3];
		 String DBJSON="{\"driverClassName\":\"oracle.jdbc.driver.OracleDriver\","
					+ "\"maxActive\":\"200\",\"maxIdle\":\"50\",\"maxWait\":\"10000\","
					+ "\"name\":\"sds\",\"password\":\"h16aug8v3w\",\"queryTimeout\":\"60\","
					+ "\"type\":\"javax.sql.DataSource\",\"url\":\"jdbc:oracle:thin:@"+ dbhost +":1521:bsmp\","
					+ "\"userName\":\"sds\",\"validationQuery\":\"select sysdate from dual\"}";
					
			//读取域名备案数据
		 ApolloConfig ap = new ApolloConfig(DBJSON);
		 
		 
		 
		 System.out.println("sourcePath: " + sourcePath);
		 
//		 System.out.println("targetPath: " + targetPath);
//		    FileSystem fsTarget = FileSystem.get(URI.create(targetPath),config);
//		    Path pathTarget = new Path(targetPath);
//		    if(fsTarget.exists(pathTarget))
//		    {
//		    	fsTarget.delete(pathTarget, true);
//		    	System.out.println("Delete path " + targetPath);
//		    }
		    
		 final IPSearch ipsearch = new IPSearch(DBJSON);
		   
		 //JavaRDD<String> lines = ctx.textFile(ip, 1);
		 JavaPairRDD<LongWritable,Text> tt = ctx.newAPIHadoopFile(sourcePath,TextInputFormat.class,
				 LongWritable.class, Text.class,config);
		 JavaPairRDD<String, String> s = tt.mapToPair(new PairFunction<Tuple2<LongWritable,Text>, 
				 String,String>() {
			 
			           
			 
//			 HBaseImportQueue hbaseInstance = new HBaseImportQueue(zooServer,mushRoomCenter);
			 

			@Override
			public Tuple2<String, String> call(Tuple2<LongWritable, Text> arg0)
					throws Exception {
				// TODO Auto-generated method stub
				//从hive表中获取一行记录 , 分割字段
				//sourceip,destport,n,rank
				
				
				
				String[] items = arg0._2().toString().split(",",-1);
				if(items.length!=4)
					return null;
				String sourceip = items[0];
				
				
				return new Tuple2<String, String>(sourceip,sourceip);
			}
		 });
		 
		 s.foreach(new VoidFunction<Tuple2<String, String>>(){

			String tableName = "SDS_IP_BLACKLIST";
			@Override
			public void call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				
				Configuration hConfig = HBaseConfiguration.create();
		        hConfig.set("hbase.zookeeper.quorum", zooServer);
		        hConfig.set("hbase.zookeeper.property.clientPort","2181"); 
				 
		        Connection connection = ConnectionFactory.createConnection(hConfig);
		          
		        TableName TABLE_NAME = TableName.valueOf(tableName);
				HTable hTable = (HTable) connection.getTable(TABLE_NAME);
				String sourceip = arg0._1;
				
				//最近发现时间
				SimpleDateFormat df = new SimpleDateFormat("yyyyMMddHHmmss");
				Date dStartTime = new Date();
				String strDate = df.format(dStartTime);
				
				Put put = new Put(Bytes.toBytes(sourceip));
				
				Get g = new Get(Bytes.toBytes(sourceip));
				Result rs = hTable.get(g);
//				if(rs.size() < 0)//新记录
//				{
//					put.addColumn(Bytes.toBytes("cf"), Bytes.toBytes(tag + "_FIST"), 
//							Bytes.toBytes(strDate));//首次发现时间
//				}
				
				if(rs != null)//新记录
				{
					Cell sourceIp = rs.getColumnLatestCell(Bytes.toBytes("cf"), Bytes.toBytes("IP"));
					if(sourceIp == null)
					{
						put.addColumn(Bytes.toBytes("cf"), Bytes.toBytes(tag + "_FIRST"), 
								Bytes.toBytes(strDate));//首次发现时间
					}
				}
				
				IPDatabase ipinfo = ipsearch.getStartIP(sourceip);
				
				put.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("IP"), 
						Bytes.toBytes(sourceip));
				put.addColumn(Bytes.toBytes("cf"), Bytes.toBytes(tag), 
						Bytes.toBytes(strDate)); //更新时间
				
				put.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("AREA"), 
						Bytes.toBytes(ipinfo.getCityName()==null?"":ipinfo.getCityName())); //更新时间
				
				hTable.put(put);

				
				hTable.close();
				connection.close();
				
				
			}});
		 //s.saveAsHadoopFile(targetPath, Text.class, Text.class, TextOutputFormat.class);
		 
		 
		    	
		ctx.stop();
		return 0;
		 }
		 catch(Exception ex)
		 {
			 ex.printStackTrace();
			 return 1;
		 }
		
	}
	
}
