package com.aotain.coeus.spark;

import java.io.IOException;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.broadcast.Broadcast;

import scala.Tuple2;

import com.aotain.hbase.HBaseDao;
import com.hadoop.mapreduce.LzoTextInputFormat;

/**
 * SDS_HTTP_STAT_H表入库
 * @ClassName: SdsHttpStatH 
 * @Description: TODO(这里用一句话描述这个类的作用) 
 * @author 程彬
 * @date 2016年5月13日 上午9:47:05 
 *
 */
public class SdsHttpStatH {

	public static void main(String[] args) throws IOException {

		if (args.length != 4){
			System.err.printf("Usage: <Input><Output><Hour><Zookeeper>");
			System.exit(1);
		}      

		String inpath = args[0];
		String outpath = args[1];
		String hour = args[2];
		final String zooserver = args[3];

		Date dateNow = new Date();
		Calendar cal = Calendar.getInstance();
		cal.setTime(dateNow);
		cal.add(Calendar.DAY_OF_MONTH, -1);
		Date dateBef = cal.getTime();

		SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
		final String date = String.format("%s%s0000", sdf.format(dateBef),hour);

		//		config.set("fs.defaultFS", "hdfs://nameservice1");
		SparkConf conf = new SparkConf()
		.setAppName("SdsHttpStatH")
//		.set("spark.default.parallelism", "680")
		.set("spark.shuffle.consolidateFiles", "true");

		//		ZkKafkaConfig z_kConfig = new ZkKafkaConfig("/home/storm/config/dbconfig.ini");
		//		final String logserver = z_kConfig.getMushroomServer();
		//		final String zooserver = z_kConfig.getZkConnServer();
		//		System.out.println("server: " + logserver + "  " + zooserver);

		//		Configuration confHbase = HBaseConfiguration.create();
		//		confHbase.set("hbase.zookeeper.quorum", zooserver);
		//		confHbase.set("hbase.zookeeper.property.clientPort","2181");
		//		Connection connection = ConnectionFactory.createConnection(confHbase);
		//		Table myTable = connection.getTable(TableName.valueOf("SDS_HTTP_STAT_H"));

		JavaSparkContext ctx = new JavaSparkContext(conf);
		final Broadcast<String> bcZooServer = ctx.broadcast(zooserver);

		Configuration config = new Configuration();
		config.addResource("/etc/hadoop/conf");

		try {
			FileSystem fsTarget = FileSystem.get(URI.create(outpath),config);
			Path pathTarget = new Path(outpath);
			if(fsTarget.exists(pathTarget))
			{
				fsTarget.delete(pathTarget, true);
				System.out.println("Delete path " + outpath);
			}
		} catch (IllegalArgumentException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}  


		JavaPairRDD<LongWritable,Text> line = ctx.newAPIHadoopFile(inpath, LzoTextInputFormat.class, LongWritable.class, Text.class,config);
		//		JavaPairRDD<LongWritable,Text> line = ctx.newAPIHadoopFile(inpath, TextInputFormat.class, LongWritable.class, Text.class,config);
//		line.persist(StorageLevel.MEMORY_AND_DISK());
		JavaPairRDD<String, Integer> ss = line.mapToPair(new PairFunction<Tuple2<LongWritable,Text>, String,Integer>() {

			private static final long serialVersionUID = 6584386367142393259L;

			@Override
			public Tuple2<String, Integer> call(Tuple2<LongWritable, Text> v1)
					throws Exception {
				// TODO Auto-generated method stub
				String str = v1._2.toString();
				String[] splits = str.split("\\|", -1);
				if(splits.length > 7) {
					if(splits[6] != null && splits[7] != null && splits[2] != null && !"".equals(splits[6]) && !"".equals(splits[7]) && !"".equals(splits[2])) {
						String domainname = splits[6];
						String [] strArr = domainname.split("\\.",-1);
						String rootDomain = "";
						if(IsSpecialRoot(domainname) && strArr.length >= 3)
						{//特殊处理例如 .com.cn  .com.hk
							rootDomain = String.format("%s.%s.%s", strArr[strArr.length-3],
									strArr[strArr.length-2],strArr[strArr.length-1]);
						}
						else if(IsSpecialRoot2Level(domainname) && strArr.length>1)
						{
							//						 System.err.println("##########################" + domainname);
							rootDomain = String.format("%s.%s", 
									strArr[strArr.length-2],strArr[strArr.length-1]);
						}
						if(!"".equals(rootDomain)) {
							if(rootDomain.contains(":")) {
								rootDomain = rootDomain.substring(0,rootDomain.lastIndexOf(":"));
							}
							String rowkey = String.format("%s_%s_%s", rootDomain,date,splits[2]);
							return new Tuple2<String,Integer>(rowkey,1);
						}
					}
				}
				return new Tuple2<String, Integer>("", 0);
			}

		}).filter(new Function<Tuple2<String, Integer>,Boolean>(){

			private static final long serialVersionUID = 8751925169950963422L;

			@Override
			public Boolean call(Tuple2<String, Integer> v1) throws Exception {
				// TODO Auto-generated method stub
				if(v1._1.isEmpty())
					return false;
				else
					return true;
			}

		}).reduceByKey(

				new Function2<Integer, Integer, Integer>() {

					private static final long serialVersionUID = 1L;

					@Override
					public Integer call(Integer v1, Integer v2) throws Exception {
						return v1 + v2;
					}

				});

		//		ss.foreach(new VoidFunction<Tuple2<String,Integer>>(){
		//
		//			private static final long serialVersionUID = 1L;
		//			private Table myTable;
		//			private String tableName = "SDS_HTTP_STAT_H";
		//			private Connection connection = null;
		//
		//			@Override
		//			public void call(Tuple2<String, Integer > tuple) throws Exception {
		//
		//
		//				Configuration confHbase = HBaseConfiguration.create();
		//				connection = ConnectionFactory.createConnection(confHbase);
		//				String resultStr = tuple._1.toString();
		//				String[] splits = resultStr.split(",",-1);
		//				String rk = splits[0];
		//				confHbase.set("hbase.zookeeper.quorum", bcZooServer.getValue());
		//				confHbase.set("hbase.zookeeper.property.clientPort","2181");
		//				//				confHbase.set(TableOutputFormat.OUTPUT_TABLE, tableName);
		//				myTable = connection.getTable(TableName.valueOf(tableName));
		//				Put put = new Put(rk.getBytes());
		//				//				put.addColumn("cf".getBytes(), "testcol".getBytes(), "test".getBytes());
		//				put.addColumn("cf".getBytes(), "DOMAIN".getBytes(), splits[1].getBytes());
		//				put.addColumn("cf".getBytes(), "REPORTTIME".getBytes(), splits[2].getBytes());
		//				put.addColumn("cf".getBytes(), "IP".getBytes(), splits[3].getBytes());
		//				put.addColumn("cf".getBytes(), "HTTPNUM".getBytes(), splits[4].getBytes());
		//
		//				myTable.put(put);
		//				myTable.close();
		//				connection.close();
		//			}
		//
		//		});
//		ss.persist(StorageLevel.MEMORY_AND_DISK());
		JavaPairRDD<String,String> result = ss.mapToPair(new PairFunction<Tuple2<String, Integer>, String, String>(){

			private static final long serialVersionUID = 1L;
			private String tableName = "SDS_HTTP_STAT_H";
//			HBaseDao dao = HBaseDao.getInstance(bcZooServer.getValue());//写在这里，下面使用会抛null异常 

			@Override
			public Tuple2<String, String> call(Tuple2<String, Integer> value)
					throws Exception {
				// TODO Auto-generated method stub
//				HBaseDao dao = HBaseDao.getInstance(bcZooServer.getValue());
				String str = value._1.toString();
				String num = String.valueOf(value._2.intValue());
				String[] splits = str.split("_",-1);
				String rk = str.substring(0, str.lastIndexOf("_"));
				String row = String.format("%s,%s,%s,%s,%s", str,splits[0],splits[1],splits[2],num);

//				Put put = new Put(rk.getBytes());
//				put.setDurability(Durability.SKIP_WAL);
				
				//				put.setWriteToWAL(false);
				//				put.addColumn("cf".getBytes(), "DOMAIN".getBytes(), splits[0].getBytes());
				//				put.addColumn("cf".getBytes(), "REPORTTIME".getBytes(), splits[1].getBytes());
				//				put.addColumn("cf".getBytes(), "IP".getBytes(), splits[2].getBytes());
				//				put.addColumn("cf".getBytes(), "HTTPNUM".getBytes(), num.getBytes());
				//new
//				put.add("cf".getBytes(), "DOMAIN".getBytes(), splits[0].getBytes());
//				put.add("cf".getBytes(), "REPORTTIME".getBytes(), splits[1].getBytes());
//				put.add("cf".getBytes(), "IP".getBytes(), splits[2].getBytes());
//				put.add("cf".getBytes(), "HTTPNUM".getBytes(), num.getBytes());

//				dao.save(put, tableName);
				return new Tuple2<String,String>(row,"");

			}
			
		});

		//				JavaPairRDD<ImmutableBytesWritable, Put> result = ss.mapToPair(new PairFunction<Tuple2<String, Integer>, ImmutableBytesWritable, Put>(){
		//
		//					@Override
		//					public Tuple2<ImmutableBytesWritable, Put> call(
		//							Tuple2<String, Integer> tuple) throws Exception {
		//						// TODO Auto-generated method stub
		//						Put put = new Put("row".getBytes());
		//						put.addColumn("cf".getBytes(), "DOMAIN".getBytes(), "test".getBytes());
		//						return new Tuple2<ImmutableBytesWritable,Put>(new ImmutableBytesWritable(),put);
		//					}
		//
		//					
		//				});

		//				result.foreach(new VoidFunction<Tuple2<String,String>>(){
		//		
		////						private Table myTable = new Table("","");
		//					private static final long serialVersionUID = 1L;
		//						Configuration confHbase = HBaseConfiguration.create();
		//						Connection connection = ConnectionFactory.createConnection(confHbase);
		//						private String tableName = "SDS_HTTP_STAT_H";
		//		
		//						@Override
		//						public void call(Tuple2<String, String > tuple) throws Exception {
		//		
		//							String resultStr = tuple._1.toString();
		//							String[] splits = resultStr.split(",",-1);
		//					String rk = splits[0];

		//					 confHbase.set(TableOutputFormat.OUTPUT_TABLE, tableName);
		//					 myTable = new HTable(confHbase,tableName);
		//					 Put put = new Put(rk.getBytes());
		//					 put.addColumn("cf".getBytes(), "DOMAIN".getBytes(), splits[1].getBytes());
		//					 put.addColumn("cf".getBytes(), "REPORTTIME".getBytes(), splits[1].getBytes());
		//					 put.addColumn("cf".getBytes(), "IP".getBytes(), splits[1].getBytes());
		//					 put.addColumn("cf".getBytes(), "HTTPNUM".getBytes(), splits[1].getBytes());
		//					 
		//					 myTable.put(put);

		//						}
		//				});
//		result.persist(StorageLevel.MEMORY_AND_DISK());
		result.saveAsHadoopFile(outpath, Text.class, Text.class, TextOutputFormat.class);
		//			result.saveAsNewAPIHadoopDataset(job.getConfiguration());	
		ctx.stop();
	}


	private static boolean IsSpecialRoot(String domain)
	{
		boolean blReturn = false;
		if(domain.contains(".com.")
				|| domain.contains(".co.")
				|| domain.contains("gd.cn")
				|| domain.contains(".cn.com")
				|| domain.contains(".gov.")
				|| domain.contains(".net.")
				|| domain.contains(".edu.")
				|| domain.contains(".org.")
				|| domain.contains(".ac."))
			blReturn = true;
		return blReturn;
	}

	private static boolean IsSpecialRoot2Level(String domain)
	{
		boolean blReturn = false;
		if(domain.contains(".com")
				|| domain.contains("cn")
				|| domain.contains(".net")
				|| domain.contains(".org"))
			blReturn = true;
		return blReturn;
	}
}
