package org.paychina.demo;

import java.util.List;
import java.util.regex.Pattern;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

/**
 * 转换器
 * @author Warmsheep
 *
 */
public class FilterActor {

	
	public static final String ANALYZE = "src/main/resources/FilterActor.txt";
	private static final Pattern SPACE = Pattern.compile(" ");
	
	public static void main(String[] args) {
		//Spark配置
		SparkConf sparkConf = new SparkConf();
		sparkConf.set("spark.app.name", "FilterActor");
		sparkConf.set("spark.master", "local[1]");
		sparkConf.set("spark.ui.port", "36000"); //# 重载默认端口配置
		
		JavaSparkContext ctx = new JavaSparkContext(sparkConf);
		//导入需要分析的数据
		JavaRDD<String> inputRDD = ctx.textFile(ANALYZE, 1);
		
		//找到警告的数据
		JavaRDD<String> warnRDD = inputRDD.filter(
			new Function<String, Boolean>() {
				public Boolean call(String x) {
					if(x.contains("WARN")){
						return true;
					}
					return false; 
				}
			}
		);
		
		System.out.println("日志中包含" + warnRDD.count() + "警告信息");
		
		//找到异常数据
		JavaRDD<String> errorRDD = inputRDD.filter(
			new Function<String, Boolean>() {
				public Boolean call(String x) {
					if(x.contains("ERROR")){
						return true;
					}
					return false; 
				}
			}
		);
		
		System.out.println("日志中包含" + errorRDD.count() + "异常信息");
		
		//合并警告和异常的数据信息
		JavaRDD<String> allData = warnRDD.union(errorRDD);
		
		//大多数情况不能通过collect收集，除非内存够大
		List<String> allList = allData.collect();
		//只选前五条
		List<String> partList = allData.take(5);
		
		//打印警告和异常信息
		for(String s : allList){
			System.out.println(s);
		}
		//打印警告和异常信息
		for(String s : partList){
			System.out.println(s);
		}
		
	}
}
