package org.paychina.demo;

import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.paychina.demo.bean.LogBean;

public class FlatMapActor {

	
	public static final String ANALYZE = "src/main/resources/FilterActor.txt";
	private static final Pattern SPACE = Pattern.compile(" ");
	
	public static void main(String[] args) {
		//Spark配置
		SparkConf sparkConf = new SparkConf();
		sparkConf.set("spark.app.name", "FilterActor");
		sparkConf.set("spark.master", "local[1]");
		sparkConf.set("spark.ui.port", "36000"); //# 重载默认端口配置
		
		JavaSparkContext ctx = new JavaSparkContext(sparkConf);
		//导入需要分析的数据
		JavaRDD<String> inputRDD = ctx.textFile(ANALYZE, 1);
		
		//找到警告的数据
		JavaRDD<String> warnRDD = inputRDD.filter(
			new Function<String, Boolean>() {
				public Boolean call(String x) {
					if(x.contains("WARN")){
						return true;
					}
					return false; 
				}
			}
		);
		
		//数据组装
		JavaRDD<LogBean> warnRDDBean = warnRDD.map(new Function<String, LogBean>() {
			@Override
			public LogBean call(String t) throws Exception {
				if(t.contains("WARN")){
					LogBean log = new LogBean();
					log.setTime(t.substring(0,19));
					log.setContent(t.substring(t.indexOf("WARN")+5));
					return log;
				}
				return new LogBean();
			}
		});
		
		System.out.println("日志中包含" + warnRDD.count() + "警告信息");
		
		int i = 0;
		for(LogBean l : warnRDDBean.collect()){
			i ++;
			System.out.println("第" + i + "条警告信息内容：");
			System.out.println("时间:" + l.getTime());
			System.out.println("内容:" + l.getContent());
		}
		
	}
}
