package com.yd.spark.demo;

import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class DispatchInfoETLWithSpark {
	private static Logger logger = LoggerFactory.getLogger(DispatchInfoETLWithSpark.class);
	
	public static void main(String[] args) {
			SparkConf conf = new SparkConf().set("spark.network.timeout", "300");
			JavaSparkContext sc = new JavaSparkContext(conf);
			JavaRDD<String> textFile = sc.textFile("file:///home/zhaoxiang/data/dispatch_info-20161108/*.txt");
			
			
			JavaRDD<String> rsRDD= textFile.map(new Function<String, String>() {
				private static final long serialVersionUID = 1L;
				@Override
				public String call(String line) throws Exception {
					try{
						String rs = DispatchInfoHandlerImpl.parseToJson(line);
						return rs;
					}catch (Exception e) {
						logger.error("parse error: \n" + line, e);
						return null;
					}
				}
			}).filter(new Function<String, Boolean>(){
				private static final long serialVersionUID = 1L;
				@Override
				public Boolean call(String line) throws Exception {
					return StringUtils.isNotBlank(line);
				}
				
			});
			
			System.out.println("========== initial lines is :============" + textFile.count());
			System.out.println("==========  parsed lines is :============" + rsRDD.count());
			
			rsRDD.saveAsTextFile("file:///home/zhaoxiang/data/output/dispatch_info-20161108/");
			System.out.println("\n=========================Finished=========================");
			sc.close();
		}
}
