package com.sheep.spark.job.impl;

import com.sheep.spark.job.NetworkStreamJob;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;

import java.util.Arrays;
import java.util.regex.Pattern;

/**
 * Created with IntelliJ IDEA.
 * User: Administrator
 * Date: 16-6-2
 * Time: 上午11:13
 * To change this template use File | Settings | File Templates.
 */
public class ErrorLogAnalysisJob extends NetworkStreamJob {
    private static final Logger logger = Logger.getLogger(ErrorLogAnalysisJob.class);
    private static final Pattern SPACE = Pattern.compile(" ");

    public ErrorLogAnalysisJob() {
        super();
    }

    @Override
    public void run(JavaReceiverInputDStream<String> lines) {
        try {
            JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
                @Override
                public Iterable<String> call(String x) throws Exception {
                    return Arrays.asList(SPACE.split(x));  //To change body of implemented methods use File | Settings | File Templates.
                }
            });
        } catch (Exception e) {
            logger.error(e);
        }

    }
}
