package com.qing;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.net.URI;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import spark.Spark;

public class Server {

    private static final String OR = "OR";
    private static final String AND = "AND";
    private static final String NOT = "NOT";


    public static void main(String[] args) {

        String[] months = new String[]{"Jan", "Feb", "Mar", "Apr",
                "May", "Jun", "Jul", "Aug",
                "Sep", "Oct", "Nov", "Dec"};


        String str1 = "messages.4:Jul 22 23:45:23 localhost nanny[12263]: Terminating due to signal 15";
        String str = "messages:Aug 13 04:16:02 BestS-LVS nanny[400645]: READ to 172.16.22.173:3478 timed out";
        String pattern = "messages.*:(\\w{3} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}) {1,2}([\\s\\S]+?) {1,2}([\\s\\S]+)\\[([0-9]+)\\]: {1,2}([\\s\\S]+)";
//        String pattern = "(nanny\\[[0-9]+\\])\\.+(\\d+:\\d+)";
//        String pattern ="messages[\\s\\S]*:(\\w{3} \\d{2} \\d{2}:\\d{2}:\\d{2})";
        Pattern r = Pattern.compile(pattern);
        Matcher m = r.matcher(str);

        if (m.find()) {

            int count = m.groupCount();
            for (int i = 0; i < count+1; i++) {
                System.out.println(m.group(i));
            }


//            String date = m.group(1);
//            String reason = m.group(4);
//            String ipAndPort = m.group(4);
//            int month = Arrays.asList(months).indexOf(date.substring(0,3))+1;
//            String day = date.substring(4,6);
//            String time = date.substring(7).replaceAll(":","");
//            String c = "2017"+String.format("%02d",month)+day+time;
//            System.out.println("time..."+date+"..."+month+"..."+day+"..."+time+"..."+c);
//            //判断是否有端口
//            System.out.println("reason..."+reason);
//            System.out.println("hosts..."+m.group(2));
//
//
//
//            Pattern p = Pattern.compile("[\\s\\S]+?(\\d+.\\d+.\\d+.\\d+:\\d+)[\\s\\S]+");
//            Matcher matcher = p.matcher(reason);
//            if(matcher.find()){
//                String ipAndPort = matcher.group(1);
//                String ip = ipAndPort.split(":")[0];
//                String port = ipAndPort.split(":")[1];
//                System.out.println(ip+"..."+port);
//            }


//            System.out.println("reason..."+reason.replaceAll(" "+ipAndPort,""));
//            System.out.println("ip..."+ipAndPort.split(":")[0]);
//            System.out.println("port..."+ipAndPort.split(":")[1]);
        } else {
            System.out.println("NO MATCH");
        }


//        Spark.get("/hello", (req, res) -> {
//            System.out.println(req.queryString());
//            JavaSparkContext sc = getSparkContext();
//
//
//            List<String> list = new ArrayList();
//            list.add("qwe");
//            list.add("qwe");
//            list.add("wrwr");
//            list.add("sdfsdf");
//
//            return "Hello World";
//        });
//
//        Spark.post("/search/:page", (request, response) -> {
//
//            Map<String, String[]> params = request.queryMap().toMap();
//
//
//            ArrayList<Request> requests = new ArrayList<>();
//
//            Set<Map.Entry<String, String[]>> set = params.entrySet();
//            Iterator<Map.Entry<String, String[]>> it = set.iterator();
//
//            while (it.hasNext()) {
//                Map.Entry<String, String[]> temp = it.next();
//
//                String[] values = temp.getValue();
//                Request tempRequest = new Request(temp.getKey());
//                for (String str : values){
//                    tempRequest.dealStr(str);
//                }
//                requests.add(tempRequest);
//            }
//
//            int size = requests.size();
//
//
//
//            return "Hello World";
//        });


    }

//    public static JavaSparkContext getSparkContext() {
//
//        String path = "/Users/wuliao/LogServer/elasticsearch/data/nodes/0/indices/4vjhtC9IQxm9rhV-YLvdtg";
//        SparkConf conf = new SparkConf()
//                .setAppName("hello")
//                .set("lucene.path",path)
//                .setMaster("local");
//        return new JavaSparkContext(conf);
//    }


}
