package com.qing;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.net.URI;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import spark.Spark;

public class Server {

    private static final String OR = "OR";
    private static final String AND = "AND";
    private static final String NOT = "NOT";


    public static void main(String[] args) {

        String[] months = new String[]{"Jan","Feb","Mar","Apr",
                                        "May","Jun","Jul","Aug",
                                        "Sep","Oct","Nov","Dec"};

        String str = "messages:Aug 13 04:16:02 BestS-LVS nanny[400645]: READ to 172.16.22.173:3478 timed out";
        String pattern = "messages[\\s\\S]*:(\\w{3} \\d{2} \\d{2}:\\d{2}:\\d{2})[\\s\\S]+?( nanny\\[[0-9]+\\]: )([\\s\\S]+?(\\d+.\\d+.\\d+.\\d+:\\d+)[\\s\\S]+)";
//        String pattern = "(nanny\\[[0-9]+\\])\\.+(\\d+:\\d+)";
//        String pattern ="messages[\\s\\S]*:(\\w{3} \\d{2} \\d{2}:\\d{2}:\\d{2})";
        Pattern r = Pattern.compile(pattern);
        Matcher m = r.matcher(str);

        if (m.find()) {

            String date = m.group(1);
            String reason = m.group(3);
            String ipAndPort = m.group(4);
            int month = Arrays.asList(months).indexOf(date.substring(0,3))+1;
            String day = date.substring(4,6);
            String time = date.substring(7).replaceAll(":","");
            String c = "2017"+String.format("%02d",month)+day+time;
            System.out.println("time..."+date+"..."+month+"..."+day+"..."+time+"..."+c);
            System.out.println("reason..."+reason.replaceAll(" "+ipAndPort,""));
            System.out.println("ip..."+ipAndPort.split(":")[0]);
            System.out.println("port..."+ipAndPort.split(":")[1]);
        } else {
            System.out.println("NO MATCH");
        }


//        Spark.get("/hello", (req, res) -> {
//            System.out.println(req.queryString());
//            JavaSparkContext sc = getSparkContext();
//
//
//            List<String> list = new ArrayList();
//            list.add("qwe");
//            list.add("qwe");
//            list.add("wrwr");
//            list.add("sdfsdf");
//
//            return "Hello World";
//        });
//
//        Spark.post("/search/:page", (request, response) -> {
//
//            Map<String, String[]> params = request.queryMap().toMap();
//
//
//            ArrayList<Request> requests = new ArrayList<>();
//
//            Set<Map.Entry<String, String[]>> set = params.entrySet();
//            Iterator<Map.Entry<String, String[]>> it = set.iterator();
//
//            while (it.hasNext()) {
//                Map.Entry<String, String[]> temp = it.next();
//
//                String[] values = temp.getValue();
//                Request tempRequest = new Request(temp.getKey());
//                for (String str : values){
//                    tempRequest.dealStr(str);
//                }
//                requests.add(tempRequest);
//            }
//
//            int size = requests.size();
//
//
//
//            return "Hello World";
//        });


    }

//    public static JavaSparkContext getSparkContext() {
//
//        String path = "/Users/wuliao/LogServer/elasticsearch/data/nodes/0/indices/4vjhtC9IQxm9rhV-YLvdtg";
//        SparkConf conf = new SparkConf()
//                .setAppName("hello")
//                .set("lucene.path",path)
//                .setMaster("local");
//        return new JavaSparkContext(conf);
//    }


}
