package com.lhz.collector.parse.util;

import com.lhz.collector.common.Constant;
import com.lhz.collector.common.SpringUtil;
import com.lhz.collector.entity.Collector;
import com.lhz.collector.entity.ParseRule;
import com.lhz.collector.mapper.CollectorMapper;
import com.lhz.collector.parse.lexer.BaseLexer;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.Event;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

/**
 * Created by LHZ on 2016/10/27.
 */
public class ParseUtil {
    private static final ParseJsonRule parseJsonRule=new ParseJsonRule();
    private volatile static Map<String,Collector> collectotMap=new ConcurrentHashMap<>();

    static{
        loadAllCollector();
        Executors.newScheduledThreadPool(1).scheduleWithFixedDelay(()->{
            loadAllCollector();
        },10,10, TimeUnit.SECONDS);
    }

    private ParseUtil() {

    }
    
    public static void loadAllCollector(){
        Map<String,Collector> tcollectotMap=new ConcurrentHashMap<>();
        CollectorMapper collectorMapper= SpringUtil.getBean(CollectorMapper.class);
        List<Collector> collectors= collectorMapper.findAllList(new Collector());
        for(Collector collector:collectors){
            List<ParseRule> parseRules= collector.getParseRules();
            if(parseRules!=null&&parseRules.size()>0){
                List<BaseLexer> baseLexers=new ArrayList<>();
                for(ParseRule parseRule:parseRules){
                    String rulebody=parseRule.getRulebody();//采集器可能没有添加解析规则
                    BaseLexer baseLexer=parseJsonRule.parseRule(rulebody);
                    if(baseLexer!=null){
                        baseLexers.add(baseLexer);
                    }
                }
                collector.setBaseLexers(baseLexers);
                String ip=collector.getIp();
                tcollectotMap.put(ip,collector);
            }
        }
        collectotMap=tcollectotMap;
    }


    public static Map<String,Object> parse(Event event){
        if(event==null){
            return null;
        }
        Map<String,Object> map=new HashedMap();
        String ip=event.getHeaders().get(Constant.IP);
        if(ip!=null&&collectotMap.containsKey(ip)){
            Collector collector=collectotMap.get(ip);
            String charset=collector.getCharset();
            String line=new String(event.getBody(), Charset.forName(charset));
            List<BaseLexer> baseLexers=collector.getBaseLexers();
            if(baseLexers!=null&&baseLexers.size()>0){
                for(BaseLexer baseLexer:baseLexers){
                    map=baseLexer.parse(line);
                    if(!map.isEmpty()){
                        break;
                    }
                }
            }
            map.put(Constant.ORIGINAL_LOG,line);//原始日志
        }
        return map;
    }
}
