package fnic.prehand.esagent.ap_measure_new2;

import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

import org.apache.log4j.Logger;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;

import fnic.prehand.common.AgentConfig;
import fnic.prehand.common.JacksonFormatService;
import fnic.prehand.esagent.EsAgent;
 
public class RecordDeal implements Runnable{
	private static final AgentConfig config = AgentConfig.getInstance();
	private static final Logger logger = Logger.getLogger(RecordDeal.class);
	private static final JacksonFormatService jsoninstance = new JacksonFormatService();
	
	private static final String apmac_field = "ap_mac";
	private static final String stamac_field = "sta_mac";
	private static final String apup_field = "up";
	private static final String apdown_field = "down";
	private static final String staup_field = "up";
	private static final String stadown_field = "down";
	private static final String time_field = "time";
	
	private static final long UPLIMIT = 1024*1024*1024L;
	private static final long DOWNLIMIT = 2*1024*1024*1024L;
//	private static final int effectcount = 10;
	
	private static final String indexName=config.getAp_indexName();
	private static final String apflow_typeName=config.getAptype_apflow();
	private static final String staflow_typeName=config.getAptype_staflow();

	
	private static volatile JsonXContent jsoncontent;
	static{
		jsoncontent = (JsonXContent) XContentFactory.xContent(XContentType.JSON);
	} 	
	
	EsUpdate esupdater;
	APFlowIndexAgent staindex;
	APFlowIndexAgent apindex;
	Map<String, UpDownFlow> staflowtbl = new HashMap<String, UpDownFlow>();
	
	public RecordDeal(){
		esupdater = new EsUpdate();
		staindex = new APFlowIndexAgent(indexName, staflow_typeName);
		apindex = new APFlowIndexAgent(indexName, apflow_typeName);
		createIndex();
	}
        	
        	
        	
	private boolean createIndex(){        
		IndicesExistsResponse existRsp = EsAgent.getClient().admin().indices().prepareExists(indexName).execute().actionGet();
		if(existRsp.isExists()){
			return true;
		}
		CreateIndexResponse crtRsp = EsAgent.getClient().admin().indices().prepareCreate(indexName).execute().actionGet();
		if(!crtRsp.isAcknowledged()){
			logger.error("create index["+ indexName +"] error");
			return false;
		}
		return true;
	}
	
	public boolean dealStaFlow(String record){
//		JsonXContentParser headparser=null;
		Map<String, Object> headmap=null;
		
//		try {
//			headparser = (JsonXContentParser) jsoncontent.createParser(record);
//			headmap = headparser.map();
//		} catch (IOException e) {
//			logger.error("parse head fail, reason:"+e.getMessage());
//		}finally{
//			if(headparser!=null){
//				headparser.close();
//			}
//		}
		headmap = (Map<String, Object>)jsoninstance.String2Obj(record, "java.util.Map");
		
		if(headmap == null ||
				!headmap.containsKey(apmac_field) ||
				!headmap.containsKey(stamac_field) ||
				!headmap.containsKey(staup_field) ||
				!headmap.containsKey(stadown_field) ){
			return false;
		}
		
		String sta = (String)headmap.get(stamac_field);
		long up = Long.parseLong(headmap.get(staup_field).toString());
		long down = Long.parseLong(headmap.get(stadown_field).toString());
		
		if(up==0 && down==0){
//			logger.info("sta["+sta+"] flow, up=down=0");
			return true;
		}
		
		if(staflowtbl.containsKey(sta)){
			//这里可以做流量限制
			int overflowfg = staflowtbl.get(sta).incr_flow(up, down);
			if(overflowfg == 0){
				//触发超流量的动作
				logger.info("sta["+sta+"] flow exceed limits!, up:"+up+", down:"+down);
			}
		}else{
			staflowtbl.put(sta, new UpDownFlow(up, down));
		}

		staindex.atomProcess(record);

		return esupdater.updateStaFlow((String)headmap.get(apmac_field), 
				(String)headmap.get(stamac_field), up, down);
	}
	
	public boolean dealApFlow(String record){
//		JsonXContentParser headparser=null;
		Map<String, Object> headmap=null;
		
		headmap = (Map<String, Object>)jsoninstance.String2Obj(record, "java.util.Map");
		if(headmap == null ||
				!headmap.containsKey(apmac_field) ||
				!headmap.containsKey(apup_field) ||
				!headmap.containsKey(apdown_field) ){
			return false;
		}
		
		String ap = (String)headmap.get(apmac_field);
		long up = Long.parseLong(headmap.get(apup_field).toString());
		long down = Long.parseLong(headmap.get(apdown_field).toString());

		if(up==0 && down==0){
//			logger.info("ap["+ap+"] flow, up=down=0");
			return true;
		}
		
		apindex.atomProcess(record);

		return esupdater.updateApFlow(ap, up, down);
	}
	
	public boolean dealStaLog(String record, boolean login){
//		JsonXContentParser headparser=null;
		Map<String, Object> headmap=null;
		
//		try {
//			headparser = (JsonXContentParser) jsoncontent.createParser(record);
//			headmap = headparser.map();
//		} catch (IOException e) {
//			logger.error("parse head fail, reason:"+e.getMessage());
//		}finally{
//			if(headparser!=null){
//				headparser.close();
//			}
//		}
		headmap = (Map<String, Object>)jsoninstance.String2Obj(record, "java.util.Map");
		
		if(headmap == null ||
				!headmap.containsKey(apmac_field) ||
				!headmap.containsKey(stamac_field) ||
				!headmap.containsKey(time_field) ){
			return false;
		}
		
		boolean updatefg = false;
		if(login){
			updatefg = esupdater.upsertLogin((String) headmap.get(apmac_field), 
					(String) headmap.get(stamac_field),
					dateFormatTrans((String)headmap.get(time_field)));
		}else{
			updatefg = esupdater.updateLogout((String) headmap.get(apmac_field), 
					(String) headmap.get(stamac_field),
					dateFormatTrans((String)headmap.get(time_field)));
		}
		
		return updatefg;
	}
	
	public void doStaBulk(){
		staindex.processBulkIfNeeded(true);
	}
	
	public void doApBulk(){
		apindex.processBulkIfNeeded(true);
	}
	
	public void run() {
		// TODO Auto-generated method stub

	}
	
	private void scheduleUpdateFlow(){
		if(staflowtbl.size()>0){
			
		}
	}
	
	
	private String dateFormatTrans(String datestr){
		DateFormat apformat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
		DateFormat indexformat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

		long time = System.currentTimeMillis();
		Date logdate=new Date(time);
		
		try {
			logdate = apformat.parse(datestr);
		} catch (ParseException e) {
			logger.error("date format trans error:"+e.getMessage());
			return null;
		}
		
		return indexformat.format(logdate);
	}
	
	
	class UpDownFlow{
		long down = 0;
		long up = 0;
		
		public UpDownFlow(long u, long d){
			down = d;
			up = u;
		}
		public long getDown() {
			return down;
		}
		public long getUp() {
			return up;
		}
		/*   0: 刚超过
		 *  -1: 已超过
		 *   1: 未超过
		 */
		public int incr_flow(long u, long d){
			if(down >= DOWNLIMIT || up >= UPLIMIT){
				return -1;
			}
			down+=d;
			up+=u;
			if(down<DOWNLIMIT && up<UPLIMIT){
				return 1;
			}else{
				return 0;
			}
		}
	}
	
	public static void main(String[] args){
		
		new  RecordDeal();
		
	}
}
