package mr.fileToDb;

import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import utils.DateUtils;
import utils.DbConn;
import utils.HdfsUtils;
import utils.IReadFile;
import utils.JarsLoader;
import utils.LangUtils;
import utils.Threshold;

/**
 *
 * @author 李岩飞
 * @email eliyanfei@126.com
 * 2017年7月31日 下午5:30:56
 *
 */
public class SceneKpiReducer extends Reducer<Text, EsbMRBean, CollectorWritable, CollectorWritable> {
	private static Logger logger = LoggerFactory.getLogger(SceneKpiReducer.class);
	private Map<Integer, Integer> dataMap = new HashMap<Integer, Integer>();
	private List<Threshold> list = new ArrayList<Threshold>();

	protected void setup(Context context) throws IOException, InterruptedException {
		String confpath = context.getConfiguration().get("confpath");
		String libpath = context.getConfiguration().get("libpath");
		FileSystem fs = FileSystem.get(context.getConfiguration());
		try {
			JarsLoader.loadJars(libpath);
			Connection conn = null;
			Statement st = null;
			ResultSet rs = null;
			try {
				Map<String, String> map = HdfsUtils.readPath(fs, confpath, "oracle.conf");
				DbConn dbConn = new DbConn(map);
				conn = dbConn.getConnection();
				st = conn.createStatement();
				rs = st.executeQuery("select sceneid,count(*) from nsn_scene_grid t group by sceneid");
				while (rs.next()) {
					dataMap.put(rs.getInt(1), rs.getInt(2));
				}
			} catch (Exception e) {
				e.printStackTrace();
			} finally {
				DbConn.closeAll(rs, st, conn);
			}

			HdfsUtils.readPath(fs, confpath, "rsrp.conf", new IReadFile() {
				public void paserLine(String[] datas) {
					Threshold threshold = new Threshold("", LangUtils.toDouble(datas[0], 0), LangUtils.toDouble(datas[1], 0));
					list.add(threshold);
				}
			});
		} catch (Exception e) {
			e.printStackTrace();
		} finally {
			fs.close();
		}
	}

	/**
	 * 为值设置分段
	 */
	private void initThreshold(List<Threshold> list, float value, int[] datas) {
		int idx = 0;
		for (Threshold threshold : list) {
			if (value >= threshold.min && value < threshold.max) {
				datas[idx]++;
				return;
			}
			idx++;
		}
		datas[list.size() - 1]++;
	}

	@Override
	public void reduce(Text key, Iterable<EsbMRBean> values, Context context) throws IOException, InterruptedException {
		Iterator<EsbMRBean> it = values.iterator();
		Map<Integer, GridKpiBean> gridMap = new HashMap<Integer, GridKpiBean>();
		EsbMRBean mrBean = null;
		//此处获取的sceneId+gridId
		while (it.hasNext()) {
			mrBean = it.next();
			GridKpiBean kpi = gridMap.get(mrBean.gridId);
			if (kpi == null) {
				kpi = new GridKpiBean();
				gridMap.put(mrBean.gridId, kpi);
			}
			kpi.sceneId = mrBean.sceneId;
			kpi.gridId = mrBean.gridId;
			if (kpi.time == 0) {
				kpi.time = DateUtils.toDateNum(new Date(mrBean.time), "yyyyMMdd");
			}
			mrBean.initGridKpi(kpi);
			initThreshold(list, mrBean.rsrp, kpi.rsrp);
		}

		//然后重新汇聚sceneId的数据
		SceneKpiBean value = new SceneKpiBean();
		for (GridKpiBean kpi : gridMap.values()) {
			try {
				CollectorWritable cw = new CollectorWritable(kpi);
				context.write(cw, null);
			} catch (Exception e) {
				e.printStackTrace();
			}

			double rate = kpi.weakCoverageMR / kpi.avgrsrp_n;
			if (rate > 0.2) {
				kpi.weakCoverage = true;
			}
			value.avgrsrp_n += kpi.avgrsrp_n;
			value.avgrsrp_v += kpi.avgrsrp_v;
			value.totalmrs += kpi.totalmrs;
			value.weakCoverageMR += kpi.weakCoverageMR;

			value.sceneId = kpi.sceneId;
			value.gridId = kpi.gridId;
			value.time = kpi.time;
			for (int i = 0; i < kpi.rsrp.length; i++) {
				value.rsrp[i] += kpi.rsrp[i];
			}

			value.validGrids++;
			if (kpi.weakCoverage) {
				value.weakCoverageGrid++;
			}
		}
		Integer v = dataMap.get(value.sceneId);
		value.totalGrids = (v == null ? 0 : v);
		CollectorWritable cw = new CollectorWritable(value);
		context.write(cw, null);
	}

	@Override
	protected void cleanup(Context context) throws IOException, InterruptedException {
		super.cleanup(context);
		logger.info("清空reduce任务完成");
	}
}
