package com.lkinga.www.Util;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Reducer;



import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Iterator;
import java.util.TreeMap;
import java.util.regex.Pattern;

/**
 * Created by xiaohei on 16/2/23.
 * 閫氱敤宸ュ叿绫�
 */
public class HadoopUtil {

    /**
     * 鍒嗛殧绗︾被鍨�,浣跨敤姝ｅ垯琛ㄨ揪寮�,琛ㄧず鍒嗛殧绗︿负\t鎴栬��,
     * 浣跨敤鏂规硶涓篠PARATOR.split(瀛楃涓�)
     */
    public static final Pattern SPARATOR = Pattern.compile("[\t,]");

    /**
     * HDFS璺緞鐨勬牴鐩綍
     */
    public static final String HDFS = "hdfs://192.168.29.3:9000";

    /**
     * 璁＄畻unixtime涓や袱涔嬮棿鐨勬椂闂村樊
     *
     * @param sortDatas key涓簎nixtime,value涓簆os
     * @return key涓簆os, value涓鸿pos鐨勫仠鐣欐椂闂�
     */
    public static HashMap<String, Float> calcStayTime(TreeMap<Long, String> sortDatas) {
        HashMap<String, Float> resMap = new HashMap<String, Float>();
        Iterator<Long> iter = sortDatas.keySet().iterator();
        Long currentTimeflag = iter.next();
        //閬嶅巻treemap
        while (iter.hasNext()) {
            Long nextTimeflag = iter.next();
            float diff = (nextTimeflag - currentTimeflag) / 60.0f;
            //瓒呰繃60鍒嗛挓杩囨护涓嶈
            if (diff <= 60.0) {
                String currentPos = sortDatas.get(currentTimeflag);
                if (resMap.containsKey(currentPos)) {
                    resMap.put(currentPos, resMap.get(currentPos) + diff);
                } else {
                    resMap.put(currentPos, diff);
                }
            }
            currentTimeflag = nextTimeflag;
        }
        return resMap;
    }


}
