package algri;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;


public class AnaData {
	
	//能量饼状图的后端分析
	//显示: 1.大于10^9 2.大于10^8小于10^9的 3.大于10^7小于10^8 4.大于10^6小于10^7 5.大于10^5小于10^6 6.大于10^4小于10^5
	public HashMap<String, String> ana_energy(List<HashMap<String, String>> list){
		HashMap<String, String>map=new HashMap<String,String>();
		int above_nine=0;
		int nine_and_eight=0;
		int eight_and_seven=0;
		int seven_and_six=0;
		int six_and_five=0;
		int five_and_four=0;
		for (int i = 0; i < list.size(); i++) {
			
			String energyString=list.get(i).get("nengliang").trim();
//			double data_energy=list.get(i).get("nengliang");
			int energy=Integer.parseInt(energyString);
			if(energy>1000000000) {
				above_nine++;
			}
			if (energy>100000000&&energy<1000000000) {
				nine_and_eight++;
			}
			if (energy>10000000&&energy<100000000) {
				eight_and_seven++;
			}
			if (energy>1000000&&energy<10000000) {
				seven_and_six++;
			}
			if (energy>100000&&energy<1000000) {
				six_and_five++;
			}
			if (energy>10000&&energy<100000) {
				five_and_four++;
			}
		}
		String above_nineString=String.valueOf(above_nine);
		String nine_and_eightString=String.valueOf(nine_and_eight);
		String eight_and_sevenString=String.valueOf(eight_and_seven);
		String seven_and_sixString=String.valueOf(seven_and_six);
		String six_and_fiveString=String.valueOf(six_and_five);
		String five_and_fourString=String.valueOf(five_and_four);
//		System.out.println("能量大于九次方的："+above_nineString+"能量大于八次方小于九次方的："+nine_and_eightString+
//				           "能量大于七次方小于八次方的："+eight_and_sevenString+"能量大于六次方小于七次方的："+seven_and_sixString+
//				           "能量大于五次方小于六次方的："+six_and_fiveString+"能量大于四次方小于五次方的："+five_and_fourString);
		map.put("above_nine", above_nineString);
		map.put("nine_and_eight", nine_and_eightString);
		map.put("eight_and_seven", eight_and_sevenString);
		map.put("seven_and_six", seven_and_sixString);
		map.put("six_and_five", six_and_fiveString);
		map.put("five_and_four", five_and_fourString);
		return map;
	}
	
	//事件发生折线图分析方法----可以继续使用
	public List<HashMap<String, String>>  ana_happen_line(List<HashMap<String, String>> list){
		
		//需要将数据库中的全部记录传入后进行每日发生次数分析：最终将日期和对应日期发生的次数存入然后传回Req_happenLine
		List<HashMap<String, String>> returnList=new ArrayList<HashMap<String,String>>();
		List<HashMap<String, String>> list5=new ArrayList<HashMap<String,String>>();
		list5.clear();
		String[] arr = new String[200];
		for (int i = 0; i <list.size(); i++) {
			String quackTimeString=list.get(i).get("quackTime").substring(0,10);
			arr[i]=quackTimeString;
		}
		//利用LinkedHashSet去重
		Collection<String> collection=new LinkedHashSet<String>(Arrays.asList(arr));
//		System.out.println("(LinkedHashSet) distinct words: " + collection);
		List<String> list2=new ArrayList<String>(collection);
//		list2.clear();
		//打印输出测试
//		for (int i = 0; i < list2.size(); i++) {
//			System.out.println(list2.get(i));
//		}
		for (int i = 0; i <list2.size()-1; i++) {
			HashMap<String, String> map1=new HashMap<String, String>();
			map1.put("time", list2.get(i));
			list5.add(map1);
		}
		
		//计算arr数组中重复的次数
		//1.定义动态数组用于存储所有数据库返回的时间结果集
		List<String> list3=new ArrayList<String>();
		//2.将结果集进行存储
		for (int i = 0; i < list.size(); i++) {
			String timeString=list.get(i).get("quackTime").substring(0,10);
			list3.add(timeString);
		}
		Map<String, Integer> map2=new HashMap<String, Integer>();
		for (String item:list3) {
			if (map2.containsKey(item)) {
				map2.put(item, map2.get(item).intValue()+1);
			}else {
				map2.put(item, new Integer(1));
			}
		}
		//输出打印部分：可有可无
		Iterator<String> keysIterator=map2.keySet().iterator();
		while (keysIterator.hasNext()) {
			String keyString=keysIterator.next();
//		    System.out.println(keyString+":"+map2.get(keyString).intValue()+",");
		}
		
		//
		
		for(int i = 0; i < list5.size(); i++) {
			for (String key:map2.keySet()) {
				HashMap<String, String> map3=new HashMap<String, String>();
				String cor_timeString=list5.get(i).get("time").trim();
				if (key.equals(cor_timeString)) {
					int happen_time=map2.get(key).intValue();
					String happen_timeString=String.valueOf(happen_time);
					map3.put("time", key);
					map3.put("happen_time", happen_timeString);
					returnList.add(map3);
					break;
				}
			}
		}
//		HashMap<String, String> restMap=new HashMap<String, String>();
//		for (int i = 0; i < returnList.size(); i++) {
//			restMap.put(returnList.get(i).get("time").trim()
//					,returnList.get(i).get("happen_time").trim());
//		}
		
		//在程序的最后将字符数组置空，否则程序执行几次就会变满最终出现问题，当然也可以用其他的数据结构，未来优化再做后续考虑
		arr=null;
		return returnList;
	}
	
	//能量折线图分析方法---没有问题，可以继续沿用
	public List<HashMap<String, String>> ana_energy_line(List<HashMap<String, String>> list) {
		List<HashMap<String, String>> returnList=new ArrayList<HashMap<String,String>>();
		HashMap<String, String> restMap=new HashMap<String, String>();
		for (int i = 0; i < list.size(); i++) {
			HashMap<String, String> map=new HashMap<String, String>();
			String db_timeString=list.get(i).get("quackTime").substring(5,23);
			String db_energy=list.get(i).get("nengliang");
			//将能量数据进行小数点合理化
			String front_twoString=db_energy.substring(0,db_energy.length()-4);
			String s1=new StringBuffer(db_energy).reverse().toString();
			String back_fourString=s1.substring(0,4);
			String energyString=front_twoString+"."+new StringBuffer(back_fourString).reverse().toString();
			map.put("time", db_timeString);
			map.put("energy", energyString);
			returnList.add(map);
		}
//		for (int i = 0; i < returnList.size(); i++) {
//			restMap.put(returnList.get(i).get("time").trim()
//					, returnList.get(i).get("energy").trim());
//		}
		return returnList;
	}

	//矩张量tensor饼状图后端分析（破坏类型）
	public HashMap<String, String> ana_tensor(List<HashMap<String, String>> list) {
		//分类规范：tensor   矩张量        <40%张拉 40%-60% 混合 >60%剪切
		
		HashMap<String, String> rest_map=new HashMap<String, String>();
		//初始化返回参数值
		int above_zps=0;
		int bt_zpf_and_zps=0;
		int below_zpf=0;
		
		for (int i = 0; i < list.size(); i++) {
			
			String tensor_from_db=list.get(i).get("tensor").trim();
//			int tensor_Integer=Integer.valueOf(tensor_from_db);
			Double tensor_Double=Double.valueOf(tensor_from_db);
			if (tensor_Double<0.4) {
				below_zpf++;
			}else if (tensor_Double>0.4&&tensor_Double<0.6) {
				bt_zpf_and_zps++;
			}
			else {
				above_zps++;
			}	
		}
		String above_zpsString=String.valueOf(above_zps);
			String bt_zpf_and_zpsString=String.valueOf(bt_zpf_and_zps);
			String below_zpfString=String.valueOf(below_zpf);
			System.out.println("大于0.6的有："+above_zpsString+"大于0.4小于0.6的有："
			+bt_zpf_and_zpsString+"小于0.4的有："+below_zpfString);
			rest_map.put("above_zps", above_zpsString);
			rest_map.put("bt_zpf_and_zps", bt_zpf_and_zpsString);
			rest_map.put("below_zpf", below_zpfString);
			return rest_map;
	}
    
	//破坏程度方法
	public HashMap<String, String> ana_bvalue(List<HashMap<String, String>> list){
		HashMap<String, String> map=new HashMap<String, String>();
		//严重 b_value=1
		int severety=0;
		//一般 b_value=0.1175
		int nomal=0;
		//轻微 b_value=0.0353
		int venial=0;
		
		for (int i = 0; i < list.size(); i++) {
			String b_valueString=list.get(i).get("b_value").trim();
			if (b_valueString.equals("1")) {
				severety++;
			}else if (b_valueString.equals("0.1175")) {
				nomal++;
			}else {
				venial++;
			}
		}
		
		String seriouString=String.valueOf(severety);
		String nomalString=String.valueOf(nomal);
		String venialString=String.valueOf(venial);
		System.out.println("严重次数："+seriouString+"一般次数："+nomalString+"轻微次数"+venialString);
		
		map.put("severety", seriouString);
		map.put("nomal", nomalString);
		map.put("venial",venialString);
		return map;
	}

	//台站工况分析:首先分析status作为一个总体的分类，如果为offline 则直接给出红色（该方法返回预警图片）
	public List<HashMap<String, String>> ana_station_status(List<HashMap<String, String>>list) {
		//首先按照 online和offline进行大方向分类
		AnaLevelWarning ana_Level_Warning=new AnaLevelWarning();
		List<HashMap<String, String>> returnList=new ArrayList<HashMap<String,String>>();
		for (int i = 0; i < list.size(); i++) {
			HashMap<String, String> map=new HashMap<String, String>();
			String net_statuString=list.get(i).get("status").trim();
			if (net_statuString.equals("online")) {
				//将used和netspeed变量传入分析出预警级别
				String level_warningString=ana_Level_Warning.what_level(list.get(i).get("used").trim(), list.get(i).get("netspeed").trim());
				map.put("img","http://www.cdmw-lnu.cn/images/levelImages/"+level_warningString);
			}else {
				//此时离线状态
				map.put("img","http://www.cdmw-lnu.cn/images/levelImages//level_one_warning.png");
			}
			returnList.add(map);
		}
		return returnList;
	}
	
	//台站工况分析：返回具体数据
	public HashMap<String, String> ana_status_bypanfu(List<HashMap<String, String>>list) {
		HashMap<String, String> restMap=new HashMap<String, String>();
		AnaLevelWarning ana_Level_Warning=new AnaLevelWarning();
		String stoCapcity=ana_Level_Warning.ana_stoCapcity(list.get(0).get("used").trim(), list.get(0).get("total").trim());
		String netStatus=ana_Level_Warning.ana_netspeedCap(list.get(0).get("netspeed").trim());
		restMap.put("panfu", list.get(0).get("panfu"));
		restMap.put("location", list.get(0).get("location"));
		restMap.put("status", list.get(0).get("status"));
		restMap.put("unused", list.get(0).get("unused"));
		restMap.put("used", list.get(0).get("used"));
		restMap.put("total", list.get(0).get("total"));
		restMap.put("netspeed", list.get(0).get("netspeed"));
		restMap.put("stoCapcity", stoCapcity);
		restMap.put("netStatus", netStatus);
		return restMap;
	}

}
