package com.eastcom.providor.dao.impl;

import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.springframework.dao.DataAccessResourceFailureException;

import com.eastcom.bm.atom.Atom_DataSetMonthStatis;
import com.eastcom.bm.atom.Atom_DatasetDefine;
import com.eastcom.bm.atom.Atom_DatasetNe;
import com.eastcom.bm.atom.Atom_KpiDefine;
import com.eastcom.bm.atom.Atom_NeType;
import com.eastcom.bm.atom.DataAbnormal;
import com.eastcom.bm.atom.DataSetStatiBean;
import com.eastcom.bm.atom.RawData;
import com.eastcom.canicula.stope.utils.DateUtil;
import com.eastcom.client.DataAbnormalBean;
import com.eastcom.client.DataSetStateBean;
import com.eastcom.client.KpiStateBean;
import com.eastcom.client.NeTypeDataSetBean;
import com.eastcom.client.NeTypeDataSetStatiBean;
import com.eastcom.client.TreeNode;
import com.eastcom.common.util.CharliDate;
import com.eastcom.common.util.UUIDHexGenerator;
import com.eastcom.providor.dao.BaseDao;
import com.eastcom.providor.dao.IDataSetDao;
import com.eastcom.providor.service.extview.DatasetDefineUtil;
import com.eastcom.providor.util.DataSetKit;
import com.eastcom.providor.util.PMComparator;
import com.eastcom.providor.util.TimeKit;
import com.eastcom.providor.util.TimeStampCalculator;
import com.eastcom.providor.util.TimeSwap;

public class DataSetDao extends BaseDao implements IDataSetDao {

	public String selectDataValueByParamsFromSource(String dataSetId, String kpiId, String dimension, Date dataTime) throws Exception {
		try{
			Atom_DatasetDefine datasetDefine=getObject(Atom_DatasetDefine.class, dataSetId);
			if(datasetDefine==null)
				return null;
			
			List<Atom_KpiDefine> kpiDefines=getObjects("from Atom_KpiDefine where kpiId='"+kpiId+"' and datasetDefine.id='"+dataSetId+"'");
			if(kpiDefines==null||kpiDefines.isEmpty())
				return null;
			
			Atom_KpiDefine kpiDefine=kpiDefines.get(0);
			String strTime=DateUtil.DateToString(datePattern, dataTime);
			String time="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_date('"+strTime+"','yyyy-mm-dd hh24:mi:ss')":"'"+strTime+"'";
			String sql="select "+kpiDefine.getColumnName()+" from "+datasetDefine.getTableName()+" where "+datasetDefine.getTimeStampColumn()+"="+time;
			List results=getFieldsBySQLFromSource(sql, new String[]{kpiDefine.getColumnName()}, datasetDefine);
			return results==null||((Map)results.get(0)).get(kpiDefine.getColumnName())==null?null:((Map)results.get(0)).get(kpiDefine.getColumnName()).toString();
		}
		catch(Exception e){
			throw e;
		}
	}

	public Map<String,Object> findDataSetIntegerDescByParam(String dataSetId, Date timeStamp) throws Exception {
		try{
			String dataTime=DateUtil.DateToString(datePattern, timeStamp);
			String sql="select abnormal_desc,data_time from qa_data_abnormal where dataset_id='"+dataSetId+"' and " +
					"time_stamp=to_date('"+dataTime+"','yyyy-mm-dd hh24:mi:ss') and abnormal_type='4'";
			List<Map> descs=getFieldsBySQL(sql, new String[]{"abnormal_desc","data_time"});
			return descs==null||descs.isEmpty()?null:descs.get(0);
		}
		catch(Exception e){
			throw e;
		}
	}
	
	public List selectHistoryDataValueByParamsFromSource(String dataSetId, String kpiId, String dimension, Date dataTime) throws Exception {
		try{
			Atom_DatasetDefine datasetDefine=getObject(Atom_DatasetDefine.class, dataSetId);
			if(datasetDefine==null)
				return null;
			
			List<Atom_KpiDefine> kpiDefines=getObjects("from Atom_KpiDefine where kpiId='"+kpiId+"' and datasetDefine.id='"+dataSetId+"'");
			if(kpiDefines==null||kpiDefines.isEmpty())
				return null;
			
			Atom_KpiDefine kpiDefine=kpiDefines.get(0);
			String startTime=DateUtil.DateToString(datePattern,CharliDate.getStartTimeOfDaysApartCurrentDay(dataTime, -6));
			String endTime=DateUtil.DateToString(datePattern, dataTime);
			String hmsTime=DateUtil.DateToString("HH:mm:ss", dataTime);

			StringBuffer sql=new StringBuffer("select "+datasetDefine.getTimeStampColumn()+","+kpiDefine.getColumnName()+" from "+datasetDefine.getTableName());
			String appendTimeExpress="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_date('"+startTime+"','yyyy-mm-dd hh24:mi:ss')":"'"+startTime+"'";
			appeandExpressWhenValueNotNull(sql, datasetDefine.getTimeStampColumn(), startTime, appendTimeExpress, ">=", true);
			
			appendTimeExpress="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_date('"+endTime+"','yyyy-mm-dd hh24:mi:ss')":"'"+endTime+"'";
			appeandExpressWhenValueNotNull(sql, datasetDefine.getTimeStampColumn(), endTime, appendTimeExpress, "<=", false);
			
			String column="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_char("+datasetDefine.getTimeStampColumn()+",'hh24:mi:ss')":
				"convert(char(8),"+datasetDefine.getTimeStampColumn()+",108)";
			appeandNotNullParam(sql, column, dataTime, hmsTime, "=", false);
			appeandNotNullParam(sql, datasetDefine.getPublicNeColumn(), dimension, dimension, "=", false);
			
			List<Map> values=getFieldsBySQLFromSource(sql.toString(), new String[]{datasetDefine.getTimeStampColumn(),kpiDefine.getColumnName()}, datasetDefine);
			//List results=new LinkedList();
			//if(values!=null&&!values.isEmpty()){
				//for(Map value:values)
					//results.add(value.get(kpiDefine.getColumnName()));
			//}
			
			return values;
		}
		catch(Exception e){
			throw e;
		}
	}
	
	public Map<Object,Map> selectDimensionNumByParamsFromSource(String dataSetId, Date start,Date end) throws Exception {
		try{
        	Atom_DatasetDefine datasetDefine=getObject(Atom_DatasetDefine.class, dataSetId);
			if(datasetDefine==null)
				return null;
			
			String strTime=start==null?null:DateUtil.DateToString(datePattern, start);
			String endTime=end==null?null:DateUtil.DateToString(datePattern, end);
			
			String startTimeExp="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_date('"+strTime+"','yyyy-mm-dd hh24:mi:ss')":"'"+strTime+"'";
			String endTimeExp="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_date('"+endTime+"','yyyy-mm-dd hh24:mi:ss')":"'"+endTime+"'";
			StringBuffer sql=new StringBuffer("select "+datasetDefine.getTimeStampColumn().trim()+",count(*) as num from "+datasetDefine.getTableName());
			boolean firstCondition=appeandExpressWhenValueNotNull(sql, datasetDefine.getTimeStampColumn(), strTime, startTimeExp, ">=", true);
			appeandExpressWhenValueNotNull(sql, datasetDefine.getTimeStampColumn(), endTime, endTimeExp, "<=", firstCondition);
			sql.append(" group by "+datasetDefine.getTimeStampColumn());
			return getFieldMapWithKeyFildBySQLFromSource(sql.toString(), datasetDefine.getTimeStampColumn().trim(), new String[]{"num"}, datasetDefine);
        }
        catch(Exception e){
        	throw e;
        }
	}

	public int selectDimensionNumByParamsFromSource(String dataSetId, Date dataTime) throws Exception {
        try{
        	Atom_DatasetDefine datasetDefine=getObject(Atom_DatasetDefine.class, dataSetId);
			if(datasetDefine==null)
				return 0;
			
			String strTime=DateUtil.DateToString(datePattern, dataTime);
			String time="oracle".equalsIgnoreCase(datasetDefine.getDbType())?"to_date('"+strTime+"','yyyy-mm-dd hh24:mi:ss')":"'"+strTime+"'";
			String sql="select distinct("+datasetDefine.getPublicNeColumn()+") from "+
			          datasetDefine.getTableName()+" where "+datasetDefine.getTimeStampColumn()+"="+time;
			
			List results=getFieldsBySQLFromSource(sql, new String[]{datasetDefine.getPublicNeColumn()}, datasetDefine);
			return results==null?0:results.size();
        }
        catch(Exception e){
        	throw e;
        }
	}
	
	public Map<Object,Map> selectDimensionNumByParamsFromSource(String dataSetId, List<Date> dataTimes) throws Exception {
        try{
        	if(dataTimes==null||dataTimes.isEmpty())
        		return null;
        	
        	Atom_DatasetDefine datasetDefine=getObject(Atom_DatasetDefine.class, dataSetId);
			if(datasetDefine==null)
				return null;
			
			List<String> strTimes=new ArrayList<String>();
			if("oracle".equalsIgnoreCase(datasetDefine.getDbType())){
				for(Date dataTime:dataTimes){
					String strTime=DateUtil.DateToString(datePattern, dataTime);
					strTimes.add("to_date('"+strTime+"','yyyy-mm-dd hh24:mi:ss')");
				}
			}
			else{
				for(Date dataTime:dataTimes){
					String strTime=DateUtil.DateToString(datePattern, dataTime);
					strTimes.add("'"+strTime+"'");
				}
			}
			
			StringBuffer sql=new StringBuffer("select "+datasetDefine.getTimeStampColumn()+"," +
					"count(distinct("+datasetDefine.getPublicNeColumn()+")) as num from "+
			         datasetDefine.getTableName());
			
			mergeSQLbyUnionFieldLst(sql, datasetDefine.getTimeStampColumn(), strTimes, true);
			sql.append(" group by "+datasetDefine.getTimeStampColumn());
			return getFieldMapWithKeyFildBySQLFromSource(sql.toString(), 
					datasetDefine.getTimeStampColumn(),new String[]{"num"}, datasetDefine);
        }
        catch(Exception e){
        	throw e;
        }
	}
	
	public List<Atom_DatasetDefine> getAllDataSetDefine() throws Exception {
		try{
			List<Atom_DatasetDefine> list=this.getObjects(Atom_DatasetDefine.class);
			return list;
		}
		catch(Exception e){
			throw e;
		}
	}

	public List<TreeNode> getAllDatasetNode(String id) throws Exception {
        List<TreeNode> results=new ArrayList<TreeNode>();
		
		try{
			List<Atom_DatasetDefine> list=this.getObjects(Atom_DatasetDefine.class);
			for(Atom_DatasetDefine at:list){
				TreeNode dataSetNode=new TreeNode();
				dataSetNode.setId(at.getId()+"@#1");
				dataSetNode.setText(at.getCn_name());
				dataSetNode.setLeaf(false);
				results.add(dataSetNode);
				
			}
			
			return results;
		}
		catch(Exception e){
			throw e;
		}
	}

	public List<Atom_DatasetDefine> getAllDatasetDefine(){
		Map<Atom_NeType,List<Atom_DatasetDefine>> result=new HashMap<Atom_NeType,List<Atom_DatasetDefine>>();
		return this.getObjects("from Atom_DatasetDefine");
	}
	
	public Atom_DatasetDefine findById(String dataSetId,boolean initialKpis)throws Exception{
		List<Atom_DatasetDefine> dataSets=null;
		Session session=null;

		try{
		    session=this.getSession();
		    dataSets=session.createQuery("from Atom_DatasetDefine where id='"+dataSetId+"'").list();

		    if(initialKpis&&dataSets!=null&&!dataSets.isEmpty()){
			    for(Atom_DatasetDefine dataSet:dataSets){
				    if(!Hibernate.isInitialized(dataSet.getKpiDefines()))
				    	Hibernate.initialize(dataSet.getKpiDefines());
				    
				    if(!Hibernate.isInitialized(dataSet.getDatasetNes()))
				    	Hibernate.initialize(dataSet.getDatasetNes());
			    }
		    }
		    else if(!initialKpis&&dataSets!=null&&!dataSets.isEmpty())
		    	dataSets=DatasetDefineUtil.createDatasetDefine(dataSets);
		}
		catch(Exception e){
			throw e;
		}
		finally{
			this.releaseSession(session);
		}
		
		return dataSets==null||dataSets.isEmpty()?null:dataSets.get(0);
	}
	
	public List<Atom_DatasetDefine> selectAllDatasetDefine() throws Exception {
		try{
			System.out.println("selectAllDatasetDefine");

			Map<Object,Map> results=getFieldMapWithKeyFildBySQL("select id,name_cn from qa_dataset_define","id", new String[]{"name_cn"});
			System.out.println("results "+results.size());
			
			List<Atom_DatasetDefine> datasetDefines=new ArrayList<Atom_DatasetDefine>();
			if(results!=null&&!results.isEmpty()){
				for(Object id:results.keySet()){
				    Atom_DatasetDefine datasetDefine=new Atom_DatasetDefine();
				    datasetDefine.setId((String)id);
				    datasetDefines.add(datasetDefine);
				
				}
			}
			    
			return datasetDefines;

		}
		catch(Exception e){
			throw e;
		}
	}
	
	public List<Atom_DatasetDefine> selectDatasetDefineByNeTypeAndCName(String name,String neType,boolean initialKpis) throws Exception {
		StringBuffer sql=new StringBuffer("from Atom_DatasetDefine");
		boolean firstCondition=this.appeandNotNullParam(sql, "neType", neType, neType, "=", true);
		this.appeandNotNullParam(sql, "cn_name", name, "%"+name+"%", "like", firstCondition);

		List<Atom_DatasetDefine> dataSets=null;
		Session session=null;

		try{
		    session=this.getSession();
		    dataSets=session.createQuery(sql.toString()).list();
		    System.out.println("dataSets "+dataSets.size());
		    
		    if(initialKpis&&dataSets!=null&&!dataSets.isEmpty()){
			    for(Atom_DatasetDefine dataSet:dataSets){
				    if(!Hibernate.isInitialized(dataSet.getKpiDefines()))
				    	Hibernate.initialize(dataSet.getKpiDefines());
				    
				    if(!Hibernate.isInitialized(dataSet.getDatasetNes()))
				    	Hibernate.initialize(dataSet.getDatasetNes());
			    }
		    }
		    else
		    	return DatasetDefineUtil.createDatasetDefine(dataSets);
		    
		}
		catch(Exception e){
			throw e;
		}
		finally{
			this.releaseSession(session);
		}
		
		return dataSets;
	}

	public boolean isDatasetExist(String dataset_id) {
		return this.getObject(Atom_DatasetDefine.class, dataset_id)!=null;
	}

	public void saveDatasetDefine(Atom_DatasetDefine datasetDefine) throws Exception {
		try{
			datasetDefine.generateTimerCronta();
		    this.saveObject(datasetDefine);
		}
		catch(Exception e){
			throw e;
		}
		finally{

		}
	}

	public void updateDatasetDefine(Atom_DatasetDefine datasetDefine) throws Exception {
		try{
			datasetDefine.generateTimerCronta();
		    this.updateObject(datasetDefine);
		}
		catch(Exception e){
			throw e;
		}
		finally{

		}
	}
	
	public void delDatasetByID(String datasetID) throws Exception {
		try{
		    this.deleteObject(Atom_DatasetDefine.class, datasetID);
		}
		catch(Exception e){
			throw e;
		}
		finally{
		}
	}

	public void delDatasetByDatasetIDs(List<String> datasetIDs) throws Exception {
		try{
		    StringBuffer sql=new StringBuffer("delete Atom_DatasetDefine ");
		    this.mergeSQLbyFieldLst(sql, "id", datasetIDs, true);
		
		    this.deleteObjects(sql.toString(),null);
		}
		catch(Exception e){
			throw e;
		}
	}
	
	public Map<String, Atom_DatasetDefine> getAllDataSetDefineMap() throws Exception {
		List<Atom_DatasetDefine> defs=getAllDataSetDefine();
		Map<String, Atom_DatasetDefine> map=new HashMap<String, Atom_DatasetDefine>();
		for(Atom_DatasetDefine at:defs){
			map.put(at.getId(), at);
		}
		return map;
	}

	public List<NeTypeDataSetBean> selectDataSetStateWithLastTimeStampDistinguishByNeType(String attentionType) throws Exception {
		String sql = "SELECT t1.DATASET_ID,"+
		 "        t1.TIME_STAMP,"+
		 "        t1.INTEGRITY_RATIO,"+
		 "        t1.ABNORMAL_DESC,"+
		 "        Nvl(t2.num,0) abnormalKpiNum,"+
		 "        t3.NAME_CN,"+
		 "        t3.NE_TYPE,"+
		 "        t4.NAME,"+
		 "        t3.TIME_UNIT granularity,"+
		 "        t3.BACK_COLLECTNUM,"+
		 "        t5.max_time"+
		 " FROM   (SELECT DATASET_ID,"+
		 "                TIME_STAMP,"+
		 "                INTEGRITY_RATIO,"+
		 "                ABNORMAL_DESC"+
		 "         FROM   qa_data_abnormal"+
		 "         WHERE  ABNORMAL_TYPE = '4'"+
		 "                AND (DATASET_ID,"+
		 "                     TIME_STAMP) IN (SELECT   DATASET_ID,"+
		 "                                              Max(time_stamp)"+
		 "                                     FROM     qa_data_abnormal"+
		 "                                     GROUP BY DATASET_ID)) t1,"+
		 "        (SELECT   DATASET_ID,"+
		 "                  TIME_STAMP,"+
		 "                  Count(* ) num"+
		 "         FROM     QA_DATA_ABNORMAL"+
		 "         WHERE    ABNORMAL_TYPE = '2'"+
		 "                  AND STATE = '1'"+
		 "                  AND (DATASET_ID,"+
		 "                       TIME_STAMP) IN (SELECT   DATASET_ID,"+
		 "                                                Max(time_stamp)"+
		 "                                       FROM     qa_data_abnormal"+
		 "                                       GROUP BY DATASET_ID)"+
		 "         GROUP BY DATASET_ID,"+
		 "                  TIME_STAMP) t2,"+
		 "        QA_DATASET_DEFINE t3,"+
		 "        QA_NETYPE t4,"+
		 "        (SELECT   dataset_id,"+
		 "                  Max(starttime) max_time"+
		 "         FROM     qa_task_statis"+
		 "         GROUP BY dataset_id) t5"+
		 " WHERE  t1.DATASET_ID = t2.DATASET_ID (+) "+
		 "        AND t1.TIME_STAMP = t2.TIME_STAMP (+) "+
		 "        AND t1.DATASET_ID = t3.ID"+
		 "        AND t3.NE_TYPE = t4.ID"+
		 "        AND t3.ATTENTION_TYPE = '"+attentionType+"'"+
		 "        AND t1.dataset_id = t5.dataset_id (+) ";

		String keyField="DATASET_ID";
		String[] valueFields=new String[]{"TIME_STAMP","INTEGRITY_RATIO","ABNORMALKPINUM","NAME_CN","NE_TYPE","NAME","GRANULARITY","BACK_COLLECTNUM","ABNORMAL_DESC","MAX_TIME"};
		Map<Object,Map> result=this.getFieldMapWithKeyFildBySQL(sql, keyField, valueFields);
		
		List<DataSetStateBean> allBean=new ArrayList<DataSetStateBean>();
		for(Iterator it=result.keySet().iterator();it.hasNext();){
			String dataset_id=(String)it.next();
			Map valueMap=result.get(dataset_id);
			Date timeStamp=(Date)valueMap.get("TIME_STAMP");
			long granularity=Long.valueOf((String)valueMap.get("GRANULARITY"))*1000;
			long backNum=Long.valueOf((String)valueMap.get("BACK_COLLECTNUM"));
			
			long referNow=TimeKit.getReferTime(new Date().getTime(), granularity);
			long retractionTime=backNum*granularity;
			long retractionEnd=referNow-retractionTime;
			Date backTime=new Date(retractionEnd);
			
			DataSetStateBean bean=new DataSetStateBean();
			bean.setDataSetId(dataset_id);
			
			bean.setConsistent(true);
			bean.setDatsSetName((String)valueMap.get("NAME_CN"));
			bean.setInteger(((java.math.BigDecimal)valueMap.get("INTEGRITY_RATIO")).doubleValue()>=100?true:false);
			bean.setIntegerRatio(((java.math.BigDecimal)valueMap.get("INTEGRITY_RATIO")).doubleValue());
			bean.setNeTypeId((String)valueMap.get("NE_TYPE"));
			bean.setNeTypeName((String)valueMap.get("NAME"));
			bean.setValueCorrect(((java.math.BigDecimal)valueMap.get("ABNORMALKPINUM")).longValue()<=0);
			bean.setTimeStamp(timeStamp);
			bean.setAbnormalDesc((String)valueMap.get("ABNORMAL_DESC"));
			bean.setMaxTime((Date)valueMap.get("MAX_TIME"));
			
			if(timeStamp.getTime()>=backTime.getTime()){
				bean.setRealTime(true);
			}else{	//界面显示无实时数据
				bean.setRealTime(false);
			}
			allBean.add(bean);
		}
		//分业务组装
		List<NeTypeDataSetBean> ret=new ArrayList<NeTypeDataSetBean>();
		Map<String,NeTypeDataSetBean> regMap=new HashMap<String,NeTypeDataSetBean>();
		for(DataSetStateBean stateBean:allBean){
			String neType=stateBean.getNeTypeId();
			String neTypeName=stateBean.getNeTypeName();
			NeTypeDataSetBean neTypeDataSetBean=regMap.get(neType);
			if(neTypeDataSetBean==null){
				neTypeDataSetBean=new NeTypeDataSetBean();
				neTypeDataSetBean.setNeTypeId(neType);
				neTypeDataSetBean.setTitle(neTypeName);
				regMap.put(neType, neTypeDataSetBean);
			}
			neTypeDataSetBean.getSgsn().add(stateBean);
		}
		ret.addAll(regMap.values());
		return ret;
	}

	public List<DataSetStateBean> selectDataSetStateByTimeStamp_old(String dataSetId,long startTime, long endTime) throws Exception {
        try{
        	String start=startTime==0l?null:DateUtil.LongToString(datePattern, startTime);
        	String end=endTime==0l?null:DateUtil.LongToString(datePattern, endTime);
        	
        	StringBuffer sql=new StringBuffer("select a.dataset_id,a.ne_id,a.kpi_id,a.time_stamp, " +
        			"a.collector_date,a.is_normal from qa_raw_data a ") ;
        	boolean firstCondition=this.appendNotNullDateAfterCondition(sql, "a.time_stamp", start, true);
        	firstCondition=this.appendNotNullDateBeforeCondition(sql, "a.time_stamp", end, firstCondition);
        	this.appeandNotNullParam(sql, "a.dataset_id", dataSetId, dataSetId, "=", firstCondition);
        	
        	Map<Object,List<Map>> timeStampKpis=this.getFieldMapLstWithKeyFildBySQL(sql.toString(), "time_stamp", new String[]{"ne_id","kpi_id","collector_date","is_normal"});
			if(timeStampKpis==null||timeStampKpis.isEmpty())
				return null;
			
			sql=new StringBuffer("select a.time_stamp,a.dataset_id,a.integrity_ratio,a.abnormal_type from qa_data_abnormal a ");
			firstCondition=this.appeandNotNullParam(sql, "a.dataset_id", dataSetId, dataSetId, "=", true);
			firstCondition=this.appendNotNullDateAfterCondition(sql, "a.time_stamp", start, firstCondition);
			firstCondition=this.appendNotNullDateBeforeCondition(sql, "a.time_stamp", end, firstCondition);
			//this.appeandNotNullParam(sql, "a.abnormal_type", "4", "4", "=", firstCondition);
			appeandNotNullParam(sql, "a.state", "1", "1", "=", firstCondition);
			//Map<Object,Map> timeStampInteger=getFieldMapWithKeyFildBySQL(sql.toString(), "time_stamp", new String[]{"integrity_ratio"});
			Map<Object,List<Map>> timeStampAbnormal=getFieldMapLstWithKeyFildBySQL(sql.toString(), "time_stamp", new String[]{"integrity_ratio","abnormal_type"});
			
			Map<Date,DataSetStateBean> dataSetStateBeanCache=new HashMap<Date,DataSetStateBean>();
			List<DataSetStateBean> results=new ArrayList<DataSetStateBean>();
			for(Object timeStamp:timeStampKpis.keySet()){
				DataSetStateBean setStateBean=new DataSetStateBean();
				setStateBean.setTimeStamp((Date)timeStamp);
				//setStateBean.setValueCorrect(valueCorrect);
				
				List<Map> abnormalMapLst=timeStampAbnormal==null?null:timeStampAbnormal.get(timeStamp);
				boolean isNormal=true;
				boolean isInteger=true;
				Double integerRatio=100.0;
				
				if(abnormalMapLst!=null&&!abnormalMapLst.isEmpty()){
					for(Map abnormalMap:abnormalMapLst){
						if("4".equals(abnormalMap.get("abnormal_type"))){
							integerRatio=((BigDecimal)abnormalMap.get("integrity_ratio")).doubleValue();
							isInteger=integerRatio<100?false:true;
						}
						else
							isNormal=false;
					}
				}
				
				/*
				List<Map> kpiStates=timeStampKpis.get(timeStamp);
				for(Map kpiState:kpiStates){
					if("0".equals(kpiState.get("is_normal"))){
						isNormal=false;
						break;
					}
				}
				*/
				
				//Map integerMap=timeStampInteger==null?null:timeStampInteger.get(timeStamp);
				//Double integerRatio=integerMap==null?100:((BigDecimal)integerMap.get("integrity_ratio")).doubleValue();
				//boolean isInteger=integerRatio<100?false:true;
				setStateBean.setValueCorrect(isNormal);
				setStateBean.setInteger(isInteger);
				setStateBean.setIntegerRatio(Double.parseDouble(format.format(integerRatio)));
				dataSetStateBeanCache.put((Date)timeStamp, setStateBean);
			}
			
			if(!dataSetStateBeanCache.isEmpty()){
				List<Date> timeStampLst=new ArrayList<Date> ();
				timeStampLst.addAll(dataSetStateBeanCache.keySet());
				Collections.sort(timeStampLst);
				
				for(Date timeStamp:timeStampLst)
					results.add(dataSetStateBeanCache.get(timeStamp));
			}
			
			return results;
        }
        catch(Exception e){
        	throw e;
        }
		
	}

	public List<DataSetStateBean> selectDataSetStateByTimeStamp(String dataSetId,long startTime, long endTime) throws Exception {
		
		try{
        	String start=startTime==0l?null:DateUtil.LongToString(datePattern, startTime);
        	String end=endTime==0l?null:DateUtil.LongToString(datePattern, endTime);
        	Date dStart=DateUtil.LongToDate(startTime);
        	Date dEnd=DateUtil.LongToDate(endTime);
        	String hql="select distinct(timeStamp) from RawData where datasetId=? and timeStamp>=? and timeStamp<?";
        	List timeStampListOfRawData=this.getObjects(hql, new Object[]{dataSetId,dStart,dEnd});
        	if(timeStampListOfRawData==null&&timeStampListOfRawData.isEmpty())
        		return null;
        		
			StringBuffer sql=null;
			boolean firstCondition=false;
			sql=new StringBuffer("select a.time_stamp,a.dataset_id,a.integrity_ratio,a.abnormal_type from qa_data_abnormal a ");
			firstCondition=this.appeandNotNullParam(sql, "a.dataset_id", dataSetId, dataSetId, "=", true);
			firstCondition=this.appendNotNullDateAfterCondition(sql, "a.time_stamp", start, firstCondition);
			firstCondition=this.appendNotNullDateBeforeCondition(sql, "a.time_stamp", end, firstCondition);
			//appeandNotNullParam(sql, "a.state", "1", "1", "=", firstCondition);
			Map<Object,List<Map>> timeStampAbnormal=getFieldMapLstWithKeyFildBySQL(sql.toString(), "time_stamp", new String[]{"integrity_ratio","abnormal_type"});
			
			Map<Date,DataSetStateBean> dataSetStateBeanCache=new HashMap<Date,DataSetStateBean>();
			List<DataSetStateBean> results=new ArrayList<DataSetStateBean>();
			Map<Long,DataSetStateBean> tempResult=new HashMap<Long,DataSetStateBean>();
			for(int i=0;i<timeStampListOfRawData.size();i++){
				Object timeStamp=timeStampListOfRawData.get(i);
				DataSetStateBean setStateBean=new DataSetStateBean();
				setStateBean.setTimeStamp((Date)timeStamp);
				List<Map> abnormalMapLst=timeStampAbnormal==null?null:timeStampAbnormal.get(timeStamp);	//该时刻的所有异常情况，完整性异常、准确性异常
				boolean isNormal=true;
				boolean isInteger=true;
				Double integerRatio=100.0;
				
				if(abnormalMapLst!=null&&!abnormalMapLst.isEmpty()){
					for(Map abnormalMap:abnormalMapLst){
						if("4".equals(abnormalMap.get("abnormal_type"))){
							integerRatio=((BigDecimal)abnormalMap.get("integrity_ratio")).doubleValue();
							isInteger=integerRatio<100?false:true;
						}
						else
							isNormal=false;
					}
				}
				
				setStateBean.setValueCorrect(isNormal);	//该时刻有指标不准确的情况
				setStateBean.setInteger(isInteger);	//该时刻设备数（维度）不完整
				setStateBean.setIntegerRatio(Double.parseDouble(format.format(integerRatio)));
				dataSetStateBeanCache.put((Date)timeStamp, setStateBean);
			}
			
			if(!dataSetStateBeanCache.isEmpty()){
				List<Date> timeStampLst=new ArrayList<Date> ();
				timeStampLst.addAll(dataSetStateBeanCache.keySet());
				Collections.sort(timeStampLst);
				
				for(Date timeStamp:timeStampLst)	//这个是查询出来的时间，可能不完整
					results.add(dataSetStateBeanCache.get(timeStamp));
				
				Atom_DatasetDefine dataset=this.findById(dataSetId, false);
				long granularity=Long.valueOf(dataset.getTimeUnit());
				TimeStampCalculator cal=new TimeStampCalculator(new Date(startTime),new Date(endTime),granularity*1000);
				long count=cal.getTimeStampCount();
				
				for(long i=0;i<count;i++)
				{
					Date tempDate=cal.getNextTimeStamp();
					DataSetStateBean tempBean=new DataSetStateBean();
					tempBean.setProbe(false);
					tempBean.setTimeStamp(tempDate);
					tempResult.put(tempDate.getTime(), tempBean);
				}
				
				for(int j=0;j<results.size();j++)
				{
					DataSetStateBean bean=results.get(j);
					tempResult.put(bean.getTimeStamp().getTime(), bean);
				}
			}
			Collection<DataSetStateBean> dataSetStateBeans=tempResult.values();
			List<DataSetStateBean> listDataSetStateBean=new ArrayList<DataSetStateBean>();
			listDataSetStateBean.addAll(dataSetStateBeans);
			Collections.sort(listDataSetStateBean, new PMComparator());
			listDataSetStateBean=removeNotProbe(listDataSetStateBean);
			return listDataSetStateBean;
        }
        catch(Exception e){
        	throw e;
        }
	}
	
	private List<DataSetStateBean> removeNotProbe(
			List<DataSetStateBean> listDataSetStateBean) {
		Date now=CharliDate.getStartTimeOfCurrentDay(new Date());
		List<DataSetStateBean> ret=new ArrayList<DataSetStateBean>(listDataSetStateBean.size());
		ret.addAll(listDataSetStateBean);
		for(int i=listDataSetStateBean.size()-1;i>0;i--)
		{
			if(!listDataSetStateBean.get(i).isProbe()&&listDataSetStateBean.get(i).getTimeStamp().after(now))
			{
				ret.remove(i);
			}else
			{
				break;
			}
		}
		return ret;
	}
	
	public List<KpiStateBean> selectKpiStateByDataSetIdAndTimeStamp(String dataSetId, Date timeStamp) throws Exception {
        try{
        	//1.查出所有的指标
        	String sql="SELECT KPI_ID, NE_ID, DATASET_ID, TIME_STAMP, ABNORMAL_DESC, STATE,VALUE,KPI_CN FROM QA_DATA_ABNORMAL where ABNORMAL_TYPE='2' and DATASET_ID='"+dataSetId+"' and TIME_STAMP="+TimeSwap.swap(timeStamp, TimeSwap.ORACLE_TYPE);
        	List<Map> results=this.getFieldsBySQL(sql, new String[]{"KPI_ID","NE_ID","DATASET_ID","TIME_STAMP","ABNORMAL_DESC","STATE","VALUE","KPI_CN"});
        	List<KpiStateBean> ret=new ArrayList<KpiStateBean>();
        	for(Map abnormalMap:results){
        		KpiStateBean stateBean=new KpiStateBean();
        		stateBean.setNeId((String)abnormalMap.get("NE_ID"));
        		stateBean.setKpiId((String)abnormalMap.get("KPI_ID"));
        		stateBean.setKpiName((String)abnormalMap.get("KPI_CN"));
        		boolean normal=true;
        		if(abnormalMap.get("STATE").equals("1")||abnormalMap.get("STATE")==null){
        			normal=false;
        		}
        		stateBean.setNormal(normal);
        		stateBean.setDesc((String)abnormalMap.get("ABNORMAL_DESC"));
        		stateBean.setTimeStamp((Date)abnormalMap.get("TIME_STAMP"));
        		stateBean.setValue(((java.math.BigDecimal)abnormalMap.get("VALUE")).doubleValue());
        		System.out.println(abnormalMap);
        		ret.add(stateBean);
        	}
        	return ret;
        }
        catch(Exception e){
        	throw e;
        }
	}

	public List<NeTypeDataSetStatiBean> selectAllDataSetStatiBean() throws Exception {
		try{
			String startDate=DateUtil.DateToString(datePattern, CharliDate.getStartTimeOfDaysApartCurrentDay(new Date(), -1));
			//String endDate=DateUtil.DateToString("yyyy-MM-dd HH:00:00", CharliDate.getDateOfHoursApartCurrentDate(new Date(), -1));
			
			String sql=" select a.dataset_id,max(a.time_stamp) as time_stamp from qa_raw_data a where "+
                       "a.time_stamp>=to_date('"+startDate+"','yyyy-mm-dd hh24:mi:ss') group by a.dataset_id";
	
	        Map<Object,Map> maxTimeStampMap=this.getFieldMapWithKeyFildBySQL(sql, "dataset_id", new String[]{"time_stamp"});
	        if(maxTimeStampMap==null||maxTimeStampMap.isEmpty())//各報表最新時間點��������r�g�c
		        return null;
			
	        sql="select d.id as datasetid,d.NAME_CN as setName,n.id as typeid,n.cname as typeName from qa_dataset_define d,qa_netype n " +
			" where d.ne_type=n.id(+)";
			Map<Object,Map> dataSetTypeMap=this.getFieldMapWithKeyFildBySQL(sql, "datasetid", new String[]{"setName","typeid","typeName"});
			if(dataSetTypeMap==null||dataSetTypeMap.isEmpty())
				return null;
			
			Map<String,Date> minTypeTimeStamp=new HashMap<String,Date>();
			Map<String,List<String>> typeDataSetMap=new HashMap<String,List<String>>();//各業務所包含報表
			Map<String,String> typeInfo=new HashMap<String,String>();
			
	        for(Object setId:maxTimeStampMap.keySet()){
	        	Map typeMap=dataSetTypeMap==null?null:dataSetTypeMap.get(setId);
	        	Date maxTimeStamp=(Date) maxTimeStampMap.get(setId).get("time_stamp");//報表最新時間������r�g
	        	String typeId=typeMap==null?null:(String)typeMap.get("typeid");//報表業務類型Id
	            String typeName=typeMap==null?null:(String)typeMap.get("typeName");
	            typeInfo.put(typeId, typeName);
	            
                //更新該業務下報表最新時間����ԓ�I��������r�g
	        	Date minTimeStamp=minTypeTimeStamp.get(typeId);
	        	if(minTimeStamp==null||maxTimeStamp.getTime()<minTimeStamp.getTime())
	        		minTypeTimeStamp.put(typeId, maxTimeStamp);
	        	
	        	List<String> dataSetIds=typeDataSetMap.get(typeId);
	        	if(dataSetIds==null){
	        		dataSetIds=new ArrayList<String>();
	        		typeDataSetMap.put(typeId, dataSetIds);
	        	}
	        	
	        	dataSetIds.add((String)setId);
	        }
	        
	        List<NeTypeDataSetStatiBean> result=new ArrayList<NeTypeDataSetStatiBean>();
	        for(String typeId:minTypeTimeStamp.keySet()){
	        	List<String> dataSetIds=typeDataSetMap.get(typeId);
	        	String timeStamp=DateUtil.DateToString(datePattern, minTypeTimeStamp.get(typeId));
	        	
                //��������ԓ�r�g�c��數據集在該時間點下指標��
	        	/*
	        	StringBuffer sb=new StringBuffer("select a.dataset_id,a.ne_id,a.kpi_id," +
	        			"a.collector_date,a.is_normal from qa_raw_data a where a.time_stamp=to_date('"+timeStamp+"','yyyy-mm-dd hh24:mi:ss') ");
	        	this.mergeSQLbyFieldLst(sb, "a.dataset_id", dataSetIds, false);
	        	Map<Object,List<Map>> dataSet_Kpis=this.getFieldMapLstWithKeyFildBySQL(sb.toString(), "dataset_id", new String[]{"ne_id","kpi_id","collector_date","is_normal"});
	        	
	        	sb=new StringBuffer("select a.dataset_id,a.integrity_ratio from qa_data_abnormal a " +
	        			"where a.time_stamp=to_date('"+timeStamp+"','yyyy-mm-dd hh24:mi:ss') ");
	        	this.mergeSQLbyFieldLst(sb, "a.dataset_id", dataSetIds, false);
	        	Map<Object,Map> dataSet_Integers=this.getFieldMapWithKeyFildBySQL(sb.toString(), "dataset_id", new String[]{"integrity_ratio"});
	        	*/
	        	
	        	//该类型下报表当前时间点下异常情况
	        	StringBuffer sb=new StringBuffer("select a.dataset_id,a.abnormal_type,a.integrity_ratio from qa_data_abnormal a " +
	        			"where a.time_stamp=to_date('"+timeStamp+"','yyyy-mm-dd hh24:mi:ss') ");
	        	this.mergeSQLbyFieldLst(sb, "a.dataset_id", dataSetIds, false);
	        	sb.append(" and a.state='1'");
	        	Map<Object,List<Map>> dataSet_Kpi_Integers=this.getFieldMapLstWithKeyFildBySQL(sb.toString(), "dataset_id", new String[]{"integrity_ratio","abnormal_type"});
	        	
	        	NeTypeDataSetStatiBean neTypeDataSetStatiBean=new NeTypeDataSetStatiBean();
	        	result.add(neTypeDataSetStatiBean);
	        	
	        	neTypeDataSetStatiBean.setTypeId(typeId);
	        	neTypeDataSetStatiBean.setTypeName(typeInfo.get(typeId));
	        	neTypeDataSetStatiBean.setDataSetSum(dataSetIds.size());
	        	neTypeDataSetStatiBean.setTimeStamp(minTypeTimeStamp.get(typeId));
	        	
	        	if(dataSet_Kpi_Integers==null||dataSet_Kpi_Integers.isEmpty())
	        		continue;

	        	int unNormalSum=0;
	        	int unIntegerSum=0;
	        	for(List<Map> kpis:dataSet_Kpi_Integers.values()){
	        		boolean firstKpi=true;
        			boolean readInteger=false;
        			
	        		for(Map kpi:kpis){
	        			if("4".equals(kpi.get("abnormal_type"))){
	        				readInteger=true;
	        				Double integrity_ratio=kpi.get("integrity_ratio")==null?100:((BigDecimal)kpi.get("integrity_ratio")).doubleValue();
	        				if(integrity_ratio<100)
	        					unIntegerSum++;
	        			}
	        			else{
	        				if(firstKpi){
	        					unNormalSum++;
	        					firstKpi=false;
	        				}
	        			}
	        			if(readInteger&&!firstKpi)
	        				break;
	        		}
	        	}
	        	
	        	neTypeDataSetStatiBean.setUnNormalSum(unNormalSum);
	        	neTypeDataSetStatiBean.setUnIntegerSum(unIntegerSum);
	        }
	        
	        return result;
		}
		catch(Exception e){
			throw e;
		}
		
	}

	public List<NeTypeDataSetStatiBean> selectDataSetStatiBeanByNeType(String neType, int span,String spanType) throws Exception {
		try{
			//int apart="w".equals(spanType)?7:1;
			String startDate="h".equals(spanType)?CharliDate.getStrDayHourTimeWithHoursApartCurrentTime(new Date(), -span):
				DateUtil.DateToString(datePattern, CharliDate.getStartTimeOfDaysApartCurrentDay(new Date(), -span+1));
			
			String sql=" select a.dataset_id,a.abnormal_type,a.integrity_ratio,a.time_stamp,a.state from qa_data_abnormal a "+
                       "where a.time_stamp>=to_date('"+startDate+"','yyyy-mm-dd hh24:mi:ss') and a.ne_type='"+neType+"'";

			Map<Object,List<Map>> time_dataSetKpis=this.getFieldMapLstWithKeyFildBySQL(sql, "time_stamp", new String[]{"dataset_id","abnormal_type","integrity_ratio","state"});
			if(time_dataSetKpis==null||time_dataSetKpis.isEmpty())
				return null;
			
	        sql="select d.ne_type,count(*) as setsum from qa_dataset_define d " +
			    "where d.ne_type='"+neType+"' group by d.ne_type";
			Map<Object,Map> typeSum=this.getFieldMapWithKeyFildBySQL(sql, "ne_type", new String[]{"setsum"});
			int setSum=typeSum==null?0:typeSum.get(neType)==null?0:((BigDecimal)typeSum.get(neType).get("setsum")).intValue();
			
			List<Date> AllTimeStamps=new ArrayList<Date>();
			Map<Date,NeTypeDataSetStatiBean> neTypeDataSetStatiBeanCache=new HashMap<Date,NeTypeDataSetStatiBean>();
			List<NeTypeDataSetStatiBean> results=new ArrayList<NeTypeDataSetStatiBean>();
			for(Object timeStamp:time_dataSetKpis.keySet()){
				AllTimeStamps.add((Date)timeStamp);
				
				NeTypeDataSetStatiBean neTypeDataSetStatiBean=new NeTypeDataSetStatiBean();
				neTypeDataSetStatiBean.setTimeStamp((Date)timeStamp);
				neTypeDataSetStatiBeanCache.put((Date)timeStamp, neTypeDataSetStatiBean);
				
				//int unNormalSum=0,unIntegerSum=0;
				Set<String> unIntegerDataSets=new HashSet<String>();
				Set<String> unNormalDataSets=new HashSet<String>();
				
				List<Map> abnormalRows=time_dataSetKpis.get(timeStamp);
				for(Map abnormalRow:abnormalRows){
					if("4".equals(abnormalRow.get("abnormal_type"))){
						Double integrity_ratio="0".equals(abnormalRow.get("state"))||abnormalRow.get("integrity_ratio")==null?100:((BigDecimal)abnormalRow.get("integrity_ratio")).doubleValue();
						if(integrity_ratio<100)
							unIntegerDataSets.add((String)abnormalRow.get("dataset_id"));
					}
					else{
						if("1".equals(abnormalRow.get("state"))&&!unNormalDataSets.contains((String)abnormalRow.get("dataset_id")))
							unNormalDataSets.add((String)abnormalRow.get("dataset_id"));
					}
				}
				
				neTypeDataSetStatiBean.setDataSetSum(setSum);
				neTypeDataSetStatiBean.setUnIntegerSum(unIntegerDataSets.size());
				neTypeDataSetStatiBean.setUnNormalSum(unNormalDataSets.size());
				
				Double unIntegerRatio=setSum==0?0:((double)unIntegerDataSets.size()/setSum)*100;
				neTypeDataSetStatiBean.setUnIntegerRatio(Double.parseDouble(format.format(unIntegerRatio)));
				
				Double unNormalRatio=setSum==0?0:((double)unNormalDataSets.size()/setSum)*100;
				neTypeDataSetStatiBean.setUnNormalRatio(Double.parseDouble(format.format(unNormalRatio)));
				
			}
			
			Collections.sort(AllTimeStamps);
			for(Date timeStamp:AllTimeStamps)
				results.add(neTypeDataSetStatiBeanCache.get(timeStamp));
			
			return results;
		}
		catch(Exception e){
			throw e;
		}
	}

	public String generateCollectSql(Atom_DatasetDefine dataSetDefine) {
		if(dataSetDefine.getKpiDefines()==null)
			return null;
		
		StringBuffer collectSql=new StringBuffer("select "+dataSetDefine.getTimeStampColumn()+","+dataSetDefine.getPublicNeColumn());
		
		List<String> params=new ArrayList<String>();
		for(Atom_KpiDefine kpi:dataSetDefine.getKpiDefines())
			params.add(kpi.getColumnName());
		
		this.mergeSelectParam(collectSql, params, false);
		collectSql.append(" from "+dataSetDefine.getTableName());
		
		String startTime=dataSetDefine.getFirstStartTime()==null?null:DateUtil.DateToString(datePattern, dataSetDefine.getFirstStartTime());
		//boolean firstCondition=this.appendNotNullDateAfterCondition(collectSql, dataSetDefine.getTimeStampColumn(), startTime, true);
		String endTime=dataSetDefine.getFirstEndTime()==null?null:DateUtil.DateToString(datePattern, dataSetDefine.getFirstEndTime());
		//firstCondition=this.appendNotNullDateBeforeCondition(collectSql, dataSetDefine.getTimeStampColumn(), endTime, firstCondition);
		
		List<String> publicNeIds=new ArrayList<String>();
		if(dataSetDefine.getDatasetNes()!=null){
			for(Atom_DatasetNe setNe:dataSetDefine.getDatasetNes())
				publicNeIds.add(setNe.getPublicNe().getId());
		}
		
		//this.mergeSQLbyFieldLst(collectSql, dataSetDefine.getPublicNeColumn(), publicNeIds, firstCondition);
		
		return collectSql.toString();
	}

	public List<DataSetStatiBean> selectDataSetStatiBean(Date startDate, Date endDate) throws Exception {
		//采用bean的方式统计比较繁琐，换为在sql层之间进行统计，这样效率可能更高
		//select datasetname,datasetid,round(avg(UNINTEGER_RATIO),2) UNINTEGER_RATIO,sum(DIMENSION_NUM) DIMENSION_NUM,sum(KPIABNORMAL_NUM) KPIABNORMAL_NUM,round(avg(KPIABNORMAL_RATIO),2) KPIABNORMAL_RATIO,sum(INTEGER_SUM) INTEGER_SUM,sum(KPI_SUM) KPI_SUM,round(avg(TIMEINTEGER_RATIO),2) from qa_dataset_daystati where time_stamp>=to_date('2012-06-01','yyyy-MM-dd') group by datasetname,datasetid;
		List<DataSetStatiBean> ret=new ArrayList<DataSetStatiBean>();
		String sql = "SELECT   datasetname,"+
		 "          datasetid,"+
		 "          netype,"+
		 "          Round(Avg(UNINTEGER_RATIO),2) UNINTEGER_RATIO,"+
		 "          Sum(DIMENSION_NUM) DIMENSION_NUM,"+
		 "          Sum(KPIABNORMAL_NUM) KPIABNORMAL_NUM,"+
		 "          Round(Avg(KPIABNORMAL_RATIO),2) KPIABNORMAL_RATIO,"+
		 "          Sum(INTEGER_SUM) INTEGER_SUM,"+
		 "          Sum(KPI_SUM) KPI_SUM,"+
		 "          Round(Avg(TIMEINTEGER_RATIO),2) TIMEINTEGER_RATIO"+
		 " FROM     qa_dataset_daystati"+
		 " WHERE    time_stamp >= to_date('"+DateUtil.DateToString(startDate)+"','yyyy-mm-dd hh24:mi:ss')"+
		 " and      time_stamp < to_date('"+DateUtil.DateToString(endDate)+"','yyyy-mm-dd hh24:mi:ss')"+	
		 " GROUP BY datasetname,"+
		 "          datasetid,"+
		 "          netype";

		String[] fields=new String[]{"DATASETNAME","NETYPE","UNINTEGER_RATIO","DIMENSION_NUM","KPIABNORMAL_NUM","KPIABNORMAL_RATIO","INTEGER_SUM","KPI_SUM","TIMEINTEGER_RATIO"};
		Map<Object,Map> results=this.getFieldMapWithKeyFildBySQL(sql, "datasetid", fields);
		for(Iterator it=results.keySet().iterator();it.hasNext();){
			String datasetid=(String)it.next();
			Map map=results.get(datasetid);
			String datasetName=map.get("DATASETNAME").toString();
			String neType=map.get("NETYPE").toString();
			String unIntegerRatio=map.get("UNINTEGER_RATIO").toString();
			String dimensionNum=map.get("DIMENSION_NUM").toString();
			String kpiAbnormalNum=map.get("KPIABNORMAL_NUM").toString();
			String kpiAbnormalRatio=map.get("KPIABNORMAL_RATIO").toString();
			String integerSum=map.get("INTEGER_SUM").toString();
			String kpiSum=map.get("KPI_SUM").toString();
			String timeIntegerRatio=map.get("TIMEINTEGER_RATIO")==null?"0":map.get("TIMEINTEGER_RATIO").toString();
			
			DataSetStatiBean bean=new DataSetStatiBean();
			bean.setDataSetId(datasetid);
			bean.setDataSetName(datasetName);
			bean.setNeType(neType);
			bean.setUnIntegerRatio(unIntegerRatio);
			bean.setDimensionNum(dimensionNum);
			bean.setKpiAbnormalNum(kpiAbnormalNum);
			bean.setKpiAbnormalRatio(kpiAbnormalRatio);
			bean.setIntegerSum(integerSum);
			bean.setKpiSum(kpiSum);
			bean.setTimeIntegerRatio(timeIntegerRatio);
			ret.add(bean);
		}
		return ret;
	}
	
	public List<DataSetStatiBean> selectDataSetStatiBean(Date startDate, Date endDate,int dayCount) throws Exception {
		try{
			String startTime=DateUtil.DateToString(datePattern, startDate);
			String endTime=DateUtil.DateToString(datePattern, endDate);
			String sql="select dataset_id,count(*) as num from qa_data_abnormal" +
					" where time_stamp>=to_date('"+startTime+"','yyyy-mm-dd hh24:mi:ss') " +
					"and time_stamp<to_date('"+endTime+"','yyyy-mm-dd hh24:mi:ss') " +
					"and state='1' and abnormal_type<>'4' group by dataset_id" ;
			
			Map<Object,Map> kpiAbnormalMap=getFieldMapWithKeyFildBySQL(sql, "dataset_id", new String[]{"num"});
			
			sql="select id,name_cn,ne_type,base_integer_value,time_unit from qa_dataset_define";
			Map<Object,Map> dataSetMap=getFieldMapWithKeyFildBySQL(sql, "id", 
					new String[]{"name_cn","ne_type","base_integer_value","time_unit"});
			if(dataSetMap==null||dataSetMap.isEmpty())
				return null;
			
			sql="select dataset_id,count(*) as num from qa_kpi_define group by dataset_id";
			Map<Object,Map> kpiNumMap=getFieldMapWithKeyFildBySQL(sql, "dataset_id", 
					new String[]{"num"});
			
			sql="select dataset_id,state,dimension_num,integrity_ratio from qa_data_abnormal" +
			   " where time_stamp>=to_date('"+startTime+"','yyyy-mm-dd hh24:mi:ss') " +
			   "and time_stamp<to_date('"+endTime+"','yyyy-mm-dd hh24:mi:ss') " +
			   " and abnormal_type='4'" ;
			Map<Object,List<Map>> integerAbnormalMap=getFieldMapLstWithKeyFildBySQL(sql, "dataset_id", 
					new String[]{"state","dimension_num","integrity_ratio"});
			
			List<DataSetStatiBean> dataSetStatiBeans=new ArrayList<DataSetStatiBean>();
			for(Object setId:dataSetMap.keySet()){
				DataSetStatiBean dataSetStatiBean=new DataSetStatiBean();
				dataSetStatiBeans.add(dataSetStatiBean);
				
				Map dataSetInfo=dataSetMap.get(setId);
				
				int baseValue=dataSetInfo.get("base_integer_value")==null?0:((BigDecimal)dataSetInfo.get("base_integer_value")).intValue();
				dataSetStatiBean.setDataSetId((String)setId);
				dataSetStatiBean.setDataSetName((String)dataSetInfo.get("name_cn"));
				dataSetStatiBean.setNeType((String)dataSetInfo.get("ne_type"));

				//if(integerAbnormalMap==null||integerAbnormalMap.get(setId)==null)
					//continue;
				
				int dimensionNum=0;
				List<Map> integerAbnormals=integerAbnormalMap.get(setId);
				
				if(integerAbnormals!=null){
				    for(Map map:integerAbnormals){
					    double integrityRatio=((BigDecimal)map.get("integrity_ratio")).doubleValue();
					
					    dimensionNum+=integrityRatio<100&&"1".equals(map.get("state"))?
					    		map.get("dimension_num")==null?0:((BigDecimal)map.get("dimension_num")).intValue():baseValue;
					
				    }
				}
				
				Integer timeUnit=dataSetInfo.get("time_unit")==null?0:
					Integer.parseInt((String)dataSetInfo.get("time_unit"));
				Integer dimensionSum=timeUnit==0?0:baseValue*(3600*24/timeUnit)*dayCount;
				
				Integer kpiNum=kpiNumMap==null||kpiNumMap.get(setId)==null?0:((BigDecimal)kpiNumMap.get(setId).get("num")).intValue();
				Integer kpiSum=timeUnit==0?0:kpiNum*baseValue*(3600*24/timeUnit)*dayCount;
				
				Map numMap=kpiAbnormalMap.get(setId);
				Integer kpiAbnormalNum=numMap==null?0:((BigDecimal)numMap.get("num")).intValue();
				dataSetStatiBean.setKpiAbnormalNum(kpiAbnormalNum.toString());
				dataSetStatiBean.setDimensionNum(String.valueOf(dimensionNum));
				dataSetStatiBean.setKpiSum(kpiSum.toString());
				dataSetStatiBean.setIntegerSum(String.valueOf(dimensionSum));
				
				double unIntegerRatio=dimensionSum==0?0:(double)((double)dimensionNum/(double)dimensionSum*100);
				dataSetStatiBean.setUnIntegerRatio(unIntegerRatio>100?"100.00":ratioFormat.format(unIntegerRatio));
				double kpiAbnormalRatio=kpiSum==0?0:(double)((double)kpiAbnormalNum/(double)kpiSum*100);
				dataSetStatiBean.setKpiAbnormalRatio(kpiAbnormalRatio>100?"100.00":ratioFormat.format(kpiAbnormalRatio));
			}
			
			return dataSetStatiBeans;
			
		}
		catch(Exception e){
			throw e;
		}
		
	}
	
	public List<DataSetStatiBean> selectDataSetStatiBeanWithDayGranu(String datasetId,Date startDate, Date endDate) throws Exception {
		try{
			String startTime=DateUtil.DateToString(datePattern, startDate);
			String endTime=DateUtil.DateToString(datePattern, endDate);
			String sql="select count(*) as num,TRUNC(time_stamp,'dd') as time_stamp from qa_data_abnormal" +
					" where dataset_id='"+datasetId+"' and time_stamp>=to_date('"+startTime+"','yyyy-mm-dd hh24:mi:ss') " +
					"and time_stamp<to_date('"+endTime+"','yyyy-mm-dd hh24:mi:ss') " +
					"and state='1' and abnormal_type<>'4' group by (dataset_id,TRUNC(time_stamp,'dd'))" ;
			
			Map<Object,Map> kpiAbnormalNumMap=getFieldMapWithKeyFildBySQL(sql, "time_stamp", new String[]{"num"});
			
			sql="select id,name_cn,ne_type,base_integer_value,time_unit from qa_dataset_define where id='"+datasetId+"'";
			Map<Object,Map> dataSetMap=getFieldMapWithKeyFildBySQL(sql, "id", 
					new String[]{"name_cn","ne_type","base_integer_value","time_unit"});
			if(dataSetMap==null||dataSetMap.isEmpty())
				return null;
			
			sql="select dataset_id,count(*) as num from qa_kpi_define " +
					"where dataset_id='"+datasetId+"' group by dataset_id";
			Map<Object,Map> kpiNumMap=getFieldMapWithKeyFildBySQL(sql, "dataset_id", 
					new String[]{"num"});
			
			sql="select state,dimension_num,integrity_ratio,TRUNC(time_stamp,'dd') as time_stamp" +
			   " from qa_data_abnormal" +
			   " where dataset_id='"+datasetId+"' and time_stamp>=to_date('"+startTime+"','yyyy-mm-dd hh24:mi:ss') " +
			   "and time_stamp<to_date('"+endTime+"','yyyy-mm-dd hh24:mi:ss') " +
			   " and abnormal_type='4'" ;
			List<Map> integerAbnormalList=this.getFieldsBySQL(sql, 
					new String[]{"state","dimension_num","integrity_ratio","time_stamp"});
			
			List<DataSetStatiBean> dataSetStatiBeans=new ArrayList<DataSetStatiBean>();
			Map<Date,DataSetStatiBean> statiBeanMap=new HashMap<Date,DataSetStatiBean>();
			Map<Date,Integer> dimensionNumMap=new HashMap<Date,Integer>();
			
			Map dataSetInfo=dataSetMap.get(datasetId);
			int baseValue=dataSetInfo.get("base_integer_value")==null?0:((BigDecimal)dataSetInfo.get("base_integer_value")).intValue();
			String dataSetName=(String)dataSetInfo.get("name_cn");
			String neType=(String)dataSetInfo.get("ne_type");
			Integer timeUnit=dataSetInfo.get("time_unit")==null?0:
				Integer.parseInt((String)dataSetInfo.get("time_unit"));
			Integer kpiNum=kpiNumMap==null||kpiNumMap.get(datasetId)==null?0:((BigDecimal)kpiNumMap.get(datasetId).get("num")).intValue();
			Integer kpiSum=timeUnit==0?0:kpiNum*baseValue*(3600*24/timeUnit);
			Integer dimensionSum=timeUnit==0?0:baseValue*(3600*24/timeUnit);
			
			for(Map map:integerAbnormalList){
				Date timeStamp=(Date)map.get("time_stamp");
				DataSetStatiBean dataSetStatiBean=statiBeanMap.get(timeStamp);
				if(dataSetStatiBean==null){
					dataSetStatiBean=new DataSetStatiBean();
					statiBeanMap.put(timeStamp, dataSetStatiBean);
					
					dataSetStatiBean.setDataSetId(datasetId);
					dataSetStatiBean.setTimeStamp(timeStamp);
					dataSetStatiBean.setDataSetName(dataSetName);
					dataSetStatiBean.setNeType(neType);
					dataSetStatiBean.setIntegerSum(dimensionSum.toString());	//实际应该有的维度数
					
					Map kpiMap=kpiAbnormalNumMap==null?null:kpiAbnormalNumMap.get(timeStamp);
					Integer kpiAbnormalNum=kpiMap==null||kpiMap.get("num")==null?0:((BigDecimal)kpiMap.get("num")).intValue();
					dataSetStatiBean.setKpiAbnormalNum(kpiAbnormalNum.toString());
					dataSetStatiBean.setKpiSum(kpiSum.toString());
					double kpiAbnormalRatio=kpiSum==0?0:(double)((double)kpiAbnormalNum/(double)kpiSum*100);
					dataSetStatiBean.setKpiAbnormalRatio(kpiAbnormalRatio>100?"100.00":ratioFormat.format(kpiAbnormalRatio));
				}
				
				Integer dimensionNum=dimensionNumMap.get(timeStamp)==null?0:dimensionNumMap.get(timeStamp);
				double integrityRatio=((BigDecimal)map.get("integrity_ratio")).doubleValue();
				Integer timeDimensionNum=map.get("dimension_num")==null?0:((BigDecimal)map.get("dimension_num")).intValue();
				dimensionNum+=integrityRatio<100&&"1".equals(map.get("state"))?timeDimensionNum:baseValue;
				dimensionNumMap.put(timeStamp, dimensionNum);
			}
			
			List<Date> timeStampLst=new ArrayList<Date>();
			timeStampLst.addAll(statiBeanMap.keySet());
			Collections.sort(timeStampLst);
			
			for(Date time:timeStampLst){
				DataSetStatiBean dataSetStatiBean=statiBeanMap.get(time);
				
				Integer dimensionNum=dimensionNumMap.get(dataSetStatiBean.getTimeStamp());	//一天中的总维度数
				double unIntegerRatio=dimensionSum==0?0:(double)((double)dimensionNum/(double)dimensionSum*100);
				dataSetStatiBean.setUnIntegerRatio(unIntegerRatio>100?"100.00":ratioFormat.format(unIntegerRatio));
				dataSetStatiBean.setDimensionNum(dimensionNum.toString());
				dataSetStatiBeans.add(dataSetStatiBean);
			}
			
			return dataSetStatiBeans;
			
		}
		catch(Exception e){
			throw e;
		}
	}
	
	public List<DataSetStatiBean> selectAllDataSetStatiBeanWithDayGranuByTable(Date startDate, Date endDate) throws Exception {
		try{
			String hql="from DataSetStatiBean t where t.timeStamp>=? and t.timeStamp<? order by t.timeStamp";
			List<DataSetStatiBean> ret=this.getObjects(hql, new Object[]{startDate,endDate});
			return ret;
		}catch(Exception e)
		{
			throw e;
		}
	}
	
	public void removeDataSetMonthStatisByParam(int year,int month) throws Exception{
		try {
			this.executeSqlUpdate("delete from qa_dataset_monthstatis where " +
					"year="+year+" and month="+month, null);
		} 
		catch (Exception e) {
			throw e;
		}
	}
	
	public void saveDataSetMonthStatis(List<Atom_DataSetMonthStatis> dataSetMonthStatises) throws Exception{
		try{
		    saveObjects(dataSetMonthStatises);
		}
		catch(Exception e){
			throw e;
		}
	}

	public void removeDataSetStatisByParam(String dataSetId,Date startDate,Date endDate) throws Exception{
		try {
			String start=DateUtil.DateToString(datePattern, startDate);
			String end=DateUtil.DateToString(datePattern, endDate);
			executeSqlUpdate("delete from qa_dataset_daystati where " +
					"datasetid='"+dataSetId+"' and time_stamp>=to_date('"+start+"','yyyy-mm-dd hh24:mi:ss') " +
					"and time_stamp<to_date('"+end+"','yyyy-mm-dd hh24:mi:ss')", null);
		} 
		catch (Exception e) {
			throw e;
		}
	}
	
	public void saveDataSetStatis(List<DataSetStatiBean> dataSetStatises) throws Exception{
		try{
		    saveObjects(dataSetStatises);
		}
		catch(Exception e){
			throw e;
		}
	}
	
	public void saveDataSetStatisByJdbc(List<DataSetStatiBean> dataSetStatises) throws Exception{
		if(dataSetStatises==null||dataSetStatises.isEmpty())
			return;
		
		Session session=null;
		PreparedStatement psta=null;
		UUIDHexGenerator uuid=new UUIDHexGenerator();
		
		try{
			String sql="insert into qa_dataset_daystati(id,datasetid,datasetname," +
					"integer_sum,kpi_sum,uninteger_ratio,netype,kpiabnormal_ratio," +
					"kpiabnormal_num,dimension_num,time_stamp) values(" +
					"?,?,?,?,?,?,?,?,?,?,?)";
			
			session=getSession();
			psta=session.connection().prepareStatement(sql);
			System.out.println(new Date()+" psta initialized");
			
			for(DataSetStatiBean dataSetStatiBean:dataSetStatises){
				psta.setString(1, uuid.generate().toString());
				psta.setString(2, dataSetStatiBean.getDataSetId());
				psta.setString(3, dataSetStatiBean.getDataSetName());
				psta.setString(4, dataSetStatiBean.getIntegerSum());
				psta.setString(5, dataSetStatiBean.getKpiSum());
				psta.setString(6, dataSetStatiBean.getUnIntegerRatio());
				psta.setString(7, dataSetStatiBean.getNeType());
				psta.setString(8, dataSetStatiBean.getKpiAbnormalRatio());
				psta.setString(9, dataSetStatiBean.getKpiAbnormalNum());
				psta.setString(10,dataSetStatiBean.getDimensionNum());
				psta.setDate(11,new java.sql.Date(dataSetStatiBean.getTimeStamp().getTime()));
				
				psta.addBatch();
			}

			psta.executeBatch();
			session.connection().commit();
		}
		catch(Exception e){
			e.printStackTrace();
			if(session!=null)
			    session.connection().rollback();
			
			throw e;
		}
		finally{
			doClose(null, psta, null);
			releaseSession(session);
		}
	}

	public List<String> selectAllDatasetId() throws Exception {
		String sql="select distinct(id) as datasetid from qa_dataset_define";
		List<Map> result=null;
		List<String> ret=new ArrayList<String>();
		try {
			result=this.getFieldsBySQL(sql, new String[]{"datasetid"});
			for(Map map:result)
			{
				ret.add((String)map.get("datasetid"));
			}
			return ret;
		} catch (Exception e) {
			System.out.println(e);
			throw e;
		}
	}
	
	public int collectData(String dataset_id, List<Date> dates) throws Exception
	{
		int ret=0;
		try {
			//1.查得该报表的定义信息qa_dataset_define
			Atom_DatasetDefine dataset_define=this.findById(dataset_id, true);
			List<Atom_KpiDefine> kpis_define=dataset_define.getKpiDefines();
			//2.得到完整的设备数
			double integerNum=dataset_define.getBaseIntegerValue()==null?0:dataset_define.getBaseIntegerValue();
			//3.逐个判断
			//3.1 根据参数生成sql
			//select EQUI_NAME,STAT_TIME,U_PIC_NUM,GI_CAPA from GPRS_GGSN_CAPA_RATE where STAT_TIME in(to_date('2012-02-01 00:00:00','yyyy-MM-dd hh24:mi:ss'),to_date('2012-02-01 01:00:00','yyyy-MM-dd hh24:mi:ss'));
			ret=insertRawData(dataset_id, dates, dataset_define, kpis_define);
			insertDataAbnormal(dataset_id, dates, dataset_define);
		} catch (Exception e) {
//			e.printStackTrace();
			throw e;
		}
		return ret;
	}

	/**
	 * 从数据网管侧的报表对应的数据表中查找出记录，入到数据质量端的QA_RAW_DATA表中
	 * */
	private int insertRawData(String dataset_id, List<Date> dates,
			Atom_DatasetDefine dataset_define, List<Atom_KpiDefine> kpis_define)
			throws Exception {
		String allSql=DataSetKit.generateAllDataSql(dataset_define, dates);
		int ret=0;
		ResultSet rs=null;
		List<RawData> saved=new ArrayList<RawData>();
		try {
			saved=DataSetKit.queryRawDataFromSource(allSql, dataset_define, kpis_define);
			ret=saved.size()/kpis_define.size();
		} catch ( Exception e) {
			e.printStackTrace();	//及时看到堆栈
			throw e;
		}
		this.saveRawDataByJdbc(saved);
		return ret;
	}
    @Deprecated
	private ResultSet executeSqlQueryFromSource(String sql,Atom_DatasetDefine dataset_define) throws Exception
	{
		Connection conn=null;
		Statement stmt=null;
		ResultSet rs=null;
		try {
			conn=DataSetKit.getTempConnFromSource(dataset_define);
			stmt=conn.createStatement();
			stmt.executeQuery(sql);
			return rs;
		} catch (Exception e) {
			System.out.println("出错的sql:"+sql);
			System.out.println(dataset_define.getDbType()+" "+dataset_define.getHost());
			e.printStackTrace();
			throw e;
		}finally
		{
			if(stmt!=null)
			{
				stmt.close();
			}
		}
	}
	
	private void insertDataAbnormal(String dataset_id, List<Date> dates,
			Atom_DatasetDefine dataset_define)
			throws Exception {
		
		double integerNum=dataset_define.getBaseIntegerValue();
		//如果配置的指标集有误，那么补录时会出现，rawdata表录入为空，但dataAbnormal录入不为空的情况,导致天报表中显示有记录，但历史记录缺记录的情况
		Collections.sort(dates);
		String hql="select distinct(timeStamp) from RawData where datasetId=? and timeStamp>=? and timeStamp<=?";
		List<Date> filterDates=this.getObjects(hql, new Object[]{dataset_id,dates.get(0),dates.get(dates.size()-1)});
		if(filterDates.isEmpty())
			return;
		String calSql=DataSetKit.generateCalSql(dataset_define, filterDates);
		List<DataAbnormal> dataAbnormals=new ArrayList<DataAbnormal>();
		try{
			dataAbnormals=DataSetKit.queryDataAbnormal(calSql, dataset_define, dataset_id);
		}catch(Exception e)
		{
			e.printStackTrace();
			throw e;
		}
		this.saveAbnormalDataByJdbc(dataAbnormals);
	}
	
	public void saveRawDataByJdbc(List<RawData> rawDatas) throws Exception{
		if(rawDatas==null||rawDatas.isEmpty())
			return;
		
		Session session=null;
		PreparedStatement psta=null;
		UUIDHexGenerator uuid=new UUIDHexGenerator();
		
		try{
			String sql="insert into QA_RAW_DATA(TIME_STAMP,DATASET_ID,NE_ID,KPI_ID,COLLECTOR_DATE,VALUE,BASELINE_UP,BASELINE_DOWN,VALIDATION_TYPE,IS_NORMAL)" +
					"values(?,?,?,?,?,?,?,?,?,?)";
			
			session=getSession();
			psta=session.connection().prepareStatement(sql);
			System.out.println(new Date()+" psta initialized");
			
			for(RawData rawData:rawDatas){
				psta.setTimestamp(1, new Timestamp(rawData.getTimeStamp().getTime()));
				psta.setString(2, rawData.getDatasetId());
				psta.setString(3, rawData.getNeId());
				psta.setString(4, rawData.getKpiId());
				psta.setLong(5,	rawData.getCollectorDate());
				psta.setDouble(6, rawData.getValue());
				psta.setDouble(7, rawData.getBaselineUp()==null?0:rawData.getBaselineUp());
				psta.setDouble(8, rawData.getBaselineDown()==null?0:rawData.getBaselineDown());
				psta.setString(9, rawData.getValidationType());
				psta.setString(10, rawData.getIsNormal());
				psta.addBatch();
			}

			psta.executeBatch();
			session.connection().commit();
		}
		catch(Exception e){
			e.printStackTrace();
			if(session!=null)
			    session.connection().rollback();
			
			throw e;
		}
		finally{
			doClose(null, psta, null);
			releaseSession(session);
		}
	}
	
	public void saveAbnormalDataByJdbc(List<DataAbnormal> abnormalDatas) throws Exception{
		if(abnormalDatas==null||abnormalDatas.isEmpty())
			return;
		
		Session session=null;
		PreparedStatement psta=null;
		UUIDHexGenerator uuid=new UUIDHexGenerator();
		
		try{
			String sql="insert into QA_DATA_ABNORMAL(DATASET_ID,DATA_TIME,TIME_STAMP,DIMENSION_NUM,INTEGRITY_RATIO,ABNORMAL_TYPE,NE_TYPE,STATE,ABNORMAL_DESC)"+
                       "values(?,?,?,?,?,?,?,?,?)";
			
			session=getSession();
			psta=session.connection().prepareStatement(sql);
			System.out.println(new Date()+" psta initialized");
			
			for(DataAbnormal abnormalData:abnormalDatas){
				psta.setString(1, abnormalData.getDatasetId());
				psta.setTimestamp(2, new java.sql.Timestamp(abnormalData.getDataTime().getTime()));
				psta.setTimestamp(3, new java.sql.Timestamp(abnormalData.getTimeStamp().getTime()));
				psta.setDouble(4, abnormalData.getDimensionNum());
				psta.setDouble(5, abnormalData.getIntegrityRatio());
				psta.setString(6, abnormalData.getAbnormalType());
				psta.setString(7, abnormalData.getNeType());
				psta.setString(8, abnormalData.getState());
				psta.setString(9, abnormalData.getAbnormalDesc());
				psta.addBatch();
			}

			psta.executeBatch();
			session.connection().commit();
		}
		catch(Exception e){
			e.printStackTrace();
			if(session!=null)
			    session.connection().rollback();
			
			throw e;
		}
		finally{
			doClose(null, psta, null);
			releaseSession(session);
		}
	}

	public List<String> selectDimensionsFromSource(String dataset_id, Date date)
			throws Exception {
		Atom_DatasetDefine dataset_define=this.findById(dataset_id, true);
		String sql=DataSetKit.generateDimensionsQuery(dataset_define, date);
		List<Map> result=this.getFieldsBySQLFromSource(sql, new String[]{"dimensions"},dataset_define);
		List<String> ret=new ArrayList<String>();
		for(Map map:result)
		{
			ret.add((String)map.get("dimensions"));
		}
		return ret;
	}
	
	public int currentDimensionNum(String dataset_id,Date time)
	{
		int ret=0;
		try {
			Atom_DatasetDefine dataset_define=this.findById(dataset_id, true);
			int kpi_num=dataset_define.getKpiDefines().size();
			String sql="select count(*) num from QA_RAW_DATA where dataset_id='"+dataset_id+"' and time_stamp="+TimeSwap.swap(time, TimeSwap.ORACLE_TYPE);
			List<Map> numMap=this.getFieldsBySQL(sql, new String[]{"num"});
			ret=((BigDecimal)(numMap.get(0).get("num"))).intValue()/kpi_num;
		} catch (Exception e) {
			e.printStackTrace();
		}
		return ret;
	}
	
	public List<String> currentDimensionList(String dataset_id,Date time)
	{
		List<String> ret=null;
		String hql="select distinct(neId) from RawData where datasetId=? and timeStamp=?";
		try{
			ret=this.getObjects(hql, new Object[]{dataset_id,time});
		}catch(Exception e)
		{
			e.printStackTrace();
		}
		return ret;
	}
}
