package com.red.wood.module.task;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.red.wood.common.util.CommonUtils;
import com.red.wood.common.util.DateUtil;
import com.red.wood.common.util.JdbcUtil;
import com.red.wood.common.util.SqlParser;
import com.red.wood.common.util.SysProps;
import com.red.wood.module.model.SqlInfo;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;

public class StatisticsTaskBak {
	
	private static final Log log = LogFactory.getLog(StatisticsTaskBak.class);
	
	public static void main(String[] args) {
		Connection isdmsOracleConn = null;
		Connection isdmsMysqlConn = null;
		Connection isdmsHiveConn = null;
		SimpleDateFormat yyyyMMdd = new SimpleDateFormat("yyyyMMdd");
		try {
			String day = "";
			if(args.length > 0) {
				day = args[0];
				if(!isValidDate(day)) {
					System.exit(0);
				}
			}else {
				day = yyyyMMdd.format(DateUtil.getYesterday());
			}
			log.info("=========数据汇总开始==========");
			log.info("入库时间："+day);
			SysProps props = SysProps.getInstance();
			SqlInfo sqlInfo = getSqlInfo();
			
			isdmsMysqlConn = JdbcUtil.getConnection("isdmsMysql");
			List<Map<String,Object>> resList = getList(isdmsMysqlConn,sqlInfo.getSqlBasicResSt());
			
			log.info("基础资源数量："+resList);
			
			
			
			String idcResCount = getCount(isdmsMysqlConn,getSql("IDC"));
			String dnsResCount = getCount(isdmsMysqlConn,getSql("DNS"));
			String ircsResCount = getCount(isdmsMysqlConn,getSql("IRCS"));
			String cdnResCount = getCount(isdmsMysqlConn,getSql("CDN"));
			
			log.info("IDC资源数据量："+idcResCount);
			log.info("DNS资源数据量："+dnsResCount);
			log.info("IRCS资源数据量："+ircsResCount);
			log.info("CDN资源数据量："+cdnResCount);
			
			
			isdmsOracleConn = JdbcUtil.getConnection("isdmsOracle");
			String icpResCount = getCount(isdmsOracleConn,sqlInfo.getSqlIcpCount());
			log.info("备案资源数据量："+icpResCount);
			
			//hive工业互联网、网安数据量统计
			String hive_databse = props.getValue("hive.database");
			SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
			String query_day = sdf.format(yyyyMMdd.parse(day));
			//query_day = "2019-01-04";
			isdmsHiveConn = JdbcUtil.getConnection("isdmsHive");
			
			/*String sqlIndustyInternet = SqlParser.getOriginalOutputSql(sqlInfo.getSqlIndustyInternetCount(),"hive_databse",hive_databse);
			sqlIndustyInternet = SqlParser.getSql(sqlIndustyInternet, "query_day", query_day);
			System.out.println(sqlIndustyInternet);
			String industyInternetCount = getHiveCount(isdmsHiveConn,sqlIndustyInternet);
			log.info("工业互联网数据量："+industyInternetCount);
			
			String sqlNetSafety = SqlParser.getOriginalOutputSql(sqlInfo.getSqlNetSafetyCount(),"hive_databse",hive_databse);
			System.out.println(sqlNetSafety);
			String netSafetyCount = getHiveCount(isdmsHiveConn,sqlNetSafety);
			log.info("网安数据量："+netSafetyCount);*/
			
			StringBuffer sql = new StringBuffer();
			sql.append(getSizeSql("IDC","idc","t_ods_idc_%",day,query_day));
			sql.append(" union all ");
			sql.append(getSizeSql("DNS","dns","t_ods_dns_%",day,query_day));
			sql.append(" union all ");
			sql.append(getSizeSql("IRCS","ircs","t_ods_ircs_%",day,query_day));
			sql.append(" union all ");
			sql.append(getSizeSql("CDN","cdn","t_ods_cdn_%",day,query_day));
			sql.append(" union all ");
			//sql.append(getSizeSql("ICP","icp","",day,query_day));
			sql.append(getSizeIcpSql(day,query_day));
			List<Map<String,Object>> list = getList(isdmsMysqlConn,sql.toString());
			log.info("存储量信息："+list);
			
			for(Map<String,Object> map : list) {
				String source_type = CommonUtils.valueOf(map.get("source_type"));
				String file_size = CommonUtils.valueOf(map.get("file_size"));
				if(StringUtils.isBlank(file_size)) {
					map.put("file_size", 0);
				}
				if(StringUtils.equals(source_type, "IDC")) {
					map.put("count",idcResCount);
				}else if(StringUtils.equals(source_type, "DNS")) {
					map.put("count",dnsResCount);
				}else if(StringUtils.equals(source_type, "IRCS")) {
					map.put("count",ircsResCount);
				}else if(StringUtils.equals(source_type, "CDN")) {
					map.put("count",cdnResCount);
				}else if(StringUtils.equals(source_type, "ICP")) {
					map.put("count",icpResCount);
				}
			}
			/*Map<String,Object> map = new HashMap<String,Object>();
			map.put("source_type", "INDUSTYINTERNET");
			map.put("source_name", "industyInternet");
			map.put("count", industyInternetCount);
			map.put("file_size",0);
			map.put("day", day);
			list.add(map);
			map = new HashMap<String,Object>();
			map.put("source_type", "NETSAFETY");
			map.put("source_name", "netSafety");
			map.put("count", netSafetyCount);
			map.put("file_size",0);
			map.put("day", day);
			list.add(map);*/
			
			/*String delSql ="delete from t_st_source_statistics where day="+day;
			JdbcUtil.execSql(isdmsOracleConn, delSql);
			
			String insertSql = "insert into t_st_source_statistics(source_type,source_name,count,file_size,day) values (?,?,?,?,?)";
			String columns = "source_type,source_name,count,file_size,day";
			JdbcUtil.insertBatchTableData(isdmsOracleConn, list, insertSql, columns, 500);*/
			
			log.info("=========数据汇总完成==========");
		}catch(Exception e) {
			log.info("=========数据汇总异常==========");
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
		}finally {
			JdbcUtil.close(isdmsOracleConn);
			JdbcUtil.close(isdmsMysqlConn);
			JdbcUtil.close(isdmsHiveConn);
		}
	}
	
	public static String getCount(Connection conn,String sql) {
		String count = "0";
		PreparedStatement ps = null;
		ResultSet rs = null;
		try {
			ps = conn.prepareStatement(sql);
			rs = ps.executeQuery();
			
			List<Map<String,Object>> list = JdbcUtil.resultSetToList(rs);
			if(list != null && list.size() > 0) {
				String countStr = CommonUtils.valueOf(list.get(0).get("COUNT"));
				if(StringUtils.isNotBlank(countStr)) {
					count = countStr;
				}
			}
		} catch (SQLException e) {
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
		}finally {
			JdbcUtil.close(rs);
			JdbcUtil.close(ps);
		}
		return count;
	}
	
	public static List<Map<String,Object>> getList(Connection conn,String sql) {
		List<Map<String,Object>> list = new ArrayList<Map<String, Object>>();
		PreparedStatement ps = null;
		ResultSet rs = null;
		try {
			ps = conn.prepareStatement(sql);
			rs = ps.executeQuery();
			
			list = JdbcUtil.resultSetToList(rs);
		} catch (SQLException e) {
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
		}finally {
			JdbcUtil.close(rs);
			JdbcUtil.close(ps);
		}
		return list;
	}
	
	public static String getSql(String source_type) {
		StringBuffer sql = new StringBuffer();
		sql.append("select sum(num_rows) as COUNT");
		sql.append("  from (select t2.num_rows");
		sql.append("          from meta_tables t1");
		sql.append("         inner join meta_partitions t2");
		sql.append("            on t1.table_name = t2.table_name");
		sql.append("           and t1.source_type = '"+source_type+"'");
		sql.append("           and t1.level = 'ODS'");
		sql.append("           and t2.start_date like");
		sql.append("               (select concat(max(date_format(start_date, '%Y-%m-%d')), '%')");
		sql.append("                  from meta_partitions");
		sql.append("                 where start_date < date_format(now(), '%Y-%m-%d'))) t");
		return sql.toString();
	}
	
	public static String getSizeSql(String source_type,String source_name,String table_name,String day,String query_day) {
		StringBuffer sql = new StringBuffer();
		sql.append("select '"+source_type+"' as source_type,'"+source_name+"' as source_name,round(sum(Raw_size)/1024/1024, 2) as file_size,'"+day+"' as day");
		sql.append("  from meta_partitions");
		sql.append(" where table_name like '"+table_name+"'");
		//sql.append(" and start_date like '"+query_day+"%'");
		sql.append("and start_date like ");
		sql.append("       (select max(date_format(start_date, '%Y-%m-%d'))");
		sql.append("          from meta_partitions");
		sql.append("         where start_date < date_format(now(), '%Y-%m-%d'))");
		return sql.toString();
		
		
	}
	
	public static String getSizeIcpSql(String day,String query_day) {
		StringBuffer sql = new StringBuffer();
		sql.append("select 'ICP' as source_type,'icp' as source_name,round(file_size/1024/1024, 2) as file_size,'"+day+"' as day");
		sql.append("  from meta_source_file");
		sql.append(" where source_type='beian'");
		sql.append(" and data_day like '"+query_day+"%'");
		//sql.append("       (select max(start_date)");
		//sql.append("          from meta_partitions");
		//sql.append("         where start_date < date_format(now(), '%Y-%m-%d'))");
		//sql.append("   and source_type = '"+source_type+"'");
		return sql.toString();
		
		
	}
	
	/**
	 * 获取配置文件中的信息
	 * @return
	 */
	public static SqlInfo getSqlInfo() {
		SqlInfo sql = null;
		try {
			XStream xstream = new XStream(new DomDriver());
			xstream.processAnnotations(SqlInfo.class);
			ClassLoader classLoader = StatisticsTaskBak.class.getClassLoader();  
			String xml = IOUtils.toString(classLoader.getResourceAsStream("sqlconfig/sqlInfo.xml"),"UTF-8"); 
			sql = (SqlInfo) xstream.fromXML(xml);
		}catch(Exception e) {
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
		}
		return sql;
	}
	
	public static String getHiveCount(Connection conn,String sql) {
		String count = "0";
		PreparedStatement ppst = null;
		ResultSet rs = null;
		try {
			ppst = conn.prepareStatement(sql);
			rs = ppst.executeQuery();
			if(rs.next()) {
				count = rs.getString("count");
			}
		} catch (Exception e) {
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
		}finally {
			JdbcUtil.close(rs);
			JdbcUtil.close(ppst);
		}
		return count;
	}
	
	public static boolean isValidDate(String date){
		SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
		try{
			sdf.setLenient(false);//严格按照给定的格式来校验
			sdf.parse(StringUtils.trim(date));
			return true;
		}catch(Exception e){
			log.error("=========传入的日期错误,正确格式为【yyyyMMdd】且是有效日期=========");
			if(log.isErrorEnabled()) {
				log.error(e.getMessage(),e);
			}
			return false;
		}
	}
}