package com.surfilter.massdata.spark.task.statistics;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.storage.StorageLevel;

import com.surfilter.massdata.spark.bean.StatDomain;
import com.surfilter.massdata.spark.bean.StatIp;
import com.surfilter.massdata.spark.util.ConfigUtil;
import com.surfilter.massdata.spark.util.DateUtil;
import com.surfilter.massdata.spark.util.JdbcUtil;

public class IpCountStatisticsTask {

	public static void main(String[] args) {
		try{
			long start=System.currentTimeMillis();
			long temp=start;
			long temp_end=start;
			double exectime=0.0;
			
			String hiveDB = ConfigUtil.get("hiveDB", "myhive");
			SparkConf conf = new SparkConf().setAppName("IpCountStatisticsTask");
			JavaSparkContext context = new JavaSparkContext(conf);
			HiveContext hiveContext = new HiveContext(context);
			
			String dayStr = ""; 
			if(args.length > 0){
				//传入日期格式：20170703
				dayStr = args[0];
			}
			//yyyyMMdd
			Date date = DateUtil.getExecDate1(DateUtil.getYesterday(), dayStr);
			String day = DateUtil.getCurrentymd(date);//2017-04-12
			System.out.println("统计数据时间："+DateUtil.getCurrentymd(date));
			
			String day_sql="select cip,city,operators_id,user_type  from "
					+ hiveDB + "." + "dwb_domain_cip"
					+ " where ds='" + day +"' group by cip,city,operators_id,user_type ";
			System.out.println("day_sql:"+day_sql);
			
			JavaRDD<StatIp> day_rdd=hiveContext.sql(day_sql).toJavaRDD().map(new StatIpMap());
			DataFrame day_ipDF = hiveContext.createDataFrame(day_rdd,	StatIp.class);
			day_ipDF.registerTempTable("t_day_cip");
			day_ipDF.persist(StorageLevel.MEMORY_AND_DISK_SER());
			
			insertWeekIp(hiveContext,hiveDB,date);
			
			temp_end=System.currentTimeMillis();
            exectime=(temp_end-temp)*1.0/(1000*60);
            temp=temp_end;
			System.out.println("insertWeekIp exectime: "+exectime+" min........................");
			
			insertMonthIp(hiveContext,hiveDB,date);
			temp_end=System.currentTimeMillis();
            exectime=(temp_end-temp)*1.0/(1000*60);
            temp=temp_end;
			System.out.println("insertMonthIp exectime: "+exectime+" min........................");
			
			insertQuarterIp(hiveContext,hiveDB,date);
			temp_end=System.currentTimeMillis();
            exectime=(temp_end-temp)*1.0/(1000*60);
            temp=temp_end;
			System.out.println("insertQuarterIp exectime: "+exectime+" min........................");
			
			insertYearIp(hiveContext,hiveDB,date);
			temp_end=System.currentTimeMillis();
            exectime=(temp_end-temp)*1.0/(1000*60);
            temp=temp_end;
			System.out.println("insertYearIp exectime: "+exectime+" min........................");
			
			long end=System.currentTimeMillis();
			exectime=(end-start)*1.0/(1000*60);
			System.out.println("IpCountStatisticsTask exectime: "+exectime+" min........................");
			
		}catch(Exception e){
			e.printStackTrace();
		}
	}
	
	private static void insertYearIp(HiveContext hiveContext, String hiveDB,
			Date date) {
		Connection conn = null;
		try{
			int year_i=DateUtil.getCurrentYear(date);//year
			int half_year_i=DateUtil.getHalfYear(date);//half_year
			int quarter_i=DateUtil.getQuarter(date);//quarter
			int month_i=DateUtil.getCurrentMonth(date);//month
			int week_i=DateUtil.getCurrentWeek(date);//week
			int day_i=DateUtil.getCurrentDay(date);//day
			
			String year = DateUtil.getCurrentYear_s(date);
			String day = DateUtil.getCurrentymd(date);//2017-04-12
			//取前一天时间
			Date yesterday = new Date(date.getTime()-24*60*60*1000);
			String year_last = DateUtil.getCurrentYear_s(yesterday);
			String day_last = DateUtil.getCurrentymd(yesterday);
			//判断是否为新年份
			if(!year_last.equals(year)){
				System.out.println("新年份开始");
				String year_init_sql="insert into table "+ hiveDB + ".dws_cip_year partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+" from t_day_cip ";
				System.out.println("year_init_sql:"+year_init_sql);
				hiveContext.sql(year_init_sql);
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_day_cip";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				conn = JdbcUtil.getConnection();
				cipInsertYear(conn,cip_count,date,"SA3100");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipYearInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");

				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipYearInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				cipYearInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
				
			}else{
				System.out.println("今天发现的ip与年内发现的历史ip union");
				String year_sql="select cip,city,operators_id,user_type  from "
						+ hiveDB + "." + "dws_cip_year"
						+ " where ds='" + day_last +"'";
				System.out.println("year_sql:"+year_sql);
				JavaRDD<Row> year_rdd=hiveContext.sql(year_sql).toJavaRDD();
				
				JavaRDD<StatIp> year_cip_rdd =year_rdd.map(new StatIpMap());
				DataFrame CipDF = hiveContext.createDataFrame(year_cip_rdd,	StatIp.class);
				CipDF.registerTempTable("t_cip_year");
				//昨天的dws_cip_year和dwb_domain_cip 数据 union
				String sql_union="select cip,city,operators_id,user_type from ( select cip,city,operators_id,user_type from t_cip_year union select cip,city,operators_id,user_type from t_day_cip) t group by cip,city,operators_id,user_type ";
				JavaRDD<StatIp> union_rdd=hiveContext.sql(sql_union).toJavaRDD().map(new StatIpMap());
				DataFrame UnionDF = hiveContext.createDataFrame(union_rdd,	StatIp.class);
				UnionDF.registerTempTable("t_cip_year_union");
				
				String year_insert_sql="insert into table "+ hiveDB + ".dws_cip_year partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+" from t_cip_year_union ";
				System.out.println("year_insert_sql:"+year_insert_sql);
				hiveContext.sql(year_insert_sql);
				
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_cip_year_union";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				//连接数据库
				conn = JdbcUtil.getConnection();
				cipInsertYear(conn,cip_count,date,"SA3100");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_year_union group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipYearInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");
				
				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_year_union group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipYearInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_year_union group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				System.out.println("list_cip_user_type.size:"+list_cip_user_type.size());
				cipYearInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
			}
			
		}catch(Exception e){
			if(conn!=null){
				JdbcUtil.close(conn);
			}
			e.printStackTrace();
		}finally{
			if(conn!=null){
				JdbcUtil.close(conn);
			}
		}
	
		
	}

	private static void insertQuarterIp(HiveContext hiveContext, String hiveDB,
			Date date) {

		Connection conn = null;
		try{
			int year_i=DateUtil.getCurrentYear(date);//year
			int half_year_i=DateUtil.getHalfYear(date);//half_year
			int quarter_i=DateUtil.getQuarter(date);//quarter
			int month_i=DateUtil.getCurrentMonth(date);//month
			int week_i=DateUtil.getCurrentWeek(date);//week
			int day_i=DateUtil.getCurrentDay(date);//day
			
			String year = DateUtil.getCurrentYear_s(date);
			int quarter = DateUtil.getQuarter(date);
			String day = DateUtil.getCurrentymd(date);//2017-04-12
			//取前一天时间
			Date yesterday = new Date(date.getTime()-24*60*60*1000);
			int quarter_last = DateUtil.getQuarter(yesterday);
			String day_last = DateUtil.getCurrentymd(yesterday);
			//判断是否为新季节
			if(quarter!=quarter_last){
				System.out.println("新季节开始");
				String quarter_init_sql="insert into table "+ hiveDB + ".dws_cip_quarter partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+","+quarter+" from t_day_cip ";
				System.out.println("quarter_init_sql:"+quarter_init_sql);
				hiveContext.sql(quarter_init_sql);
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_day_cip";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				conn = JdbcUtil.getConnection();
				cipInsertQuarter(conn,cip_count,date,"SA3300");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipQuarterInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");

				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipQuarterInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				cipQuarterInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
				
			}else{
				System.out.println("今天发现的ip与季度内发现的历史ip union");
				String quarter_sql="select cip,city,operators_id,user_type  from "
						+ hiveDB + "." + "dws_cip_quarter"
						+ " where ds='" + day_last +"'";
				System.out.println("quarter_sql:"+quarter_sql);
				JavaRDD<Row> quarter_rdd=hiveContext.sql(quarter_sql).toJavaRDD();
				
				JavaRDD<StatIp> quarter_cip_rdd =quarter_rdd.map(new StatIpMap());
				DataFrame CipDF = hiveContext.createDataFrame(quarter_cip_rdd,	StatIp.class);
				CipDF.registerTempTable("t_cip_quarter");
				//昨天的dws_cip_quarter和dwb_domain_cip 数据 union
				String sql_union="select cip,city,operators_id,user_type from ( select cip,city,operators_id,user_type from t_cip_quarter union select cip,city,operators_id,user_type from t_day_cip) t group by cip,city,operators_id,user_type ";
				JavaRDD<StatIp> union_rdd=hiveContext.sql(sql_union).toJavaRDD().map(new StatIpMap());
				DataFrame UnionDF = hiveContext.createDataFrame(union_rdd,	StatIp.class);
				UnionDF.registerTempTable("t_cip_quarter_union");
				
				String quarter_insert_sql="insert into table "+ hiveDB + ".dws_cip_quarter partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+","+quarter+" from t_cip_quarter_union ";
				System.out.println("quarter_insert_sql:"+quarter_insert_sql);
				hiveContext.sql(quarter_insert_sql);
				
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_cip_quarter_union";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				//连接数据库
				conn = JdbcUtil.getConnection();
				cipInsertQuarter(conn,cip_count,date,"SA3300");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_quarter_union group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipQuarterInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");
				
				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_quarter_union group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipQuarterInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_quarter_union group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				System.out.println("list_cip_user_type.size:"+list_cip_user_type.size());
				cipQuarterInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
			}
			
		}catch(Exception e){
			if(conn!=null){
				JdbcUtil.close(conn);
			}
			e.printStackTrace();
		}finally{
			if(conn!=null){
				JdbcUtil.close(conn);
			}
		}
	}

	private static void insertMonthIp(HiveContext hiveContext, String hiveDB,
			Date date) {

		Connection conn = null;
		try{
			int year_i=DateUtil.getCurrentYear(date);//year
			int half_year_i=DateUtil.getHalfYear(date);//half_year
			int quarter_i=DateUtil.getQuarter(date);//quarter
			int month_i=DateUtil.getCurrentMonth(date);//month
			int week_i=DateUtil.getCurrentWeek(date);//week
			int day_i=DateUtil.getCurrentDay(date);//day
			
			String year = DateUtil.getCurrentYear_s(date);
			String month = DateUtil.getCurrentMonth_s(date);
			String day = DateUtil.getCurrentymd(date);//2017-04-12
			//取前一天时间
			Date yesterday = new Date(date.getTime()-24*60*60*1000);
			String month_last = DateUtil.getCurrentMonth_s(yesterday);
			String day_last = DateUtil.getCurrentymd(yesterday);
			//判断是否为新月份
			if(!month_last.equals(month)){
				System.out.println("新月份开始");
				String month_init_sql="insert into table "+ hiveDB + ".dws_cip_month partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+","+month+" from t_day_cip ";
				System.out.println("month_init_sql:"+month_init_sql);
				hiveContext.sql(month_init_sql);
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_day_cip";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				conn = JdbcUtil.getConnection();
				cipInsertMonth(conn,cip_count,date,"SA3400");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipMonthInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");

				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipMonthInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				cipMonthInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
				
			}else{
				System.out.println("今天发现的ip与月内发现的历史ip union");
				String month_sql="select cip,city,operators_id,user_type  from "
						+ hiveDB + "." + "dws_cip_month"
						+ " where ds='" + day_last +"'";
				System.out.println("month_sql:"+month_sql);
				JavaRDD<Row> month_rdd=hiveContext.sql(month_sql).toJavaRDD();
				
				JavaRDD<StatIp> month_cip_rdd =month_rdd.map(new StatIpMap());
				DataFrame CipDF = hiveContext.createDataFrame(month_cip_rdd,	StatIp.class);
				CipDF.registerTempTable("t_cip_month");
				//昨天的dws_cip_month和dwb_domain_cip 数据 union
				String sql_union="select cip,city,operators_id,user_type from ( select cip,city,operators_id,user_type from t_cip_month union select cip,city,operators_id,user_type from t_day_cip) t group by cip,city,operators_id,user_type ";
				JavaRDD<StatIp> union_rdd=hiveContext.sql(sql_union).toJavaRDD().map(new StatIpMap());
				DataFrame UnionDF = hiveContext.createDataFrame(union_rdd,	StatIp.class);
				UnionDF.registerTempTable("t_cip_month_union");
				
				String month_insert_sql="insert into table "+ hiveDB + ".dws_cip_month partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+","+month+" from t_cip_month_union ";
				System.out.println("month_insert_sql:"+month_insert_sql);
				hiveContext.sql(month_insert_sql);
				
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_cip_month_union";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				//连接数据库
				conn = JdbcUtil.getConnection();
				cipInsertMonth(conn,cip_count,date,"SA3400");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_month_union group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipMonthInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");
				
				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_month_union group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipMonthInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_month_union group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				System.out.println("list_cip_user_type.size:"+list_cip_user_type.size());
				cipMonthInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
			}
			
		}catch(Exception e){
			if(conn!=null){
				JdbcUtil.close(conn);
			}
			e.printStackTrace();
		}finally{
			if(conn!=null){
				JdbcUtil.close(conn);
			}
		}
	}

	private static void insertWeekIp(HiveContext hiveContext,String hiveDB,Date date){
		Connection conn = null;
		try{
			int year_i=DateUtil.getCurrentYear(date);//year
			int half_year_i=DateUtil.getHalfYear(date);//half_year
			int quarter_i=DateUtil.getQuarter(date);//quarter
			int month_i=DateUtil.getCurrentMonth(date);//month
			int week_i=DateUtil.getCurrentWeek(date);//week
			int day_i=DateUtil.getCurrentDay(date);//day
			
			String year = DateUtil.getCurrentYear_s(date);
			String week = DateUtil.getCurrentWeek_s(date);
			String day = DateUtil.getCurrentymd(date);//2017-04-12
			//取前一天时间
			Date yesterday = new Date(date.getTime()-24*60*60*1000);
			String week_last = DateUtil.getCurrentWeek_s(yesterday);
			String day_last = DateUtil.getCurrentymd(yesterday);
			//判断是否为新一周
			if(!week_last.equals(week)){
				System.out.println("新一周开始");
				String week_init_sql="insert into table "+ hiveDB + ".dws_cip_week partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+","+week+" from t_day_cip ";
				System.out.println("week_init_sql:"+week_init_sql);
				hiveContext.sql(week_init_sql);
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_day_cip";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				conn = JdbcUtil.getConnection();
				cipInsertWeek(conn,cip_count,date,"SA3501");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipWeekInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");

				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipWeekInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_day_cip group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				System.out.println("list_cip_user_type.size:"+list_cip_user_type.size());
				cipWeekInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
				
			}else{
				System.out.println("今天发现的ip与周内发现的历史ip union");
				String week_sql="select cip,city,operators_id,user_type  from "
						+ hiveDB + "." + "dws_cip_week"
						+ " where ds='" + day_last +"'";
				System.out.println("week_sql:"+week_sql);
				JavaRDD<Row> week_rdd=hiveContext.sql(week_sql).toJavaRDD();
				
				JavaRDD<StatIp> week_cip_rdd =week_rdd.map(new StatIpMap());
				DataFrame CipDF = hiveContext.createDataFrame(week_cip_rdd,	StatIp.class);
				CipDF.registerTempTable("t_cip_week");
				//昨天的dws_cip_week和dwb_domain_cip 数据 union
				String sql_union="select cip,city,operators_id,user_type from ( select cip,city,operators_id,user_type from t_cip_week union select cip,city,operators_id,user_type from t_day_cip) t group by cip,city,operators_id,user_type ";
				JavaRDD<StatIp> union_rdd=hiveContext.sql(sql_union).toJavaRDD().map(new StatIpMap());
				DataFrame UnionDF = hiveContext.createDataFrame(union_rdd,	StatIp.class);
				UnionDF.registerTempTable("t_cip_week_union");
				
				String week_insert_sql="insert into table "+ hiveDB + ".dws_cip_week partition (ds='"+day+"')"
						+ " select cip,city,operators_id,user_type,"+year+","+week+" from t_cip_week_union ";
				System.out.println("week_insert_sql:"+week_insert_sql);
				hiveContext.sql(week_insert_sql);
				
				//cip数目 我省活跃用户IP统计
				long cip_count=0l;
				String cip_num_sql="select count(distinct cip) as count_value  from t_cip_week_union";
				List<Row> list_cip=hiveContext.sql(cip_num_sql).toJavaRDD().collect();
				
				if(list_cip.size()>0){
					cip_count=Long.valueOf(list_cip.get(0).getAs("count_value").toString());
				}
				//连接数据库
				conn = JdbcUtil.getConnection();
				cipInsertWeek(conn,cip_count,date,"SA3501");
				
				//运营商用户IP数量分布
				String operators_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_OPERATORS' buss_type,operators_id buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_week_union group by operators_id ";
				System.out.println("operators_sql:"+operators_sql);
				List<Row> list_cip_operators=hiveContext.sql(operators_sql).toJavaRDD().collect();
				cipWeekInsert(conn,list_cip_operators,date,"WEBSITE_COUNT_OPERATORS");
				
				//地市用户IP数量
				String city_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_AREA' buss_type,city buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_week_union group by city ";
				List<Row> list_cip_city=hiveContext.sql(city_sql).toJavaRDD().collect();
				cipWeekInsert(conn,list_cip_city,date,"WEBSITE_COUNT_AREA");
				//用户类型用户IP数量
				String user_type_sql="select "+year_i+" year,"+half_year_i+" half_year,"+
						quarter_i+" quarter,"+month_i+" month,"+
						week_i+" week,"+day_i+" day,'WEBSITE_COUNT_USE_UNIT_TYPE' buss_type,user_type buss_value,"+
				" count(distinct cip) as count_value,0 sta_range  from t_cip_week_union group by user_type ";
				System.out.println("user_type_sql:"+user_type_sql);
				List<Row> list_cip_user_type=hiveContext.sql(user_type_sql).toJavaRDD().collect();
				System.out.println("list_cip_user_type.size:"+list_cip_user_type.size());
				cipWeekInsert(conn,list_cip_user_type,date,"WEBSITE_COUNT_USE_UNIT_TYPE");
			}
			
		}catch(Exception e){
			if(conn!=null){
				JdbcUtil.close(conn);
			}
			e.printStackTrace();
		}finally{
			if(conn!=null){
				JdbcUtil.close(conn);
			}
		}
	}
	private static void cipWeekInsert(Connection conn,
			List<Row> list_cip_operators, Date date,String buss_type) {
		
		try {
				if(list_cip_operators.size()>0){
					String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
					String values="values(SA3501_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
					JdbcUtil.deleteWeekTable(date,conn,"SA3501",buss_type,"",0);
					JdbcUtil.insertTable(conn,list_cip_operators,"SA3501",columns,values);
				}
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	private static void cipMonthInsert(Connection conn,
			List<Row> list_cip_operators, Date date,String buss_type) {
		
		try {
				if(list_cip_operators.size()>0){
					String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
					String values="values(SA3400_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
					JdbcUtil.deleteMonthTable(date,conn,"SA3400",buss_type,"",0);
					JdbcUtil.insertTable(conn,list_cip_operators,"SA3400",columns,values);
				}
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	private static void cipQuarterInsert(Connection conn,
			List<Row> list_cip_operators, Date date,String buss_type) {
		
		try {
				if(list_cip_operators.size()>0){
					String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
					String values="values(SA3300_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
					JdbcUtil.deleteQuarterTable(date,conn,"SA3300",buss_type,"",0);
					JdbcUtil.insertTable(conn,list_cip_operators,"SA3300",columns,values);
				}
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	private static void cipYearInsert(Connection conn,
			List<Row> list_cip_operators, Date date,String buss_type) {
		
		try {
				if(list_cip_operators.size()>0){
					String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
					String values="values(SA3100_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
					JdbcUtil.deleteYearTable(date,conn,"SA3100",buss_type,"",0);
					JdbcUtil.insertTable(conn,list_cip_operators,"SA3100",columns,values);
				}
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	private static void cipInsertWeek(Connection conn, long cip_count,Date date,String table){
		try {
			String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
			String values="values("+table+"_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
			Map<Integer,Object> IPMap= new HashMap<Integer,Object>(); 		
			IPMap.put(1, DateUtil.getCurrentYear(date));//year
			IPMap.put(2, DateUtil.getHalfYear(date));//half_year
			IPMap.put(3, DateUtil.getQuarter(date));//quarter
			IPMap.put(4, DateUtil.getCurrentMonth(date));//month
			IPMap.put(5, DateUtil.getCurrentWeek(date));//week
			IPMap.put(6, DateUtil.getCurrentDay(date));//day
			IPMap.put(7, "WEBSITE_COUNT_SUM");//buss_type
			IPMap.put(8, "PROVINCE_COUNT");//buss_value
			IPMap.put(9, cip_count);//count_value
			IPMap.put(10, 0);//sta_range
			JdbcUtil.deleteWeekTable(date,conn,table,"WEBSITE_COUNT_SUM","PROVINCE_COUNT",0);
			JdbcUtil.insertTable_oneline(conn,IPMap,table,columns,values);
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	private static void cipInsertMonth(Connection conn, long cip_count,Date date,String table){
		try {
			String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
			String values="values("+table+"_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
			Map<Integer,Object> IPMap= new HashMap<Integer,Object>(); 		
			IPMap.put(1, DateUtil.getCurrentYear(date));//year
			IPMap.put(2, DateUtil.getHalfYear(date));//half_year
			IPMap.put(3, DateUtil.getQuarter(date));//quarter
			IPMap.put(4, DateUtil.getCurrentMonth(date));//month
			IPMap.put(5, DateUtil.getCurrentWeek(date));//week
			IPMap.put(6, DateUtil.getCurrentDay(date));//day
			IPMap.put(7, "WEBSITE_COUNT_SUM");//buss_type
			IPMap.put(8, "PROVINCE_COUNT");//buss_value
			IPMap.put(9, cip_count);//count_value
			IPMap.put(10, 0);//sta_range
			JdbcUtil.deleteMonthTable(date,conn,table,"WEBSITE_COUNT_SUM","PROVINCE_COUNT",0);
			JdbcUtil.insertTable_oneline(conn,IPMap,table,columns,values);
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	private static void cipInsertQuarter(Connection conn, long cip_count,Date date,String table){
		try {
			String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
			String values="values("+table+"_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
			Map<Integer,Object> IPMap= new HashMap<Integer,Object>(); 		
			IPMap.put(1, DateUtil.getCurrentYear(date));//year
			IPMap.put(2, DateUtil.getHalfYear(date));//half_year
			IPMap.put(3, DateUtil.getQuarter(date));//quarter
			IPMap.put(4, DateUtil.getCurrentMonth(date));//month
			IPMap.put(5, DateUtil.getCurrentWeek(date));//week
			IPMap.put(6, DateUtil.getCurrentDay(date));//day
			IPMap.put(7, "WEBSITE_COUNT_SUM");//buss_type
			IPMap.put(8, "PROVINCE_COUNT");//buss_value
			IPMap.put(9, cip_count);//count_value
			IPMap.put(10, 0);//sta_range
			JdbcUtil.deleteQuarterTable(date,conn,table,"WEBSITE_COUNT_SUM","PROVINCE_COUNT",0);
			JdbcUtil.insertTable_oneline(conn,IPMap,table,columns,values);
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	private static void cipInsertYear(Connection conn, long cip_count,Date date,String table){
		try {
			String columns="id,year,half_year,quarter,month,week,day,buss_type,buss_value,count_value,sta_range";
			String values="values("+table+"_SE.nextval,?,?,?,?,?,?,?,?,?,?)";
			Map<Integer,Object> IPMap= new HashMap<Integer,Object>(); 		
			IPMap.put(1, DateUtil.getCurrentYear(date));//year
			IPMap.put(2, DateUtil.getHalfYear(date));//half_year
			IPMap.put(3, DateUtil.getQuarter(date));//quarter
			IPMap.put(4, DateUtil.getCurrentMonth(date));//month
			IPMap.put(5, DateUtil.getCurrentWeek(date));//week
			IPMap.put(6, DateUtil.getCurrentDay(date));//day
			IPMap.put(7, "WEBSITE_COUNT_SUM");//buss_type
			IPMap.put(8, "PROVINCE_COUNT");//buss_value
			IPMap.put(9, cip_count);//count_value
			IPMap.put(10, 0);//sta_range
			JdbcUtil.deleteYearTable(date,conn,table,"WEBSITE_COUNT_SUM","PROVINCE_COUNT",0);
			JdbcUtil.insertTable_oneline(conn,IPMap,table,columns,values);
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
}
class StatIpMap implements Function<Row, StatIp>{

	@Override
	public StatIp call(Row v1) throws Exception {
		// TODO Auto-generated method stub
		StatIp e =new StatIp();
		e.setCip(v1.getAs("cip").toString());
		e.setCity(v1.getAs("city").toString());
		e.setOperators_id(v1.getAs("operators_id").toString());
		e.setUser_type(v1.getAs("user_type").toString());
		return e;
	}

}