package com.surfilter.massdata.spark.task;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import scala.Tuple2;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.DnsCount;
import com.surfilter.massdata.spark.bean.SAN041;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;
import com.surfilter.massdata.spark.util.URLUtil;

public class WebSiteVisitsClassifyTask implements ISparkTask {

	/**
	 * @author weimin
	 * @Date 20170420 网站访问分类统计--行业分类网站访问量
	 *       行业分类网站访问量变化统计（要求是省内接入网站，即dip在BR2002中存在）
	 */
	private static final Log log = LogFactory
			.getLog(WebSiteVisitsClassifyTask.class);

	private static final long serialVersionUID = 1L;
	private static String outName;
	private static String deleteTable;
//	private String sql;
//	private static JavaSparkContext ctx;

	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames,
			Map<String, String> commandMap) {
		log.info("==============DomainAnalyzTask begin====================");
		// Date date = GetTime.getPartitionTime(commandMap);
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();

		try {
			String dayStr = commandMap.get("-d");
			Date date = DateUtil.getExecDate(DateUtil.getYesterday(), dayStr);
			// 行业网站分析排名
			DataFrame data_webTraffic = dataFrames.get("dip_frame");
			// data_webTraffic.registerTempTable("d_dns_domain_hour_temp");
			// dataFrames.get("BR2005").registerTempTable("BR2005_TEMP");
			// dataFrames.get("BR2002").registerTempTable("BR2002_TEMP");
			// 业务附加表BR4001 System.out.println("执行附加表");
			//data_webTraffic.sqlContext().sql(HiveOtherInput.getBR4001(commandMap)).registerTempTable("BR4001_TEMP");
			
//			DataFrame result_webTraffic = data_webTraffic.sqlContext().sql(sql);
			
			//在插入数据前先删除当天数据
			CommonUtils.deleteTaskTableData(deleteTable, date, "WEBSITE_COUNT_BUSS", 0, "day");
			
			DataFrame df_webTraffic = webSiteVisitsClassify(data_webTraffic,date);
			outList.add(new OutQueueEntity(outName, df_webTraffic));
		} catch (Exception e) {
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		return outList;
	}

	public static DataFrame webSiteVisitsClassify(DataFrame dataSource,final Date date) {

		// 获取顶级域名
		//System.out.println("========获取顶级域名......");
		JavaPairRDD<String, Long> domainCounts = getDomainCounts(dataSource);
		
		JavaRDD<DnsCount> dns = domainCounts.mapPartitions(new FlatMapFunction<Iterator<Tuple2<String,Long>>,DnsCount>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Iterable<DnsCount> call(Iterator<Tuple2<String, Long>> it){
				List<DnsCount> list_dc = new ArrayList<DnsCount>();
				while(it.hasNext()){
					Tuple2<String, Long> tuple = it.next();
					String domain = tuple._1;
					if(StringUtils.isNotBlank(domain)){
						DnsCount dc = new DnsCount();
						dc.setTop_domain(domain);
						dc.setCount(tuple._2);
						list_dc.add(dc);
					}
				}
				return list_dc;
			}
		});
		
		DataFrame df_dc = dataSource.sqlContext().createDataFrame(dns,DnsCount.class);
		df_dc.registerTempTable("Topdomain_TEMP");
		//df_dc.show();
		String sql = "select ZZ.PARENT_INDUSTRY_TYPE as id,sum(e.count)as sum from Topdomain_TEMP e left outer join br2005_temp zz on zz.TOP_DOMAIN=e.top_domain  GROUP BY zz.PARENT_INDUSTRY_TYPE";
		DataFrame result_domain = dataSource.sqlContext().sql(sql);
		//result_domain.show();
		
		// 在获取顶级域名的基础上 获取id和count
		JavaRDD<Map<String, Object>> rdd = result_domain.toJavaRDD().map(
				new Function<Row, Map<String, Object>>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Map<String, Object> call(Row row) throws Exception {
						Map<String, Object> map = new HashMap<String, Object>();
						// int id_type = row.getAs("id");
						// int count = row.getAs("count");
						map.put("id_type", row.getAs("id"));
						map.put("sum", row.getAs("sum"));
						return map;
					}
				});
		
		JavaRDD<SAN041> javaRdd_SAN041 = rdd.mapPartitions(new FlatMapFunction<Iterator<Map<String,Object>>,SAN041>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Iterable<SAN041> call(Iterator<Map<String, Object>> it){
				List<SAN041> list_bean = new ArrayList<SAN041>();
				try{
					while(it.hasNext()){
						Map<String, Object> map = it.next();

						SAN041 san = new SAN041();
						String id_type = String.valueOf(map.get("id_type"));
						long sum = Long.parseLong(String.valueOf(map.get("sum")));
						//System.out.println("id_type:"+id_type);
						// mapSort.put(count, ++i);
						if (id_type.equals(null) || id_type.equals("null")) {
							id_type = "221";
						}

						// 将SAN041所需要的列做拼接
						san.setYear(DateUtil.getCurrentYear(date));
						san.setHalf_year(DateUtil.getHalfYear(date));
						san.setQuarter(DateUtil.getQuarter(date));
						san.setMonth(DateUtil.getCurrentMonth(date));
						san.setWeek(DateUtil.getCurrentWeek(date));
						san.setDay(DateUtil.getCurrentDay(date));
						// san.setHour(0);
						san.setBuss_type("WEBSITE_COUNT_BUSS");
						// san.setBuss_type("WEBSITE_TEST");
						san.setBuss_value(String.valueOf(id_type));
						san.setCount_value(sum);
						san.setSta_range(0L);
						// san.setRank(mapSort.get(count));
						list_bean.add(san);
					}
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return list_bean;
			}
		});
		
		DataFrame df = dataSource.sqlContext().createDataFrame(javaRdd_SAN041,SAN041.class);
		df_dc.sqlContext().dropTempTable("Topdomain_TEMP");
		//df.registerTempTable(outName);
		//dataSource.sqlContext().sql("select * from "+outName).show();
		return df;

	}

	// 得到顶级域名
	public static JavaPairRDD<String, Long> getDomainCounts(DataFrame dataSource) {
		
		JavaRDD<String> domainSuffix = dataSource.javaRDD().map(new Function<Row, String>() {

			private static final long serialVersionUID = 1L;

			@Override
			public String call(Row row) throws Exception {
				String domain = row.getAs("domain");
				String topDomain = "";
				topDomain = URLUtil.getDomainName("http://" + domain);
				//return topDomain;
				return topDomain+"@"+row.getAs("dns_visit_count");
			}
		});
		JavaPairRDD<String, Long> domainPair = domainSuffix.mapToPair(new PairFunction<String, String, Long>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, Long> call(String value)throws Exception {
				try{
					String[] valueStr = value.split("@");
					String domain = valueStr[0];
					long count = Long.parseLong(valueStr[1]);
					Tuple2<String,Long> tuple = new Tuple2<String, Long>(domain,count);
					//Tuple2<String,Long> tuple = new Tuple2<String, Long>(domain,1L);
					return tuple;
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return new Tuple2<String,Long>("",0L);
			}
		});
		JavaPairRDD<String, Long> domainCount = domainPair.reduceByKey(new Function2<Long, Long, Long>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Long call(Long x, Long y) throws Exception {
				return x + y;
			}
		});

		return domainCount;

	}
}
