package com.surfilter.massdata.spark.task;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import scala.Tuple2;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.Count_Operator;
import com.surfilter.massdata.spark.bean.SA3601;
import com.surfilter.massdata.spark.model.DnsStatConsts;
import com.surfilter.massdata.spark.model.StructIP;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;
import com.surfilter.massdata.spark.util.IPDatabase;

public class ActiveUserOperatorTask implements ISparkTask {
	
	/**
	 * @author weimin
	 * @Date 20170420
	 * 网民对应IP数量统计-
	 * 			-我省活跃用户IP数量分布统计-运营商用户IP数量分布
	 * 本省网民按运营商统计CIP数量（本省网民是指CIP在BR2002中存在）
	 */
	private static final Log log = LogFactory.getLog(ActiveUserOperatorTask.class);

	private static final long serialVersionUID = 1L;
	private static String outName;
	private static String deleteTable;
//	private String sql;

	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames,
			Map<String, String> commandMap) {
		log.info("==============DomainAnalyzTask begin====================");
		//Date date = GetTime.getPartitionTime(commandMap);
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();
		try {
			String dayStr = commandMap.get("-d");
			Date date = DateUtil.getExecDate(DateUtil.getYesterday(),dayStr);
			
			// 行业网站分析排名
			DataFrame data_webTraffic = dataFrames.get("cip_frame");
			
//			DataFrame result_webTraffic = data_webTraffic.sqlContext().sql(sql);
			DataFrame df_webTraffic = activeUserOperator(data_webTraffic, date);
			outList.add(new OutQueueEntity(outName, df_webTraffic));
			
			//在插入数据前先删除当天数据
			CommonUtils.deleteTaskTableData(deleteTable, date, "WEBSITE_COUNT_OPERATORS", 0, "day");
		} catch (Exception e) {
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		return outList;
	}

	

	@SuppressWarnings("resource")
	public static DataFrame activeUserOperator(DataFrame dataSource, final Date date) {
		JavaSparkContext sc = new JavaSparkContext(dataSource.sqlContext().sparkContext());
		final Broadcast<IPDatabase> ipBroadcast = sc.broadcast(IPDatabase.getInstance());
		
		JavaRDD<Count_Operator> domainRDD = dataSource.toJavaRDD().mapPartitions(new FlatMapFunction<Iterator<Row>, Count_Operator>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Iterable<Count_Operator> call(Iterator<Row> it){
				List<Count_Operator> list_oc = new ArrayList<Count_Operator>();
				try{
					while(it.hasNext()){
						Row row = it.next();
						String cip = row.getAs("cip");
						StructIP stcip = ipBroadcast.value().get(cip);
						String op_id = stcip.get(DnsStatConsts.IPExField.OPERATORS_ID);
						if(StringUtils.isNoneBlank(op_id)){
							Count_Operator oc = new Count_Operator();
							oc.setOp_id(op_id);
							oc.setCip(cip);
							list_oc.add(oc);
						}
					}
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return list_oc;
			}
		});
		
		DataFrame df_oc = dataSource.sqlContext().createDataFrame(domainRDD, Count_Operator.class);
		df_oc.registerTempTable("OperatorCount_TEMP");
		String sql = "select op_id,count(distinct cip)as count from OperatorCount_TEMP GROUP BY op_id ";
		DataFrame result_count = dataSource.sqlContext().sql(sql);
		//result_count.show();
		
		JavaRDD<Map<String,Object>> result_Rdd = result_count.toJavaRDD().map(new Function<Row, Map<String,Object>>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Map<String, Object> call(Row row) throws Exception {
				Map<String,Object> map = new HashMap<String, Object>();
				map.put("op_id", row.getAs("op_id"));
				map.put("count", row.getAs("count"));
				return map;
			}
		});
		
		JavaRDD<SA3601> JavaRdd_SA3601 = result_Rdd.mapPartitions(new FlatMapFunction<Iterator<Map<String,Object>>, SA3601>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Iterable<SA3601> call(Iterator<Map<String, Object>> it) {
				
				List<SA3601> list_bean = new ArrayList<SA3601>();
				try{
					while(it.hasNext()){
						Map<String,Object> map = it.next();
						SA3601 san = new SA3601();
						long count = Long.parseLong(String.valueOf(map.get("count")));
						String op_id = String.valueOf(map.get("op_id"));
						/*if(op_id.equals(null) || op_id.equals("null")){
							op_id = "N/A";
						}*/
						// 将SAN041所需要的列做拼接
						san.setYear(DateUtil.getCurrentYear(date));
						san.setHalf_year(DateUtil.getHalfYear(date));
						san.setQuarter(DateUtil.getQuarter(date));
						san.setMonth(DateUtil.getCurrentMonth(date));
						san.setWeek(DateUtil.getCurrentWeek(date));
						san.setDay(DateUtil.getCurrentDay(date));
						// san.setHour();
						san.setBuss_type("WEBSITE_COUNT_OPERATORS");
						san.setBuss_value(op_id);
						san.setCount_value(count);
						san.getSta_range();
						list_bean.add(san);
					
					}
					
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return list_bean;
			}
		});
		
		DataFrame df = dataSource.sqlContext().createDataFrame(JavaRdd_SA3601,SA3601.class);
		df_oc.sqlContext().dropTempTable("OperatorCount_TEMP");
		//df.registerTempTable(outName);
		//dataSource.sqlContext().sql("select * from " + outName).show();
		return df;
	}


}
