package com.surfilter.massdata.spark.task;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import scala.Tuple2;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.SAN041;
import com.surfilter.massdata.spark.model.DnsStatConsts;
import com.surfilter.massdata.spark.model.StructIP;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;
import com.surfilter.massdata.spark.util.IPDatabase;

/**
 * 运营商网站访问量
 * 各运营商访问量接入网站访问量变化统计（要求是省内接入网站，即dip在BR2002中存在）
 */
public class WebSiteVisitsOperatorTask implements ISparkTask{
	
	private static final Log log = LogFactory.getLog(WebSiteVisitsOperatorTask.class);
	private static final long serialVersionUID = 1L;
	private String outName;
	
	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames,Map<String, String> commandMap) {
		log.info("==============WebSiteVisitsOperatorTask begin====================");
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();
		try{
			DataFrame dataSource = dataFrames.get("d_dns_domain_hour");

			JavaRDD<Map<String, Object>> operatorsRDD = getDipFilterRDD(dataSource);
			
			JavaPairRDD<String, Long> pairRDD = getPairRDD(operatorsRDD);
	        
			String dayStr = commandMap.get("-d");
	        Date date = DateUtil.getExecDate(DateUtil.getYesterday(), dayStr);
	        
	        JavaRDD<SAN041> javaRDD = createRdd(date, pairRDD);
	    	DataFrame df = dataSource.sqlContext().createDataFrame(javaRDD, SAN041.class);
	    	
			outList.add(new OutQueueEntity(outName, df));
			
			CommonUtils.deleteTaskTableData("SAN041", date, "WEBSITE_COUNT_OPERATORS", 0, "day");
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		return outList;
	}

	@SuppressWarnings("serial")
	private static JavaPairRDD<String, Long> getPairRDD(JavaRDD<Map<String, Object>> operatorsRDD) {
		JavaPairRDD<String,Long> pairRDD = operatorsRDD.mapToPair(new PairFunction<Map<String,Object>,String,Long>() {

			@Override
			public Tuple2<String,Long> call(Map<String,Object> map){
				try{
					String operators_id = CommonUtils.valueOf(map.get("operators_id"));
					if(StringUtils.isNotBlank(operators_id)){
						long dns_visit_count = Long.parseLong(CommonUtils.valueOf(map.get("dns_visit_count")));
						return new Tuple2<String,Long>(operators_id,dns_visit_count);
					}
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return new Tuple2<String,Long>("",0L);
			}
		}).reduceByKey(new Function2<Long, Long, Long>() {
			
			@Override
			public Long call(Long v1, Long v2){
				return v1 + v2;
			}
		});
		return pairRDD;
	}
	
	@SuppressWarnings({ "serial", "resource" })
	private static JavaRDD<Map<String, Object>> getDipFilterRDD(DataFrame dataSource) {
		JavaSparkContext sc = new JavaSparkContext(dataSource.sqlContext().sparkContext());
		final Broadcast<IPDatabase> ipBroadcast = sc.broadcast(IPDatabase.getInstance());
		
		JavaRDD<Map<String,Object>> operatorsRDD = dataSource.toJavaRDD().map(new Function<Row, Map<String, Object>>() {

			@Override
			public Map<String, Object> call(Row row) throws Exception {
				String dip = row.getAs("dip");
				StructIP structDip = ipBroadcast.value().get(dip);
				Map<String,Object> map = new HashMap<String,Object>();
				if(structDip != null){
					map.put("dns_visit_count",row.getAs("dns_visit_count"));
					map.put("operators_id", structDip.get(DnsStatConsts.IPExField.OPERATORS_ID));
				}
				return map;
			}
		});
		return operatorsRDD;
	}
	
	@SuppressWarnings("serial")
	private JavaRDD<SAN041> createRdd(final Date date,JavaPairRDD<String, Long> pairRDD) {
		JavaRDD<SAN041> javaRDD = pairRDD.mapPartitions(new FlatMapFunction<Iterator<Tuple2<String,Long>>,SAN041>() {

			@Override
			public Iterable<SAN041> call(Iterator<Tuple2<String, Long>> it){
				List<SAN041> list = new ArrayList<SAN041>();
				try{
					while(it.hasNext()){
						Tuple2<String, Long> tuple = it.next();
						String operators_id = tuple._1;
						if(StringUtils.isNotBlank(operators_id)){
			        		SAN041 sa = new SAN041();
			        		sa.setYear(DateUtil.getCurrentYear(date));
			        		sa.setHalf_year(DateUtil.getHalfYear(date));
			        		sa.setQuarter(DateUtil.getQuarter(date));
			        		sa.setMonth(DateUtil.getCurrentMonth(date));
			        		sa.setWeek(DateUtil.getCurrentWeek(date));
			        		sa.setDay(DateUtil.getCurrentDay(date));
			        		sa.setBuss_type("WEBSITE_COUNT_OPERATORS");
			        		sa.setBuss_value(operators_id);
			        		sa.setCount_value(tuple._2);
			        		sa.setSta_range(0);
			        		list.add(sa);
			        	}
					}
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return list;
			}
		});
		return javaRDD;
	}
}
