package com.surfilter.massdata.spark.task.hourstat2;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.SA3701;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;

public class ActiveUserHourTask_New implements ISparkTask{

	/**
	 * @author weimin
	 * @Date 20170420
	 * 网民上网时间分析--我省活跃用户IP统计(只统计小时)
	 */
	private static final Log log = LogFactory.getLog(ActiveUserHourTask_New.class);

	private static final long serialVersionUID = 1L;
	private static String outName;
	private static String deleteTable;
//	private String sql;

	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames,
			Map<String, String> commandMap) {
		long start=System.currentTimeMillis();
		log.info("==============DomainAnalyzTask begin====================");
		//Date date = GetTime.getPartitionTime(commandMap);
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();
		try {
			String dayStr = commandMap.get("-d");
			Date date = DateUtil.getExecDate(DateUtil.getPreviousDayHour(new Date()),dayStr);
			
			// 琛屼笟缃戠珯鍒嗘瀽鎺掑悕
			DataFrame data_webTraffic = dataFrames.get("dwb_domain_cip");
			DataFrame df_webTraffic = activeUserHour(data_webTraffic, date);
			outList.add(new OutQueueEntity(outName, df_webTraffic));
			
			//在插入数据前先删除当天数据
			CommonUtils.deleteTaskTableData(deleteTable, date, "WEBSITE_COUNT_SUM", 0, "hour");
		} catch (Exception e) {
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		
		long end=System.currentTimeMillis();
		double min=(end-start)*1.0/(1000*60);
		System.out.println("ActiveUserHourTask_New:exectime: "+min+" min............");
		
		return outList;
	}
	
	public static DataFrame activeUserHour(DataFrame dataSource, final Date date) {
		//dataSource.show();
		
		String sql = "select count(distinct cip)as count from dwb_domain_cip_temp ";
		DataFrame result_count = dataSource.sqlContext().sql(sql);
		//result_count.show();

		JavaRDD<Map<String, Object>> rdd = result_count.toJavaRDD().map(new Function<Row, Map<String, Object>>() {
			
			private static final long serialVersionUID = 1L;

			@Override
			public Map<String, Object> call(Row row) throws Exception {
				Map<String, Object> map = new HashMap<String, Object>();
				map.put("count", row.getAs("count"));
				return map;
			}
		});
		
		JavaRDD<SA3701> javaRdd_SA3701 = rdd.mapPartitions(new FlatMapFunction<Iterator<Map<String,Object>>, SA3701>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Iterable<SA3701> call(Iterator<Map<String, Object>> it)  {
				List<SA3701> list_bean = new ArrayList<SA3701>();
				try{
					while(it.hasNext()){
						Map<String,Object> map = it.next();
						SA3701 san = new SA3701();
						long count = Long.parseLong(String.valueOf(map.get("count")));
						// 将SAN041所需要的列做拼接
						san.setYear(DateUtil.getCurrentYear(date));
						san.setHalf_year(DateUtil.getHalfYear(date));
						san.setQuarter(DateUtil.getQuarter(date));
						san.setMonth(DateUtil.getCurrentMonth(date));
						san.setWeek(DateUtil.getCurrentWeek(date));
						san.setDay(DateUtil.getCurrentDay(date));
						//输入日期就是指定日期的前一个小时，不输入日期就是当前时间的前一个小时
						san.setHour(DateUtil.getCurrentHour(date));
						san.setBuss_type("WEBSITE_COUNT_SUM");
						san.setBuss_value("1");
						san.setCount_value(count);
						san.setSta_range(0L);
						list_bean.add(san);
					}
				}catch(Exception e){
					if(log.isErrorEnabled()){
						log.error(e.getMessage(),e);
					}
				}
				return list_bean;
			}
		});
		
		DataFrame df = dataSource.sqlContext().createDataFrame(javaRdd_SA3701,SA3701.class);
		//df.registerTempTable(outName);
		//dataSource.sqlContext().sql("select * from SA3701_temp").show();
		return df;
	}

}
