package com.surfilter.massdata.spark.task;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.SA6000;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;

public class DomainCount implements ISparkTask{
	private static final Log log = LogFactory.getLog(ActiveUserTrafficTask.class);
	private static final long serialVersionUID = 1L;
	private static String outName;
	private String sql;
	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames, Map<String, String> commandMap) {
		log.info("==============DomainCountTask begin====================");
		//Date date = GetTime.getPartitionTime(commandMap);
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();
		try {
			String dayStr = commandMap.get("-d");
			//天
			Date date = DateUtil.getExecDate(DateUtil.getYesterday(),dayStr);
			
			//域名
			DataFrame tableFrame = dataFrames.get("domain_history");
			DataFrame result_webTraffic = tableFrame.sqlContext().sql(sql);
			DataFrame df_CountTraffic = activeUserTraffic(result_webTraffic, date);
			outList.add(new OutQueueEntity(outName, df_CountTraffic));
			CommonUtils.deleteTaskTableData("SA6000", date, "WEBSITE_COUNT_SUM", 0, "day");
		} catch (Exception e) {
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		return outList;
	}
	

	public static DataFrame activeUserTraffic(DataFrame dataSource, Date date) {
		//dataSource.show();

		JavaRDD<Map<String, Object>> rdd = dataSource.toJavaRDD().map(
				new Function<Row, Map<String, Object>>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Map<String, Object> call(Row row) throws Exception {
						Map<String, Object> map = new HashMap<String, Object>();
						map.put("count_value", row.getAs("count_value"));
						return map;
					}
				});
		List<Map<String, Object>> list = rdd.collect();
		List<SA6000> list_bean = new ArrayList<SA6000>();

		//用于计算id个数
		//List<Row> list1 = dataSource.collectAsList();
		if (list.size() > 0) {
			
			System.out.println("开始填充数据......");
			for (Map<String, Object> map : list) {
				SA6000 san = new SA6000();
				int sum = Integer.parseInt(String.valueOf(map.get("count_value")));
				
				san.setYear(DateUtil.getCurrentYear(date));
				san.setHalf_year(DateUtil.getHalfYear(date));
				san.setQuarter(DateUtil.getQuarter(date));
				san.setMonth(DateUtil.getCurrentMonth(date));
				san.setWeek(DateUtil.getCurrentWeek(date));
				san.setDay(DateUtil.getCurrentDay(date));		
				san.setBuss_type("WEBSITE_COUNT_SUM");
				san.setBuss_value("1");
				san.setCount_value(sum);
				san.setSta_range(0);
				list_bean.add(san);
			}
		}
		
		JavaSparkContext ctx = new JavaSparkContext(dataSource.sqlContext()
				.sparkContext());
		JavaRDD<SA6000> javaRdd = ctx.parallelize(list_bean);// 创建RDD
		DataFrame df = dataSource.sqlContext().createDataFrame(javaRdd,
				SA6000.class);
		
		return df;
	}
	

}
