package com.surfilter.massdata.spark.task.daystat2;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.SA5601;
import com.surfilter.massdata.spark.bean.SA6601;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;

public class DomainAnalyzTask_SA5601_SA6601_New  implements ISparkTask{
	
	private static final Log log = LogFactory.getLog(DomainAnalyzTask_SA5601_SA6601_New.class);
	private static final long serialVersionUID = 1L;

	@SuppressWarnings("serial")
	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames, Map<String, String> commandMap) {
		long start=System.currentTimeMillis();
		log.info("==============DomainAnalyzTask_SA5601_SA6601 begin====================");
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();
		try{
			DataFrame dataSource = dataFrames.get("day_domain");
			String dayStr = commandMap.get("-d");
	        Date date = DateUtil.getExecDate(DateUtil.getYesterday(), dayStr);
			
	        DataFrame df_SA5601 = topDomainCount(dataSource,date);
	    	DataFrame df_SA6601 = domainCount(dataSource,date);
	    	//sqlContext.dropTempTable("domain_result_temp");
			outList.add(new OutQueueEntity("SA5601_TEMP", df_SA5601));
			outList.add(new OutQueueEntity("SA6601_TEMP", df_SA6601));
			
			CommonUtils.deleteTaskTableData("SA5601", date, "WEBSITE_COUNT_SUM", 0, "day");
			CommonUtils.deleteTaskTableData("SA6601", date, "WEBSITE_COUNT_SUM", 0, "day");
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		long end=System.currentTimeMillis();
		double min=(end-start)*1.0/(1000*60);
		System.out.println("DomainAnalyzTask_SA5601_SA6601_New:exectime: "+min+" min............");
		return outList;
	}

	public static DataFrame domainCount(DataFrame dataSource, final Date date) {
		SQLContext sqlContext = dataSource.sqlContext();
		//子域名数量
		String sql="select count( distinct domain) as  count from  day_domain_temp";
		DataFrame result_count =sqlContext.sql(sql);
		List <Row> domain_count= result_count.toJavaRDD().collect();
		List<SA6601> list_SA6601 = new ArrayList<SA6601>();
		if(!domain_count.isEmpty()){
			long count =domain_count.get(0).getAs("count");
		
			SA6601 sa6601 = new SA6601();
	 	    sa6601.setYear(DateUtil.getCurrentYear(date));
	 	    sa6601.setHalf_year(DateUtil.getHalfYear(date));
	 	    sa6601.setQuarter(DateUtil.getQuarter(date));
	 	    sa6601.setMonth(DateUtil.getCurrentMonth(date));
	 	    sa6601.setWeek(DateUtil.getCurrentWeek(date));
	 	    sa6601.setDay(DateUtil.getCurrentDay(date));
	 	    sa6601.setBuss_type("WEBSITE_COUNT_SUM");
	 	    sa6601.setBuss_value("");
	 	    sa6601.setCount_value(count);
	 	    sa6601.setSta_range(0);
	 	    list_SA6601.add(sa6601);
		}
		JavaSparkContext ctx = new JavaSparkContext(dataSource.sqlContext().sparkContext());
	 	   JavaRDD<SA6601> rdd_SA6601 = ctx.parallelize(list_SA6601);
	 	   DataFrame df = dataSource.sqlContext().createDataFrame(list_SA6601, SA6601.class);
			return df;
		
	}
	
	public static DataFrame topDomainCount(DataFrame dataSource, final Date date) {
		SQLContext sqlContext = dataSource.sqlContext();
		String sql="select count( distinct top_domain) as  count from  day_domain_temp limit 1000";
		
		DataFrame result_count =sqlContext.sql(sql);
		List <Row> domain_count= result_count.toJavaRDD().collect();
		List<SA5601> list_SA5601 = new ArrayList<SA5601>();
		if(!domain_count.isEmpty()){
			long count =domain_count.get(0).getAs("count");
			SA5601 sa = new SA5601();
			sa.setYear(DateUtil.getCurrentYear(date));
			sa.setHalf_year(DateUtil.getHalfYear(date));
			sa.setQuarter(DateUtil.getQuarter(date));
			sa.setMonth(DateUtil.getCurrentMonth(date));
			sa.setWeek(DateUtil.getCurrentWeek(date));
			sa.setDay(DateUtil.getCurrentDay(date));
			sa.setBuss_type("WEBSITE_COUNT_SUM");
			sa.setBuss_value("1");
			sa.setCount_value(count);
			sa.setSta_range(0);
			list_SA5601.add(sa);
		}
		
 		JavaSparkContext ctx = new JavaSparkContext(dataSource.sqlContext().sparkContext());
	 	JavaRDD<SA5601> rdd_SA5601 = ctx.parallelize(list_SA5601);
	 	DataFrame df = dataSource.sqlContext().createDataFrame(list_SA5601, SA5601.class);
		return df;
	}
}
