package com.surfilter.massdata.spark.task;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.BR2005;
import com.surfilter.massdata.spark.bean.SA5000;
import com.surfilter.massdata.spark.bean.SA6000;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;
public class WebDomainCountTask implements ISparkTask{
private String table;
private String sql;

private static final Log log = LogFactory.getLog(WebSiteAccessTask.class);
private static final long serialVersionUID = 1L;
	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames, Map<String, String> commandMap) {
		log.info("==============WebDomainCountTask begin====================");
		List<OutQueueEntity> outlist=new ArrayList<OutQueueEntity>();
		try {
		    DataFrame dataFrame=dataFrames.get("br2005");
		   DataFrame resultTopFrame=dataFrame.sqlContext().sql(sql);	 
		   String dayStr=commandMap.get("-d");
		   Date date=DateUtil.getExecDate(DateUtil.getYesterday(), dayStr);
		   DataFrame df_CountTraffic = TopCountTraffic(resultTopFrame, date);
		  
		   outlist.add(new OutQueueEntity(table,df_CountTraffic));
		   CommonUtils.deleteTaskTableData("SA5000", date, "WEBSITE_COUNT_SUM", 0, "day");
	    } catch (Exception e) {
		 if(log.isErrorEnabled()){
			log.error(e.getMessage(),e);
		}
	}
	 return outlist;
	}
	private DataFrame TopCountTraffic(DataFrame resultTopFrame, Date date) {
		JavaRDD<Map<String,Object>> rdd=resultTopFrame.toJavaRDD().map(new Function<Row,Map<String,Object>>(){

			@Override
			public Map<String, Object> call(Row row) throws Exception {
				Map<String, Object> map=new HashMap<String,Object>();
				map.put("top_count", row.getAs("top_count"));
				return map;
			}
			
		});
		List<Map<String, Object>> list = rdd.collect();
		List<SA5000> list_bean=new ArrayList<SA5000>();
		if(list.size()>0)
		{
			
		     System.out.println("开始填充数据......");
		     for(Map<String, Object> map:list)
				{
					SA5000 sa=new SA5000();
					int sum=Integer.parseInt(String.valueOf(map.get("top_count")));
					//sa.setId(++id);
					sa.setYear(DateUtil.getCurrentYear(date));
					sa.setHalf_year(DateUtil.getHalfYear(date));
					sa.setQuarter(DateUtil.getQuarter(date));
					sa.setMonth(DateUtil.getCurrentMonth(date));
					sa.setWeek(DateUtil.getCurrentWeek(date));
					sa.setDay(DateUtil.getCurrentDay(date));
					sa.setBuss_type("WEBSITE_COUNT_SUM");
					sa.setBuss_value("1");
					sa.setCount_value(sum);
					sa.setSta_range(0);
					list_bean.add(sa);
					
				}
				
		}
		JavaSparkContext ctx=new JavaSparkContext(resultTopFrame.sqlContext().sparkContext());
		JavaRDD<SA5000> javaRDD=ctx.parallelize(list_bean);
		DataFrame df=resultTopFrame.sqlContext().createDataFrame(javaRDD, SA5000.class);
		return df;
	}
	
	

}
