package com.surfilter.massdata.spark.task.daystat2;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import scala.Tuple2;

import com.act.sparkanalyz.service.impl.SparkService.OutQueueEntity;
import com.act.sparkanalyz.task.ISparkTask;
import com.surfilter.massdata.spark.bean.SAN041;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;

public class WebSiteVisitsClassifyTask_dayNew implements ISparkTask {
	/**
	 * 
	 * 网站访问分类统计--行业分类网站访问量
	 *  行业分类网站访问量变化统计（要求是省内接入网站，即dip在BR2002中存在）
	 */
	private static final Log log = LogFactory
			.getLog(WebSiteVisitsClassifyTask_dayNew.class);

	private static final long serialVersionUID = 1L;
	private static String outName;
	@Override
	public List<OutQueueEntity> execute(Map<String, DataFrame> dataFrames,
			Map<String, String> commandMap) {
		long start=System.currentTimeMillis();
		System.out.println("==============DomainAnalyzTask begin====================");
		List<OutQueueEntity> outList = new ArrayList<OutQueueEntity>();

		try {
			String dayStr = commandMap.get("-d");
			Date date = DateUtil.getExecDate(DateUtil.getYesterday(), dayStr);
			DataFrame data_webTraffic = dataFrames.get("dws_hour_san041");
			String sql="select buss_value,sum(count_value) as count from dws_hour_san041_temp where buss_type='WEBSITE_COUNT_BUSS'  and sta_range=0  GROUP BY buss_value";
			DataFrame dataSource = data_webTraffic.sqlContext().sql(sql);
			DataFrame df_webTraffic = webSiteVisitsClassify(dataSource,date);
			//df_webTraffic.show();
			//在插入数据前先删除当天数据
			CommonUtils.deleteTaskTableData("SAN041", date, "WEBSITE_COUNT_BUSS", 0, "day");
			outList.add(new OutQueueEntity(outName, df_webTraffic));
		} catch (Exception e) {
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		
		long end=System.currentTimeMillis();
		double min=(end-start)*1.0/(1000*60);
		System.out.println("WebSiteVisitsClassifyTask_New行业分类网站访问量:exectime: "+min+" min............");
		
		return outList;
	}
	public static DataFrame webSiteVisitsClassify(DataFrame dataSource,final Date date) {
		
		
		JavaRDD<Tuple2<String, String>> mapRdd = dataSource.toJavaRDD().map(new Function<Row,Tuple2<String,String>>(){

			@Override
			public Tuple2<String, String> call(Row row) throws Exception {
				
				String buss_value=row.getAs("buss_value").toString();
				String count=row.getAs("count").toString();
				String [] line =StringUtils.split(buss_value, ",");
				for(String t:line){
					
				}
				Tuple2<String, String> webType=new Tuple2<String,String>(null, null);
				 return  webType;
			}
			
		});
		JavaPairRDD<String, String> webRdd = dataSource.toJavaRDD().flatMapToPair(new PairFlatMapFunction<Row, String, String>(){

			@Override
			public Iterable<Tuple2<String, String>> call(Row row)
					throws Exception {
				List<Tuple2<String, String>> list =new ArrayList<Tuple2<String, String>>(); 
				String buss_value=row.getAs("buss_value").toString();
				String count=row.getAs("count").toString();
				String [] line =StringUtils.split(buss_value, ",");
				for(String t:line){
					Tuple2<String, String> webType=new Tuple2<String,String>(t, count);
					list.add(webType);
				}
				return list;
			}
		});
		JavaPairRDD<String, String> webParRDD=webRdd.reduceByKey(new Function2<String, String, String>(){

			@Override
			public String call(String v1, String v2) throws Exception {
				Long l1=Long.valueOf(v1); 
				Long l2=Long.valueOf(v2); 
				return String.valueOf(l1+l2);
			}
			
		});

		JavaRDD<SAN041> rdd =webParRDD.map(new Function<Tuple2<String, String>, SAN041>(){

			@Override
			public SAN041 call(Tuple2<String, String> v1) throws Exception {
				   SAN041 san = new SAN041();
			       String id_type=v1._1();
				   long count=Long.parseLong(v1._2());
				   if (id_type.equals(null) ||id_type.equalsIgnoreCase("null")) {
						id_type = "221";
					}
				    san.setYear(DateUtil.getCurrentYear(date));
					san.setHalf_year(DateUtil.getHalfYear(date));
					san.setQuarter(DateUtil.getQuarter(date));
					san.setMonth(DateUtil.getCurrentMonth(date));
					san.setWeek(DateUtil.getCurrentWeek(date));
					san.setDay(DateUtil.getCurrentDay(date));
					san.setHour(null);
					san.setBuss_type("WEBSITE_COUNT_BUSS");
					san.setBuss_value(String.valueOf(id_type));
					san.setCount_value(count);
					san.setSta_range(0L);
				return san;
			}
			
		} );
		
		DataFrame df_webTraffic= dataSource.sqlContext().createDataFrame(rdd,SAN041.class);
		
      return df_webTraffic;

	}

}