package com.surfilter.massdata.spark.input;

import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.storage.StorageLevel;

import com.act.sparkanalyz.input.ISparkInput;
import com.act.sparkanalyz.service.impl.SparkService.ServiceContext;
import com.surfilter.massdata.spark.bean.DnsRegionHour;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.DateUtil;

public class HiveSparkInput_New implements ISparkInput {
    private static final Log log = LogFactory.getLog(HiveSparkInput_New.class);
    private static final long serialVersionUID = 1L;
    private String tableName;
	private String statisticalTime;
	private String sql;
	
    @Override
	public Map<String, DataFrame> read(ServiceContext serviceContext,
			Map<String, String> commandMap) throws Exception {
		log.info("============HiveSparkInput begin==============");
		Map<String, DataFrame> resultMap = new HashMap<String, DataFrame>();
		try{
			SQLContext sqlContext = serviceContext.getSqlCtx();
			HiveContext hiveCtx = (HiveContext) sqlContext;
			
			//Broadcast<TreeMap<Long,Long>> ipBroadcast = serviceContext.getCtx().broadcast(CommonUtils.getIpExtensionMap());
			DataFrame dataFrame = null;
			dataFrame = hiveCtx.sql(getSourceSql(commandMap));
			JavaRDD<Row> sourceRDD = dataFrame.toJavaRDD();
			
			dataFrame.registerTempTable(tableName+"_temp");
			//System.out.println("hive tables：");
			//dataFrame.show();
			//持久化到内存中，若内存不足部分数据将会保存到磁盘中并序列化
			dataFrame.persist(StorageLevel.MEMORY_AND_DISK_SER());
			resultMap.put(tableName, dataFrame);
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		return resultMap;
	} 
	protected String getSourceSql(Map<String, String> commandMap){
		StringBuffer sqlBuffer = new StringBuffer();
		String dayStr = commandMap.get("-d");
		Date date = null;
		if(StringUtils.equals(statisticalTime, "hour")){
			date = DateUtil.getExecDate(DateUtil.getPreviousDayHour(new Date()),dayStr);
		}else{
			date = DateUtil.getExecDate(DateUtil.getYesterday(),dayStr);
		}
		String time = DateUtil.getCurrentymd(date);
		int hour = DateUtil.getCurrentHour(date);
		
		String hourStr = "";
		if(hour < 10){
			hourStr = "0" + hour;
		}else{
			hourStr = String.valueOf(hour);
		}
		sqlBuffer.append(sql);
		sqlBuffer.append(" where ds='" + time + "'");
		if(StringUtils.equals(statisticalTime, "hour")){
			sqlBuffer.append(" and hour='" + hourStr + "'");
		}
		return sqlBuffer.toString();
	}
}
