package com.surfilter.massdata.spark.output;

import java.util.Date;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.DataFrame;

import com.act.sparkanalyz.output.impl.HiveOutput;
import com.surfilter.massdata.spark.util.DateUtil;

public class HiveOutputforSmcs extends HiveOutput {
	
	private static final Log log = LogFactory.getLog(HiveOutputforSmcs.class);
	private static final long serialVersionUID = 1L;
	private String statisticalTime;
	private String table;
	private String database;
	private String tableTemp;
	private String columns;

	@Override
	public void write(DataFrame result, Map<String, String> commandMap) {
		log.info("===========HiveOutput  begin============");
		try{
			String sql = getOutputSql(commandMap);
			if(StringUtils.isNotBlank(sql)) {
				DataFrame sql2 = result.sqlContext().sql(sql);
			}
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
	}

	protected String getOutputSql(Map<String, String> commandMap) {
		StringBuffer sql = new StringBuffer();
        String dayStr = commandMap.get("-d");
        Date date = null;
		if(StringUtils.equals(statisticalTime, "hour")){
			date = DateUtil.getExecDate(DateUtil.getPreviousDayHour(new Date()),dayStr);
		}else{
			date = DateUtil.getExecDate(DateUtil.getYesterday(),dayStr);
		}
        String time = DateUtil.getCurrentymd(date);
		int hour = DateUtil.getCurrentHour(date);
		String hourStr = "";
		if(hour < 10){
			hourStr = "0" + hour;
		}else{
			hourStr = String.valueOf(hour);
		}
		sql.append("insert into table ");
		sql.append(database+"."+table);
		sql.append(" partition(ds='"+time+"',hour_='"+hourStr+"')");
		sql.append(" select "+columns+" from "+tableTemp);
		return sql.toString();
	}
}
