package com.surfilter.massdata.spark.output;

import java.util.Date;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;

import com.act.sparkanalyz.output.impl.HiveOutput;
import com.surfilter.massdata.spark.util.CommonUtils;
import com.surfilter.massdata.spark.util.ConfigUtil;
import com.surfilter.massdata.spark.util.DateUtil;

/**
 * 与HiveOutputforSmcs2的区别在于，在数据先写入到HDFS 再入HIVE
 * 需要在/smcs-mass-spark/src/main/resources/config.properties里增加配置
 * dnstmp_path=/user/rzx/dns_tmp/
 * */
public class HiveOutputforSmcs2_hdfs extends HiveOutput{
	
	private static final Log log = LogFactory.getLog(HiveOutputforSmcs2_hdfs.class);
	private static final long serialVersionUID = 1L;
	private String statisticalTime;
	private String table;
	private String database;
	private String tableTemp;
	private String columns;

	@Override
	public void write(DataFrame result, Map<String, String> commandMap) {
		log.info("===========HiveOutput  begin============");
		try{
			//注册sprak临时表
			String path=ConfigUtil.get(
					"dnstmp_path", "");
			
			String dayStr = commandMap.get("-d");
	        Date date = null;
			if(StringUtils.equals(statisticalTime, "hour")){
				date = DateUtil.getExecDate(DateUtil.getPreviousDayHour(new Date()),dayStr);
			}else{
				date = DateUtil.getExecDate(DateUtil.getYesterday(),dayStr);
			}
	        String time = DateUtil.getCurrentymd(date);
			int hour = DateUtil.getCurrentHour(date);
			String hourStr = "";
			if(hour < 10){
				hourStr = "0" + hour;
			}else{
				hourStr = String.valueOf(hour);
			}
			String tmppath="";
		    if(path!=null&&!path.equals("")){
		    	if(path.endsWith("/")){
		    		tmppath=path+table+"/"+time+"/"+hourStr;
		    	}else{
		    		tmppath=path+"/"+table+"/"+time+"/"+hourStr;
		    	}
		    }
						
			boolean delflag=CommonUtils.delPath(tmppath);
			System.out.println("delflag:"+delflag);
			System.out.println("del tmppath:"+tmppath);
			JavaRDD<String> resultRDD=result.toJavaRDD().map(new Function<Row, String>(){

				@Override
				public String call(Row v1) throws Exception {
					String key="";
					if(table.equalsIgnoreCase("d_dns_domain_hour")){
						 key= v1.getAs("domain")+"\t"+v1.getAs("domain")+"\t"+v1.getAs("cip")+"\t"+v1.getAs("dip")+"\t"+v1.getAs("dns_visit_count");
					}else if(table.equalsIgnoreCase("day_domain")){
						 key= v1.getAs("domain")+"\t"+v1.getAs("dip")+"\t"+v1.getAs("dns_visit_count")+"\t"+v1.getAs("findtime");
						
					}
					System.out.println("key:"+key);
					return  key;
				}
				
			});
			//System.out.println("resultRDD:"+resultRDD.count());
			//resultRDD.saveAsTextFile(tmppath);
			resultRDD.coalesce(1).saveAsTextFile(tmppath);
			String sql1="load data  inpath '"+tmppath+"' into table "+database+"."+table+" partition (ds='"+time+"',hour='"+hourStr+"')";

			
			JavaSparkContext jsc = JavaSparkContext.fromSparkContext(result.sqlContext().sparkContext());
			HiveContext hive = new HiveContext(jsc);

			try{
				System.out.println("insert data sql1:"+sql1);

				hive.sql(sql1);
			}catch(Exception e){
				System.out.println("erro:22222222222222222222222");
			}
			
//			try{
//				System.out.println("insert data sql1:"+sql1);
//
//				result.sqlContext().sql(sql1);
//			}catch(Exception e){
//				System.out.println("erro:22222222222222222222222");
//			}
			
//			String [] sparkTables = result.sqlContext().tableNames();
//			boolean  HasTalbe = false;
//			for ( String table:sparkTables){
//				if(table.equalsIgnoreCase(tableTemp)){
//					HasTalbe=true;
//				}
//			}
//			
//			if(!HasTalbe){
//				result.registerTempTable(tableTemp);
//			}
//			//数据输出逻辑
//			String sql = getOutputSql(commandMap);
//			if(StringUtils.isNotBlank(sql)) {
//				//System.out.println("*************************sql:"+sql);
//				result.sqlContext().sql(sql);
//			}
//			//删除spark临时表
//			result.sqlContext().dropTempTable(tableTemp);
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
	}

	protected String getOutputSql(Map<String, String> commandMap) {
		StringBuffer sql = new StringBuffer();
        String dayStr = commandMap.get("-d");
        Date date = null;
		if(StringUtils.equals(statisticalTime, "hour")){
			date = DateUtil.getExecDate(DateUtil.getPreviousDayHour(new Date()),dayStr);
		}else{
			date = DateUtil.getExecDate(DateUtil.getYesterday(),dayStr);
		}
        String time = DateUtil.getCurrentymd(date);
		int hour = DateUtil.getCurrentHour(date);
		String hourStr = "";
		if(hour < 10){
			hourStr = "0" + hour;
		}else{
			hourStr = String.valueOf(hour);
		}
		sql.append("insert into table ");
		sql.append(database+"."+table);
		sql.append(" partition ( ds='"+time+"',hour='"+hourStr+"')");
		sql.append(" select "+columns+" from "+tableTemp);
		return sql.toString();
	}
}
