package com.lvmama.java.rhino.etl.main;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;

import com.lvmama.java.rhino.spark.tables.LogDetail;
import com.lvmama.java.rhino.spark.utils.Constants;
import com.lvmama.java.rhino.spark.utils.SaveToHdfsUtils;

public class TestMain {
	public static void main(String[] args) {
		// 创建Spark上下文
		SparkConf sparkConf = getSparkConf("lalalal");
		// 创建Spark 上下文
		JavaSparkContext sc = new JavaSparkContext(sparkConf);
		// 创建Spark SQL上下文
		SQLContext sqlContext = new SQLContext(sc);
		List<LogDetail> logDetailList = new ArrayList<LogDetail>();
		LogDetail logDetail = new LogDetail();
		logDetail.setContent("日志日志日志");
		logDetail.setThreadName("线程号线程号");
		logDetailList.add(logDetail);
		SaveToHdfsUtils.save(sqlContext, SaveToHdfsUtils.TABLE_NAME.LOG_DETAIL, logDetailList);
		
		SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
		Date date = null;
		try {
			date = format.parse("2016-07-16");
		} catch (ParseException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		String logDetailFileUri = SaveToHdfsUtils.getFileUri(SaveToHdfsUtils.TABLE_NAME.LOG_DETAIL, date);
		String apiRequestTimeDetailFileUri = SaveToHdfsUtils.getFileUri(SaveToHdfsUtils.TABLE_NAME.API_REQUEST_TIME_DETAIL, date);
		String requestSpecialParamFileUri = SaveToHdfsUtils.getFileUri(SaveToHdfsUtils.TABLE_NAME.REQUEST_SPECIAL_PARAM, date);
		String userAccessPathFileUri = SaveToHdfsUtils.getFileUri(SaveToHdfsUtils.TABLE_NAME.USER_ACCESS_PATH, date);
		
		DataFrame logDetailDf = sqlContext.read().load(logDetailFileUri);
		DataFrame requestTimeDf = sqlContext.read().load(apiRequestTimeDetailFileUri);
		//DataFrame specialParamDf = sqlContext.read().load(requestSpecialParamFileUri);
		DataFrame accessPathDf = sqlContext.read().load(userAccessPathFileUri);
		logDetailDf.show();
		System.out.println("logDetailDf : " + logDetailDf.count());
		requestTimeDf.show();
		System.out.println("requestTimeDf : " + requestTimeDf.count());
		//specialParamDf.show();
		accessPathDf.show();
		System.out.println("accessPathDf : " + accessPathDf.count());
	}

	protected static SparkConf getSparkConf(String appName) {
		// 创建Spark上下文
		SparkConf sparkConf = new SparkConf().setAppName(appName).
				set("spark.driver.allowMultipleContexts", "true").
				set("spark.testing.memory", "2147480000");
		// 从配置文件中读取，判断是否为本地debug模式
		String isDebugStr = Constants.getInstance().getValue("local.debug.run");
		if (StringUtils.isNotBlank(isDebugStr) && isDebugStr.equalsIgnoreCase("true")) {
			sparkConf.setMaster("local[2]");
		}
		return sparkConf;
	}
}
